diff --git a/.flakeheaven.baseline b/.flakeheaven.baseline deleted file mode 100644 index a7ff2d9d7..000000000 --- a/.flakeheaven.baseline +++ /dev/null @@ -1,3346 +0,0 @@ -02bfa4ebbcf73a180864c0e92f2e087d -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -e1d6ad6643d9bca28461a543f93bb563 -c403d5ea40c87e6995c923352e1bc901 -c403d5ea40c87e6995c923352e1bc901 -7a5c7b92de180e0fb660b997a74fcb89 -87ba1bea569a5a91a7157de5b9f5d601 -aab7186e650f53424b93b56303ce04a9 -69ed0689a188dc779fd57f439d5582c1 -a4ad81d4f4a0b77f7544eab637c7b729 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -78cc668c3e569e9dd131031ee15082f6 -bb48f929af2f533dd5ba854f821d12b5 -43e9422f6cde96f6b16f4bbc361f339e -43e9422f6cde96f6b16f4bbc361f339e -ec1580a7c0a8c3e537230547fa73fb96 -cb224219ba93647438826f0d294e9194 -3ece5b4e8a4023dbee935da331de3896 -3ece5b4e8a4023dbee935da331de3896 -25488aeee566d7de91f717bbefd065c6 -4bf36bc9fca97906a8da7dd79411635b -25358f8a027dbc2c89dcab7bf92f8253 -25358f8a027dbc2c89dcab7bf92f8253 -8ddd82b83b34174bb022454f5fe13fcc -b5edbb42c1380aa8b2c397642015136b -94d60784d56418948e598ca47189d0c2 -095dd7013ba88b16cee3c1bac279bb60 -095dd7013ba88b16cee3c1bac279bb60 -e465827579612b245f3037766ed339cc -e465827579612b245f3037766ed339cc -39f3556b4e17160078c6cb6b95980962 -39f3556b4e17160078c6cb6b95980962 -d397f989d8595dd9f5f66acfcf9e9693 -2110ed0b9310ded230da3ffc4302c4fe -e40b2fcd7d6809d50805027bb8921aae -e40b2fcd7d6809d50805027bb8921aae -989178c8406a99a041c1a46de5785b8a -e83c0ba0000e981b2eb9f18eef1ec4d7 -ccba42eefccc29ff938c2f77a30204ab -58828144dd7dc0a98b102c411e27a220 -3f1182db7d8cf7d92eef0e884a4c748d -470cb5f4e7bdf663976f60bd5294373d -d4b1bbd60d2652a7c3196d697a6d8668 -24d6caa7a8ed30ed6f5cb768a7ca27e6 -152adc4b33f359bd323b204558130b70 -5b906abec1e8a8512c0a66f0cf7768a2 -d451a965d3fffb3d50d148880d5a4e30 -bff83c5f99961de789074753d229b525 -82aabf52eb5e76e0b7a33f7099440d02 -527dbf2b858f2ebccf209b56bd042210 -5103f191965e3639710d6c3fc35aa620 -2f222fe129f0f789aadb3bd7fba660cb -2f222fe129f0f789aadb3bd7fba660cb -2f222fe129f0f789aadb3bd7fba660cb -552161f7a580fd4525530c60a96c576c -695730637845e71f8c0edf2642b3ef9d -6b2bbe99d6c8755569d6f7ff9e77670c -6b2bbe99d6c8755569d6f7ff9e77670c -fa76801e1e8bfa3f1ab7717a9aa69c97 -94c513cfacec7d2caf4da7e26c554c33 -f66a8614d23d8ff9bd906f2450444d15 -f01e0337d7b8cc27a6ef996fc2de39fe -cff358f69962e6dde3a3029adae44a69 -0bbefead1c3d2e1b8284e798d960895b -320fb55a531ce146a6661e4691ab46a2 -0cd5e41a53ad70da4638d270fe07c9d9 -fe55700768f2e69eba89b660b07243f5 -bd420727a8732edb96a09a1267e33398 -9c49c38a2a97083a86432c37f429c53a -817a3d1f0af2f8c6baf4f8ef49469001 -1c0c4b6db586fbaa8e890f5946a22603 -db38e1b8a29824e30940ee65a7b2c691 -fcc4bc66d35cab26c99061d9e86a0a72 -b39115b7eb7992ac338302ce73aeddb4 -1a9877256e12872045c88f6c461e9bf2 -1a9877256e12872045c88f6c461e9bf2 -1a9877256e12872045c88f6c461e9bf2 -6454f65386f0f41e88eada8937385cc3 -eaa569e765bd0a3c90598bb4862f00a3 -f0aa10bec478544510ba0048e956ff2d -48bc99b1292d6e604e6d190fc78f1b4d -32d093484ee2f309428e6ba90594246f -419d8026ce8043d0efaa8cbc7e9d20d1 -f7bad65d1f18a4da6f6c5da3b3b75ba1 -aeb54ad7d30cf2ee7aca979cf27310c2 -e8e4a45a0fc039ec50779fce81979d4b -c802108ee95a6da8cfa039c55d3de1de -31bd081bd8d0441c855ac542656ac0d5 -988e9d2010b1a35be8ebd77d9afa4427 -9b7a1c644cb49bb7933f40cd536ff294 -9b7a1c644cb49bb7933f40cd536ff294 -430b6b44c9b4380adfeb0ae6a5064da3 -67a954ae03e29ec7e963bf77e7b3ad52 -456cbac04d56cf0d96cbe15209557457 -b4c8849aab788d768fc4fc7b763ef778 -b4c8849aab788d768fc4fc7b763ef778 -c6eafb4d358d55f1d68cf8c6956ff4fb -3c4cf9241c33816f74ea2f4ad28f4692 -339aef4f491a0e158b170f6134ecc774 -1c8e06f6eb2320f04d8629da33b51286 -cb6e8a899e634f98dd20e3fe1bf489c4 -1c8e06f6eb2320f04d8629da33b51286 -7505fbbbbaa320e8f536a18012e3fdf7 -339aef4f491a0e158b170f6134ecc774 -363639b1bd7c03a58df42f6569f72a38 -1c8e06f6eb2320f04d8629da33b51286 -ee7645bafc36743fe675cb6fabe6baf0 -95352e9831ca551bf8bdf9cd75bf7e8b -95352e9831ca551bf8bdf9cd75bf7e8b -ee7645bafc36743fe675cb6fabe6baf0 -6278f82840d864acaa2112ac469f64f5 -a8227d982c062f7db87a2447709f7272 -ac57c2b93bcb1360057a4b0bf6bbf88b -e235ced7a2ade75ecd95ece3c089f70a -ac57c2b93bcb1360057a4b0bf6bbf88b -e235ced7a2ade75ecd95ece3c089f70a -8ef104e789d9f59eef10183acc98d147 -9596cf02234a3b81f337da1ea56a99a5 -20cbd14ea6f850b6a162a377213a591a -ba55e3b210f61ac0c6f3d7c4021e2dcb -d15cd703ca8083f015f8da72702f107d -ec80fe6a217a90157b7dc2b06a1a3ccd -ec80fe6a217a90157b7dc2b06a1a3ccd -0ab7ed0b9ed39e2ccfe152779bb65d23 -85be0d73b118c246eadd869c93306321 -ad4db3c9efb4fd4cb1120501181234d5 -fb90319c9aab74f2f367433af5d6977e -8854eb92ec9a8a32df218b67e2e67c06 -f209943c360336a77d1c5ad382ed36ac -8eec1ed15fc9860cc54f3cb8cc3bae14 -4e6caefd7a36256c85b6bd060c487874 -1a7455b39475c2d6d42c7ed5d9b5e442 -018ecde721e80f96ec5a36ff22be1701 -018ecde721e80f96ec5a36ff22be1701 -d4d876d406a25635f689cfac5483e695 -11edce5d28cd3617c13e25fff7c98a7e -31129d60ecf98b9fb8c7fa019743e65e -46847c0e64bfdf77c6fc2da7fdc927d5 -368d2d48d36719059830b23d9977ce47 -5e5108623e37b3c16e470fc1b4b13783 -6871a23d564faa086441c4f809a27afe -bef4c0dc0fa5c5ae53aefa032481d7c4 -535929d165665dd6e52d9e4928a35489 -3d8074548aa126fb5360ff99bd3d98ee -5809d6c113e4f1eb82652afa73c977a1 -0169ccbcc18f4a36eb4d3bc5c6b1f156 -7c303a177091bb216a0c2b54d3d4cb74 -b3b7345a28829e45830ee0d9faab8335 -4e4da7e3ef00baeebcdae70d3c04560e -b1095ae73f66eac30377499be7572788 -bef4c0dc0fa5c5ae53aefa032481d7c4 -21fbf0cc6c302ebaa2ac7a885e572b18 -701ef1bef994ca1fc5967d8777e6fd13 -f56d204e8811afd7c93b7a283d96331d -8e4110b530f252507918708d500c1881 -0856ce31d8034d7b680dd02d2e5451e4 -8e4110b530f252507918708d500c1881 -1708845e00e90b8b97f93a2e30be2e76 -8e4110b530f252507918708d500c1881 -b79935ca718796811eeaa777334a335d -f7fcfc527c270d27d55fb56cb89d01ba -a07981b4632b9e907c214c44f65997b3 -a07981b4632b9e907c214c44f65997b3 -a07981b4632b9e907c214c44f65997b3 -08b07caa07922c6cbfa7d93e03340610 -08b07caa07922c6cbfa7d93e03340610 -08b07caa07922c6cbfa7d93e03340610 -f76001f3b60a3d169c1e88d990982014 -d63f735db11b6dafe22a29df690c5220 -8e4110b530f252507918708d500c1881 -e6fccbe37a91772c3816a11fb32978c2 -c4b6a25bf53aadade73f8cfc5aa319db -1a9876507719709d6e3916e2be5ec5a3 -3b66cd1a864ea5d9a294c2e87c6a4886 -c73b2de5cc8a2a55082a2747797aa81c -0e173679afcb4d2ce9f4ecd2ccd76fe2 -a00ecb51cf884822d410800da7a78c66 -48b540612ac4e3d10514f3ef99e25e5f -5b25f1f551b4ec6ac938049baa5c5735 -7957b70085ec9efe5adbe622b0abd6af -16704602eb2120028ade1465055b1767 -6e474e664f02ba912e03522da0a9a503 -bf0bc4ca0de52a5f23aae14483b1d162 -bf0bc4ca0de52a5f23aae14483b1d162 -1b9f8df5304153bba2282160b16bbf07 -1b9f8df5304153bba2282160b16bbf07 -4af9703f051b2ad1c5ee8c077255ddfb -4af9703f051b2ad1c5ee8c077255ddfb -67fa8bb65c0d44868a547e69c116120f -888bb7807040ae2b7ecf98f05cf6339d -3f27f7150c1bf781f5515320ad1e8316 -b12a610cbcfcb74d1a6038a3c749b72f -18d648fe6a7a72c21efaf7f6b5f31f85 -f9ac4c5cbd9f404808cd12713c73d403 -282aa95a20d526d9581b35313e0fcbd9 -3b08303f347665eb6fc26a2a8e32c0f2 -f454050b15099febf230892203da252a -77777c6f2856bbd15dfac01219cc28a5 -f2ed321f0ac6466c2e6a9a140d2c2b23 -1c968c81321f0794fce67a7e165fa5e5 -f7f2398714ab5e29c66f36cc5bc1e855 -d7262f35ed262a168c5550ced232712b -fb3caa62e8ec0f7118346e346751a415 -93a4ea0f064fc809e74d20c5d9a148c2 -81ca49d972d531242457cd086ca56b5b -3413a97ea9707b24a9eae71294fe6144 -29f2d646e1166c5c258eb4e3083f6568 -d209866c97bb6d5f73e0ded9160e7408 -9b02a92bbbb998a0491c76d3878f9371 -5ea4daa4054f5196092ba13794ffcfc4 -d38e3a16f6f9c9f56ec86f87496f168f -dd15c9d40f598e91638ec257ce539be3 -e8898781c77e400283468a8fcb1da81e -0182274426ec1622a5913d5c72d04671 -50dab38771e1624e6175a08938403c1d -a7739076e6c48a539c56e301e4c62778 -f2d91d93f37e3bc16044f6bdd7096035 -7ca717aa3aa6455ec449d243aa7165b8 -e10796c94b1b3480b4b602d6df8bf47c -6fba4bb8c0ee4502b6cb9e442b2ea74a -9031b2d01b29ffcf95ae1a08b5209297 -72daf6e155e0a3b8936826efad4fe1f8 -a6dfc2dec42906d04617d8992fbfa200 -02ee7a8683cd7fda36c21bf6eebb64f1 -57e9ed34d236f1ae443ad5c11428d7a7 -edff7917a5b3fedcdaa9d8df5590642a -eb54f08191a7e6c6bcfbd3a2fd082cd3 -6cdb3950c13bcc61029e29d0a3020cb4 -c11b80dea83b3ecf87571cd5758b399f -3b0a8b81752ecdaff535decee7351d31 -905fdcea32d3db7b6c5cba67ba5bb9ba -3b0a8b81752ecdaff535decee7351d31 -1443ebfc7f25bffc508474c5e457fe0b -0872e60c56866c8283212754e6832728 -7f06aa32deda3b500b892e676f1d678b -3b0a8b81752ecdaff535decee7351d31 -a940f5345cb75a4f6f45f31d4a268a77 -de1ab3627794b0aa4036fab067d50ac3 -3b0a8b81752ecdaff535decee7351d31 -47093552a9e95bc7cfae8f72ac3726b4 -6724834cffb2cb0fe95eff30dd7de5ea -18c1154aa3c93345ca8374b92071ccc3 -75a92d096505d76d4963f318b575a8a6 -df78e8ba9084a882b1cf763e459ffe80 -2174aa2ffa909b575aaeca93f2e58bf9 -5ecc51b79cd81a94762cc4a4b9a163c1 -2565399c058b0782c2fd331b6649a430 -cc10acdb5de7f373bb27079dce65d08e -966241cd83aaa64d6551145b48cb5b5c -c01c0a25f0e04b6accaab308bc4830ef -0c317aaee05ee9885a677b084d03a673 -6b09a7430655a9ce2db1413594b409a2 -af4701a8fcac4599e1596a6e0d38bce5 -402d706f500d2835c85fc6ac3119a51a -0fe3a73815cbf45694fca1acdb5f06af -52fc1c42562f1ec0139043adf5465419 -343809d3d4cd73a7279fd1e782eceae8 -8a196acb20c1043126cea99b111b57e0 -fe72201b035b7482b2697467dc5b2e7d -e2c02a8044e325f07af8e41348acd4ad -55de13c619e907ae0bfc0a99cc8302a0 -184ee28fd63f663984d90bb870de05d8 -c8c4a53910df4a2158bc6aa1f0f38a07 -1561e2d291802cd38fcf8de24756b715 -9b29df8ff3f1a0a571a85257bc08aa72 -43578be9d7b36b65d69fb5c41b451458 -189bbe389a84f18350373b43a9605859 -4fe482d784f10f92cc322b41848c02f8 -6f6fb44dd40f91118d14cb060c492e48 -efa9c10399db1a96474699e8454affdd -844d33e4fd782e4aa371c31b8c721ae2 -811ce6a0b234aaac925af41f2d48c481 -392cb7311c61d7af3570ea6badde6583 -39684840e958884d24ff76dc26846e1b -1351a1b9a7110d965d50c1bcbd012c92 -6efd2738b1d31aec6bd21882b1b89a5e -916c01a99eddb8d279467309370a1299 -da633cec86214ff5d075990cafc05e89 -3eed8bd52486594dd82a3253ddde0f7d -81f65c10f97c7b97c175cb999719e983 -b3a171098378ad7b52f69e3972b94966 -b814e1f19f2e8d5fa643b44ff4d4764b -6cb7ce994e7f9761162914e6e5b295ee -9402f6516885ff2fac323a4b39b5b52a -f016c7d070102c0ecaaf28ba3c87b957 -cee4045cae3bc6ebf9b2ced73e89b637 -aab3b9ec74a741f2e613f06ed3bbefa2 -830c353b4d4470705cb574d07ca19467 -2efd6f737c2b258f5d94b5cea113db68 -fe20e952b6669a1475c40d7f5d4b4177 -d0e637f40f77f150c7bdb79b0d6b6011 -1ce5024c1dd5c6b06fbe74a6b3bf3274 -f01f6ea194df567984273f27ded1b90b -cdc5c3f3111fe362c9b39745eb44e9e9 -3b5fac2c9f8d8028a43c36ccbd6943c6 -8a42787d91c737b706956fe429d15926 -e328aad6a786f9613579480662cc59c8 -e7423f88c570b471f66e49d595d1d3e1 -063bc5878e421bf96094e06235bd5cab -1d1af4e193a0fa60b92d51c041c5d8bc -d2cda5239834d31e22b0c51c6711c1b9 -0200f01e75fc78f6018b23c07b189e0c -1d5250458f75f6815fcfe72ed82e7fec -5a3f9bbf3d75c30c74ceec8dd982d9f8 -a4efcf5cf9a90baaa3694b5371348c3d -cac6d68473167cc439f4656c2727c21b -5e0fbcc1ccb1825ea187eec7900fa965 -6f8462f89077b4fca68d5209d43d441e -6f8462f89077b4fca68d5209d43d441e -46ada918ce47dae10e5fd3137894d1cc -8c65ff7badc12aa16c01bfb489c55078 -41cb1148d556d13cae96f1fd735a8e6f -36790afde55d23f4eecd542919e4c20b -e3feb3d81b399aa6f245c169a3df545c -725c4bd7e5cf2d880cd6d7b22144b2f1 -6748f78c700712fd20ac3a37705a88f3 -45837b6f3639fe679588164674287434 -45837b6f3639fe679588164674287434 -45837b6f3639fe679588164674287434 -45837b6f3639fe679588164674287434 -9e9989a6bdfe1b1a87d44fc6656a4f9a -675ec6e6347b867d11b2d17f3ea499cf -2e17acc1ddda658635cae5bb253b4830 -73e2f0be5fcae51b3aec9e4a23a9b982 -1f19c17388b5c7f36ea438e42eb47ae4 -8b8126a6e9570d63ee23133cd987d2f9 -6c3f247f4a4628d8296d98d15b427848 -b16d485893a0b20e0a115061b2d3d99f -b16d485893a0b20e0a115061b2d3d99f -04fb9687187e1b1ae1c202a4f1ae02b8 -da29570f0e4050dabab685e951cbf6d8 -d4beb607ce7e8da5380d100ecd32eaad -189f38f2a6bf23a43b830b3302cf03b8 -da29570f0e4050dabab685e951cbf6d8 -103b6b8b39465a8e7f2dfbbd576360f6 -bcf7b82ba6da43272e03e2c73b2aaaf2 -bcf7b82ba6da43272e03e2c73b2aaaf2 -4b6cf6f1e047440cc5b0d054fef37af9 -4b6cf6f1e047440cc5b0d054fef37af9 -afc7e0d8812c6adeab68482dc82b7484 -afc7e0d8812c6adeab68482dc82b7484 -afc7e0d8812c6adeab68482dc82b7484 -0b38362e083f2c0b87904d59da015d3e -6d8e15c1f21f55881cff5d9574e556d4 -d7097752bf764532b2d756b057c97a6b -b75c43c75c1ed9b0ceb3775787278ffd -7d9e5c347b7c77f4baf839ab3c16bd1f -a089337d30ae884accfeb6b0e189a6c2 -b93771011f071c8a07345f7ff2a59c64 -aa0ace620b759e42f8d62f6641685b19 -2f89d838dceed009745c92daa6e1eb0c -3ee21070299236f79967ccb9024fc2c6 -a328e9e4192d6dd2c82340e591d76b8e -7e37537b3276e188a0c30c6650f335f2 -b43dea1220b7d325c3a71f75a8c1f66a -8ca5c6a8fb129806a8cac1fed958ab99 -b08386e6a16b4a428fa2bc4fbd579d77 -41705bcc697f78bb0684e208db5c9a47 -42e69605a4573c53bf05c11e8a7d9fd7 -cb58e942aa0be400aa529e3df164fedc -db2c0be876b8f025871d0d30917105cf -b2fc51b456e1dcab86c41b057769e4b1 -0d3bd1946036016f6703fb5e474e8e12 -1823d0e6fe44210bb5f65d89725c685b -2df0842909c14d807bcc5a1cb070f855 -96c53dd7058428c9927ad8bf5afc87d2 -8a7a86d53b113d36f9c3a02a84443407 -d82cb07c7d31ffaf65d0bfb61c9b05d2 -b646c77403578f61d0011d7830a0f65a -2d965bc1bedde97856687a536437629c -c683371d5f4f8af92ed0da91ec450475 -6235b9aaeb8efa3a4be6ca30e8cf9cf0 -4ecb2b50ca6e15616181a12355a191bb -4853f1f0d51afd7c274728c4cf2402dd -277eb48e9f9743920e9fe54e821a2945 -277eb48e9f9743920e9fe54e821a2945 -277eb48e9f9743920e9fe54e821a2945 -d1b0f17f9b9a1f60dbfd0af9a5727ab7 -84df9599c78e0ff282718dbdd1014798 -c94e4eafd6bca67a1ee2bda86a140310 -abb6e5057780df2e968ef5b390bece5f -f0e4193eab344314effa9e5b30b0c8b5 -6f51642ac60a8521595ea523d9ddfc44 -d3fedc9bd600736a679574ca13910b0a -704405a671c8b1932f0ab3431d81449f -f2fddf65960cf04c1561d2aa0127acb6 -ca2008804d2bd50d27edccf8e1199cae -d266b6695b06a96f156fe8f26b089b09 -3d9c1e31251ddc118ac9375f60dedb88 -8e93e3e5c8b7a311367470a3d9b910e2 -c2c68b4f02c45be9491b51d32d607ca3 -0ce0f210e94f5f8515509729a5b48131 -85d227cb23f810a5037654f11b186061 -db96459b800a1234d01101760b371bd4 -778e0097f09cb0b2510ad87d87f89b80 -cc8fe929adc220158b8ac42fac7862a4 -5f34eea78758c0334e3a1249f89a720f -c0aa9476e62e7509df29500767572fb1 -bb15f406598d2b301907fbb1ec772a57 -380f371e11c2ac1f63e9d5177a8600cf -4ae73150e95ad5653ff5cf57c0d54c22 -39c4429019b3f72be9b29232b73d66ae -abbf7a27784459989045792ccdcd62fa -393960add50c6562cc1fc0a916398a17 -73826fd6177c013c1eefba32b545cd0e -fcb5bbc48d40008b0390ac5b5e790085 -2b67cf8608f232e1a5979584179a2853 -22dd9bd2c9311ea170f8b28ce5c0e85c -de5cdc690de2e81720cdae18d614f208 -4b677af684e0d0500f691b8632b370b8 -86964061962b8af0ec27a1165bed150e -abae14b1af5585c6eea93030fb9a9e0e -1e3b1c7b6c3b45a6f644cc0ebb557d83 -717f9e771c1ffa4bdbdbb032399706c1 -6aa73ce4e279863d1697314ecb1979aa -20c1cb52a258dfbe016934794ce5639c -c690da0bc3d6aa3473dbbed0ba9840da -13f35fbaa368ad2c2f214f044e49169b -df08a9f97c764a619bb028f5873ebd05 -29e80b3be0ca3905cc6210f22b9654e1 -f079dbc96392018e58a0e9084843119b -1f63ef87ef691b34bbb9857393c903f3 -0857b47063309dd3881ed7e60c1b51f6 -216ee6cb982c96d17d0e3d359c225e0f -2b7a5d3bd4f7c2703e20dfd6eeb5bbee -978d8b48bef4c3328386375f846a3320 -b3bf1b3045d2a1d6f09cfb85466312b7 -f296a9f29bfd6ab647d5ded3eeee482f -41a4778b14e291f53609404e8f8ee00b -70b9dc5a11e6a2cf46ece9cf2efbc33e -69078faaa1dd5d4746d5f81372d15773 -a9fa7797d184eede8bf9a48674a68e4c -bd689eec2ca4416ef053996def314964 -082da34f24637dc7051756df640e320b -bbb87fee2f5d0ec190dfeaa1b46c5f98 -216194dabe9175ecc45e46df86e82652 -9fbf70bb7346ba08e333f17759695d16 -c021a824fc69463947e2515e396590ca -480e896b9b9e2e6cf2f0bbb283728c0a -6546a4c758c862a01a25be6b297b89a5 -2a08a325e426ba9532b28369e093e8cc -fdbad34375ce9add515495d8dfad520f -4506d4d424232164c5ea0815c7e3d5c7 -f36ac950acb8c24a4e041babf357d65c -7e45dc5af1fef923dd71f161e5da5512 -93ce6a9cb29489250ff2516cf690d38a -37134ff8527d98323c8f834a276c18b3 -26917320eefcb2a08f2cc8183d470692 -ac1f936c311360856120cfc1be765e34 -76ab9b583c2495ec974308eaf4fe7a7a -f1de536c5adc3525105eea377cd765d1 -317f067c71342732361e7e0f863b139a -9a0e65e17f68eadf2a887f232e9a33a2 -568debe767efb48de1e62185e7daf457 -a404c2d63344a74b126a9924e7f2a99c -7417c9b9b18a34519762ba1bd220e31e -f6f6d0b50ccd1c5c3035ba777bfb2d02 -9083a6ee97e7b50166b7086aca88c6b5 -f6f33f55df3ec8681586eebf8104e53d -1d1648207053c4fcb8c9ebdbcb6ab0f3 -8af48e5fbe46254b247c985ec189eefa -3be08431f04433b26088e52936905774 -266f499b79295d6f86e782c9b0c81639 -7764bff1dae66586240d9610ae6abe91 -5c546e70070e5ae797834ebc806c3597 -c500724e993fa77b1fff2cdeeddaae47 -05bd1b8558a204bbe28a0b024caad20d -3a1f71abf1718b4839e9dfc471e7b591 -385995a71e53a6f1d0789bc576a519e7 -353da241656432947d246a81a8773031 -e54e9f0e13f0bbafd06cb5cf3a982212 -72e4f60c16caaf79c1e25e9f9f675669 -6d629ebb1ebbccb9a07b30a4abfbf3b2 -41e28eaa7d8189ad10b0fca9986e1d1f -6cf9ae80314eb4a89932ad9ff43ec83b -2e52ac37468d67adfc201681f56104b8 -02fe71317edaed25013f5d2dfbcac591 -07b638194fb40b4e06f65193f800c71a -29121d3c73d6e669ad6309bf1f40d2a1 -a1369bc1772ff41a6c63c2c7f56816bb -03ea0ca9c4ab1339585038ef280049ee -3b992983997648446b0ee420d450d3e3 -70f1653ab7aa55049ee584169331c57a -c92715ff43ea1b7bdcc36f041b715a27 -a5470197ad19272f73ef08fbb31bade7 -9823f3f9801393ceac83b0fe683951d9 -7b7705b1a424aef3e8ad5731425b69da -3e615898b5e7e2fe775613496665b4b1 -57f08359ff19bfaa1eaf486c2f09feb9 -d3c7db389ee604e3336d69ac2ea79140 -ffed16c60f237c37f7ca54e857e2b5d9 -fcf02b905648ef26e79f479ce99f8e0d -e5533ba95294e62236cd285a955836c9 -35eb84d02c111cae4ac38e4fe09b2f32 -3710733dcb415f433943b73cf72e1a80 -8dd610dc366b7b35805519378dc70fda -9e238c66285593232bba54c1e65a87b7 -ae4002bd36987f8faca131f57392f0e6 -a8a5db8b71cd8561d49eba1fe5ca8f2d -5a70e2244fcf4de54c9a0aa7eb3aa5ee -b7165f1b5095693eb789ba4a393ffed9 -ccef9331a2078a2ea0b9ebfed427a7bb -449fc16c5b592706ec6af91b863ea0b5 -bdeb3784589724fd0cac74bf8d52c660 -7e6bb915c67eafd3efd1d15cb3a1a2f4 -e12b120bedb29a448f093f4d8993b472 -a6e9fd4c15e40850efca61eb9d43b4c7 -ed20cc9f994d43a008e16682f2c0d18c -f97d533dd392bdbe433f5d51af223c27 -9091dc26402e09a73393db5be67f2e96 -3216cf360436b623217989bf8364b179 -ba08fbd8d5a76c2e23ecccc9105548c9 -a322025503f6b9e7dd907e48e4b84bb8 -329222582d9ccb9f2f108d6448ea4f97 -e3500d073dbfacc49652661a3f3ef2a1 -9f2bb8069291b290fd185db1fb578d3c -591715128735bf55c867a547fced6522 -ba4e3603c1a2fc866c384906767369b3 -41544181884455303e8c6ee4c0fc5a2c -09b1a54267016d50025b9fe465c81968 -8a3fcb17540477252ea0a8e4c56623bf -6343d5e5c135475ef984b2de68f086a2 -d11444794acc36ba2ac3e866fdfb5004 -e5bb8060d0a0acad1a0e9b53c1d04b00 -fd42b198dd69564fa85c1b3c690c1759 -d27124e264ed58c5582792aacfb4489e -9d0dc766a02d4913ce45b25f3545cdd3 -70652c85362797f2f060618fdb18a34f -2e9cb09627b37eaac4aef4e8f23e9c93 -bf343d92a5fdeb15fe1480206cdbb716 -465fb111d0b5412543baaac1cb1d0c8a -b5994cb76669c18a9e0421c40afe2baa -17d9dd2bf018a085e7d19b9510be1c61 -60d5ce2fbacfcb9fd2bcff99ecfdbc39 -5c9cbb436dea4631ffa7e04c660ef72b -7d7eb3d80e2ce323a2a7f00623061342 -dcfadaa643675c8a7ee9b305f1953663 -13bae45a674cf6279c92d39bee44eb4f -9caf609b9f849018ce9a87f384185b07 -1a9ef48f7d3d2bbebe60d8ecbc0c1c5a -66cc32cee3b276106e3bf9b1af6660fe -86ff9d50df8bf9b99fd6effb0ce3e26a -51d463277cd5616df735fa68d8cd3f2d -1272f5804bd5cc611a6ad781c2ec9fcb -c0e708e7e4666be8a69b0c45095df01d -5f39867334db22ec63a3e8a5d929e0f8 -f7a5250c99544ac30a1f81f52a963f27 -27772b3abe781241f9f593a7c7241bcf -bb82e47237be7ae5c9316e59dc649c04 -c4ef8dbd00b759fe700f1cf0cb13e8d0 -d5a433080aea64c8b19e5925d920eea5 -fc6b03d06a3aada98d690f58b55c2412 -5c7ef807ad61215a10863f2b71501e9c -23699e5777d4e864b6e5bc940c361a1e -f32d36adcd7fd1ebd9c9d0548f5cddb8 -485a8e133bf64914adf42c82debd22c6 -5b22f118ac9f243bd7450dde74af7a1b -664e23929a337585984e889d165b1ead -89d532458f93ce4a0dc199098442ea69 -565193b177659837f896ac05f3380108 -6163e06ba179c6c91ee5c9da95a60819 -94e7917261639c94057b01e6a3cd0532 -3447be1efe38270740ab8a3784fc0f42 -6068c6d53ec9e3b039d5c04c5663ca28 -18dc1efe611c58117e858d54e66d7220 -74d0dee294ff65a767b2523f81dfd304 -c2d21ac567b93aab151d3f5746d29f13 -d1d6d5fccd419af62d12d9e17b44cdd4 -4723a88b766b35c6d5b9ef0733fd653a -b6bd3d404448d68ac36d5d989752d6b0 -c0010c7cb2a50b56df0dd99655840df6 -da8673cdf4961996bc9b567494549f53 -e9009c57de69e0b3c7a60e43ebfd2515 -25349f7de7513c9131403050f113f549 -404b5d2b78aeb351beee9b1bfdd3fcc6 -fb948dfb2a627950d2928a94981ab0ac -302a005ef58b03774cfe4998a062a157 -00718736e9ea11ecfbb48135c0a116fd -21a6597e21bcc69243bdb9d894b81f3a -53ca8af575c54126a1338daebc6a7908 -565b6769cba2295e188fdcc83d5c45a8 -a485a4a4f34ebed20c6bd2c1c0a34223 -1134eb4900778fb0fd7e0bb996aa9047 -160d1a88173e3800de35033cccd7e5eb -d082d7f240fb4a41a91410ec83cfe022 -435debd512f444a2da0799136842ebba -4d291128c759d92170ff3c3fc8cd998a -42c640ba7c3bf25bc08808099cd354fe -4d1b11374a451b0e6546521eb6b37c83 -ba8b8cae45672a57f5617e4bb22c9bcd -240a8a818f39eb638875acc154948c6e -62b21e36d9b5afd0ac95d5a1c704e99e -0e5be376e2d78619b6bb36cca9755988 -24a710d9252ae11f9471d61cea06bc41 -0f06fb8bf4e3bd7adeaa64bba4e53961 -8014711365f079fe20cb19d0c14c2002 -775d193cac7935373f8d70bb478e7695 -2a9d51c749222c5e70d92c775a6819b5 -6ec4f439e6e3355f3508612cf9196524 -dc2c6d688a1c673b24eb9683c029d942 -225e5640857b9fa0507a689fb72ddbf3 -0f008306dbaae0688940d08fc86c96b8 -20a763dc383abb5548446fcd4e349f32 -119404468c389cdbad9f4dede0f5d699 -73198c45d273567ff30962182fe432d1 -cf42fcec04e5226724eebaa6e9f91824 -036cdb21435c7c817a63ed112c2ffa7e -e4f42ade9580102df74ea17616ffab29 -5689fe00672ed9d94ce97b0736148ccd -65526102d996e422d46bd81423ec7978 -a2c74d7a676d0128c0d03f3f7beb6214 -702d2bc023d79a0b7ea763ec8537ed57 -78b861f07c1bddbcfb7e91e80747b9b9 -2a565694d16c9a3a2df305b39777205e -7df29046055189e074880777fb86a3dd -cda07858b5b3fc023c9e242001ee329f -3446b928fa5581e7c494764be54ba46e -d2055c3cb98d4bf5a3f519560382f1a3 -51bbd91937799221bcf8c47e425528fe -9360fcd8159133660addd664ecefbee5 -f89d5ee0ab9a52a556264f9355e7c3e7 -d52ed42475f37497aaaee7989994d63e -c1a9b530b057ebb6d19ff3d163b15b46 -127d6e301c24b502b310f3e321bf917f -5978c0c52253f59794d92892e80ac836 -4d57afa11ebe2e8d58c996fb7ae698fa -9efcadc2ffbdc05e9b774f1379530e50 -df68dfe1d8e4408d36ce0a9bb8a6a2c6 -838d7aff1a802186b4686272aa568e41 -814085e590ff72ad2d021fff5304d469 -316d34612938970982b5f80de904371d -ff0a3def2326a1a5c9c0625784047fc2 -66c715fe518b39ad2971f5f8dd466ad0 -9429bdb5e2f615a483117653bb52c8c1 -012a70313f50f5ad54176b53f78275b9 -a97e5d5428ec7f7b6d0a0f83c5b2e624 -9f6b5940f216ea1b1c072e98c4d43193 -b04b6e50b3b84a94e0641f5d40eceb96 -d36811281040e84f920360aefce2d28a -f4da8619534656ae3707077bae58fc9d -287b7f2b6b782b6122b29244aeaac450 -be48f38c0ed91c0555be55534f542ee8 -d7fd8176094465da0d1294f0a3dc5548 -d9fe333caaeba7c1d9c8583c14d4d9da -ab6d34196c7b870f82439c2256f4ae96 -52ea9abd867c5e095e2e35d6bb4a8276 -8b21e1873919f215db953a0e2b2f3ae3 -a98fabd67f085473dd82d52a6e318a47 -50e8a20f198e4c407f11392d0620a3b3 -60bd37e175b846a5f92e2b02fa32b821 -1c730a5929a818a310b0b6647929198d -989ffd7123a20dec5c6c42d014ca5d2f -08234ab189689d1b731c105815dcc6c5 -63504033d40738c1c2a0938291186023 -a5538e5196db05b8ac55c79cb9e4e10c -52ed181042b7e7ddc09ef1cef3e545dc -2a471359612b9e930198171b908546db -da4d18b75e7936dcb1027d086d7ac882 -976e6b5db6d886a2ab93fbfcab47eb53 -12bfc5cbc7519065242e004b923ba95f -cc437b2ef45ed4d7c6d2b2d2c6983305 -ad9fbc5e71c82a9b888c0501c3e19eea -8e0b6014c26377fa015695205a973c12 -72a01ac5a942f3142454721a8d3a90d8 -a1599925df64de99019284ee57645bb3 -9f09a6b2d31b5c795bd606eabb7e17bf -fd900fbcdea88ab373886a9ae629d18f -ed305311205d435d3f13ba912ca8f37f -b8b2ab7fb776a87dce6b3de0da20dada -7df4431dc487ccc3bc02c77e9ed53b1c -d9841b882ee2c696a1c55d21d9687aef -006c6f5d6979e8493fad0c5baf3f2db3 -ec7c1921cece481439f083c798b39533 -924a00c5a7bc0ae61a34350b6fb635bb -90d3163da9f9540b0b73723cd88564f0 -89dfaa38d628eec31eb735b634215a5f -56dcdb1b81ad1e3b751738f71f195db2 -8eda1c4753914f1f424567de4d0e0c79 -e9dd7a0379e2d027abebf08ef1ed98bd -4fbd32e1c347735181b17732d6feeb06 -eeb88be10ad30676fb676dd85edc0e25 -a7c77b614e0cb40f2dd2c99659a6a802 -47ca60d9cc97db06782fa0907a5c2748 -e3e339364d4b4fc5de282b206caa36c7 -e0d78888a77c70665b7c9150fd91f421 -baaa7fa0f48a73193a905690eb06cbdc -dbb05ada9322b6b1b89b3f08207bc36b -e03f3f73db3105dbea40a0950c07d817 -7ebf9ea0967b564b5e81dcf8743407e9 -19bbdd54e5ecd265e461ed78cca7fc0f -ab9a1475ad0b83b1da0718bd5e08eedb -769df21b6c0f89f409c2958ffa0c17a9 -97e295d0388fe644c309d6f6a4a4cb9c -30f9c3937e88d5af6b37dcb079ccbbaf -32f3237df50cda39a6467dace5dbe759 -908280911420c5a4134c4a7d9f925291 -9f0220e25658ad402aec2b4a0265e881 -b947a8ca834db6f4df7d514063aca680 -384ebe49938532fc5125d3d36a75651f -ce210b1028d04de5c784a6eca8f96ebd -f242687cd03096a1430c0c4b52d30f35 -63a55b8bbe367715d332ad778bd42a88 -71601a4bc9063baab98f26960a061268 -b48312a4bb1c92110f7dfd1f637ce372 -9b038795127bf37417cdd19ed5a68d0b -aa3c3ccf87820c0a59142d34221d7e3a -c57b17a3d438405fffab529c939048b7 -4cd208e3e73aed06c0e9749326de1d5c -0649aa52aa2e26c866a980c962f11783 -647f3a7b78516d823c9ff91c3d4b0886 -fe9509c93649691a2098ca456f8b7fb3 -eab45d3b92723cdda9841ccd9fc1fb5b -b289334884e51b9ad637be82b99253f9 -6c4adccd6d95ef6435cac832d1a01488 -7325f55518af8c37211abf7e50ce28f0 -47d40901f4f395bfa720dd47005076b3 -d875f8ea850e4e184d17e4c3e59a3980 -c577b721af2e3c34de19dcfb0ac7cbb2 -890ab00c6a0f3de6898f2168ce4d09e4 -c71a88b4289a7f96d17f62c2de6c3984 -3d987377ccc9695907dbe85d449e7eb2 -fa0d6b2b28a0ba2c9d0d5353be0e9104 -49c5a7bd67c5ce9be364dc22b62db0e3 -ff215f4203c9c0fcb955e10b4e6aacab -a3412c4f1698e7da3426f6ad69f2e083 -cf72d13b4fed5b1b18ddac734bda47dc -97ac66a910cb53b2a6081ed58b1f147d -c08af689b375044d8f35b99203f16cb8 -590499d8586689ad80842ccad380d205 -37c4e04ac814bb759962e066a8f39a32 -f7285b436a6e783cd7c1835fa0d7ec8a -7813c19e91efabf35ddeddd7d4874bac -75e0e2ab92298c23da09c288b6920333 -fd5f3ea39c593323313efa58430d0afc -493837463839be87927470f4e5687611 -e22839d03d22f5346083966ee68f5d0c -5cfc391632930cdb18b5c3e5793e38ee -c4ac1c479d6bf5d547a682fdac62f117 -a89b44eb19cb7a41688b40a469c17743 -1531a6a77623aadb9b2b74095d980d16 -3f7b215b36c2c0742cb7955e738a7ed3 -37224e4d6b6205d986a27ab65632a55d -6a096de9a530dd2087aabd392d519ba4 -dd9a64a4e451f38489fe436afa447853 -fcfe7667028e79945a0075ff5b7bb7ef -25c593810782f7c6cc2456c200a8fcdd -9c2b6b2196f31494bdb9e40266c2d392 -f73a320903d01ee50b689fc776ab6be3 -9132c5f89c50a031b650749107a35fe5 -00bd2927f67a99af2a70eae5f24c61a4 -a1dbecf045ce18a82e3ca318d84d53d6 -24fd1e0def74f5ec32ae3eec02ab5538 -db5f07c621b293fc3cb111381b31157f -8ebd230f3d0b5d465f19c1cfeab79c05 -7e19542140778f8c2ed99be1a2884add -dd3ccb05e6c9b3625123e97e2b67aa45 -c68c4ed016db7695aae6ad3ddb2a2a15 -1dfce3c71b9b74209b660e8caf65e62f -ddc373adc5b81e643ec06cc79cf51069 -24b620ba809ef4ec1bf69b578c004fba -8aabe5a700b9f788e58cbe2e674aaf3b -95cde79d6049b0baa702a202ea46c21e -8342a3341db0bf966804f65e8d60e027 -9281be8dec4373f07416fb61cae5b4d5 -2d844fc589831431d243bb2a92b801ff -770812b718b5bde5560f1ea384410936 -0a479839dbbd406523ef744cba2e2029 -3bdf67fd4ddeed76c50ffa0a72c75964 -f27f0ba5554e6520d3ba3988ba5275ff -2b82a61e7d280287937b5098f8211c5c -416b2f47d7e02b060c00b0d14f65ca6f -77ad2ac2ca9b210abbabc913fbe6c819 -a4fad9ed017a6ec9f117b7dbf3345670 -fafe036842cad31249f873871cb3b48f -28b590419f9c99eabe7d2f2b3b5cf1bc -035a0c74e2969c355bbd05b212dd5e27 -1af83a2e6776b58290149fde965a7623 -0e46dff95dcd20cdf092d37bd63ad7ed -de5f56e542b4bc1722756406ca7fb789 -b79364ba8b9110d730631d5a9f2b0cf4 -182d34bcd31039d8363ee06828410538 -3762f08632b0041db7620e2539fce2a6 -f55b75f8fff6340f15c412b495a6af57 -ef9bb0cf1cff35fcff0ba2b5da02f4f9 -9f38064f313aafde44b8b39aa80dbb2c -31613b4132403ce80879b49481b19315 -b9ddafa6ff917df8cca333857fee5231 -99824d2ce671a36c350e9990bd84ce10 -ec8afc0ba1e0981fa6c7481a0a403eee -dae211ed31494c4dfd1d2432bd55da1f -3896611c6023ffd409bbf6e1ebe68d3a -128dcbfe5829b2d554bc538a99624b17 -811f7ae52fe387f8c60e0ba650d6f394 -d84ad888dce6b00295c5c003afdb9ec2 -1afeaae623c40a6a41b94936617b5c86 -a1d98478f09a846bfaa1f8e82f8b5219 -2f55f66e71dc7b1926173b1037376517 -c066b18a63e6947a181ca50aaf41667d -4fb3edf23b7a14fc09bf651ea4602d31 -fda2b48b228d61417075f4d6e53210b1 -0faca68c37d2df90cb6436147c9ac6ed -cd83a873a2bc442d8daa1687d0b0b7c2 -71d69c5ff8ca2b291990c7b7ebd77e20 -e191fef054cfb6b6c51cb9b31445d81e -875ff0f391bc67fe5446be8932c25e8f -0aee38cf4898bca12fe5a3cad72f52a7 -2c66a29ff75959297d28802a8ae37d85 -858616bd34f55deb63debe6b7ed05c0f -3957594eb6447596b350adb841853ad4 -ba372ca7ea9b3ff080614b6e2693d7da -44d0cb109dfb4bd10855d0bd121e9b3b -ecb01c7787057019d153705165d5101c -201fd15ed8c048314b394b562beca6dd -f865276aed86c49c7b8647073d585e7f -a5f4741769ad01be3600c502a57dd240 -91ebf8fc25624da6d4573e13fc79922d -b3c502c6cdd4554be14d262cce101a91 -92ab47253d4ebefdb5d24ea3adbd81ea -718c577c3ca423f6de9ea0e73f402fe7 -12e0477020d73d9d27c4a0e967e5f331 -082aff7922c1762a46b0f1adf0665fc1 -75657ceed900241f597f3931e53ae691 -36375493897588d41e589c94f175fdef -f4eb0141c8f60af6450b403353459392 -578983f8068f5770d6fa15bdd23be66a -412059d28251272aa47d9ea708a72624 -4327e366d8b81200da409fd03fe4f0ac -0b6c658304de986daee2cd87aedd1317 -1116aaec13f21ae1b83e9ae29accd8e0 -6b3091dbb94ff3c8d0787387703ae83e -0f99dac04fd625ac62c9794533e9c796 -5058af6aa1c03d04783d332325c242a5 -4cc572d93844fa1338f97441c6478cd0 -d16950c543344128a433e2a35684c6e8 -f853b197f2f478a2a0d58210a44a57f3 -2d7088c5b352905bdb5f63949af8cb8b -0206a26a419d8324d2d13b9d30cb9c98 -2f54ad0882bef5d66a7d1f0e924c0715 -b4eafa0e8a66bf1e3a777fddc337520f -a65aeefeeb2c9d5250f9b6de1ceae2c6 -122fe9dd450ed27070806e8879b8de8c -b40d509899de354e740c6f5c0d965f97 -4cf7210b1dff6378186158027b7fe5f2 -287f96b8f49651095e30ce857a8f554b -3116734fc645e6ab2ab7335f5f886881 -36a676db4bd13d0af6760df5420aad1f -1a1f1878c814e2c06a6a60194de99e93 -3b2a5a5d245fa09564a69ff062776fe0 -44adaab6517b268633bfe3b73bc077ef -045d2bda2ea4077dfdaf70da44d89a73 -0bf2b8091a99184651c835c64d443dcb -f4d26ab0fec6335b4db91a1cc7787c7c -dbc9d5d8848927b2d57806b1935bf787 -9ae440d902ee0d75b0a922a7e1f77d2a -72cb8a11c57b4d03bb96ad0fdb9cf5a4 -63794f1b0c9154c5cbe39686370aea23 -7990d509d0d667c1d27c0a35fa8b4ac3 -6cc809fce55b900ef2ae5294360174df -d084e57f9b6f2bf26a8b407f8a06c3e4 -f54de080f44d93ddc7573682e97130fa -cee0b6ad85080509fdfaac64d364132e -9db5d4d766f047520cdac3a0c432eab4 -45fcd47a52f38b9b748786a76af52d8a -2332006187011ee009856ab10888aede -eb46d25eccca44bd3b2473f07f274e6f -88fb4c374e1441f9e7476007cd4b2d8e -388031af90ba0275bd0a14772a6a3710 -f8a17b16a7e0b445dfbf086a080d139f -bb65f46fc3d27f2b60e68ed2c807dae4 -38fefd2a83e4af63e0681025efc91188 -bb7fe4511ab06ec032f413d0b7fc03dd -e84cc50c5cf0ddc372f10e5f3b3ad4e6 -52641d68ea4225a85365cad2e8b9f00e -a71e603226b90ce7991ca17590ce918e -d5bffa4bc2040fb22be4874c872c9127 -3ab8f580cbe71b76654fb92712f816e3 -3cdf4c05b2b7745820b381e14a08b04b -a0e2179ae36824b4a3fcf155f054ae88 -29390f95ee8aab59d270b6bca3459c0d -ccbb1d9c2fa74ab9cb6e5d26681723ee -50a66919e8e024735de810f8e756c4f4 -0ac93ec8b52a1fdeeb35da7be387fb33 -f35a4d362cc19d20c77c22eadcf1ae28 -89153007bd82614d823cc2f16cfc020f -3f72590499a60e016349a9fcb10a28ca -a2d5b02192d4e4b474474b08990cc7e1 -34cf571e9681cc11740d5a012c62a9a7 -cad9017e4bdcc1655139764ff578a808 -0f41cabe04ec9fc973acbbe65ef6921f -dc31f97a26336e45252235ecc77c67c8 -9c24cb9a97a543082c29ab2a6f3f0953 -895753386f7113ed98c8b2c537ead1d9 -5346f6f91ec26f9e25f99c2c13bfad6e -8c5c9088cac7dee7ab949bc51bcbe140 -0a91ef997f6f0c18118234940a884bf1 -66c2031f5d8f1b43b9ccd6a7f934b9cf -0268992117b7cc96f77110fa58e3b2ce -263f1a143f5c227390cb5690a50b37b2 -e9ebba96d21bcd51293843411a02ff94 -077320ea16e32c5922d31f276d7cbdf9 -e054c96ff2a5e93d73d52b2fca032e9a -b4af2191ceec5aeb427f8acb66b37f66 -d661fa3085db53f667435fb32fae895f -e24e5a28bfa566bd90c8b7a8328ccee7 -61be061264a4f1b2c9b4b7229136f436 -eb1f601a779bf014bf81481ab38c0161 -30941409f34a77eef99a0aebeb19bc94 -8e7ec1f57d3a66431ecdd2389f9620ca -1c2bc2717bef6ce17c8bbed59c913c4b -4d5388d8d4768cfa4dc539b014b9ad47 -d098d4fdfa5e0d36e2066586a4fd74b5 -1410da8001bf22ac7a93a58bb7d45e77 -f80b332f61fad3121bf9f172995c8904 -4d380fedb168765b35ac46a5e40b1273 -02927cb6c7d389cac9557ec1d5ae3d04 -15dc1be76f0bf63f8bad1a553e80f0e3 -ad3038a1e0c7e71e42ce6a218469def4 -94e43ce38f1dc7d875be1cd0de4550a2 -4c6f79ebca7e8436fd2ed0306f51e6f0 -bdd2d9a73171c05dc349aa233f32d8bc -541c404d549c8174864eb11305fe2e36 -c1acc2074341e9c0287e9c7e64c9498b -c54a23898b1917530688762af1e671b9 -e8bebffba2b149b251d3e0d1d04a9f40 -5f1ecceefacb1014399582ae5cf97d87 -481e7a819ee5b5c42b469adca81104a5 -0f9335699ead73bd0d3d77a39e1e48ce -171a0006cf48a1773920b1e5c73c4d56 -1f8d04bdb499744d620e149bf143435e -6de1ea035335307ffed095b6cb6ef031 -5694b08e9b7bf517264fc0aa44c4f22f -0c33bcaf31605a96bd52388dbb99156f -92d57470a4a81fab5bc855a5642c6ccb -45f250fa3cb7eb64254ad0987e81409c -4fc869fd3a8e4c30a231503f3a3b20d4 -3668eef202251c4f95b0eb177235bb03 -845e9804ccf2e699b4eb8532829e0c6d -4c08b2bf81c1b72d961a575dc415c595 -c0e5b2a4104860555edd343da69ec4bb -0f5683cfffba4cf783090a60023d7657 -bd9d0d7b6dc4783f464f51d3ffceb2d0 -2a28c085fe1e1a5eb5f0e82586da55f6 -41f85290ec1738184e4a4877bd24e918 -d3867ddab5abd3b4cd9732b965ee4420 -9094db5758e7ed7cc555c44567b00b5c -0a6176f3184caad2995f0e352ac83430 -4f97753f016a0d0072a8affb3a3068e1 -4b0454d85a638755d8d3e0afd829da11 -9eb127ff23e31011afd25c4e34d57a3a -4071d6c7a18317373cfc64623f4151cf -ad8a41927ecb2134ac729de61ee682cb -8b70dbf0efcd2cb7ef338d597f96ac85 -eb6048a0025a72a334d672a8c3e3223e -29983120e02cbd005cf8dbc027e50276 -265d6ea73031124edfdfe209478b16c9 -2e971e1096f90ba264335a995474338a -f34c09a240145c1b4c0ab1eb2a9c3578 -9c069787fd2a49e836fade4c7ddfec52 -ed7dcfca13e129c34819b2060eaa0522 -3f4be1dd13befaf4529fda9d694096b8 -4a456e431fd896bcd885e6656dcb2902 -14a5f32d6dafbf0cc840bc02f0915a6e -0757f9887129fd1af65b14209d2b2b1c -ae7247ab73bcf45d40f799bc8f4ed1ed -ce0dc429bca2675a954c2946f19b54a0 -bd77d598872e9fe163bbbc7d5a77ab69 -4001072532101684a1b2f5355414af2b -86f43d2a75dd6b7babec9a41221bbe3a -18784b10a7fce47338f8da8e7939092c -4802332bb23734135bfa8f773612e42c -873d69ba44f8b0f0686952bbf2ca1dcf -e1d80205da641c4b4c8d1004abc54438 -5775b12f0a6035c7216cd71f54481f84 -ff09612f743986e8654ce1cb7847c6c9 -0693d49d540f035817fd3b5bf59b67ae -823b260b859148e28e28f89f3e8b1bab -108dfb03e10677801a97036fab47d78a -e16fb13102eeb830955f08f2ad021342 -258b8004dee864f36b3c2a592caed625 -9a4d083f0be3aa61b25fb5447e94d469 -bf33e73264e77aca70dcd352460662ba -6506e18af818297e59700eddb0e201d3 -2d1acd65c416bcadea0e59c10af4016a -b9e2b93b35b22b11392f4eaf70e9fed7 -f0f72e817848ff7dd7806c5d1b2bfdb9 -e017c17f1510c28eabe3c5f14d471459 -84f4743a7f35c7cc1f1427801c05dca0 -fcf0a85108b10fbfdae6b47c924e43cc -191dc0c518fdf6aaf00e0167d3fa6eb0 -6d9a13530387e964607176ae927112a0 -9c4526a28015464a3a96a48f5c0409d8 -08c0203ee83191f5f8acb8ffa1d8bdd9 -b0824c849090498144200d7f499a5e37 -6963ccd15e67e766acca940794de6904 -e6fd6b4fb78422ce25b20eb22bc70d6c -45ae2a21448f08409c30751093295159 -cc4ad22951452566a5888a52eb424f8a -ded3ebfeb352d284af5a719b39a3d757 -700820c90a638b442556e4547b950cfd -634f3059a7dc8d19343c3bea1f38fe1c -669fbe1c46bc03317e64363233f8d497 -3c013f53111e6fb12943ff5b7532cfa8 -5086fc2891a848030ffaabf8df0eb753 -2705c650503b2003e6e9dfc8fc9a2b3d -d2bf28a09f0b9596816a9efc7bba91c8 -957390d6e79434864f5493c89b754707 -d15ca4199c8ea5b929995990fd63203e -6c906da908be6f741f2b089ee62ae5a8 -f5f8fa9450dc7decb6c7250b29874bfd -addd2d5f6954a71c29e8bbfdebd72495 -df77eda1623846a7e1688a202add3079 -e339c224de45e3c35ec76ae8499d4446 -0868d98453cf29165bb1402c4f75c6d6 -f624c62f823c87148e3d98f8383a3a96 -9e254126c5593f7c89d8ae1a86389936 -d6857f6c40ef91dbe30cd3e1dcf387c3 -049b6cdd156ee05094e2a1e8952c3745 -da25638328dffd35804c0288e245552a -91ea3e2f0ccca7b9ad5d04f52bb542c7 -d1da628ef7c9b543e862b898a04aafb6 -9069db6fc1e23bd9c33542e543706245 -baadac4786369678f09bff15fa0f5074 -ee43ffe5f57473ca3688fd1c56ad7766 -d6a5b1406807dcf52fb310b0ccf1c89f -76592fbad5f1afb86685b0a980869cb7 -8e67683fe9bce52297720f49614ffb5e -f19b6030376338e366d6d4b40d78614d -5acc3731f31cc06b3871df4efea21b51 -fc691cf493f4ba53ce8ac6ca3d648f87 -90f0ee8506912e89ccc0b4d08220a5fc -92eb835b243faf9cb5e5276331e20a58 -e7e8e7048ab094b45543a382c7344fce -90313a39bc7e5bedf83edfca10397835 -72028a95871bcaf192c13fc3dbce8972 -463898b7ba549b32ab52507ffc70fdb7 -1bf3d5e38751c2426699b726834ce371 -be15e2c8c55c113e4bfe35195b7288c7 -fa5e42e5f659bdc7a0e4b42f2fe8e60d -6604f54347024d87301b8bf790571d8f -9d693ed39653bf140f044cb6b6da0988 -fdd4bbc8d12bc4a0b690461defddd1e3 -b9c4cf8d6ea549c2ede531d579eab1e2 -ece0c9826a92597f4984fd716c56c1e0 -b32c5989e2a6474f43ccd14053a31ff8 -904fc3621a3cbc8b7eb7c42cd73f14a8 -8def9dfa67073267f9c3fab9bb4d8cba -b48aa7d9ec577e080dda366bcc0f8ed3 -0bea6ecc701ae9cbed0e1567c3da4b5f -7b29c262bc95be6ff46aa514adc4d8e9 -08443a78d553d06fa1f4917e8d6877f9 -84d06335bbb1a946a10c21636b512965 -6e5570e1acf3a8dddb6281021ccd1c46 -f1542f49a177e1391925952ceb95e04d -fa04c3039078ff169518c73dd80a72af -2449c0030726b7ac1c70c20e23184275 -0b1659a0517906d78795e14115680685 -2f4af2581b5a10dd753260dbd4ae5b64 -701c23b6307fad63e52bf862141a8eec -74b746e74bf1b324731a47f851402559 -2136e15510dd6e197e4d3448f04755d0 -a25f949f64b6889a517b438db7461671 -349fc337e07a9f7fe03d86ddd6d1fbc0 -5060ae67e836c5102dae0889769a7463 -a294587cfe5bda82571baaf24e1a4fbb -a557523693c60f06043749aa84b59645 -d28059f4cccc1085bc49451be40342ef -0c4a4ab8c5e7916e619beafa8ecf69dc -bac578c694fbe521f5aa32705467ebc9 -15f18248521eff3ff636099c6100f455 -6fd860d41d3e1c1fd021b0f98e39f1b5 -feb1ed3134a5a25b377d69de4a3ac516 -7f8d25520cad5df8ce03792451d3e618 -7597eff532a1b91997ac461c8c03fb3e -ea9b57225d6e64d346af9beb90e78b80 -024d853cf64b4a34075600ca0412ee72 -d7e387f83a400fb39f3e9f4cb4048c4d -1f2d8ac58deec98ec32562b1d64da66f -440d37eb168280093d479c0ea1ca131d -91d2e5d6f9767a65ce520b17a3ac323c -08381e37ac9b04329177f6e57eb7ec6e -cdbabdd823f8a2085bbfdbc9c600fde2 -60a4ca7bf8d279892731c751eb85fa0f -18615e6a97a9ca8ed6b1068e21e06c50 -39f85492b5817c682f2fc0e6b037875b -b6dc520246ebb892d60df211a6187c88 -5590469929677755c5f66eec894debf9 -dd47f772a0441ca21ae142ccf23f8b41 -5e244d0ea290399088563964d429c3c9 -2136a399b51f446c6e6a06db54b49d8e -027ff0a8ca0d93698865664051b15ac6 -2a075a2ae2957be7f5227f168f7e4d28 -3e9ae158574279f017e0430f4a294e61 -6528b9b7888ad329ee1948002ada9291 -caf4fb0eddc60b908c45d0c6c5298618 -581e584b0999ae9e32126309c2c58886 -470eed6f9352f1ab0c519e885422795d -e3509b1de07871ed718abd86c84dea97 -e32d45a07299e1d12f35c3dbabbeb4af -239d079939cfea3916644c84f1552dfa -a975c6fd07194ae943bfa5c9b8030ed3 -a62846a6ae5e2006a3fc29ee69b16d0b -dbf1f2a25ec847d30ed896d7e0300237 -4741868214db42d01be59d948eed79c6 -d556d11ac23b2e638bfcd184dea7101e -4553168488fbfecb79a1b79263160016 -e54075bdf1865743041fb1da9c483ad0 -3390de249fd46c5dbeede1aa92ae80cc -48525e99bfc7541f2bdab1e5e013526c -4822ca765cb768bf9a929289814eb430 -f1b90bf3d3e96c37686224c83721aec0 -5573a55bc89bfce32fddb0134a5ee123 -9b3f59a6e2af6ad34425077289240e72 -5e48069324232f63b1b15e7c58e9e2b7 -d4f667e3c6a823a3ce67ca7ded309545 -e4b94ea5b6ba2cf13893eecb18598d40 -9fbd46d11b38453e2d6377cccf1faa5f -90088524fe96c554eebeb2f8d9145a0c -3c3cd41154e1d2955b5acc683ec6c8d0 -214dcfe0e62783518ab78ad5f04f6913 -e1cae3055475bd966c861eabba07fede -4de39fa3ada06801144fb068e4f7cf0f -e10b401de968e2cc25dc5509ad484d60 -94ccfd47968687f69b2694dc61f96cac -2dec66cc27daa04eb7fe267279160656 -dbe3713e4ddae4a099c1605318ff6d34 -64fe94371cfff752fa04df4c6e1397fe -a6460cb34624335e7788cdc4b8d6f77a -4d4906758ec983bd50bc0d7021850ef0 -e71febe9056d7b6180e69167c9c8df99 -ccfe9aed43a9029a0825e0bf56b7a81b -c15704827ced826eeb6f968083e6d98e -414631c344e1675079f9123573f492ef -3c91c648a62048bba3fdb55b3dd713a0 -183d9ea7f3c3a2fc9a33ef3885d3c282 -df92c4a29b9f9ae1b74fbea9eb396922 -abe8323a41c7f3dd830010137254591a -e0ed12e7bbcdb2c529fcab4039a494fa -05fa84f55e79021d1b54463d04e1a32a -18e02efaba96acd03474a0322933ca1c -43e5eb7a00aa9d78f26eaf62bd658585 -b1ef869397df762636822d478688bf3c -88c3fd8f5cded32d0d2bd03d43f9f4bc -ec02d697be4dad9a95caacb0e32ffb1c -1f23b96009e21e800bbbc3ad7a889bc4 -4072ca99316be65c82a41149132d1a53 -6072e96df19cf6d84bfe17db205ed553 -110cae122030bda352570f54c60fd7be -013047e3124551c6a08d0477d7c2c875 -6b55e22e358dfece5996aadacd9e27fc -49f827a3ca7784070287657162e9b617 -751c9ae1ebeb7b9de4a3d76fede17fd0 -7acf0ddfcfb461ee060bf610e5106e02 -197118f9421c1d143b8380035054799e -17b54e486f04627dcb2ad0b89917d14b -b4621a4d259db0c84d2bfac00b493681 -d704086f0f783c37e435d6f3c8648113 -d9503babab4724013c4a037d386d5096 -86482feac21fd6a0020238dd28d59ce8 -c7853bb74cb3c39428c725ccb53e1e55 -0e2c836a6e360b471f4c292ae1d0aa48 -516837ad61fb56fb1a8ac9c7b8f12a89 -75f0bb1a17c1a7a54f285401ec52117b -2d0c2fc8604228fcba70a4f9f3dfde50 -2734e8d3c526f989ed3e5e151394d227 -7d0148cc875ece18cbbe53320cc0c5e4 -e15c846d8287b6c1025dcd8b459ba1be -63cde5d9ee7f729599b1114c3ead2888 -5de5cb10358dd46196e675a54d67ace3 -2670757eef13f7445d624f16964bc836 -07f88c16f67528e19fe04f81cd1fd0cd -57f4ad3dc7aeabb83f4f4746da974cbb -08a6123efed219d7864890939392765b -974271a835f303b1b2d1ebd2b176bb4b -a8318404fd7d8b4c85babf91e50f2eba -32369af79890c96cb9c9e7a8f141c969 -f207cecc8aaed537c7bc3209a291bfc3 -7e6ab7b3c07df391e0eb19e3bc8ac744 -37dc8c9288bbd83c99dcaaf01da77af3 -44683edd78970e0b62dc292908052024 -312600a78e9dae61603219effad35330 -e740ce55f23fc4682dc0394a662cc12a -b48f936e6fa8e0205a82121befac9030 -88cb74815e7e221dbd83e97b80e6d5ff -efb5ac474e99810eb68fd159d6525837 -8495d40b73b3912d6a46617972a591dd -ffc24958c3805caa89836f5951d2cf28 -003d6da5081c6824f30b503670e1e5d0 -f2814a3cc47180a8a75b2c4e52a32fb0 -9010b41bafd4b62a309dbbebf4e1f68d -6073b4e2adba01810ca58f3c208d3ed7 -76c794200bf27b4842462919d6d03c5d -777a49e9557096ba55dca543b7da9a58 -48696b7b935ffb6c9ec32143d9284549 -92c00ca64f195ef4d7853aa13afa4e3f -e8a5b218c80f8f7abe6d0d68d1194308 -d5f59751222d112aad10f49b35dcd603 -1bcf97606a2473718821bd8f44243018 -295fcc6cc2cdf48202505b336b86808d -4eaa995c58307d1d7bf04c752c6b9d72 -94c5f873e267e10bef70bc797763d5ac -636e370e3fb357141c3688d18ffb23d6 -d5e3b3d7890f7d52e8677f7fb2f73be0 -49e5c90a5023e3d28d58ae2de0664c29 -dc3fd234d5610b9a8cf395c1afd1d892 -af1f9a6644044f44dc23e8748693670b -adfcdc9a0bc9145c4e9775ad9ad9bbb6 -8580e6a9c5cb6e614dae87c276a8b68a -8b82f782f68a5cb0ff152747a3a4a078 -31fc2447f2b0c9e62dba4005e34be51e -7690ff1c2f22fcaf8c305115c5a73b1e -9ff053b55914d132b3b888d006c5f923 -84ea1f1c1f7b6d5c715eff2363d03227 -3785780088a85ac08ee2b3d9308b3615 -d3f1dda7b6b86b3c8462524b809683ab -8311a6c5321d845b8a15c36df34c4ff8 -449559317cce672b25cedf28ac6ab32b -8a167345aae7a48d01051c33b5926e35 -6d19ddbd44179d984dcb301c990ea0d3 -995e3250d12697cd79cb958cfb931a24 -1eb73fa22af0d4d19fe69463c8386ca5 -07a0a71323fd8f8a128fac091157386b -68259521fb0d85ddb9232f89a1a6f072 -4557a8583af8d595d11bb8525484abb9 -3178c73ed64fb9d76595bb6ecd4fce2b -b52937f3996f6d88eec508b14c04d929 -c3a65b3e87e5df1a338b30385360e41d -0a3f85728782fb87782cc4ce0351e19b -f6124a844743cec252273e28b164239b -ac867ef6321d59ebf03b2c4820bd97d1 -8ff37d8039173b46b334fa2cf3c61696 -5dbe7ce3c94e687db86836d4b70a6799 -4bf96a6e9cf4672fd46435a6452b1c1d -443d75855b28a6cb2a054bf4a88faafb -b40af5331e319e8ca69880b72fa0023e -f77a81606ec26373eb4e973cb288c548 -0c722decb700e040f301ac41fec779b1 -2076633413f9360fd3d6c788e1b4bfbf -202a39bd4c2c82e53a9ac457b64a4141 -0c1efe8dec15178ca3354b5eccc96189 -6a4cb5a6509e649b07ab87e7f11ec97c -f42c76235ec39b0168f5dd5c01904d4c -3195a9198751c5bb9e9bf153a77bcd78 -a048cd6bb5980eadf008ca996879c99a -fafeb0c394684f2584c6c333d686fce8 -b61216a285911e31963784c4b425eec1 -877c0acc68dc23d619714da89aef387d -dd0bf131f32a707ec70ed7b7347cfb79 -d9ae265d998422550dbaa6fa7f86e85a -400539aa062d79e73293d28e81e20ecc -8a0d37b68770a87aa30e5d1b581aca28 -3655df5b497de48f4a3dff9197e10055 -f461f82a30ac3bcb72a938d207aec632 -bd39971758c884f4f475bf81aa16d6e5 -42b8ca813c002eb32a94a7b5a267d367 -36ea4ec630e79dccbda6ed2bb100d4af -6614ad9d5a1d91faf5cbc5499a3c8027 -5de43aca73fe7f876938870379b955b5 -6737b1b45265491d957d089ec2ef917c -a686c6abda74bfb0ec68e3e7ecdd54f7 -2288d3ae38cc4372b215f26340e4d381 -05661fa6c7eeab56d45552fee1a87af4 -3eb9bc81f39fea61165d2c2004b4896c -3ebb73ccd260430456daefdbf8df742f -a06baa6da58160c7a92abfc72388321e -163f27e0cb24f46d153493fa4c2602db -7abeacb499c2e02e086e1c38b9c04c1c -ab9f5c505b8fbaa2b31fa25ce507c014 -2fcf80b297eb6ae7ea038791ca378db3 -679042a25601f56fc6d255565fd9b863 -8ee12922673ff311a0268632bf84eb6d -83f466c4c3060cd97564878b01503000 -8123d69783a1dde07e0ad03f300ada8d -ab761643dd6df6b99275fdb2150d2690 -d41bca889ccee0bb1adacbae8419eb36 -6c45a24b98c1496b645a6adb530ce05f -4b6c89605f8a09982a9b1cc4206dff3c -11c7cfba0a4b6c96bb79c47d589e9686 -8f7b3a60ac7015ef357d9b3af826f2b2 -d5e75e04c334c9543e66df4967f558d1 -60494dbcba199530fb0ebbf1dd9caf2d -655286e2f441c758a8f91f6ce9701406 -17bfabe1526bff315b4b80fc982d3745 -308075a9badc0ca2f64750c5228f2e7c -ada51ef80ecb5cdf8cd54f0353727074 -207cf679d83747c464294cd40f2fe509 -3afdd791d031bd39d7aff0a6fc616821 -baf21da4d1f941efe4974ca6a6a50577 -18a9194e6d9e1021e75e31d22323c816 -991c7567cb9af094e0c2ede6a00fc8e9 -ea9e944867fca3b0b3955ca7e6533f18 -49c8b28e2084eb39e58a75213d1420ea -6ad374662814cd2017eaa2e9161c1e0d -83072121a5c41ddea36d07d0af40e828 -3ad745c70a1c548088d93946a918abfa -2fd2d4c8cefa171133c48a0e9654290f -74ad43dc5f823b3b60504e721f399a25 -6f04cd765ab7ca7b117c9ff075e93fb5 -7d1e6a46082de244bcb4a14d94f1599d -16d558e0ed6d398dd88df227c336ccac -442783394dad3c53f42e3254598ce1b0 -f22cabfd1872fd7675824fe78d8b01ef -84435d7189fc98e882099b15ae4b8d38 -7b3159f89d8cdae918cfd16e451ca677 -00580c32a0deee6597cd62ceb8850435 -3a31cc8cb89908a7349b5e975db628d1 -f39362c1037accae98f7abf70a4da95a -962b32833f9a5849039e746b11b38324 -07ebb177d1dac81f9a5a9db4aac66508 -3520c21f170aa9de4af09e62538a1030 -0daba984e8adca5c27808b73b5ea1342 -5644125fe66d2ca32657f03bdadb4183 -77d185f70d04725300a9f6d39be4ffb0 -efaa79d33fe8817f2d960d86c339e669 -7c6a1e873d7a02c576c7ff2a3e3d5e4b -55d1264dca001ae84171a9c8c28524ff -03b7d965ed82303a903104251f06fc5e -c43a0b6db81e89eb8df1b9479e9f689f -1b2e1f85bd58c429a8729f04fd1d8f5a -31ebe99f6479dae792e0ecbbd8002c3e -4de9d7657e31fe7ed8e2ce15b6d9ccc9 -88c84ade75a5b0be95b540d545f41024 -3ae2d3020b71af84992d3610328415af -2b9cb84b86fdfe6c7a276a54de74da50 -ddf9d4e16e2b440caa8ab2a45c1eca3c -db9e75e7ed9d4d12f1fcd918608f0f5c -4375c6a2bfc2537855007342a5ab44a7 -9e7418ad984a4dc7a98b65e1164ac76b -a95c4473cdfd928817c9518e026d65b4 -980396eedf67bc72dea881311d1cba04 -6e1fa869e12c3f02245ad7131329fb51 -1fdc092e6a7bb3a8d12cd24a188a9570 -10f94802461dc186c1b026ed632143bd -b8c1b9c41973f6114c8513bac4ac5893 -96380f8439197f1ad78694490253a9d1 -628497e8a6e1d7dc031788d6fea7de9a -0c44b530eb02f1c754842d9e00a511ff -6f045e8b8338efb47b23a9f3cedba154 -d3da3f6c7f8554b7b0fe7bc74d23e4e0 -8c5b53c1a5501d2e461eb6423c718251 -2a9879a8728609b89186d812d916727d -2261341270fc9607d1df09227d382141 -81f1f98059cbe221fe70fcbecfedb440 -215aae163a962232883924262b0111a6 -97668a2ea015430d2915598fd6a9b5b3 -46b48d67c85f7192855b41d49d731aed -785fffcec3342604e1ec230c3fd69034 -296a4764c711023b7650c8b29f412e8e -56fc501c1830d8f260278d72ce48e9e9 -978ff18f1a64116efe4d9c2036572b05 -67c4e02a17d9a8963541d50a32e87170 -d08c7f5ec8aa8580a2d9347f974f865c -4d90f8a3f225a1d50648da927e6d6cb8 -2c98363cf146969b7c61fe81b26873c0 -cedad2bf33be9f7bca8800f8dfea9468 -76bfffb59c11624803b5badeae8d474b -b57104e5e784797824e7aea08e2e43aa -706e5cf859e37e59754605fe63344849 -f54e4dc6857c1414c7cbfe48408057ae -0c13675bb911638dff263ea002091c17 -014977008d5a55f506128aec5c750e86 -500c4941e846c4e9fa94079c5d3efaf9 -7e6b91c943d4b38ed21f2bbb571f434e -e1b24741343a23b70b51bb28b0dd2b50 -1bbf1b85c018a78666641c22f380efe4 -993178a2affb5b893a2c3c1fa1299fd5 -206d7a2b6ec0a4feed627f38a02c4d72 -27073ef05f86e315ad094195e62c9c35 -fc23a276682fcaea70b28985d0b40b32 -3471c235fc83c3d1177ed31c4c463172 -95561b739fa153300830e5074d5e8995 -acf67c49595cfb480b2909b402f06af6 -1c833234c0e86f765467773845199599 -5e4302a2e37c809a58452ac31f12fc62 -eb97a7cc00b9b57ccd9ef224269865a0 -c5bedf8087e3b4638df1f0e0fbcdd64c -505b69aba1b8f6f92e0c87bb2156359d -68a670433a927806ccbe67fd60bbe741 -88cc8c56694d05360864c3b626ba5fee -a39b634ee0e6cd2649e0a4e4b32bdd34 -be30ac785e827b181eacfc9471d4ecfe -124cde04f57f28bb4f48a91125d94285 -e8e792104eb9e5cd04de291b513b29fe -362e6beba5e662d057bdcd70434c2e98 -8b78d548d095812a2b161894a928bf79 -e4c3adf3d2caadf52285e5e4596e215b -fe4033f15a94bd9f4f3eaec7f3d6e7c4 -e8679eb742d6e8f1675f201f03a9004e -7c273ef319c9e19d5986eddcff10cd0e -5dd7e5758a8e8701495cbc622ff113a2 -b01ce83338f89a76e876ec397e79fa54 -9aba290db2d39a5086676321dcc78e1b -26357be2adf06316adff82aaa99eb78d -438421ca34072f257066ef45dfc5cf57 -a271d29c2c9aedb31f4d1482d28e2516 -79a9e612eeff79f4259fc5d857e5468b -3661caab922f8168e4866866a3fdddc5 -a0520894519d0aeac7f0f1d6b71d1a65 -bbfae3fa555d47d8a7816a3997a19010 -21b01b2ef04037b2fa98ed50e8ab2685 -a1091e50f6a33fb9b9c6ab20b164ac48 -88c4ffb4d70b20b6785a348152235c9d -d0b99c3ab0dbe527b824b3bc8d9624dc -4d60d4c3c4924e3857b603971d54bfdd -fe02a09ea51c552645156dc3a43c5a45 -1c325cb4c22f04dd3f6ec12c5b1e7e58 -6fb52c94ddb17017ded914d4428fbf1c -a096baca72b226b6ddc54fc63c56f560 -df8c5ad3d9fc37da87915db15c29600f -58c3de8048c08839f8d52290d0f1a014 -cb8138d497ac65afacf61e9530007dff -842b2b2856c0bf0f2962f1adbcfee443 -01a64d3ddd1ff2505c6ccb2f5de3c757 -b97ea275288fd3a3f36bb984ae170b15 -c4e3784029caed973f9471e60c3340f5 -c784cec550404b352a94477fee5e9c7e -e3f22bd2aaca681aba5905e32bb1d3f8 -4cb013e99ca7673bd8aab688414f5ae6 -0f675eba6b73356a01ec4af99d8cff73 -31a7708d6188f2c28240d4ae3b1f9b21 -5c597d600f19ccdb782e14590eaf6169 -50008eb2ef0156073480178275976564 -68735f85c1994790331e3fc43ce59b01 -f7c79dcceb64b37a33edbe3fee7b958b -d09dd99f5974f7a98799821505c962ab -ee07551f07b4a89d0a3ba45f36b0445b -3bb6ac44e20fe45580d64b7c0e64e735 -5274fa7155ded2c9a2261472a0a64721 -06c80dd4b937f1e702ebba62604e485d -7a6bec21e4d8759d3feb8b11c9e79463 -9b096cc87a0b72844c63322f29014f6e -9c9fd8819854074c2df00d6b8b2eda87 -285827d44524822b05d657ea93b1633d -289df4d10906b7ecf56df68a1451c292 -241d5f6b44b27547297a5c5d30eb6356 -d53d21443eb2cb5ab00cc1109ed8187e -fda379009a5c62850b3f96658faf5204 -2e8d1730a566feb2b0b69c2cbea33f45 -92ab9e607ccb40696f24fcea5000ecaf -64666ad3a5d2c2d0978791ae40e19613 -9b908c30fece87a0314ad4bce60926a9 -3273889790beb0bb98088857d566ab53 -f9944c5a712d0695b03ed36eab786e06 -028f3ac62a5f2e577e49fe69ddf5aed5 -cb3d0a949bdb727c492775cc659f2109 -2bc5c0ddbfd37454a111e81cff16da56 -c6a352301cb1de583befdf6934f6345c -17e74b5e4dadbfd83fca98137745b1ba -636272e4d4ecf69e32fcbabaa93f4116 -1d68759028e8161cd0a97fd877c6461d -662b9c285a9796a6e411c1fcb2de243b -67fd5a3bd94833059c9ae2286903b061 -b743d93e91cc404e6148bc716236da0d -8c2729369f40a09be781cb4ddbc33e88 -1308595614c24977c6120e5ea44d9e02 -a661e2458691f8158ba1d2edbc56275a -808ee7d40d16d38733bf7978a77b4aca -c5b3de74ee0f51270e40eabfa759c70a -1ef7879ad44d4a69057fd3e15fab0e0e -f9ef68481fd68a449489a5c1c678b186 -97114d7e5865779a39856a3ebf3e9889 -6c405042557354948409db1a0587056a -e03a07f191a1e2f89c1712e6dc7cb866 -d4211395823b3b88eb5b1d261e564e76 -d466c5b1a12a89ec86b3520e352ab0e2 -2cea38aeee67ad5cb18e2f16fac1e971 -9e9d3272bb4f50f360a62ac084b62e38 -021209a397ae21125d2f1522f83693dd -efae6b14e4d8a50f402d91bc8c5860fd -9646c5974387204ca47da476413093d3 -b407bb17b59b7b9c80eca1eca2c43ee3 -eeca017c9faa25ac919bd262bd1c3879 -e5e6a9c2bc3e189c96b4228fa9cbceab -81fd06562e327a5b29820eedfbcea089 -c0f92cdfb2389db655b4a171e3506b7f -a56f055d1d1a725b1cc4fbf759a637d4 -bd38090a28ce35cf16773d350be0851a -d08b353d78bf9378bd76a4b82e043132 -fb921f63866162af0184f39ae6475153 -2d3bb674c370ee5e0bad16cd56a8754c -4d9ce302e4ab6d0fb99aaed297433a6d -0300bce20ec28e85831594e6f0f1afce -76fb998fa19d75695bd761e6a8e3dc47 -8e423f7b9543c0882f55588969f827e9 -c20012841827f0fdaaf93bb45c2a7d72 -d8228bc95684eeb618fa8a75faffe1f5 -524dc3fe89430bb79a157aa0800c2271 -cf7753bc7c9197030bc451d15a5a8fc6 -4dd056e3782051f4db2d9f6b7fa5d9c2 -69a4b55513ac05f301abb645e65717aa -ff388baee7e786830a76b89c79fbac98 -f0e1ac8cb8cbfd305469ed0ebfb95f06 -23c906acf42387a6a322dd8ad39eb986 -2492fef0f94ed53dd1a6e7a7af4a06c4 -30111cb424cfe0fd8788309526e468bb -8a6a147c0c31e8d1ec8bf5e8d8dfc3d7 -f155d54630ac9d997dbb69e42d73a8ee -61bd781296c75c0eef3df6c76b50d222 -6de1d54edcc02ec8ea3ef336403b5ce3 -6af43d9f3c297a2420544c1b20209feb -c0bfded11a31914e4f8bfe64a55cdb79 -95ed88881a9e5076786c964267720e79 -5fe621ce6dcdca0c028c7e42e0fac503 -04161d67b341f86db3fcae2b01c4d8be -c2594f52f373a2e54f60a256aec780c2 -8d8a7b080cdf66b0b6cf19e0042a024a -d0923c304e9b69b3fd3c650075385923 -e4f269d703c47667cb55d6448252bdad -8b0cee426b2aa8bfe7fe05eb6028fa8e -f0aa05826aa7ec93412f714c03f58c4d -720c40e91f91987d78a564cb2f229248 -ff419baeeb0c9e8e4f33d7a77ebecd9d -9e53f3fcc034fc2ff1df5132cc46609b -8a06c3365e1ca715c88328854b178636 -87bb91ea2d189007bb0542410957621a -db208ce9e24087938b515a19399ce602 -98143601b72e60ce1deb35b8fe7ef2e6 -588a397435948ec5cc5ac1fee280a883 -f490c1487a399f315ff18ecb011283c6 -cc638a843e20f4de5533d639a6f36758 -a98bc613e11d87f0ba8770071d4c76b0 -0dc5550cb7bba563772f1a09518d2688 -891430570b3e4a40d6de6fb86c5a1902 -4a64ca8c2a6f1bc71394f8c28dac80a1 -370a61eca8ea48fec4cddcc3db90b722 -2a550657e9bb235e49618a7ffd4e62f6 -0d385f69e651460973dc2bc25d8b7676 -e3ecf3101e2a83ac5c838bfd80fac9e5 -ed8b16a45ca803d0894789a728a4282d -5583cafd79521747ae9b8e98770a9be8 -98a1495d87154ed117b6b58a30526c60 -a81832b6d1412f2939599a74f945b54f -0706af34f7f53aae9bdbe4f8427aa59f -c1ed0342f6699e48afb31439e5c00767 -b2c467288dc8d895262b62fe6e2bc613 -d960b5a39d5e21497c4d0894b242a582 -eb8ff1c90ba607e0ecb7ee5bc82fac39 -82fc97233071ec9cc839ccd5399ea433 -e49a7d5b728d2ef002d261c53b4c8315 -6adf383bd97af29680b2b2f6db2f5f3f -98c8aa53a033d5b25294b35c83a51325 -0325c6080a0734299fb0a32a9552bc4b -87663e9ed96e8d9ad5cb939c8280d438 -76b43278da113cb4eaf8e47d5df65b13 -8a2936e065cb12e766fc5fdb1518ad04 -4a5d3de497752fae803c3fa4d088feb7 -7aba0ee4e82b268942f1f0955870eb35 -34cbeb44c0e05b23de39138a7f26ebd2 -9a95e24c9497278f4d766a00476bf66d -7b1a701928585606035c669aa62bd466 -0938b6e6099a82f7b10e44eac57a1567 -7aac21f8713a1cf39844d33eb4c54893 -3f7f3a888cd588632e11d30302df8f83 -f82416efd00c88e17ec48ac77748e490 -c51ddcca2348f9412160ea3416fb0140 -76aa1a579fe64cfedf76b26ba7180ee1 -144886af02feb1866b83963626eff0ff -844fc86b72e7465eb24382d6e4f39f41 -3bda897f1e9ffe78735d2e8eb276ea91 -b0b583515b99ecd636b78681a779cce3 -7076c9da86b0c34430ee3ac0e034d73f -44ceae9d5cd0202393ffe1ed7398da9a -19e87105c73010922313462a06e5cfc5 -5c92dd097535b3cc55fd362c5db0f4fe -ba740b55d0ba4757ffa27df409769b08 -06fd02946e301fcbfab4bac27feaf601 -a1c014e0aebd00e0c686199435293f86 -cab2e7d0690636d174c2693bc04b11ee -c59516b849ebb22ad9f0130e1b012e2b -dd4b7085a2af43348ec94c6820bbbf39 -4a320c809a9d13f2a6622146e0218f83 -9acd1388ac341a9f4fb873d8fd9c7310 -1d2a79a89242c95c1666e0dd87d83973 -d19706f127a83fbfd05d03623f55a7d3 -d77bfea1e56e238697164652552cf8b9 -118847020261406e183e413a6b3c7c7a -58e4d194da17b526e6b9e2438e904b0f -f1429fc3018f3b90070bd750a79c91a4 -025d9252c5034d371c7a1f9d4a5ca8df -9e9fd1b904c7eae68c2e15de644ba498 -46e8c2392c78835502d756f52b6a3921 -824ef1306c7e6b9e40b798668b1730d5 -b58cd97806f9c2bca912c058e1b180a4 -4d74e6e278301b5410b19912e0fc9f56 -5db23e7b124c9a75a5cd76a8e1d67d78 -6f568461466ac41bd029aa9400c17cb2 -e97662b1d8db841aeb7e18125cb94a38 -b60fbf48ec234cdddb158819e137abfd -1eda46bc5e46dec0c8176e647bb9a5b2 -42ac429ec11c997859a88c67c3a7a463 -0e19e5a6bb44dbfc48a503747f69c633 -b8c92ec40e263238be07f736c6cf461e -144c18b607ff83bb13acdfe440f3f24b -f9a9cc83227c631099889a6b6d53df3c -c1f43334a99231377a46d4e0b56697ee -a7921852e14d98afa118a29039acecce -c22c67686076fb88349da152c27a9530 -271c894b47ba5a254340239c0f25d88c -766148e457bbb4170e0d2c6483aecbe6 -2fe71084d1c766ca2a2418a53a761907 -c052735fb0f986a6004f540da9c81566 -f0bbfd1286b2e153dcfe0ffdb1a3c278 -8bc4014356886799b82b23ab7bffbb5b -4d45b3987b59453a40be1e268a7c06ea -c25f252168468c48dcb10a173d987a86 -7c5a49eb7c7515ae1d051ffed2f88a48 -cea4edeeb8dd00c05e5ffa562089b307 -da6276baa9a55af9a824864bfcaa600a -60636e13352e1f0d373cc95c62b86922 -4fa317450ee801e05ba822754b073b41 -ec1c4a1e7c71ad8817dd07f66b3acd2d -3a1017bdd580996b1324396582f4f0f2 -03963841a3df6dcee6403d2896de5471 -f2b73df3a3791ea17f996c5993569d94 -ab3d670fa0fbf5f88a1e63e2e5172ab9 -f3725adab364d33088d53ccbb2fa9e63 -702b4f43484567845e5f0ce07411a031 -aceb8ebdae2ada04e0bcfe237b69f9a9 -2a84054e93e52c9c6076aa1993826f5a -df19daacfbf69a86e291b5a9e550f167 -3c097d671a99174f432449cb14ad615c -e7bb6deedc5524fd820b82b12f97af19 -6ae56c2227bec0f4b563d881a5566bfe -8d228724e1ea476132993a6e50dac2e4 -e708ca2c0229ef3effb3c35d290aa6f4 -9197a5fefe5d636cfd82ddddb8cd0a2d -29e60b40d7b4cf8c69bb4be0fe121c9e -0bfbe4f5cd5b61901c30e82268ef73d0 -7b058f0cc7d0e8ce34ff04db1d70ed3f -ff78ae16cfbcd1946b72fc92e8b18d43 -c7ad0e23b8c193e933a22aaddb944f17 -42e9db26898d3af5624ee1f7e72d09e2 -9e50a7c6057d10d14fba288e6e53357a -97343d8b5375ec3bbd429d72ffbf6dda -92ee1e312d3134492c1f9a6146022f3b -4bae89e8347168dad2674aed4b01d65f -3ce36cd26f107f02eeefe7b7ec303709 -3d14319fecdc5900dec3ae534c41da98 -8dc4011ac3dcb49d2e725a6b15f7f900 -fda8380799e04d7c236d23532ce2f50d -f5e72c062ea2fbe27914e68a89ccf932 -1bdb376598e9d4c350b3d7723d0ab883 -a7358eb93f95a3823ff7c3a92b33c828 -69c3b468d44effd2f0a350865daaf1fe -25037ebe245f0cc4ae3df8802f8fdb28 -a9c2956cedd18cfd65fa6f5e17809cb3 -d97b4364a19013c53ec88f43d41d67f2 -07f6d96cb2b9c70b332e867d17ba87a1 -4d8d1d80d3cc51762db6a2258c392b75 -49b77d0c9ed39ec55fff0f2352c7913a -88f2e016010cdd6166dec01468500c44 -1eff0199ce1432b3de7836c8fd0e9eb3 -45870ff6af69514a0570843e250a3668 -30172163a0a56aa438abd82d09da9215 -d98adeaf681d2a8a8d5b8c5f40868d7f -5ecae1aac4af19347884064789ce24c7 -cdeed26ca76d8bf147cff6341b605cc2 -423cf2dc180e364079fe118aa3fd3b79 -2fb8f1a9b9c47c26e49dc113fcc780c1 -91ddb583844357937cc66af184a99f14 -a21b41d387da1d960675d2bcf010a834 -f012519255d833479ed3dda872496e95 -57a6ba8a05cc69815229f7b3b8eda974 -585e8155961e29265b27648980ce1942 -46f68efe43c946ecc28b012cef4d8bc7 -9e22bced3f55633ae96f37de0e5cc876 -40ade34090de9aea5b3f904f151c0a24 -9ab13a689ce0c70a1cdb476742622c7c -14e41eead02bdf95e9244adda3e4ac17 -c8887062b86fba191cc5eb71ee74a7c4 -a10eb29fee7b710e9ae95daf3f77752a -20a6dc72dd1a21eccac435b6ddf56de6 -1e28b7f24ac36e7fd17deadf73177f8a -e1348aa8d4446497876e4d7aaa0c1e82 -5da02590bafb64c9c8d395e061a30955 -13bcd4e0b92d7245a65a3799f1762f59 -c7f1372208c3ab8a6fd73140e3e83117 -b4998870dba177709ba62471f579a0fa -a140509f33480bc3398c95cadad3f076 -f523d5dc4972c40f12dbb5e77c7d18a5 -3f09521ed6d8e6dcd303eaf473f2f488 -ea138667d50204faa4f3d5f8e5fd9f33 -488c60667ae37ff93d21229e1d2db99f -b7aedc10700556c2e0e6eb57fc2917ad -7789fa309add0ea82c660c102cebe486 -bbc842ca20412fed529bdbf265111294 -af5effae425ac4429c5e4b44aac1a5f3 -2f2eb7e2bb465fc18640380597f99acd -cf99f6fa720c80bf8abe42bb1eb817bd -4cc18abbf9ee4d7a10854debde80a014 -0ddca504f8a07785025950c80ea15807 -2117d047e7e57a4c9e962747122397a2 -289fba2897e25113b7a8667d6c02f4e7 -78205b14e3689011244b974557ac2db3 -b98995c64fa72b111a322aa22742f86f -80f0f569167ebe8f4c927b139cf4af0a -8563d3953d2bbd45a7fc03893f29152f -3e7d7f935466c2e9d74726ff2d24bd69 -59f2160f3a5b04d389759f896a12a4cc -40c7bd0aeafd69d5d46f9601aa279fc1 -eec732a1ac48946734d5dcca545f6095 -32f26d5db6c42c0ed242b4fbff78e493 -f50717a667b45cfff8e4d18c91acdd3e -74f2cb74b108106be97a7d1ece83ea8f -e936a6359385a4e9bc8cf9d9c1279f34 -d318e4c89254438533b85813c9732b3a -05a2802de287f7b58f03eb43be0ce0e9 -ac3b3cca307e889ec2c0c5fc6ba1419c -59d810157b7dd8b7faea5216858e600b -f34e5c0d7d3cc4a83235de09cfe292ce -7e59c28036784c7a6c9a70e3d39ed417 -312915d6ef990c3945833fd0cb54c397 -c8d260cf3e95e9c5849d793696d53104 -fb8c06d94248e1ee3e950e5114b7a1cb -ac5759d302a086b26fdcae08eac93751 -46c99c20567b9270cc9a0513c5b4afa8 -d615cf9ec2069c745e7aadd12f551e61 -aa37968897a4bc6f5a9cc9b54bbeb607 -9a41f06a3173fec0e79712af9bcacce1 -041d420aa73ee144af6913b564442a79 -a0fbbc6428dafeed594aac802c7c561b -60cde240c490b4bcd072fcfd41837e0a -736ec39890e3198392d7b0c16558156e -5ec2974755c4b55ae21f43ef824b5471 -ab1c7e204e63fc1f888bcff2bae31df8 -783dd3b2c95b5d1696e9e0a0d53ae5a0 -704db8d5251d7150e670002f44bafb95 -c334204bec86b88b84e7ffb2c052ea9a -72686dd4f5734f35dc04b5bd309e2a7a -38a4cbf7f4db062d9012e743e9e460c2 -b5ae6c3e2673f4b721932ffc8523d5a0 -867c6d33f9e87c6bd372b86ae8ccb4b5 -3b472454f40c0a9092e72f97577f9e75 -bc10202d40441b639c22da716f86cd69 -aa8da2dd0af32ca3ae2fddfdda350992 -66cf5f5e5d7916150b3993b9cf158ce5 -20a69b68a5bef700134c56e473313a6a -79da6887a4a48dc4a10ac81e5df82862 -bba72302aae752e76b76e62aaf0c1698 -e8c6009dd893b5590c10b675ec83896e -1a096a3eb9653e7d5582e7962782dc1a -fc4de94aa0a74aced4bfb5649fb41b93 -d6dc00909815fccbc29d38648f648212 -9e132ea239400634aa9e17888afe88c3 -eb705116c07b3a1c2383d9a16c22b371 -9f0f2672fe9e289853e460098f0f526e -cc7241927b967ad64c63d86fdefbff12 -78f060574328882a10bf60731644513b -a9c213e2b9e9807ce4aaf3597ed7eeb8 -ada32e991a1c14cac170eeb774dad3cf -6829915139fc745a388b3ebe7019c52b -96401fe4bff61a69c8d12307750f6ceb -c8811fd651eca41f0ec0be49ac09c7ef -74efaa8d0661cea38e91e2ab1993b4a8 -fe0e30092932c79df1eba1721ae47f99 -ea28b409722f8b49102bae089ab089df -1dd43020abc7b004fd32659ad09b3950 -69e46d885d02587ea789d0c33901d8f1 -e983edde15524a5c45c436d29a947308 -ad81ab80abc9d74f46664cfe1bc0ad51 -a774b776213cd1b08fff1e078cc75863 -1292c47781ba6f5f9d4964e5f4aa7db3 -29067aea6d92ad6fe0619f01efebfcf5 -1a740892dc1244e5ee0612d5a2b359fa -fd087aae9f8d632f3ede8bdf71174719 -cb42ac3054e48b8ec7bb599cd6c2c567 -5f6f8cdebfa3119258c7a2d91b4b7eeb -a4cb2c46f32a64d4748b5b5c0b28986a -bb7fc69fc0a1f053b2498aa1d7daae28 -39e0cf8f40e24185dfff926e721a0081 -1688f53487e0edfe4f56a635d5b9c69b -a5b67c4be287e31d66e5846adad09871 -8275e330f51d1d8e03371f13405ab993 -e64f87bddd9b82276f60f8409dbbcb6f -85c7a60d29c60a5ea024bbcab7d3a1ea -30d514b1334e01bae45ec1797abe0b44 -c8e24fba9d54f7604847fd691a8e9417 -d782bf30995cb904bacbfc0675cee9a1 -05c40d29fcc418d13fd25d053d39a185 -9497d88e67a9a9a51d308a633d388233 -15eb3bf61b11dbdbd2fbeabc186105e2 -a2265e0b6553ed570877e2ee4fbae183 -50000e98f36e43c9ee0d85894a4fe691 -a0f44fc812e799d0d5a076195f3a3fee -9d8eefd509057d9a7ad6fab110ad7a5d -c19b696ab3b9ab71e8e6094e4272e738 -f3b941e52d21d734432949650b74c1a9 -3f0977eba0e772856f8e8d648e054f86 -d3714a1589d08b9efa24e125e53ce963 -e0135b2900ad1a77b087c680f820628e -d8248067c44b12df16de93e4ef0893c0 -4a857c461332ceec41a5a978728b4df2 -de775898a43b412d52914e1d7f9661e3 -b88faa6a21634fb364ece8e92740a5e7 -626a9a4ab74a8dc382d16a36840cd58d -a7518d60d99329290f7de6f384d03197 -2aabad8dd5ed7378e65d18220bc45dd9 -0098dd049286b05a7cc5095a4364fad3 -f1cff0881af1359832e347fd648dbd42 -8ff67160a3105fbd9e3a23f2e3c04d45 -e55a6e42848266b84ccfe00da3712c8a -dbdd6763264d4ba4b385fa8146a33104 -bd51fa378b3a4c0599bc1426b18c711a -76fc625c65d27845da63fc9734259173 -ffd2367f87b3ccc85d67cb753894e456 -005422de9118654f92dd385b2ca1297c -e1b50f2e2e3daeb816891df2aa6de437 -6fd903c0edd145c38a9165e090e0f45e -cd485991665b2d26fab6d83173f08d6f -7c3f157f55a48c9d7eb6611d78e3cca6 -f9a2efc75b0f539ac8a5b0073ad4fb90 -ff6682e92d86e4191d3e974a2094f98b -949f2257232bb0bc7a1cfe25225a5079 -9de2eb71bd23040cb48594a0d672aaa6 -f3766cd5ba87d9b6e26198ed536808a0 -c72eb7f93a454b56a9baaabce3a3c0d8 -c8c0d3751b7a4af73709b0bfdf694add -99a05a6278f125f3e2f60a9577ecbd94 -2c33bdee2942840de77e4bafe11802e4 -7737979b48dc9113c74681838b8076f3 -a40bec1bce4728d2f0ae775821c4b91e -2102c0928c5ca756c9d3de49d47dc774 -575ab8a6e092a26b07fc903ac7d8a5e3 -418c8bc2b627d6fd1b2882bfe4cc27db -ae1d2ad68cda2f8702dc231f4e3fbeec -cbbef281acd011e35c7228bcde8e77b0 -9bef168bb41dcc2ab1bf160c14f46402 -a2641723969f8ef19d565e13d370375a -5fcfbceb0207d9c6f1318b9a7e607b28 -be71f237a96e5df077916d5016f97faf -9fbf7d34793b2f7560791f8d3508fbbf -8c0688f9936897bec0cd4e75a16f902c -f32ba6813200dd60781a732df4ca8c51 -c280ee7755b4f6ceb0ac59cf77ea3273 -f3a8b34ad2bdc0019a4ee670681442c4 -5d675eda5f36a96e3257634b08af6397 -1d4e9589f9852c8ee14b0d1cd59f0902 -2a8ec8af5aacfb0faaa88fa00b5792f9 -45ad2a49f5856e3795a8077c52c57052 -277342b586de42f2885d28460b3be509 -fc138567213de756ac83591b94f55f09 -63cf0791b83860d68c61ee6199fd6769 -1fcf3dca7f8ab38654ac7cced961d836 -7744209a3bdc98a0470106ed5f205d33 -ffca4b857f0fee9c99ab2549509c13ef -e0c32a4f4225c74dfc177c0446adc7ac -d74591609db45dc24e89d75d4c14937d -6a5a742790e04bbf6824701b9447ed3c -9a095ec54fba4beaad5d086aa5db5cc6 -4d707e83df0bc5eb8f63afbfdc8fda16 -0c00c82f75092b8614263ff0c60f4268 -4c171b83d17f934423660837cf742ae3 -283dee2f64e7250ee397d7e9c2e31349 -7a435aaef73548093fd1c525bff69863 -ba4a2e5ed06aed7bfef5d04b24484854 -7e7958024a1ef5809c3bf0be87e4a82c -d98319bcd8c46be916e8cfb7323270ab -fd5fe6d70ac1bd511d3e127e1d08d3d3 -8dbeb9a59b1042ef59bcce91aa311872 -b87ed3760ed9a5f8f9d91f92d2bbc47c -323e705406cf5e0916ee76bbd5518ed8 -2e5c94faa801fd09a5e24c330f12b719 -5fa7659dc94f176b17ea7fe634cfbc13 -060236d6b369b85e94283452f37a0fc1 -e2148a391d879c3219e15c6ef01ec3a6 -63eddaac520c7238ecf5c0a9fdbe233a -799c65c14943a600fd7158378f110f16 -be5922278eb99be8c6c6ec9c9349b008 -8674ba7ebb52b1dfd88fb25e6f86003d -3b105ae6a37afc2f33a7e6ecc560a947 -9ecbd01fec5b0c55c377792ef4b0b596 -5d3336f6eefa3591148a14760ebd7f4b -37da01606c4515150d98e5037879f398 -8310340a8d464791ddcf20014954b915 -03fcedd3cc06aab5789fe22ae05f0009 -b9b80a45b5725e4ad7818cec4bdcb79a -9a298be13210bd488fd4f15c187d3d44 -86024b33686895c11ef169b53672b308 -1275c93751ea6d3c2fc68239a3576510 -cccbb8a6b350bc326dd5d3fc30938e3c -221c23b54946e7f48e9f4b379cc5b71f -525e32228e2bb7a6bc2768d4110876f6 -bc67a9c7ca8b93437ae2cb8df08d83e7 -cc2930307639c93441c9928944ac3ee1 -6549693790875b9e356c5a38ccb82a01 -6a499d27e389d135dd5d272c79f7539c -6ab41b489a1b47451a8d3765ab44af30 -a709e64f2400cb3466ff14b89ae2259c -bbd737a054ea83440809ef6a47678cbc -d3fa2cf3793e02d2dbd71e3a87296462 -3e3f1e2b5e856abac7984e49c1d26882 -048818899b224bd791f3eef3db78ae11 -9c157512fa4d3fba71b96ee8ce6ee2d0 -851ab72b6394aa2b4255be6c957a51e5 -84951d8053ccccb5860e8b6a9bc6677c -c4953249572583b1cc4956d8b5b25edb -f25715a3a3d8fd0205c0cc8270a73367 -59027f9809069020e2d9d3c16b130d41 -73d87d7368e38e6806984eecd5824d49 -ed8c7d3bc90dde0237f0e2e339aed8cf -bb36e18aa0a1bccea8bfa8cc4860d2ab -7d5ed8bda7e496a359a8798df4ba3d57 -b8e97749be6ad20957c43aa13ab1d67b -a100e7cb39584d0501a8c01d773bfbff -c79f9cb04e04b72d585c626899ad0c58 -4994da7ca635ba6204a30734195fb7b6 -cef6bb02a1499077ed98695641d9f33b -2c00c7b403051257a88d972e70b427b8 -d6809dab84ecb35c08f7bed999001e90 -7e7c874d266bd7e1e526c7e975522fd8 -4c97da63d2ea9bd765551a548e080ad0 -a440a864fa16b64444937f1c1f45d3e5 -b0cfa55ae914f743fb29f05903546929 -41c2556f12064ed4df3e04cebe029859 -34311f2280ae65e2734b9db6ac065da4 -a1dd0b672a0c74faa2ab24a1a8dffc88 -bed4c4f4d9d6457fefdfeafe94dfb94e -209f867d1ffc3cb5f849b7f71625cc46 -e388103e53821dd6c35aa387b461aee4 -fee4cdd0d627fc0caa7fd92a48b15c88 -bcb197f9f204f12d6b0133c85a594877 -bebb327e3c81be225b3fdda5bce0cd1d -4e48ccecd0e6e2f69c56ddaa7e6d8d89 -4a1474ee374f52a2b0dd4ea45bfb2af0 -d434d0a821bd5c0b3f630f112ce2bc8c -616055cdda33b1c9fff1a3125a8ea6d3 -bdde349f8d868a9836de95539b4e0347 -0ce0150c442504f52424859065948eb3 -ca3860f1ba114276a854d0384b1a8436 -5f30b26eb01da983c62995e66964a328 -a29ea667da35ae5a0a61a85e7ddf6733 -640ea578763c3c7efb00b237bcf8b302 -6a8267f79baed0e8c6b999c7002ef7aa -e73d249c49269d5abad0a123dc77b189 -4481fa30c7da43a3d78ab80c1fd50875 -e78719e418100098766de89f0db13d36 -6e508ef9fd36c20f1106de7fba5b1ca8 -c0e7e5903b8f23b62265cedd355c10c4 -d352507b68196bb5e3abc65b807b01d4 -c0fa851cce19148a2a5ec5cf0caf6795 -ba55ccb5023eebe2df0a0c24d694f1cf -8615a8289f16c0dc054770b3372e1528 -6c4ed3099359b06b5ee114060cb8129a -2f3dde3eb00e3917a0bde17821e3bbc7 -8f068ecd78a8a4673ae0d71cde5fd008 -92f38dc26155c9254606c9edc0da3617 -4ab6cc466760a593af5859bfd4742ebb -723a56a108a2348147f922740f60af3f -c2e1d125a4a5d8638c0ebef88058e0da -5ddb844f6fc42a69d47f43e1b77b416e -311b6dfec47c0b6ecf5f28e0af1c1dd0 -72bded5c4f738a231f4b81e8b25be228 -3350f9ff96167ad52d2e10b482f55a50 -6632194986393eaa1d93fe97680dc016 -7243dca81ecf03678d8192eb0fd57b1f -c8cee474331cc8a28c56644daa7b8d9c -47a929e06c1bd4572db40d94ce9098b7 -1ae3366f3ed1f00dcf09dbad8a26a65a -3fcd77045151cb7e25b37bbca19289d5 -b13bc854973dd3116abc1ac5770faff0 -c4faca8f710180583ae4a0f76295e1cf -ed9c9d63db6d6bc1349fea6922e6f5f1 -a8ee5a7da85495647c3cfdbdcb4dd81b -4a29f228e6646c2414516528ceddfe43 -f9be40d611436ff7ba1f609d8d4494ed -6a329cd22f09d9ee01b51c09f259ec3d -06db500c432097049a22b67616c1b897 -4225c6eff2766d557bdfc646bf464e8a -32f100f5d95eb3499b74b1b9d0dfbb26 -0485bca60a660bea1db51696e1acfe9d -138eef7d3455500e779916d536ecd152 -35f3e7bb1e290891a2f632db419db812 -8f95085f44667d3894f33af4695c2b36 -bc4b0033ff97d2246943a5bbd6aec32a -16c69859e328648f8e8e5e75482db196 -969739f8a29aef301a8da49d9c60479a -17a0c897f9634daba439189efb0c79e3 -de6dea610ad8a0f5ee99e92fcfeb74c8 -63e71e67015493f8ecbd9327c1aefb50 -f66569c33264e70a1b6f2fe8bb699fba -e5779d5b3185847ab2fae79a67bb9e58 -d4c50c83b6537361681985b25b7ab693 -2f2a0d3629ccfb134981952e8ad8ee97 -b7d5188b5839431104b7d279ad365e20 -f8f14fbb2910f26b18751560aaf50724 -13314114d9a21e8610254cb818d3123d -e6510aea691ae15d3dacfbe0c7b530ec -458b0c4737e75be4ad1deb7f37258039 -2b9a96cd9cef3fe0e39c8b861325bfcd -f8b40542d0088942ae1fa669e4c29d83 -14b862cea7d5ef883296e11f75bfbfb4 -618a8efd90dfdead4da995cac2d8d266 -1c5df91fdc346a96383eb7af02dfce41 -a36459eab2e24363a3e5cecd4a8a37f2 -785cbeaa7d9a1516fb10cc56ed6e0318 -9dec340b1ef8622a1ee5b2d99bca0fd7 -32b5c279054a46e6f0221de02e8c9992 -41e5586b654dac41dc605a815e339ce8 -ca762683e12eb570f4f31042aec69346 -5d2b0c52dfedd0f3cf2b428e91b0454d -c948c8634f008dcc91a2d6df06bceb9d -4955e45686a1ecba9dca2ea15894ee8b -3ea92ca286ec9fb7cc4bc6ece4f20926 -27b704808e9d00346fab488ab0805ed9 -d42ad3de3db884da2ba89a2a28ae2568 -9619598039391d30070784bce3647b61 -0fe871d49b352dde1564cc69f60c7e04 -d73055e518bd6c8c03123cf923b7bd43 -49724325c73921470cfeca44e4dab82b -9ed754287433d15d950e86333f75f721 -2caf1907423819b87ca68e90f5db1c1d -3251542188a001642879c3c740d4bbf7 -e353f5af686eba4a59134d6eb2934567 -41dbb488f339116654ff207ef5042763 -463d8f7858ffaea9bf7b01b8e9bd2800 -b6aebd17dc8b9da112d33e20f2dcc735 -4a261930870bb10428bf79497ccf3532 -80c2efd695cb6fb8c4993a9d7d6047e2 -d628f6454b041b1ebe4a49a2b2d3c12d -deda4059fea055069a247eca9b75fc4a -6d5e25a7a87f55fb03011b7d03b1a3c8 -35c605af6c6dd9d56f560892e2406cb9 -e7c57641ea6f777f8748e7ab916994d1 -872db753431a63e2e2da50d6f2be8216 -301c4e6fcf92c113407e122385edceba -dc87f50d13b7b53aa33a20c0175202c1 -42c06c2d47ddc931efea88a95e77bb99 -3d52a9988bdc50e994caf4bbea56b973 -36307ac8aa1b7db94e12ad0db761b326 -18adb7cf12244a28cccd69974288fe31 -3d7fbc9b6f56b5a8cdc4c0706d3ddca5 -05067ab3bcbc2d749ae8492383ac3fe9 -7a34b670c8626bef637477452a1df416 -707c4f0523c5476666d053a397556f2c -650cb4b26c0928737f030c88eea0116e -435bf82e318d8086d43290312e39212d -a75a53240eb4f161c8acca14a57281ed -e9c7dca005bdf54968d12cb698e1914b -e2a3ac54a4c2763b7c669a4e86bb6d36 -020af662735bd562302038580c192ef2 -07b141f1318c2e2fb6d41f45c0661901 -04b2836d31e70d1908857e824fe42485 -ec7e31b75383342d87ed15dc170119b6 -c343cc2054eb86a1bcb8b13b5269eee6 -ad1cc895e5bfaa60e8275b6030f6d5a0 -53d53d82c47192403a8d2075b2e62ce9 -acc67908ab1e804e49d6c0c4587d0cb6 -b1ea78f104db739fe1ffc3d4360c32e3 -791ab5ff8a133d719e096bd463202a5d -b9b763ac35e45d094f818a3576b688d4 -96864058ce292747f50821845fb6c8a6 -f25d60ac6a4bfcc34fc3d1a677140e3e -fa57d761afd5a8ecbeaedc32f21f0a94 -8323700f59e4b6f54469c89606ae229f -1593eca641d2ac3924fbd3b719902f76 -65e99291a83528dbb045511e33e8a969 -6c086a67fa3a4c3f6441c9dfd6e537f2 -da4c320652c97c4652eabbf1037a731e -d5b04937b0c1530d18300cec8f096138 -8c616450bf7883ca1ce032e318e073ce -ec0b7d089687647e45c9096be0b59b96 -b0704acf09d49d0d937422423c4a6c75 -4a4e76d21f91a277fa8aa6bfb311e795 -8e1897019641bf39dfd8ccbd3015a458 -e7810fb8d1e69f04966af28582508622 -f71827138f9be02da3d496cab0b56fc6 -742b0106dea5ae5c8a57573d6d364db4 -29502c77a6ac54f0690fcd0f06357e77 -37a6eabf2bd835ec7200219100cfde9f -2012d4486cb4a9d09c117f7f2be0d517 -0e2a64adb8ce6f65fdb26075d083c4af -fe9084540da455409e6f763caa69dde3 -8b17e7785359dcde8a4322d7d565082d -74623eae52358ed2650dcc5ed7e75050 -de47d9035b30435fc3c77aa4d5197aab -5e34ab3252d49303fcc7b869d4d8bd1e -86968c7fdcdc9fe186cbea0efb83a0ed -2dff8824647f0f7f230c8b3748868fd6 -88127e8a143797102f23c3ad129b743f -c7ebc3ec90ef2ea2b75d4475f16dc232 -5189c42b01c52710e91244bf4f46a29d -8cb8a29a83e8a1960d90c51406c84131 -ff12226a40bfae4b7a2cf6b6fab86dd5 -968ffe35732fb2180c8e7a061629aa48 -87c963da7e6faea42e538ace5f082e7e -8e2f9b48632d4eaa3a9d568e964d58b2 -d1fb0d01fac3bcefa4f4d78994937eca -4d23bad91290f8b012996108d4ed56f7 -f281afaf11f90a0c19fd788e8ac910cc -6a7625e9ac0d3f20135b49c2c3840a27 -d241d21514b84ca684758d77ece9fee7 -c6aa49ccad874af8cf2ffc999605a7ca -36156b5b7c193a50e07d35c3d3885b01 -88cdf8d7702ce8fb3cdc5a2a909aa5c7 -d84683f70f9011ff9dcd3c4ec40d35a7 -8f85d1d2d1a71437b4d5ab1c9dea8df8 -d40118f8ea7de579cfa9b0646f67ccb6 -5005975c5daf8c56ed841526ae87f4ba -d8b51076854b8c792f79860f1ffd95bb -53fbe83cfb97223dc405a58a85df7fae -40cdf5684038dee6acec3413c8945d2a -2c62f19c081550751bcc677e7dba3eff -adcbe2395b95494f98796b2786ba1d56 -81e1a9e9fa3b6725e67b71b7b8bf0273 -c0ebb52c84e8a1dd5f4f1011cb879eee -e539548575a5f132b5152206f1e06455 -b00e12fba49177ed0b00e6167d7cfb64 -4462cc8703cd6646f816ae93d96d41eb -3c1ad147576b96438618232b1749aa3a -b8bf352101a10bfdcc5d5538c79625dc -88f1664d06fe6876d0cf6efc9543ec1b -27800e8316c693553cf2d8ece9b03963 -dff0a32e73971fa20f821dcd61e3e548 -dad1cfcd2587568c7a0a59b2a6713dc2 -9ecaab6c1b87fce310f84e07f0803556 -1f542a5647f4a54a364367317021ce1a -86a527d9ac33d76f9c98a47246126bb9 -2ab54f74550cbc82d5f3dde51ec74356 -8f7fb6a2e2243ee6a5f462f3f06478f5 -c43d50ea9c04c7be493642ac27cc189a -9b1090fd819907311a7c56b81f5b9285 -7d4eb311f9d72eca9eeb9173fe7a889b -be1237acfca56e0ee3f3dd3a4490b046 -edfda01d3e946e5e814a761db93f25b8 -054d37cd67fccbb0c7fe85ca5a073e9a -34a0a544a73c8e2593f7e3d2ef035fc6 -187df0b818179ab5cd0995c9fca81540 -2e8c794797652244247bdeb5e19742ab -8d1eabf17aafb9ec44ed02690eb6ad33 -f732074caa197fc8a0410e1d9d1eff62 -eff41077e2a633de08e7a8559538c45c -074666d185879072fec32004f93653e4 -14cea8e4695f450161651563decdda25 -f3cefc4f814ec4dde352bb9a2651b185 -22df10ed6b7cfe91ef8a6358e49ed21c -8e5d413e306311edea9d6d181eddd090 -9a19d88569f37f3983188659e29846ed -7694d46074c317326d610446cb39e69a -5f6b401b89c4467777ac03aac82594c2 -a79233b14cf794c20c5ecee236b36ed5 -ef6afe66ebdc862030e84c4295102d75 -2fa4c044e56d6b887bb701441008b886 -d462440967f5e9f3a90743e38d2f5f6d -dc6c51fb5c256c4ad657b3e2ec389741 -a7e173116e0c233a77482c54c95a2ed5 -a0b016dd851ea6c18c28f4572be498d5 -1573919efe84ae315369fb90d2c319fb -3e0bd23078432146a1b183d09bcfc890 -0a2ed991901b799e11bfe8e099de51b8 -86a6667c053c38fdbb38b8b76ed741df -6ef0a90abec633b310149a7c58012ec9 -bdc7167cc7e6f31682457e5fcd2a7753 -9105d414fc3c58e3be60b7dd6f9acc7b -191e6668ad1a0e4ab2b66ac0de10b767 -dab0df1e1ca512509a325a9e2e11f80e -4970503387ef67041489904b579b8f51 -a384ba6b616cb3e038bdbd0c33d3853c -f873297aade22a5816d26ab284079768 -3f8d41f7b7e05e29eb1bb9c3fa448613 -a95d6bf75717cc21215e118ff0f29d13 -5523dbab924c7b13af26b2d9356a0a93 -6e2374cbc8b5a5bcd9ede9946df16264 -64d69bad568ded9d644c2c73a4e315f1 -96f04c3834d7c89ab531427ba3ed7d01 -deca09b16f8f4a18b4d7cabc436228db -c8ecc878a9169aa0ba70568312589d3b -e786f547c37a16258d6713c34c6086e0 -f964b917266974790c44496376e60c00 -b6beda3d86f70ecece9f3b76b0d12321 -3886441b2a124e9b08865c2161628aa7 -072b8df5b1e11439efe403970e25f039 -55047e2deec552392d6bf34ba83530c7 -47299a4b0351325fe86c5c5218b5db0e -455922571ab92960e8aca8cbf8076e02 -3560f456da1ab150321a04cdd6fe379e -bb3ad92effa9ec805224f8fe8d89cb38 -384f4f21d7eb5b5f1aeb51646d473f2b -c84b586d4cbf588bb53e4dd953393e7a -b71ead42cef9ed860b9707ee999321b3 -deb0b193faaf0d10cb00fe4d500bfb63 -f05f51f8608536f28a120fdf494b7789 -30b62999cc6d8d108ab3276c0b766624 -8af0e1b6de2cc7e79309911ccb476617 -80c482bab6d86e0d08378168116dbdc9 -73b3f078389197d4f9eaed644e207c55 -ba302c0661a1e8023a2267d862535ef9 -196b2a007b1145aff2d8a72e7e8ec916 -5669c64e6a0307f49be2a1f08f42ae52 -25ad2ef8869ba7356c130fa0370ec642 -96382794536f4412db5bfa41428b3238 -68823372365a183f5ed09176d2d07ddb -be0f684a3a68d3ec91a42fbf8899fd6d -a5a5321aabba342bd620f12a6f5079fb -ff676627178c703cc6b24a77dd01698d -08e154544ad30aa104a5511d79d2ee52 -48cb50e819d1f4dca4458f585ffdaf41 -717318aeafc556f3e3486159a090cacb -378d3978f6f035808f451b79fc02ba61 -0b6c5689b63b79bcfd3192ee52f1f367 -912dbea984447119d8169282807257b8 -1aa41e08e0f78ee35c0e934adb257d4d -2f78f2ce38e92233661b4c4d8067b6a8 -9233bbddca7c8aeaf009d0b1e493f0ae -96ee7076fd0479d5c00e031228953d16 -a9c1b7ccc98dfde321cbf8bb49b540ca -0e2ac09229ade4dd05411cd436fd0c65 -4c3fa253d7996818a8a950ad07c21d90 -8a5df5aab3e04698d05a59b106d5c75f -0688a20ba8a7635ac922b067c6178bd2 -c8241e4985d05438a1ef965518741ccf -9fb1d547c8577fdd72f386f040c01c4a -b3c3ba52eda97a84d3ec310195a418f4 -d73ffc00718fff0438c447b83c182c2c -9130b51f67665166b6826924abeff6c5 -f281a9e275071dea74c57ae08fc7f36b -fdbe1f4b7c2b6efc4fdab4dc2e354bce -fb57c52316a24cfb67150a15719b9c60 -7aaaa59152ce1275ee2926b8b3659705 -3d8b2fad60c1aa3fe959971181363b73 -a6e1e31874a7e985c0fa68fba5b01442 -b3712a5e2e3a0e7ed60cc746b8b33ef2 -68dcec92d4da8f652b8c1ac7deccf4a2 -6e3c0991579ae41cece596c70fb23c81 -3d4c4949d0b9fc0bddfa6f6cd2def0cc -c52ec58b22d7f85209c250cc67b1418b -9daa55c99db717e8b7c2f5c6bef61aa4 -45590f2f05a9fcd68b5ca378c2176615 -e921fb334da0d9c2cc46e39e1fad059d -d7ae2f6b5df3a23a3605dc9a6dd268da -b8fb659cf403ea7288327cd1ae9a9358 -ccfacc7b1f9d5d3b8a523a1272cc18ec -d3a75c3978477b41f22a2ae66dd88133 -f457467551ca7e451b17bcc629623757 -345f04f8b517f8afa1a71198f975d2c7 -ec608f690d61ec97d44ea8395d83193f -68d71e108552316c3dcbd7aed3d579d2 -22c3ce0fc5a9a932b91fb2739e5056d1 -6c96bb6fd320096b61e6532c8179ffbe -055dba46ba199649b4e66238a2a256dd -77e9a746704db2bfce21cc3e4e28c519 -1b129ef26b6a0b193d9188d665fd0904 -bdd2675e9412e30d1a14fb4e7e89c467 -d6ce358ccc1ccc2b2494e06cf37662e7 -ffe8b3beb7e3ce19405631bf008ccb24 -74c2b25548b84c6bb6314e8be03ac794 -9b3a2fd41c935b60e14933844d503357 -8be799be5da76c3fbc2e838b499e674a -4f7ce423702f6e6e30bf7d2811c11fb9 -c83abadfe48b5d3f21ded7076a80ba2d -420208f74fb6e6577f4c19e422085318 -058a09311b50e8c30a9d5084d9a8f5ba -1bf1e3995bc715c7e5470824b8e5c584 -b17e5c658abac5d043714148be7d6847 -5949ee79f2c68d75decb2fe3213474f7 -d4e380f87e0d3c099dbac18d19ce7a3e -83e91e64e631987c3e07691c2614541c -28e9e0239f0f87955b4a987c70858ee2 -3a6877045316b1b09280eb43b650db35 -d4e380f87e0d3c099dbac18d19ce7a3e -f3c207e809951fee275b05e609fc3eff -f3c207e809951fee275b05e609fc3eff -dd200104426a26eb1e625d225b1470c5 -a90fa85635f9b23eea3b93f882bb8221 -a03fe9152991e14d350bc5920083f79c -511516136b14e2595a65bcb0ae21cbb5 -27479185ed68fa204196a5aadc62d63c -df4c9682c97a32970dc3836189038306 -61af9cd0bf6b476db37463e0c5c36ced -64d182ff178cfb6ce7301b131b5a1bea -4074d1fb7265403efb164ffe90590209 -d0e320816bfdfd594a570a50daf38e8c -aec5f3c91f62becf027b2d72c06fac08 -eabd52798a1e1a661e0c90851215b128 -bc399ac815b1e15a8e40d5dca5b97634 -4694f60bfb5ee31a49dfeb6263e97a36 -bede66eb7eadee08839ced6f63393020 -bd31d822a8ee49825caf2cddba4077d2 -755fef33c219192b4785cc08621c047e -755fef33c219192b4785cc08621c047e -bb7bdf72feaf25b60819d22194039559 -6e0fb521192b98aa308efe61f9701f5a -6ae15fc9ef48c56d5f3c95bb02b694d4 -d65675abc2f44ae254bebb26a735271a -b2d55f585ebbc9d112366a3e036e2f90 -1211faf785866641e0a3aae40c289e83 -40534bf9268174d7236a1c5f20365f03 -443be9d67db3adaeac7435652827eab6 -647e08768841fcc2b9dda573cb2d64c0 -8c6d3430704268841502326974e5b603 -68cbdb1f2bffbe1596ee89329ac2d4bf -68cbdb1f2bffbe1596ee89329ac2d4bf -9850d34b85291deee274c968e492ec9f -c9a7f6e7aa8ea9d0e3e7b850be118dca -2ffb42c7fa7ad14811bb7cd3742bc31a -9f96e7b4cd16bb8e866b07a03abaa72b -19a687e0407f38dfabf240d3837aa80f -d0dec75358b23194d71bcc7d0724c12e -0d979cfac799056ce984fb3181b9f957 -3209f80ce9f98f024b4b37999ed7c002 -544f6c0bb806e370a5d2d3baf962ac99 -e9e64663f9f5f9fe68424b6a7f7bb417 -c7953b4214f66402bb55c48ac17bf3cd -d3dccab4a3d9f4ac3fa47b35a03ca67c -7320444b24282afdffb83562a9d2c909 -984babace2d0d936cf64a494a6dca13a -0198f6bc935b70610b28680123024fa0 -b997f5096a8e877b9ed3a8a57dfbc39f -5f0ca2440d4404852f9ce2959e0621f8 -a81355708907203e40330bc7b73c18f4 -a81355708907203e40330bc7b73c18f4 -f5954f6e3261c0b0ef4881dd721d123f -dae281167e7281f7f0160ec9f133b328 -08a75c4e682542d58cf4fe8ffdd0fa49 -bb28cf566ebe9e0f8616a97b5d6991a0 -1e043c75a7764e20d190721469af2b82 -247818b20e302ed2c3a0feb8f727b960 -9b42e8afe47028ad040916d62cb1c3fd -66a4a2c2cc57e6b20037c808451d6d14 -cef837c743585e660a948095eb64108d -52a274eb25cbe8131c0212755c1e91df -1dce7c6a79eb395991f08d6d63cfc314 -e8740c2fadd9c65151eb09170d42234c -7e0d5c5a837c37c08998d76c2dfb24a5 -05e3e53bb4669938c3cf316d14fd3744 -93fb2cc9b5ccd978113afbed3f59676e -93fb2cc9b5ccd978113afbed3f59676e -ba58171f91e5c2e492c9ea1e6ccf9224 -d653422a23b9059eb71d6cdf33f61f3c -88b9a2634c8b7cbaa573e5143ec84e6d -13bca70748947cb29ddf84a7b972ddbb -9a165c3ce1665c21da4bcab0f021d835 -9e05fd0f0e7f6d640b4c3a5cdd36a93c -3db4f9356e1048b2482e97cb300d7a9a -6dde2bc8d99828bfd06a83ecd63fa43a -903eb17fb4802eac5aae074197b68b68 -7e3ae1597363af122b0bdab87424153a -8fdf0e40c69b914bdb299a3a49aea15a -63a39aacc83bc50d9147cc7676d313b4 -40435fc51f146eca2af7386881ed10de -a7e6abedb63f41978e77420e7553e412 -aa9193e6332b867bbbf75a2621aa7082 -f144bee51f131d80ebfb77154c40bd2d -461ce63a42cb121c4e1b536beff7a360 -39491efcf885701c0d269d002fbf4fc4 -a8f7e98045a940c66875cac6a6a6c589 -ff82c3a660186218fc31134531dac1f4 -c1d379e20939d73f796b11754098e07b -861b84ead6cd580357980f93abd0aa9e -a8c4fca447d4c7fa02a07246bdfb7cd6 -fc376d4e4c8a4b8305ef1fe2c226c6a9 -76f64ceb9c84db1e143d0bea4976997c -d735b9e659e62ac8c174dac4eae003ab -e5a377321b7cefc0498587c030170e44 -56e9e50fcb8eb69a6b6acd5af3549504 -4d1d61a69d7adbe387061f3e8651017e -f97f8f26bcc695071e7d512b6a894000 -a7d61d7c42fd802c61799ac3c4987f3c -64558404b7c3feec28ac3b850a21e906 -028e7fdbcedec648a6eae2845dd36959 -6c64384915b3f026b16bd4ab169481e7 -471dd4058dcf191c8ba81011e16b7346 -c44d293adf075ab6655595447ebe24b4 -b367c53628f86d625fa0ff3fea04ab92 -ad3712e3c2775fd6d07c32ba48e8852e -9e6b0468e6425da47d765eef30a661f0 -29b4edb534e59e3d02426a96f322cb67 -451c1feda0dfe9ebe0dafeae37a0fc07 -cccbccb47eb5966c0dc0e4a2855f0dfa -a66e6ce755930f76d14318d39b064bf6 -a66e6ce755930f76d14318d39b064bf6 -6fa23da1f6a4183b45131238924d1493 -3639b1cd35e1d40ed93d706c89dfc61f -161c3001ed09a1af76962319411fe35a -409f6e59bd4062d1b0322a2326e29cf5 -c761b422cddac5831125bd158815d30b -2f0c6e4302d927bed384fca7826b7c3f -2f0c6e4302d927bed384fca7826b7c3f -2f0c6e4302d927bed384fca7826b7c3f -90a46c605d27689433c73773ce6d7190 -705d0a9d183edc4288d37386322688b3 -37dc7b4db4db970d7d4db6bb4238f76e -db657ee14fe088eb07e5d685b6f08701 -124eec24586c1599972ac401e5fb0bdc -096d706ae1287aafbeb159d84f65a540 -a1630e38b6f00e279440d348bec3f8a5 -23d6520c4bfbaaa8d096ed3c0c1500c9 -9128bdd31a2174a061388426c474b343 -712e44c131f81ac5a01ee9f41733a049 -99be1fbc155cdaf5b7951d8c249b32f9 -3668c9401afc4df9825f351b048c5d0a -b227341f54e80a4d4981e0c744d2821d -930f2d5e18f4366d8c1133019a458135 -6923b413fe2536c272f95e2b4464c602 -eb4bdd3eeabbdb278cd2fd009ef46154 -d1797cf0b18083c3f5eec72048b64e24 -19dff617b8848582a6d1cdd50041d59c -d0894be7bdafdb3c3be66b98b3026405 -ed290f996c26958808b1cff264073ef4 -c48341e57ca6265059a49e6ec54ecf8c -ee257a76d622bb488d5dfb99614c3f13 -81579da6d6b7fbb41788eb481c38b637 -54a7f812bcfbe431d95f90d42cb5a8fd -6e4de39bc33bec7564d77d05ebe4e423 -481815ad31f6223f533488207f97730c -27f7b8309ecb5636923e22dbd50a92b0 -eed584a3be06a47d489c7880b792f5ef -df205cd0dba3a61c18425552541f2f32 -6fa9403fe86b93302aa5cceccb92128e -d61ce3e66559e16d6848f38400cc219b -590729a5f5b5d0eb05994a825b91f130 -fbeb988052b928df83efd8959efcf1ab -2e72e9f500818c1149a755e17522b9c3 -4cfcbb7e75bb2eeee6aa411ac5bc082e -2b0cc1fee5fb2c8d3128d33e55390a35 -287faaaca2315e671bf442e8dc09918a -b093adfbee63c2c7b56505af2a261c9e -e8af7a1baf52ce11746903a956f735ef -206bca2db9800e1ad9d0ed1e9677444e -ec18653bce53848bc0f2bee67f9f5030 -5c70b2672ebad25001bd88b489768c59 -d7347b1e7a5029312cfba490c28ece50 -d75bfeaf7092a43a5f18d4ffbef5ab3e -05258587a874e21cc2ad8e2f798881d0 -49e69141c30969a1c885627b672752c6 -48f86132a7e0dd9684e5b0d52ac7346a -eecd3b6bb21a95b82ed0f2ab277aaa73 -1c6103338d6c0ae625ca5b0c6e5a3625 -9607fcd3b93109ee78d7c01102ebbc8d -633dab28ff262929d6ecd8bc81b13a37 -6b1866a15d71ebb8dbe56efce8acafb0 -6c845a9a8a2cc016d081a9c0bd285f7c -e0287aead1edb2bef9434dfdfc1934bf -a6a1b3be3aafb5b8896b192e1435d087 -72c4b2cce70f9fd5ddade6afa0fb0e02 -85d1b27dc15b6521ac24b4b56fc6a56b -813358725edb9f43fac9da196e5c89ad -be192fa2449ed5575584dd9fde3d817d -d1806541307821299da40ffeb9dac4e3 -d02dab5c9a587e805be5e2dff1cba846 -d02dab5c9a587e805be5e2dff1cba846 -6b51081ebc259e60224daf9f3e910377 -d3bcb0938b2a00acf1ae3a0d44206f06 -6d28d27dc0a7fcc041e59ef5cd4af687 -df95f6f6ba6c6f5e5202d4e3b19ab7ff -847b3b715176dc9c84a994bcb54c91b0 -c68fa44305b8217617a7617e5b643947 -a3540350858fab9575b7ef3188d05a17 -f1ed96095291347dd6701f140487c02c -916a4c69b5d580ea691948b46b5de8c5 -7f6b191c5350c77c749f07c0c20bba75 -7f6b191c5350c77c749f07c0c20bba75 -6d28d27dc0a7fcc041e59ef5cd4af687 -a566bb90c7e8d96ba2aec2e5898a4e9b -d5741927130f6bffdf057e20a49829b2 -e6b3db7a76da825a733de114dec0c38b -19d81406d9f1d498721693eba11817d8 -4847c386997a9066911a0d47f8d0f175 -037ef315924daa158006bd510bb5d69e -50a29b24548411c75c583299cc5dabe1 -8cd18645a722bbdeec884bc338da6205 -7d62051c7b45f190bb0294bf94cb18f8 -5134da1f09686a22ca509b49658c41d8 -3ecc9546799fdd76dd68b5b2330e3b5f -9da4a1e4aa047e4f6c6ee29b5b14d964 -9da4a1e4aa047e4f6c6ee29b5b14d964 -307ccae0fb02611a679d306de448172d -08a1392659f38eb025e357e28b9cfa83 -830a9ec245c6360a67b48caad8e0850c -298439ac5ff3c6d523325fc9dae5be3a -9932907c5f159e97c4b0ff1abaf6c8d6 -8ebb45da714c80f1fda9f839d959af41 -79fea9c8f9f064c19a2b34d8581bbf87 -f76887a8237d51be3a39d28f302a9c79 -ca8f6eb8ffda15fd660c4ddcf88b383b -30e229b28cc4513a15ae434cfb454c3f -5d2062d4e96281501696da1c3d9241e5 -83c560e6425ebe443b785af22615113c -db860c611aed8021041923e6f6050be7 -011523d91608791f36f0e13a3296ea03 -bac96d830325ea250b6335ad4b7dcd43 -9d65c4cc21f341d9152835578a25417c -99924b99ea69590e1b21c17d94335333 -f2a2b24c5a0306c89fe20c1d0d671301 -1122e7bfd1c550c409f7349031f60c7c -a5c77afe7e3bee604d8a71f4908c3a95 -85fff261fa2d8211bcbfa912c90f0ead -48c2575613d919414599a77b22ba3bfb -e40c74586731cc9886564e1f5c6134ed -e8c027accdb5856c50ac1611828ecc4d -a3c6e07b674114d1d7ddcf1f5dbea126 -c1d1a712596bfd1aab289e255b03ef06 -c546da402eb6f42d3595219b2bc2f0e6 -bef101b46cbb442d6bcd2a95330db6fa -37fae99d82ae6a2f903e20467e7dcf1f -e7b9b0bccdb221ac40ef0faa32d56cfb -14ce615388a4dc83e5ce3c60f67bfdda -1b5ed7c4346b70d26cf3eade83846e8b -633a4102c38267c962d440e254a104a4 -4ac3e640a9e1839c58173e526feff3b3 -d15c43f169a9d74943bc087a6b62f185 -36a0496055cc039f8066e0df44cdaed0 -11d0a2c10e6c38c38220596ab3685b1f -f674b42d1fe86a6428182f572c5564ca -2285d61d30202b4ad5f09c5dcd30d2e2 -6f7330c9a60d10e3ef4b8f44862a1f8d -0343a6a33dc1b28469dbf1ce521159cb -a208da325c94d2856a4ce43e0178dca4 -a208da325c94d2856a4ce43e0178dca4 -5b4dc5180c25ec5c22cc270c11234d24 -2be515e2d709344823ae258675db108c -b677dfa915913145c4cec679e39dbe26 -baeb59b36288bb2f7c288e864a2991ef -f28a65a0184d293c82b2d3b6a17f20ce -3f5eb01aadd58502984fe7b7b23e30af -784d80e69ff940ad0e4f75e46c6099b3 -74f3806e4d5e1ec2d827ddb6259fc330 -9b158da4ce02b0b8522062a43edae034 -7640236f7a52348da18e5363701b41f6 -8dc7d2aa264938ccb301fa162254b4f0 -526c0d6d4d7742738930f16792ce5ead -5c9ab0c9570a763011d26f094be14ad0 -90dca90e3e66a25a7d1fbf6ef5d1577b -4cf8366a1ef9156d57f93bcc3115ca03 -eccceb9a86827e1f29a5ae9fa88eace8 -68f7993cd1104324b3cbcd441a2f4022 -521ec44dd25f611ab3e37ce95dfab2e6 -16c57d2bb175c1d6e3754286872a533a -74630f4ce36893e453325556312bb718 -f6a8975eacb2d12b89c22a65fd4f450a -6730228bec0b9fd8ce4406af5cd03016 -1d48f1940efa787850956457b23541da -f9ec7c207ce40910f3a2aa7146a8aa81 -9fe5eae4d151326da5941573d48e5f73 -70aec9161c49db50f6b06112d1594dfe -cd74140cf262339bb688a579d8383c13 -f9ff6f3533ecb1490ee180867fd9f9dd -5c9e1e5fae0c0207236249d002fedf30 -1ca6ccf656f14c73ebf80e5f7fe353f3 -d4bc39d6b5c3f910dec3632274ae405d -90a87cae6376533400caff4c75b10f9a -812defe7d39489f45fde11865d411b7b -71ad3c13c7a25efaa4ce6c2c9dabda96 -fa35d4a1c1f1b0d314df39dab9b74f2a -472ee51d0dc86f3ef5aec1ea9e4b99bd -8436e9ad7aa7382d82c9e067c13731b1 -a657cf1867ac702f26290eeab5dd23a3 -46b64939a28533fd61ccbd9a68ef6588 -a75089878fa75d7563b723b62771803f -7b50abc03830eba33b664eae0a2c6a7a -ab033e8c68a3fc1ec7359ecfac59b890 -83367997ee831fe7413a736f9c29e9e5 -190ba28a3d9497e439c2d854150ce8e8 -0398c261fdf8c5f79a77981a37dd6b68 -47f776aa984421c165753bda4fadb473 -f36aa2ee5a8c3d15ab282a434cefa52a -eeb24eb86011e6960bf4b1bb634a2546 -20e1721fe69856debca45ab1203b35b3 -e9882406251ebc46a342bd712266df55 -b0c6af7ca5b728e61987a15640ea5924 -ab5a705e7d721920b1bdd03e53ed4204 -02d2ce640b0f098d5a6ae4caefbad42e -48b1c5d81e03b9f31f1c8d43442c1d21 -2efe5a4870feb00f800bd82244eb7039 -f05298a17463d876afd8ee949dab803f -dabf723f35ea9666aa3e1fa7cfd45792 -b0c994e60000d1121ecf420c6cb28bea -5720a6e50c09d84e9151a72495c61dfe -9c3760c98381d8a78263d96e92d863ad -0e5c4fa294582672b76e2f2c260567da -5b92496ab7dee424f66d45be555a2d5e -fc14795b10b1ef105b86239ccbfa7f9d -e79b2250e1ff2793cb67e5f9a758b459 -93a377a5831ae4c11527d40c7938dc26 -5862d74e173d0e5a9d672cf0313d5bf6 -c5201caaa174144e1f1ae7f6c53d3a2b -de4884d3566c53d45abbdea337e94b85 -b29d7ee5638f30eb64c3660b230d81ef -6ac699c8e1fcc9b909dac76b2b5df5a2 -96a6105b6b89ddacac6ba19f010ddc22 -3df5f299f4d9f07ec36ce4279650f619 -81547ef21c776c99d4b497e3ba4dd890 -1d14734247c8c7915449ced82b6b74f7 -3df5f299f4d9f07ec36ce4279650f619 -81547ef21c776c99d4b497e3ba4dd890 -1d14734247c8c7915449ced82b6b74f7 -2cfcbc0e65298dccb2b742b4003e003c -7e5e2f2431265f55f3f7c3e9c3f4b536 -986d657e168519e2731b67b0311176e6 -6f0bdc9d76c41c5dba41a53f92d86137 -cc0a0dd3cbbdaadee4dd1084b7eaa4fd -4f8fe31ba34f97f3ab805e0ad4552565 -78dec3964b00566ca71fa226db080038 -78dec3964b00566ca71fa226db080038 -78dec3964b00566ca71fa226db080038 -78dec3964b00566ca71fa226db080038 -78dec3964b00566ca71fa226db080038 -a705cab8e5c00b55ccf8f25d8d4ab6b4 -180e643d8c9d270d902732a6af949d0b -497a39ee92255a0fe2b1da90f74c0098 -2ae3ff9c414473a7137942b49beb2f8b -c5447219c5cc8166fff5907402824a9a -7023db51c8b3f908ae03960d1b469954 -b450a61f65f48fb14f754b8ad2e865db -a74ce65be925d1999a02f15cede17e21 -7efd03a02b56f28cc8f7f1595966f006 -d734e5d50770a2c07750327482c64b19 -cb13fc73793263cd53c6e238e54b857b -478ccd240cf45e79caee7c88114a1493 -478ccd240cf45e79caee7c88114a1493 -cc526d8486766fc96d8cd739c8ab9631 -0ed5d6a0ac1ececde9455f583104e4f7 -072ad2438cbb03101d5d43d18e08a214 -534a0bb57715ec67b86f308749845d2b -a848083eab81b3ed9763db6f47d3534a -eaf9e2ce791d44195c3b6d12e033cbaa -edd0e27ddd6a354483bc2217228c1e02 -6299450216db9321d7b6aead1dafb568 -b45c2f1e60f53d9588a9db0246437d0e -79deee214d0c7f858a78b366ff0b352f -71400fddfde8944298ee767426f5f529 -b547dae3e28795e5e03e636bcb2a9006 -558b30a82fe89962ce9accdc5b68dfd6 -cefe307394325bca8aabe694eb218f93 -c454083d73a52ddadcbef6fd03f05e27 -713117784406c8528c8c3d51c0e0b3db -12d040b46698fc1031f2d186cee1dd42 -3d006821b0edc7e0e3425062ecbc57ed -ed929d2eb560b2e4125c6176efa50cc7 -8f5a9182e8b6f867913a423a9e8c07a3 -deffcf5145db9262415520b0d0e15014 -a82578fac7784bf2de365755cd5572ed -cbd4c85ed4cb1ec34b53901fc656cf97 -d1ed69ab7f09d9b6e91bb30731211919 -289612ab083deb5b8eed37a6f2fc4c0b -6c975597f9fb54831eafbf887b4f8dac -1dc118b0c0bf2c095fc5728290347aa9 -8b39d0746395f579226f79521dcb2c79 -75f099b2150d4bb6dd63d1394758bd33 -f48a5e0b383c4a3e5488e846a7ccba05 -24de635dc4c8ae130226121fdcb8acd9 -b4aa5b55b968e19e551f5e7dd8c6f2a3 -8422af62ac9f18f4f7270ee351722fc9 -c6fc1a24ef5bbcf0ae4566babefe9338 -98aa37f1c320462ab0b68c6b8448fe6c -9a4348268682cf154d3f44736e90085a -1645c1bdf7608c24ff954e173831a955 -ffa5cdce14f04f794d8b0841c0b75b08 -6935237d22a6ec5c3df24f1434fd6860 -f292176275b7c5f1da1a22806a84da7b -909131cbc7dc09b26dcf7afc754553c5 -9a940e0add3aea3a21b63751b91fb655 -78ffe3f2810d203d0dc5f385f03a340a -2a928c30e9a912ff0702096dabf14d7f -91095ce6bb828b1fbf59500160f861b7 -a4c7837a28622a74f2202b8b0063e416 -c5cb59b186483ab7134abff94f1a5fbb -eff70be6e3458495786255268abb0088 -10bafc802e7475c562a450956a590257 -942c8bbf8bc14d13f628394eb4658810 -0474ee5842866899668f8c1f89a46fa6 -8e746dd06caee01950f78f8fa3124215 -bbade15c9cd703663a19ba35ab896691 -bbade15c9cd703663a19ba35ab896691 -bbade15c9cd703663a19ba35ab896691 -41b18bb1c86b628b0bc71d3ecaa87cbc -5129ab9a632d1f679c1c19f0d2daf14d -b1fffcc4a54e24e1aa8883e1d97c1185 -6e9b5a2a04f9cd5197a83d6a1ba81568 -08d7328e633644510fd52368e9574bdb -2251a7d35f6379efbea5daf529cff01b -6d161615d224576a09d0183d4d4853d3 -6d161615d224576a09d0183d4d4853d3 -961c45fee8dc70473208517af84b21ac -e5ecbd3888a0989b57d963257590fa46 -501c7102bd0394b9bed19bc414004011 -44feedb18cf3e051a71c421aea1c0c76 -d23169c99ffeab5194ce3e7a95643abb -89a5f7e158417c65bf063f46e0594bf0 -b0b9af82e496c9341c020f565465cf01 -d23169c99ffeab5194ce3e7a95643abb -89a5f7e158417c65bf063f46e0594bf0 -75a3fff2ec70c94a9c82693ed12ef897 -3fe63656ccdadcc4450e65ef0223cb79 -6b270918e9fa0daa483f246014078ca3 -45132d031f670f4c3f7d54bf97dcef9e -75a3fff2ec70c94a9c82693ed12ef897 -3fe63656ccdadcc4450e65ef0223cb79 -e43b43b48993f236a4fd6c5f182fd711 -45132d031f670f4c3f7d54bf97dcef9e -0dc6fb793eee26cf7c05199eae56eb36 -0dc6fb793eee26cf7c05199eae56eb36 -0dc6fb793eee26cf7c05199eae56eb36 -0dc6fb793eee26cf7c05199eae56eb36 -0dc6fb793eee26cf7c05199eae56eb36 -0dc6fb793eee26cf7c05199eae56eb36 -1681a2add0e4568b0c7d68a36ffc512f -c307aa98d7417adcea33631bd028e921 -83c6f8e69fa3eed265b57682fc9a0caa -a5ed7b3dba6eda970d18d78201075780 -6a15270b7ae92d6e4b88d215a445988b -435d2f882fbfd322bca4daee4d3389a2 -1d72f2fa2ca6ed9fc9427cce20eef4b7 -8ea6d4237266804f883d8273ca21d84a -a7c0955f29957170fa018ad611fc5164 -b77f6032d0bef6c71db1379697554b18 -b77f6032d0bef6c71db1379697554b18 -ef297aa09423db3099b3ecd74c90c990 -2a064b4df37571820a6c4f9e4cf43f0e -3cdcfe6b66c96b67cee0c5dd39abbe8f -7a1968dfcde0660583abe436bbba2958 -cca54d4af9dc407ce14555d9e76ea684 -3cdcfe6b66c96b67cee0c5dd39abbe8f -00ef87d961eebb5c172e6c9c60de1124 -59cf334b0dd8958b23c6a025d6c8a805 -7b05f7805f4bda4823dd8120139c84c7 -e0eb95ec6c63ef543c7c702143346e44 -6f510cfe0371a74351798f8fb80698b5 -cedc6e9735c1cca95b6a2bdd3c8cc4d7 -e972e25a26bfb36ab9cbb80af7f31983 -d1455755c33fac892ee713ac79e4a966 -51b6399b335e3e8856e41c1887c241a3 -cef80860328f3ea1486f3c02849c4c7c -a2e6f8b73f2580caaad05d7be2963205 -487fac2e0557923c798995aa59fca160 -487fac2e0557923c798995aa59fca160 -40136d88ec73a3d511239994b3cc2dc5 -8dc215404d9f625114506d14af40a23c -58412e45aa1d6771e1cccd2c63014505 -910ece49eb818c5deb7f6725e3c4fd01 -73adba2dac2dfe0a745c9ec9bc9a0c4d -ddeead2931b77ba75be5f1d4712a8cfe -3402b53e726539547caeeffe7ac6ab35 -0a38ae7b09bd5beb6782b69a986bc434 -fba0af1574b458943ee176fcd1824618 -e09e4f39bf7b4fa7b01826a0624ec014 -4a7096bd2a494280a72b27d2f20434d4 -ea9e5af80ae62273728430cd83778f76 -cdaa7db62df4f13493fe3f67ca0697f3 -78341824d4eab934c31567bbcbf60ca2 -63f3be44f7f64593cc1dc0e11c726222 -659ef9f3643387acecd4009dadc01531 -887c65dd24a017d9ab6c79254684098c -4f0bab91c7bc21c56f697f805cc32783 -4f0bab91c7bc21c56f697f805cc32783 -4f0bab91c7bc21c56f697f805cc32783 -fcdb74bd532586087ad06c460e0539d3 -fcdb74bd532586087ad06c460e0539d3 -fcdb74bd532586087ad06c460e0539d3 -887344bcdf6c994364e413a31b657872 -8f7ea15d384c1f7905a02ff448d5f897 -f13b7f26ebc77faedc504b6cd499668e -1b49c308b25623a8867c0bcc112c7604 -6dcbb1ffeac2ee299b346d6eb075c930 -c184375918e207348d7e7227c9fc2459 -ce81eac23923ae598e500a4b66f80d62 -9725f0be61dfd55e86d0261c1c27d62e -b93cb4f987f96743867d75569902ad13 -9ed9c582d4e1ff03f53051d3781ab314 -fb461f1b7314fb810f4739e94ef09052 -b0d8c38afcbefd7aa8e6b6cf677904c1 -efe4ca957c09bb25175a03e5317355f4 -a4a5abe80c60caccdec5bef4c60f4b02 -05b919a841468a3f89866800aa8c7e85 -180c36481d75cf4b5bc1077ff4169a9a -cdbefc832933f39a05b3218a169bb3bb -4332c0ea98d2bf2ca0c05d7149fc8e59 -2ca819ea3736e6009ba3202a8cb7a214 -97762c0c32a11f12b8796ad7b510ad53 -9c9612e6965d95d8542004653d92fe68 -bd064f712d66cdac35c8222c18bbaac0 -92b3601dd66d91842454a213fd3c2e05 -5cec4706fbcab7add9b6495c3df6e3ed -ded51d71d5fe36b240b2fe6a51eee3a8 -45dee3f070fba9707daa2f24706c979a -48844a7c31dd8b5f54340ea8d7c5ff06 -acbea947a3ffdfe13ffb99e9efd7917a -2c868aee081553ca11c17e0d1b53e915 -e231c3f90355cf457c35fd7ec9b868ef -d821c7b7f0d306ed1942330ae5afe632 -d821c7b7f0d306ed1942330ae5afe632 -2fd6e04e992ad94d89e8e70e04488d7e -04c7440e18a04908dd8e062c6e7ef7cf -ba668e12f22476b0592db4da121ba05a -54a5d1a2181ea740a8a8a07c9b500151 -da8fe922f843660558db67d58d24980b -14c3558e147a879337c1e59bac8ba45e -9ec2e262845fd21fe195e0ad8ef173c6 -c89716b8be6c84791b8922e1d5c9eef1 -02c465b0d144c0ccb22c61bc55a179f9 -1541d729003856a45a7a5b78d1806324 -87dd929111c6bb1d3ca25e1a0be8418f -34b103f5dea81798803d4de2aa4d46d8 -4e2b88e7d75661f91e1df5802cf05747 -4e0463c06aefe0121eb9a4106037d193 -b660b1a454ddfcc2b1f885ae4e3d5428 -05d0282d5688055316f9a5e10d3862fd -a443f686e140eff4d9f3753e2da7df24 -2fcbb97ac149c410626f850d88fc79fa -ff3fa2f0a709546a4e6f17b2f9b719db -c83078d3d633b8269f3a84e30fa8d821 -fc42b86f617473cca56cc9cd0553d919 -1bc0f965220b3302117c37d567d3c55a -9305f6b43432e8c4c046da55e110c90f -702918bc4a1ced100af03632bbd9cc22 -702918bc4a1ced100af03632bbd9cc22 -702918bc4a1ced100af03632bbd9cc22 -6e48182c69e12706f85cd971b5608014 -b3831a1787344b2b548a94ba1abee690 -277990cd24264d2fcdeb84e8bc406c0d -15d79cacbd55b87b7a895b609813a5b7 -31102deadb74097b21cd9a0d9a05ecbe -94a81d227f6556926787ec4deddcb72a -32eb3279bc92a8dc5c8d8da2e1e59676 -15d79cacbd55b87b7a895b609813a5b7 -31102deadb74097b21cd9a0d9a05ecbe -b280386591c0bad9f1db2864283c8b49 -881bed5e35f9d319f74aa12d3aa256cc -1ce00c5b34d536c0bebc74d42dc56792 -fd3552e251932660f0d013d0891d1744 -0e9235f50e2b07c1c71431b1ed756f8c -4a4a173a601e3302e13bc9b46d8536b7 -64b08806be91f6da5e1ca8e37bdfa827 -436515fd2f6308fbd1e9266965a6fe70 -f329468ba06e180417d04819840c18a2 -1fce8fad6ce4d28fab5f8c9a7a3dbab4 -2521ecc2ea4e3d1ac49c07e00e5bdf2e -f9b2528fa4db7d08fb29133b6abe7c40 -1a446d64fa35b5cedac50b9fa40bb6f5 -96f3dd530587e20b1127f9a2642899aa -cfdc52262e30f5f17d7bc10e2fde366c -fddf3d2ba81e55ffbbc3a8086c6a0b44 -4a805c2ffb894ff78a54db84f9a07ee1 -607ad8efd6c7c04d5a26315aea8328b3 -63ea10efcc25cb3979489c4cf524a408 -0dfef2ba0cd911247b408bd8240b1abd -13effbb08d818f9c39cb419910ce3a80 -b4e1c87c5a807f4a9b3cf7b6bf3de8fb -4160d54165955ba030fb93b0f6319b69 -ec4cf36930c22ac8d08638c87d8812c5 -e3d7492914a65d5d5b7cc2336f549dea -4426783fda306de1dae91d15ff87d942 -5fdeaacee3091698b35115ee74d4150d -8b5d23aa94800a610c25d2ea1d7d1f59 -b8a47df1e05098dd5a7fea9501e2b00f -ad80a1bdacb0fb5d7b5eede3a0925df0 -7efd669da0cf83b32cb935238b8ed49d -7efd669da0cf83b32cb935238b8ed49d -2a6236404eec221cfc5b6c6a1647f7c2 -a19f6d6b08a44710d18f26fc0081af02 -9096098733241f05f65f098eb9960be3 -65d5d6e3d6f8969a7b727aaaf92ea4ab -c709efa2d909dc4dcc456c855815cb82 -a3fb51e202c6b5ce50c3c44d173767da -a3fb51e202c6b5ce50c3c44d173767da -d0395506e4053f4738c17fd3ce04e8f7 -bd9b6c514590c7b563e9118807dd852a -bf815ba34d9875b6122b6af5ac40d648 -f993b50af8300c963cd6151520fe4336 -da74e781282d987a7e8d8691f9f26a9e -586ed9fdb8a5970924d44e4285d1e003 -9af2ea27f71aa10fbca9696389837ca8 -4dfab241c9563b0b6c2c7533154a3c49 -537ae1a403f7f03cf127d2f8bbe3583f -f63e9485b618859a5c35ce48061fa4fe -6acf1a598d9c2914b6e8bf667940d43e -f8db8610f49dd93cffbf38e9cc239a1b -d06d003e44fc4d547265173589bbc070 -1c660169dea36f461ca75fa26662d8ef -212aa361076152d4149216777df53e9d -3beb80320a5d1c6c37243acf92b3434e -653e922092749c987e7ba33e3a63682c -278235ffc409c8496d504adc7acbe2c5 -278235ffc409c8496d504adc7acbe2c5 -6b7432180bd507e13b8b0c2f6d992427 -83d35f2e3c0d2c2c568565c572da3029 -83d35f2e3c0d2c2c568565c572da3029 -df69ecadc4f53d87648bf657f14ce9ea -34f516657589fad84c41bc9020f88dbf -80ce0d1760f91c104cb3e9d8d807cc3f -579dd2a5e32a74bbbe60b0deef7a8c30 -6ba934953c2fe8a91cf72c33833ba100 -1f112b607a20ee86e135ba8a90010e3e -c0d660f49a350497cc2968a9a160eaa4 -7b28b0661ce8837d6bf8b617d469b9ef -155271a832497142dc8262e92a987f1d -e0937139729e3eca462d3426a2585d93 -e0937139729e3eca462d3426a2585d93 -e0937139729e3eca462d3426a2585d93 -3f84892efb119ef1b596e0805c8e23e2 -57dd9221254b008584266673bee908c6 -064f16203d1484c922002c9490b32ac9 -1e8386c286f2a5d6a3d2b4a1bd90e1e4 -76071791d23914843ff3e0ad765a3458 -ed185bdf15d449fe12a7b833fb312fb6 -3d26cc73514d8c969c05dbb4ff71c68c -1b9bfe6af6458be2e8bc5ca60853ee75 -cab243b41a6a5693310c4f82bc77c78f -c3a387ce17d5e95f577b7a5b814a131f -4a0cc877fa002af394993b974d12e105 -c077e83fe8597aad1f56c8716778daec -dcac0e07759e574441af2ebffb380555 -63a824a836acc307509481a1146da438 -6e626f4b0c74007dd84be83fc16bd442 -23662427a18c57e3abc69e9955b00c48 -c9f310987b75133cdb6bed0e2987df12 -29d1dd19865f5264963962fae9f92a2c -2e95f59b99e471644c107eece7180739 -6913603e91629db7f3c03b5d2e0348cd -df5965fc2526cf6b0fb31ae63e713d84 -64824f04a2c49d563c8f46cb1a00dd02 -1b01894827e78ea0a091d1895af9d829 -f02b80fe96f36f66e9892007f85e2792 -8c865e11724cd9ebbbd865fc76fd14e5 -d4ac51e8c4ae7a66f8bdba8255f1cc76 -5b1a56da6b48992c8600c436c373fbd6 -ba591a89447486425f8340a98a918cb0 -606cca093add7afb92f6897b907e78aa -8902e01ff69775a518e5dd30d1ebb3e2 -efcfeceade46eb69dae026f4a1f2196e -4f139d03b6b16239e0dee3015a8a7b86 -af881dfee10a6d16d527b09bf3d3c1d9 -7c1ac612df81c7321ccf3430ab944fe1 -5658f2cbfe6b5f15fc5c4563df80e421 -7789307a708904bb819ed720c5e47e97 -4ca90dc21afc383e117baf8625b27c24 -12bccf95d188f1e0887509daca2075c8 -daf3e3decd9f109a8de4a823c19e5aa2 -67dd0e79853602cf32d27768d6c19f7f -90451a8c33d076dfe3f646c964a5fe7a -27219b3932fb0092b48a457b6c836c99 -27219b3932fb0092b48a457b6c836c99 -27219b3932fb0092b48a457b6c836c99 -b428fc6cb72f38511a76ecff3f40f1e4 -97577978bc308dc7dd674f885b6a3541 -f306cbb064de0297530db0ef298f0e51 -1f72e875a4b34cb302cd08fcea2bf6f6 -d81d2672f16e0ea0d330522b0dda7496 -f25d913504f5752629eadd18cd3d7486 -f25d913504f5752629eadd18cd3d7486 -f25d913504f5752629eadd18cd3d7486 -1cc56b13e09df9f53653321687d44f61 -6e1b3a41e70e7972539dbc4577167a5e -f15897fc994b2de6d431f1e69cde1e4a -8fcdbaf4912afb3e20c798426a39417c -aada61eb7ba9a1e7802c5bb457610f32 -aada61eb7ba9a1e7802c5bb457610f32 -aada61eb7ba9a1e7802c5bb457610f32 -aada61eb7ba9a1e7802c5bb457610f32 -aada61eb7ba9a1e7802c5bb457610f32 -aada61eb7ba9a1e7802c5bb457610f32 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7babb7b402cd2046fde8ac647d2beaa0 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -7669b12bffac2ac7e10eaedf46b16cf8 -bcc27ccfbd2eff23bbfde60aa04a36dc -bcc27ccfbd2eff23bbfde60aa04a36dc -bcc27ccfbd2eff23bbfde60aa04a36dc -bcc27ccfbd2eff23bbfde60aa04a36dc -bcc27ccfbd2eff23bbfde60aa04a36dc -bcc27ccfbd2eff23bbfde60aa04a36dc -bcc27ccfbd2eff23bbfde60aa04a36dc -8be183418dc0c0f663bf49e9e33db9d9 -8be183418dc0c0f663bf49e9e33db9d9 -8be183418dc0c0f663bf49e9e33db9d9 -8be183418dc0c0f663bf49e9e33db9d9 -8be183418dc0c0f663bf49e9e33db9d9 -8be183418dc0c0f663bf49e9e33db9d9 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -5d5cb72034974822a3907324adac7b51 -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -09429fca7b9d258b0dcfc59705daac4e -7af2242284a4efbfbfd8d61097f9720a -7af2242284a4efbfbfd8d61097f9720a -7af2242284a4efbfbfd8d61097f9720a -7af2242284a4efbfbfd8d61097f9720a -7af2242284a4efbfbfd8d61097f9720a -7af2242284a4efbfbfd8d61097f9720a -7af2242284a4efbfbfd8d61097f9720a -3d74c76ea592240dc7f86a61c81ee390 -3d74c76ea592240dc7f86a61c81ee390 -caf2d7c2f1f13b95c51227eeb23d1367 -caf2d7c2f1f13b95c51227eeb23d1367 -8e7aaf5a15da26ebbf0df9c0881410c2 -5320fb2b2e5be60382eda047ae48b851 -29894e8a8437d2565154d74db1425db4 -58d88545246bcb5c703663d0ecf8ef26 -58d88545246bcb5c703663d0ecf8ef26 -58d88545246bcb5c703663d0ecf8ef26 -58d88545246bcb5c703663d0ecf8ef26 -58d88545246bcb5c703663d0ecf8ef26 -89ceb9c2c202fb5f260452ed8ce448d9 -0b8cbb034d390a465a25b726091e1a8a -d99252bee244bb7732fdce84fbdda7a7 -55aa26927a141169d16478d130607fec -0175faf03d504d8c71bbd1977a76f6bc -0d375ec5642b3409b56204808eaa0808 -cc0b80a60e568c14f1d25928e6399674 -da762d24c3b3caa1726f61a763407627 -d115676bea409dcb72080e34b81d6281 -55a90925f8aa3bd935c9e58645c6c43d -b100e6d0d1c1088b7dae3fa405b268d9 -99b6fe5fa6603b6fe87cd6c03308fc5f -d26052ffc35abde23a06b594d14923e4 -60efd066d62746fb99b84d4ef4c6a372 -1a7807129fd2ba3db17fa5b0a95cb399 -f880e19dd9cb9bb50968588035930bf7 -3952f0bd64aa980cd06509722c9efc41 -49d1f305c222698ae30df3865550d90f -e0e1ec165159ad87bc9cb492e3a61934 -30b780ada4eb2eea405c9931208a1641 -f49c44c24e9060f8deed04b974d2ea8e -8f432a3018c827316e092ad4cb3c813d -5a2913b31cd46d78e72e20325e72849a -13f9fbb7f0af9509de10623f8b84abc8 -65831d14db1a86b35060fd61691836eb -aca3674e73bc097ef2eaeac9c714d627 -dabe8bcf86579efe9dcda98530dcda74 -2324f98f56f699f9dbac3b9a6a28c042 -f2eb3809126c5e44ef9c197245389103 -a1ff303d0282418b33163a711cb9a6f1 -838f9bd591580b66a9c1d3151eec86ba -541f9e42f6b21ada7c00eec3fdb133e8 -c7ce93516d5e62615047589df30b79fa -779936a99a40c590806c26c3ee5d1875 -565e90c5c4fedb44ca7ec1d3164935b3 -7a77beffcec10af2414ad46014874ee5 -e716658c3c57f842a0918d39f23d5212 -8c29e28940da914d4be2bd9e95e9fa52 -2718b18cc1c85a7c31d8b5978fa9b86e -f230030e4c1679cd320c1df2b08b4578 -a670767edda00f65f0ff3fd5e8ae33c0 -1cfe9bceee64986fb9b7ecdd88817c55 -6e63627f1569ab3f45777511d60c849f -993a8d8a34a79bfc00f1c2836aa95771 -a5eb7a17f26412f933d969f934bafeb8 -601b6d90345f78740ac93acb60ba4a04 -6dbd9f9683176868c239e875ed05240d -a80f541d0e887ec7c87895bf5155a64b -4aaaf970d89bcf0e8086e764a6721aec -bca1d744d5d5d66479975ed22743f7e8 -6f738092d055c3f04a83473b388fbadb -15fcbb918dac6af3c19d00bc95e6f8c1 -26eb704c99353a2f865a84af08d6486d -fb4d0bc985502c07e5d20a9cf4ada1ab -9fa1b61943b737b92da4f09051215ebf -82f3878fb9f8c604756155fc8f382833 -524c45cb84f4eaed7b0f2f3c3ffd3792 -9c3e8a69212a81af2eebdc3adbba12e7 -0dc7323c9b9e3bc4bf202b68d264651e -06452af2ad3188a9eef706741934bca7 -fa893f1a6a26481c90957e31321f4283 -3cdbaf9b9695e92794c4bf1b06213798 -7b9e7056d30e0a2456174fce31c10e5f -7b9e7056d30e0a2456174fce31c10e5f -7b9e7056d30e0a2456174fce31c10e5f -bfd0686b679318be77824bf238edf0d8 -062f6b46877fee62406e9c4eaf430b67 -0bf0de038477dcb60ea3cb3cb102f91d -34b389079bdfc376d50d9cf1ea373a81 -8cc4090b2d1d5cdc38ed1977faff3681 -778ba1412f0f47692a4b4d89af482efa -79e5b1841b49189e1b953978f260f0ac -9cebac7f046fcfd1553c1de2d2072485 -b5a93c9c13fd8d380728c1022c957701 -19a21d9c88861594f89943e894b6f517 -a31a398476ea2d0d96018f207a53378c -f90474dcf93c7319148888569e7b6ed1 -598274d8f099623d265319cc9117c967 -19a21d9c88861594f89943e894b6f517 -f5abc93e761b27672a82dcbd474c9d82 -0f9b73ac0b50fb67440b249e8f3caf5e -bc6bf5d33c770da78f6bb2ac7bbd0d41 -8847503ebaf681600bd90547c395c346 -61c2a39e0e4b1c1d9299ffb9685f5deb -cdc4c9619b1f779e0f3d290ca38a9b0a -a3056b41a641372acca0db7074f0ca15 -a46d53bd61ec2e0938cf907663af9215 -c7cfe750e00b219bb69712ac5fadd9e4 -acb2b7dec15dbecb823ba4789aa7865c -2741f418a2367f72e9a908d0b4c8d330 -0ea64f578cfa66d4daea90c664a374f5 -bea105416d9ead16b0bcc231d9c96aea -83dcf8462895f0f82783abb44c40cb6a -94062c3aa1962687a0f0db38306f3277 -f5ad361cb1f0cbfe78025c3e5d8a4817 -53f3b2d078f8c41abaf68155a90441c9 -83dcf8462895f0f82783abb44c40cb6a -635e71ee943805d7586d3acf999cdec1 -e8ffcd074c9db58745bda56dc982bcb4 -eb3f3bed1f08a99503883f1c68fcb7b4 -730722bcef09f212b5b216f03e19da06 -b8761c79db3af75d60abae2e7e6b4d1c -25781e6e2a1aa7f4adbca662e78e0ab7 -a6f99a208bb061b35c4d180e2470d591 -756a858e003ed7991facc4b0ec5b1f58 -e24579ae1cff57510a6ca6e8d4e766d1 -b9c1c0614f62016c5acb300cf3a29a12 -2b4fbad244eb926a9a8ba56ba95f5a04 -2a8e2153026e5ad4f4a9ca11fb45cc76 -56a8511b1a4e79ef3a1a67761a0e5cbe -e39aa74716f5417976498e65eec48d67 -9abf15c73b2b8ce9d0cbb1457ef48710 -e85576f37656797f2c82359c7e474700 -4a7d566702f76961e07371752ba2fef7 -a54254002457fda1b0712a84e7586ebe -433fcf61841eb411036035056bbd627d -332d470bf14822493411e8897ca7e249 -a622f4798b28f0eb10f764159ff08e64 -f2bef2736f9b459ee6865600efcbc9fb -11ffbed6380ecea0a1bd4f7f8d6045b2 -11ffbed6380ecea0a1bd4f7f8d6045b2 -11ffbed6380ecea0a1bd4f7f8d6045b2 -11ffbed6380ecea0a1bd4f7f8d6045b2 -3142801e5a564e4342496a4edbcd1747 -b52064f3b967f63d708aa7c217d30fe0 -ca71b63d83b52cc7997898ee0fc5d34b -f87b4c85ed7ff51c4d80cc862f189f8d -15efc1db72b2f3286725c0dc14821050 -7a9a0d77c4d4e11fedee577f9137e038 -b66796b625282fe452c4fd263a78c1a1 -49a0f22ab0a75cfeb9eb32243ee0804b -1098f52459e389bcc4f276a453be3bcf -72a7e62a56b43b97675861156ef30a1c -590a5a438c79911cef20ce4ed0223f6e -5dfc261fc4ffe677c0c69180457c932a -95aff72690a0a9acea1d9e692bfc2d42 -371a5a2a366aa841fda399fae1d288a6 -25c77369014bf8bb5a87d8b97e4dc7ba -dfb6dded5ae057a4a791a75df503e6ca -326dca490d5f883c2aee98b3c7eb7a35 -daa7782e19d24a7ed0990f0f23381caa -f0aaea941d597aa4961c0a70fc475963 -89976b55874e6b25404b3e68f0020ef0 -44b732fee2b205d169a263a8d4b4a4ee -0989040d40a1ba597d6d42fd2d8805b5 -148a1d102461befea8192e2e3e2490ab -27ad6c1c07616eec0234cc1ce01ab105 -84f843e6176cf18d8c2139b2cd21f3ef -cf707dbfe633a8ddabfca09724c0af3a -7db7aa0825ce98369600a5818406a854 -f0cc8e9e1519cce022d1476471f77a0c -1a27e47f9a7da6bb3872d815c75dd30c -e01e14459d59cdd7e1c9fed31ac7e30a -e01e14459d59cdd7e1c9fed31ac7e30a -e01e14459d59cdd7e1c9fed31ac7e30a -09491d37337215b5fe848db14d086919 -2861a231db3fd7eeac9d1d094c601406 -db7810c857aca451796b53e2dd2f6d29 -82dec601e930b42434fd1ef35cc61a93 -01b3c6ac3436e32874b6e4c59033f821 -94af3e23f34dfab689864bdc09c98f85 -25588ebe294463477591f5b9f4082b7e -132e9b1fd48b5c44e3d631ccc4fac69d -14f0efcd9f02dc2db67e78658f958418 -7443f8c253fe4425025efdc678914702 -132e9b1fd48b5c44e3d631ccc4fac69d -2ea67fe64ba29e53e2454eda7de7c3bd -903cc1b8f72ed866745f0a82f4c40a0e -e44e33463a58946325a38de6d2f5f8bb -c079d0fdfe37578fa1598f6c44adbb48 -8c5e4ab8086a8d3bbb0479b57c8ed652 -3f26ef5d64cafa3db1b37d7a585b6b88 -ef8489248e832b14fcd8e5ac38640609 -e73f514e27f90df63cb1349965d83d22 -1bd53a1242e5b4fc114a349bf613b895 -20f4cb441b6e83fbadd99fc786325008 -20f4cb441b6e83fbadd99fc786325008 -c4d2eec97ee47137454321263a570a8d -46d03c56f9cbf2f241af8928d1039bc6 -e1c5cdf9b0ed191e801d77a611262fd5 -cfd6e5849d34ebd9f5339292f4ffd280 -47ab16e679b07c8bdf868b48992d2cc3 -2e9a9b7a56846bffe2f82df1edcff5a9 -12ca6687c5a8eea726dbb1890f652a1e -a71726eb74df01b2eca3416773c2ded1 -0e595b959927548645d35f29ca720454 -8e20344a30d8f64d4e4a687069436b9c -f262de20893311d30928a68a391ac8c0 -8cfefad9da9c757779e82517e17ab03e -cc236a90229e61f71899675fc44208bc -99432d58534736ef86fbbdd64760fe62 -9b63079240bff6d9945447b5288485a4 -471bc91c0331c68e4e1683e3a88a3740 -e1c5cdf9b0ed191e801d77a611262fd5 -fa273fbab92e12d13c448d33f057a015 -337925db8879fc20cc86b1907990484d -e1c5cdf9b0ed191e801d77a611262fd5 -e1c5cdf9b0ed191e801d77a611262fd5 -cfed85f06a173bc7481ce57e631a395f -b89943ef2300de767ab76e83c4bfa44f -a71726eb74df01b2eca3416773c2ded1 -545f0951f5e2e2503401e7d002b659e0 -fcc3886eed23b63b5774f89426881f32 -b89943ef2300de767ab76e83c4bfa44f -a71726eb74df01b2eca3416773c2ded1 -545f0951f5e2e2503401e7d002b659e0 -a71726eb74df01b2eca3416773c2ded1 -a71726eb74df01b2eca3416773c2ded1 -a71726eb74df01b2eca3416773c2ded1 -0e595b959927548645d35f29ca720454 -8e20344a30d8f64d4e4a687069436b9c -f262de20893311d30928a68a391ac8c0 -8cfefad9da9c757779e82517e17ab03e -cc236a90229e61f71899675fc44208bc -99432d58534736ef86fbbdd64760fe62 -9b63079240bff6d9945447b5288485a4 -a71726eb74df01b2eca3416773c2ded1 -0e595b959927548645d35f29ca720454 -8e20344a30d8f64d4e4a687069436b9c -f262de20893311d30928a68a391ac8c0 -8cfefad9da9c757779e82517e17ab03e -cc236a90229e61f71899675fc44208bc -8fc267bdc29f65c2ee5de103db177723 -9b63079240bff6d9945447b5288485a4 -02a4bff9579365064895e9c1b8e7849b -d1dc8f7204e8107257c1459a0afbdab0 -1935f3218e44ccba65c3c00b1c9990d6 -1f0aec03d2b7e667e9cd9be0afe9aa8f -d92a56bad7867dd55dd3023b34c442b6 -f44a0045efa2e19d2cb8623682d21c46 -d92a56bad7867dd55dd3023b34c442b6 -869189c733cd0535e66c97f9f3ca131d -408d30d0423b841a372c316ac19a8f19 -68f1508a4e1c8189fce9ad1defb615c9 -c2fcc68c762410593ca16cf9e883e8f2 -b243f952dcbb4cde6d712c1feea199cc -8ecfa94c49657d420dfd2ef82e7a6d4c -bd1ee3bc5ae92d3ec1b3b621d35cf196 -712e9dd80cb22b9b8719a6a645f5b0ac -e7900f9f6e35c97c09b1fbc00692e2ab -ce9377744dcd5f543eddd876c6fba9c8 -84aa495df484876afb61277c713afcf4 -75a9249760e4b5e302fbac6129ffe179 -2923cee6cfceab6546fe1aad58e6e11f -49a55b966a0ebf5c0d5c8ea1bf380c44 -d2db2cb417607525705bab5951ca9336 -c4c73a649ae80b826aff1a2693d29374 -c35e3b543b97274a1385403274384e6a -2c9ac891fb3a2c174c95d8b382b199a4 -6a4f971b73008f104c73758faacaabec -f7883b126fc32509d45577b5cac7170c -18c15ae4aa8264fa6ab1856e64cdd22f -9878650d44af48d3f3765fe79ea03ee5 -e3462c7cd965bb50239f8b9eaed73393 -657914c1b31c633ce51cd7b5490a0882 -32532dbfd9478479aff43864a83f913c -5857b9b414ab8dfa1ab84326c5694a91 -9bcf6b17f195a1d609049a2d2a5d1608 -92d3fbc7d0a57bf073bd8c7e28d69a53 -5c4faf19dac0ed11172a13c978afbbe3 -df5571f77a2a65596048565154d2f3e4 -25bf88700bef29cecbf15e4e1eaa5cd1 -aaf6e43e21f4a67634ca0917c83d5e4d -30d9166e2f884b793cc1add4603462bf -5bf1a5481ccf7b9d758cba4009bd4ac2 -5bf1a5481ccf7b9d758cba4009bd4ac2 -5bf1a5481ccf7b9d758cba4009bd4ac2 -5bf1a5481ccf7b9d758cba4009bd4ac2 -bb8c4b3ccfb6a50ad2f5f08a10b565a3 -9bfb99fafd5ef401368fd341717f28e7 -cec5d12e18190e3b6cdd6202c4a255d4 -29c10a7bdf51ff4eee0848ee48bf63ce -155944c4b3c86e9f9353410f5e09b7b9 -e2f882eb592cb80d1a3f331bda096e6a -cb68b7c74d24a31f5f8455634fa1cf2e -63baf267b7c1c3e0cd00875b3679d4b5 -598df03daef1783e8542fd3e99c8068a -a07e2644b704b058de1428222bb65dd3 -c8f22be983f32eb0c92757fd61e8b976 -4a736b7d2c8cb7dae9629c91dc9fe307 -04623f4956ff7f28d1b2a1a367b6a82c -14b09fb90e9ef05ad3abfab93b886607 -43691faee1dbe46e4f2e818a121750ae -1fa68787df902f7a22b55386840701e6 -628b0f91f2107f749c26f2bff0fcf46e -628b0f91f2107f749c26f2bff0fcf46e -cf361951674cb5130b67c8af59815d65 -cf361951674cb5130b67c8af59815d65 -cf361951674cb5130b67c8af59815d65 -ebbfeb573ce0332a905c5962735a20e2 -3fcf2ccdfce324754a9a5defdb1cbcfc -fa2852f4e11b363c55dfbef3ab961579 -eb063952fdb13984202b5aac5f45122c -eb063952fdb13984202b5aac5f45122c -eb063952fdb13984202b5aac5f45122c -eb063952fdb13984202b5aac5f45122c -8d022c643571756358153de2b759503c -8d022c643571756358153de2b759503c -a9e7a1bc3d560310d5155b768b4f473d -5ba6da71189662806bafc3c7dac1f2c6 -5ba6da71189662806bafc3c7dac1f2c6 -2b128bc46fecb137e30f04fdbe47bdf8 -4aeb20a805f0c6999eaa1bf260ebd487 -5c88ac81279b6fdc0a8355b22e4108f4 -5da48fcb090e863ce2ea127caefdb387 -0d038ec5f3d7f0237b65bd7dc7e7497c -1b009a27b28ee1ad6009ce9ce04dabf4 -e06a538da7da42929f158fc9728a18a6 -89093aadd2007052b4c5a1f0d76e04f6 -9f8a9f84cf3243c6c5076d90b50478b0 -3d5d341bdf6f091cf931c7bcddb15388 diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 98d040bec..649578c19 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -5,11 +5,19 @@ updates: schedule: interval: weekly open-pull-requests-limit: 10 + # todo: change this to widen when Dependabot adds widen back to Pip ecosystem + # see https://github.com/dependabot/dependabot-core/pull/10194 + versioning-strategy: auto ignore: - dependency-name: sphinx versions: - 3.4.3 - 3.5.2 + # Ignore all black updates, because we use a pinned version we don't want to change + - dependency-name: black + # Ignore types-setuptools patch-level updates, because they issue too many! + - dependency-name: types-setuptools + update-types: ["version-update:semver-patch"] - package-ecosystem: github-actions directory: "/" schedule: @@ -22,6 +30,7 @@ updates: directory: docker/latest/ schedule: interval: weekly + versioning-strategy: auto - package-ecosystem: docker directory: docker/unstable/ schedule: @@ -30,3 +39,4 @@ updates: directory: devtools/ schedule: interval: weekly + versioning-strategy: auto diff --git a/.github/workflows/docker-images.yaml b/.github/workflows/docker-images.yaml index 81c73673c..a23deff35 100644 --- a/.github/workflows/docker-images.yaml +++ b/.github/workflows/docker-images.yaml @@ -37,11 +37,11 @@ jobs: packages: read runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 1 - name: Install Task - uses: arduino/setup-task@v1 + uses: arduino/setup-task@v2 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: Install poetry @@ -62,18 +62,18 @@ jobs: contents: read runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 1 - name: Install Task - uses: arduino/setup-task@v1 + uses: arduino/setup-task@v2 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: Install poetry run: | pip install -r devtools/requirements-poetry.in - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} diff --git a/.github/workflows/validate.yaml b/.github/workflows/validate.yaml index 8044ef02f..b4318a959 100644 --- a/.github/workflows/validate.yaml +++ b/.github/workflows/validate.yaml @@ -38,11 +38,11 @@ jobs: - python-version: "3.8" os: ubuntu-latest extensive-tests: true + PREPARATION: "sudo apt-get install -y libxml2-dev libxslt-dev" suffix: "-min" TOXENV_SUFFIX: "-min" - python-version: "3.9" os: ubuntu-latest - TOX_EXTRA_COMMAND: "- isort --check-only --diff ." TOXENV_SUFFIX: "-docs" - python-version: "3.10" os: ubuntu-latest @@ -50,16 +50,15 @@ jobs: TOXENV_SUFFIX: "-lxml" - python-version: "3.11" os: ubuntu-latest - TOX_EXTRA_COMMAND: "flake8 --exit-zero rdflib" TOXENV_SUFFIX: "-docs" PREPARATION: "sudo apt-get install -y firejail" extensive-tests: true TOX_TEST_HARNESS: "firejail --net=none --" TOX_PYTEST_EXTRA_ARGS: "-m 'not webtest'" steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Cache XDG_CACHE_HOME - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ${{ env.XDG_CACHE_HOME }} key: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml', '**/poetry.lock', '**/with-fuseki.sh', '**/*requirements*.txt', '**/*requirements*.in') }} @@ -67,19 +66,19 @@ jobs: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}- ${{ github.job }}-xdg-v1-${{ matrix.os }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install poetry run: | pip install -r devtools/requirements-poetry.in - - uses: actions/setup-java@v3 + - uses: actions/setup-java@v4 if: ${{ matrix.extensive-tests }} with: distribution: "temurin" java-version: "17" - name: Install Task - uses: arduino/setup-task@v1 + uses: arduino/setup-task@v2 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: Run preparation @@ -103,12 +102,12 @@ jobs: TOX_PYTEST_EXTRA_ARGS: ${{ matrix.TOX_PYTEST_EXTRA_ARGS }} TOX_TEST_HARNESS: ${{ matrix.TOX_TEST_HARNESS }} TOX_EXTRA_COMMAND: ${{ matrix.TOX_EXTRA_COMMAND }} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ (success() || failure()) }} with: name: ${{ matrix.python-version }}-${{ matrix.os }}${{matrix.suffix}}-mypy-junit-xml path: test_reports/${{ matrix.python-version }}-${{ matrix.os }}${{matrix.suffix}}-mypy-junit.xml - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: ${{ (success() || failure()) }} with: name: ${{ matrix.python-version }}-${{ matrix.os }}${{matrix.suffix}}-pytest-junit-xml @@ -121,12 +120,12 @@ jobs: fail-fast: false matrix: include: - - task: "gha:flake8" + - task: "gha:lint" python-version: 3.8 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Cache XDG_CACHE_HOME - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ${{ env.XDG_CACHE_HOME }} key: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}-${{ hashFiles('**/pyproject.toml', '**/poetry.lock', '**/with-fuseki.sh', '**/*requirements*.txt', '**/*requirements*.in') }} @@ -134,14 +133,14 @@ jobs: ${{ github.job }}-xdg-v1-${{ matrix.os }}-${{ matrix.python-version }}- ${{ github.job }}-xdg-v1-${{ matrix.os }}- - name: Set up Python ${{env.DEFAULT_PYTHON}} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install poetry run: | pip install -r devtools/requirements-poetry.in - name: Install Task - uses: arduino/setup-task@v1 + uses: arduino/setup-task@v2 with: repo-token: ${{ secrets.GITHUB_TOKEN }} - name: Run task diff --git a/.mailmap b/.mailmap index d953d228f..06db195ac 100644 --- a/.mailmap +++ b/.mailmap @@ -65,9 +65,10 @@ Maurizio Nagni kusamau Michel Pelletier michel Mikael Nilsson mikael Nathan Maynes Nathan M -Nicholas J. Car Nicholas Car -Nicholas J. Car Nicholas Car -Nicholas J. Car nicholascar +Nicholas J. Car Nicholas Car +Nicholas J. Car Nicholas Car +Nicholas J. Car nicholascar +Nicholas J. Car Nicholas Car Niklas Lindström lindstream Niklas Lindström Niklas Lindstrom Olivier Grisel ogrisel @@ -98,4 +99,4 @@ William Waites wwaites William Waites ww@epsilon.styx.org Whit Morriss whit Zach Lûster kernc -Zach Lûster Kernc \ No newline at end of file +Zach Lûster Kernc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5f0c147ca..8a74122cc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,26 +6,22 @@ ci: # https://pre-commit.com/#adding-pre-commit-plugins-to-your-project repos: - - repo: https://github.com/pycqa/isort - rev: 5.11.5 + - repo: https://github.com/astral-sh/ruff-pre-commit + # WARNING: Ruff version should be the same as in `pyproject.toml` + rev: v0.5.4 hooks: - - id: isort - # This is here to defer file selection to isort which will do it based on - # black config. - pass_filenames: false - require_serial: true - args: ["."] - - repo: https://github.com/psf/black - # WARNING: version should be the same as in `pyproject.toml` - # Using git ref spec because of https://github.com/psf/black/issues/2493 - rev: 'refs/tags/23.3.0:refs/tags/23.3.0' + - id: ruff + args: ["--fix"] + - repo: https://github.com/psf/black-pre-commit-mirror + # WARNING: Black version should be the same as in `pyproject.toml` + rev: "24.4.2" hooks: - id: black pass_filenames: false require_serial: true args: ["."] - repo: https://github.com/python-poetry/poetry - rev: 1.5.0 + rev: 1.8.3 hooks: - id: poetry-check - id: poetry-lock diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 07bdc9db8..357ab543b 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -21,9 +21,10 @@ build: # the readthedocs environment. - pip install -r devtools/requirements-poetry.in post_install: - - poetry config virtualenvs.create false - - poetry install --only=main --only=docs --extras=html - - poetry env info + - poetry export --only=main --only=docs --extras=html -o requirements.txt + - pip install --no-cache-dir -r requirements.txt + - pip install . + - python -c "from rdflib import Graph; print(Graph)" sphinx: fail_on_warning: true diff --git a/CHANGELOG.md b/CHANGELOG.md index 666be380f..78c596974 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,437 @@ -# 2023-03-26 RELEASE 6.3.2 +## 2023-08-02 RELEASE 7.0.0 -## fix: `ROUND`, `ENCODE_FOR_URI` and `SECONDS` SPARQL functions (#2314) +This is a major release with relatively slight breaking changes, new +features and bug fixes. + +The most notable breaking change relates to how RDFLib handles the +`publicID` parameter of the `Graph.parse` and `Dataset.parse` methods. +Most users should not be affected by this change. + +Instructions on adapting existing code to the breaking changes can be +found in the upgrade guide from Version 6 to Version 7 which should be +available [here](https://rdflib.readthedocs.io/en/stable/). + +It is likely that the next couple of RDFLib releases will all be major +versions, mostly because there are some more shortcomings of RDFLib's +public interface that should be addressed. + +If you use RDFLib, please consider keeping an eye on +[discussions](https://github.com/RDFLib/rdflib/discussions?discussions_q=label%3A%22feedback+wanted%22), +issues and pull-requests labelled with ["feedback +wanted"](https://github.com/RDFLib/rdflib/labels/feedback%20wanted). + +A big thanks to everyone who contributed to this release. + +### BREAKING CHANGE: don't use `publicID` as the name for the default graph. (#2406) + +Commit [4b96e9d](https://github.com/RDFLib/rdflib/commit/4b96e9d), closes [#2406](https://github.com/RDFLib/rdflib/issues/2406). + + +When parsing data into a `ConjunctiveGraph` or `Dataset`, the triples in the +default graphs in the sources were loaded into a graph named `publicID`. + +This behaviour has been changed, and now the triples from the default graph in +source RDF documents will be loaded into `ConjunctiveGraph.default_context` or +`Dataset.default_context`. + +The `publicID` parameter to `ConjunctiveGraph.parse` and `Dataset.parse` +constructors will now only be used as the base URI for relative URI resolution. + +- Fixes https://github.com/RDFLib/rdflib/issues/2404 +- Fixes https://github.com/RDFLib/rdflib/issues/2375 +- Fixes https://github.com/RDFLib/rdflib/issues/436 +- Fixes https://github.com/RDFLib/rdflib/issues/1804 + +### BREAKING CHANGE: drop support for python 3.7 (#2436) + +Commit [1e5f56b](https://github.com/RDFLib/rdflib/commit/1e5f56b), closes [#2436](https://github.com/RDFLib/rdflib/issues/2436). + + +Python 3.7 will be end-of-life on the 27th of June 2023 and the next release of +RDFLib will be a new major version. + +This changes the minimum supported version of Python to 3.8.1 as some of the +dependencies we use are not too fond of python 3.8.0. This change also removes +all accommodations for older python versions. + +### feat: add `curie` method to `NamespaceManager` (#2365) + +Commit [f200722](https://github.com/RDFLib/rdflib/commit/f200722), closes [#2365](https://github.com/RDFLib/rdflib/issues/2365). + + +Added a `curie` method to `NamespaceManager`, which can be used to generate a +CURIE from a URI. + +Other changes: + +- Fixed `NamespaceManager.expand_curie` to work with CURIES that have blank + prefixes (e.g. `:something`), which are valid according to [CURIE Syntax + 1.0](https://www.w3.org/TR/2010/NOTE-curie-20101216/). +- Added a test to confirm . + +Fixes . + + +### feat: add optional `target_graph` argument to `Graph.cbd` and use it for DESCRIBE queries (#2322) + +Commit [81d13d4](https://github.com/RDFLib/rdflib/commit/81d13d4), closes [#2322](https://github.com/RDFLib/rdflib/issues/2322). + + +Add optional keyword only `target_graph` argument to `rdflib.graph.Graph.cbd` and use this new argument in `evalDescribeQuery`. + +This makes it possible to compute a concise bounded description without creating a new graph to hold the result, and also without potentially having to copy it to another final graph. + +### feat: Don't generate prefixes for unknown URIs (#2467) + +Commit [bd797ac](https://github.com/RDFLib/rdflib/commit/bd797ac). + + +When serializing RDF graphs, URIs with unknown prefixes were assigned a +namespace like `ns1:`. While the result would be smaller files, it does +result in output that is not as readable. + +This change removes this automatic assignment of namespace prefixes. + +This is somewhat of an aesthetic choice, eventually we should have more +flexibility in this regard so that users can exercise more control over +how URIs in unknown namespaces are handled. + +With this change, users can still manually create namespace prefixes for +URIs in unknown namespaces, but before it there was no way to avoid the +undesired behaviour, so this seems like the better default. + + +### feat: Longturtle improvements (#2500) + +Commit [5ee8bd7](https://github.com/RDFLib/rdflib/commit/5ee8bd7), closes [#2500](https://github.com/RDFLib/rdflib/issues/2500). + +Improved the output of the longturtle serializer. + +### fix: SPARQL count with optionals (#2448) + +Commit [46ff6cf](https://github.com/RDFLib/rdflib/commit/46ff6cf), closes [#2448](https://github.com/RDFLib/rdflib/issues/2448). + + +Change SPARQL count aggregate to ignore optional that are unbound +instead of raising an exception when they are encountered. + +### fix: `GROUP_CONCAT` handling of empty separator (issue) (#2474) + +Commit [e94c252](https://github.com/RDFLib/rdflib/commit/e94c252), closes [#2474](https://github.com/RDFLib/rdflib/issues/2474). + + +`GROUP_CONCAT` was handling an empty separator (i.e. `""`) incorrectly, +it would handle it as if the separator were not set, so essentially it was +treated as a single space (i.e. `" "`). + +This change fixes it so that an empty separator with `GROUP_CONCAT` +results in a value with nothing between concatenated values. + + +Fixes + + +### fix: add `NORMALIZE_LITERALS` to `rdflib.__all__` (#2489) + +Commit [6981c28](https://github.com/RDFLib/rdflib/commit/6981c28), closes [#2489](https://github.com/RDFLib/rdflib/issues/2489). + + +This gets Sphinx to generate documentation for it, and also clearly +indicates that it can be used from outside the module. + +- Fixes + + +### fix: bugs with `rdflib.extras.infixowl` (#2390) + +Commit [cd0b442](https://github.com/RDFLib/rdflib/commit/cd0b442), closes [#2390](https://github.com/RDFLib/rdflib/issues/2390). + + +Fix the following issues in `rdflib.extras.infixowl`: +- getting and setting of max cardinality only considered identifiers and not other RDF terms. +- The return value of `manchesterSyntax` was wrong for some cases. +- The way that `BooleanClass` was generating its string representation (i.e. `BooleanClass.__repr__`) was wrong for some cases. + +Other changes: +- Added an example for using infixowl to create an ontology. +- Updated infixowl tests. +- Updated infixowl documentation. + +This code is based on code from: +- + + +### fix: correct imports and `__all__` (#2340) + +Commit [7df77cd](https://github.com/RDFLib/rdflib/commit/7df77cd), closes [#2340](https://github.com/RDFLib/rdflib/issues/2340). + + +Disable +[`implicit_reexport`](https://mypy.readthedocs.io/en/stable/config_file.html#confval-implicit_reexport) +and eliminate all errors reported by mypy after this. + +This helps ensure that import statements import from the right module and that +the `__all__` variable is correct. + + +### fix: dbpedia URL to use https instead of http (#2444) + +Commit [ef25896](https://github.com/RDFLib/rdflib/commit/ef25896), closes [#2444](https://github.com/RDFLib/rdflib/issues/2444). + + +The URL for the service keyword had the http address for the dbpedia endpoint, which no longer works. Changing it to https as that works. + + +### fix: eliminate bare `except:` (#2350) + +Commit [4ea1436](https://github.com/RDFLib/rdflib/commit/4ea1436), closes [#2350](https://github.com/RDFLib/rdflib/issues/2350). + + +Replace bare `except:` with `except Exception`, there are some cases where it +can be narrowed further, but this is already an improvement over the current +situation. + +This is somewhat pursuant to eliminating +[flakeheaven](https://github.com/flakeheaven/flakeheaven), as it no longer +supports the latest version of flake8 +[[ref](https://github.com/flakeheaven/flakeheaven/issues/132)]. But it also is +just the right thing to do as bare exceptions can cause problems. + + +### fix: eliminate file intermediary in translate algebra (#2267) + +Commit [ae6b859](https://github.com/RDFLib/rdflib/commit/ae6b859), closes [#2267](https://github.com/RDFLib/rdflib/issues/2267). + + +Previously, `rdflib.plugins.sparql.algebra.translateAlgebra()` maintained state via a file, with a fixed filename `query.txt`. With this change, use of that file is eliminated; state is now maintained in memory so that multiple concurrent `translateAlgebra()` calls, for example, should no longer interfere with each other. + +The change is accomplished with no change to the client interface. Basically, the actual functionality has been moved into a class, which is instantiated and used as needed (once per call to `algrebra.translateAlgebra()`). + + +### fix: eliminate some mutable default arguments in SPARQL code (#2301) + +Commit [89982f8](https://github.com/RDFLib/rdflib/commit/89982f8), closes [#2301](https://github.com/RDFLib/rdflib/issues/2301). + + +This change eliminates some situations where a mutable object (i.e., a dictionary) was used as the default value for functions in the `rdflib.plugins.sparql.processor` module and related code. It replaces these situations with `typing.Optinal` that defaults to None, and is then handled within the function. Luckily, some of the code that the SPARQL Processor relied on already had this style, meaning not a lot of changes had to be made. + +This change also makes a small update to the logic in the SPARQL Processor's query function to simplify the if/else statement. This better mirrors the implementation in the `UpdateProcessor`. + + +### fix: formatting of SequencePath and AlternativePath (#2504) + +Commit [9c73581](https://github.com/RDFLib/rdflib/commit/9c73581), closes [#2504](https://github.com/RDFLib/rdflib/issues/2504). + + +These path types were formatted without parentheses even if they +contained multiple elements, resulting in string representations that +did not accurately represent the path. + +This change fixes the formatting so that the string representations are +enclosed in parentheses when necessary. + +- Fixes . + + +### fix: handling of `rdf:HTML` literals (#2490) + +Commit [588286b](https://github.com/RDFLib/rdflib/commit/588286b), closes [#2490](https://github.com/RDFLib/rdflib/issues/2490). + + +Previously, without `html5lib` installed, literals with`rdf:HTML` +datatypes were treated as +[ill-typed](https://www.w3.org/TR/rdf11-concepts/#section-Graph-Literal), +even if they were not ill-typed. + +With this change, if `html5lib` is not installed, literals with the +`rdf:HTML` datatype will not be treated as ill-typed, and will have +`Null` as their `ill_typed` attribute value, which means that it is +unknown whether they are ill-typed or not. + +This change also fixes the mapping from `rdf:HTML` literal values to +lexical forms. + +Other changes: + +- Add tests for `rdflib.NORMALIZE_LITERALS` to ensure it behaves + correctly. + +Related issues: + +- Fixes + + +### fix: HTTP 308 Permanent Redirect status code handling (#2389) + +Commit [e0b3152](https://github.com/RDFLib/rdflib/commit/e0b3152), closes [#2389](https://github.com/RDFLib/rdflib/issues/2389) [/docs.python.org/3.11/whatsnew/changelog.html#id128](https://github.com//docs.python.org/3.11/whatsnew/changelog.html/issues/id128). + + +Change the handling of HTTP status code 308 to behave more like +`urllib.request.HTTPRedirectHandler`, most critically, the new 308 handling will +create a new `urllib.request.Request` object with the new URL, which will +prevent state from being carried over from the original request. + +One case where this is important is when the domain name changes, for example, +when the original URL is `http://www.w3.org/ns/adms.ttl` and the redirect URL is +`https://uri.semic.eu/w3c/ns/adms.ttl`. With the previous behaviour, the redirect +would contain a `Host` header with the value `www.w3.org` instead of +`uri.semic.eu` because the `Host` header is placed in +`Request.unredirected_hdrs` and takes precedence over the `Host` header in +`Request.headers`. + +Other changes: +- Only handle HTTP status code 308 on Python versions before 3.11 as Python 3.11 + + will handle 308 by default [[ref](https://docs.python.org/3.11/whatsnew/changelog.html#id128)]. +- Move code which uses `http://www.w3.org/ns/adms.ttl` and + `http://www.w3.org/ns/adms.rdf` out of `test_guess_format_for_parse` into a + separate parameterized test, which instead uses the embedded http server. + + This allows the test to fully control the `Content-Type` header in the + response instead of relying on the value that the server is sending. + + This is needed because the server is sending `Content-Type: text/plain` for + the `adms.ttl` file, which is not a valid RDF format, and the test is + expecting `Content-Type: text/turtle`. + +Fixes: +- . + +### fix: lexical-to-value mapping of rdf:HTML literals (#2483) + +Commit [53aaf02](https://github.com/RDFLib/rdflib/commit/53aaf02), closes [#2483](https://github.com/RDFLib/rdflib/issues/2483). + + +Use strict mode when parsing `rdf:HTML` literals. This ensures that when +[lexical-to-value +mapping](https://www.w3.org/TR/rdf11-concepts/#dfn-lexical-to-value-mapping) +(i.e. parsing) of a literal with `rdf:HTML` data type occurs, a value will +only be assigned if the lexical form is a valid HTML5 fragment. +Otherwise, i.e. for invalid fragments, no value will be associated with +the literal +[[ref](https://www.w3.org/TR/rdf11-concepts/#section-Graph-Literal)] and +the literal will be ill-typed. + + +### fix: TriG handling of GRAPH keyword without a graph ID (#2469) + +Commit [8c9608b](https://github.com/RDFLib/rdflib/commit/8c9608b), closes [#2469](https://github.com/RDFLib/rdflib/issues/2469) [/www.w3.org/2013/TriGTests/#trig-graph-bad-01](https://github.com//www.w3.org/2013/TriGTests//issues/trig-graph-bad-01). + + +The RDF 1.1 TriG grammar only allows the `GRAPH` keyword if it +is followed by a graph identifier +[[ref](https://www.w3.org/TR/trig/#grammar-production-block)]. + +This change enforces this rule so that the + + test passes. + +### fix: TriG parser error handling for nested graphs (#2468) + +Commit [afea615](https://github.com/RDFLib/rdflib/commit/afea615), closes [#2468](https://github.com/RDFLib/rdflib/issues/2468) [/www.w3.org/2013/TriGTests/#trig-graph-bad-07](https://github.com//www.w3.org/2013/TriGTests//issues/trig-graph-bad-07). + + +Raise an error when nested graphs occur in TriG. + +With this change, the test passes. + +### fix: typing errors from dmypy (#2451) + +Commit [10f9ebe](https://github.com/RDFLib/rdflib/commit/10f9ebe), closes [#2451](https://github.com/RDFLib/rdflib/issues/2451). + + +Fix various typing errors that are reported when running with `dmypy`, +the mypy daemon. + +Also add a task for running `dmypy` to the Taskfile that can be selected +as the default mypy variant by setting the `MYPY_VARIANT` environment +variable to `dmypy`. + + +### fix: widen `Graph.__contains__` type-hints to accept `Path` values (#2323) + +Commit [1c45ec4](https://github.com/RDFLib/rdflib/commit/1c45ec4), closes [#2323](https://github.com/RDFLib/rdflib/issues/2323). + + +Change the type-hints for `Graph.__contains__` to also accept `Path` +values as the parameter is passed to the `Graph.triples` function, +which accepts `Path` values. + + +### docs: Add CITATION.cff file (#2502) + +Commit [ad5c0e1](https://github.com/RDFLib/rdflib/commit/ad5c0e1), closes [#2502](https://github.com/RDFLib/rdflib/issues/2502). + + +The `CITATION.cff` file provides release metadata which is used by +Zenodo and other software and systems. + +This file's content is best-effort, and pull requests with improvements +are welcome and will affect future releases. + + +### docs: add guidelines for breaking changes (#2402) + +Commit [cad367e](https://github.com/RDFLib/rdflib/commit/cad367e), closes [#2402](https://github.com/RDFLib/rdflib/issues/2402). + + +Add guidelines on how breaking changes should be approached. + +The guidelines take a very pragmatic approach with known downsides, but this +seems like the best compromise given the current situation. + +For prior discussion on this point see: +- https://github.com/RDFLib/rdflib/discussions/2395 +- https://github.com/RDFLib/rdflib/pull/2108 +- https://github.com/RDFLib/rdflib/discussions/1841 + + +### docs: fix comment that doesn't describe behavior (#2443) + +Commit [4e42d10](https://github.com/RDFLib/rdflib/commit/4e42d10), closes [#2443](https://github.com/RDFLib/rdflib/issues/2443). + + +Comment refers to a person that knows bob and the code would return a name, +but this would only work if the triple `person foaf:name bob .` is part of the dataset + +As this is a very uncommon way to model a `foaf:knows` the code was +adjusted to match the description. + +### docs: recommend making an issue before making an enhancement (#2391) + +Commit [63b082c](https://github.com/RDFLib/rdflib/commit/63b082c), closes [#2391](https://github.com/RDFLib/rdflib/issues/2391). + + +Suggest that contributors first make an issue to get in principle +agreement for pull requests before making the pull request. + +Enhancements can be controversial, and we may reject the enhancement +sometimes, even if the code is good, as it may just not be deemed +important enough to increase the maintenance burden of RDFLib. + +Other changes: +- Updated the checklist in the pull request template to be more accurate to + current practice. +- Improved grammar and writing in the pull request template, contribution guide + and developers guide. + + +### docs: remove unicode string form in rdflib/term.py (#2384) + +Commit [ddcc4eb](https://github.com/RDFLib/rdflib/commit/ddcc4eb), closes [#2384](https://github.com/RDFLib/rdflib/issues/2384). + + +The use of Unicode literals is an artefact of Python 2 and is incorrect in Python 3. + +Doctests for docstrings using Unicode literals only pass because [ALLOW_UNICODE](https://docs.pytest.org/en/7.1.x/how-to/doctest.html#using-doctest-options) +is set, but this option should be disabled as RDFLib does not support Python 2 any more. + +This partially resolves . + + +## 2023-03-26 RELEASE 6.3.2 + +### fix: `ROUND`, `ENCODE_FOR_URI` and `SECONDS` SPARQL functions (#2314) Commit [af17916](https://github.com/RDFLib/rdflib/commit/af17916), closes [#2314](https://github.com/RDFLib/rdflib/issues/2314). @@ -14,7 +445,7 @@ This change corrects these issues. - Closes . -## fix: add `__hash__` and `__eq__` back to `rdflib.paths.Path` (#2292) +### fix: add `__hash__` and `__eq__` back to `rdflib.paths.Path` (#2292) Commit [fe1a8f8](https://github.com/RDFLib/rdflib/commit/fe1a8f8), closes [#2292](https://github.com/RDFLib/rdflib/issues/2292). @@ -32,7 +463,7 @@ All path related tests are also moved into one file. - Closes . -## fix: Add `to_dict` method to the JSON-LD `Context` class. (#2310) +### fix: Add `to_dict` method to the JSON-LD `Context` class. (#2310) Commit [d7883eb](https://github.com/RDFLib/rdflib/commit/d7883eb), closes [#2310](https://github.com/RDFLib/rdflib/issues/2310). @@ -43,7 +474,7 @@ This change adds the method. - Closes . -## fix: add the `wgs` namespace binding back (#2294) +### fix: add the `wgs` namespace binding back (#2294) Commit [adf8eb2](https://github.com/RDFLib/rdflib/commit/adf8eb2), closes [#2294](https://github.com/RDFLib/rdflib/issues/2294). @@ -54,7 +485,7 @@ This change adds it back. - Closes . -## fix: change the prefix for `https://schema.org/` back to `schema` (#2312) +### fix: change the prefix for `https://schema.org/` back to `schema` (#2312) Commit [3faa01b](https://github.com/RDFLib/rdflib/commit/3faa01b), closes [#2312](https://github.com/RDFLib/rdflib/issues/2312). @@ -66,7 +497,7 @@ prefix would no longer have the same behaviour. This change changes the prefix back to `schema`. -## fix: include docs and examples in the sdist tarball (#2289) +### fix: include docs and examples in the sdist tarball (#2289) Commit [394fb50](https://github.com/RDFLib/rdflib/commit/394fb50), closes [#2289](https://github.com/RDFLib/rdflib/issues/2289). @@ -81,7 +512,7 @@ A `test:sdist` task is also added to `Taskfile.yml` which uses the sdists to run pytest and build docs. -## fix: IRI to URI conversion (#2304) +### fix: IRI to URI conversion (#2304) Commit [dfa4054](https://github.com/RDFLib/rdflib/commit/dfa4054), closes [#2304](https://github.com/RDFLib/rdflib/issues/2304). @@ -103,7 +534,7 @@ though there are some differences. - Closes . -## fix: JSON-LD context construction from a `dict` (#2306) +### fix: JSON-LD context construction from a `dict` (#2306) Commit [832e693](https://github.com/RDFLib/rdflib/commit/832e693), closes [#2306](https://github.com/RDFLib/rdflib/issues/2306). @@ -118,7 +549,7 @@ input is used to construct a JSON-LD context. - Closes . -## fix: reference to global inside `get_target_namespace_elements` (#2311) +### fix: reference to global inside `get_target_namespace_elements` (#2311) Commit [4da67f9](https://github.com/RDFLib/rdflib/commit/4da67f9), closes [#2311](https://github.com/RDFLib/rdflib/issues/2311). @@ -130,7 +561,7 @@ that instead referencing the argument passed to the function. - Closes . -## fix: restore the 6.1.1 default bound namespaces (#2313) +### fix: restore the 6.1.1 default bound namespaces (#2313) Commit [57bb428](https://github.com/RDFLib/rdflib/commit/57bb428), closes [#2313](https://github.com/RDFLib/rdflib/issues/2313). @@ -147,7 +578,7 @@ To bind a reduced set of namespaces, the `bind_namespaces` parameter of - Closes . -## test: add `webtest` marker to tests that use the internet (#2295) +### test: add `webtest` marker to tests that use the internet (#2295) Commit [cfe6e37](https://github.com/RDFLib/rdflib/commit/cfe6e37), closes [#2295](https://github.com/RDFLib/rdflib/issues/2295). @@ -162,14 +593,14 @@ running the tests inside `firejail --net=none`. - Closes . -## chore: Update CONTRIBUTORS from commit history (#2305) +### chore: Update CONTRIBUTORS from commit history (#2305) Commit [1ab4fc0](https://github.com/RDFLib/rdflib/commit/1ab4fc0), closes [#2305](https://github.com/RDFLib/rdflib/issues/2305). This ensures contributors are credited. Also added .mailmap to fix early misattributed contributions. -## docs: fix typo in NamespaceManager documentation (#2291) +### docs: fix typo in NamespaceManager documentation (#2291) Commit [7a05c15](https://github.com/RDFLib/rdflib/commit/7a05c15), closes [#2291](https://github.com/RDFLib/rdflib/issues/2291). @@ -177,14 +608,14 @@ Commit [7a05c15](https://github.com/RDFLib/rdflib/commit/7a05c15), closes [#2291 Changed `cdterms` to `dcterms`, see for more info. -# 2023-03-18 RELEASE 6.3.1 +## 2023-03-18 RELEASE 6.3.1 This is a patch release that includes a singular user facing fix, which is the inclusion of the `test` directory in the `sdist` release artifact. The following sections describe the changes included in this version. -## build: explicitly specify `packages` in `pyproject.toml` (#2280) +### build: explicitly specify `packages` in `pyproject.toml` (#2280) Commit [334787b](https://github.com/RDFLib/rdflib/commit/334787b), closes [#2280](https://github.com/RDFLib/rdflib/issues/2280). @@ -196,14 +627,14 @@ and possibly other reasons. More changes may follow in a similar vein. -## build: include test in sdist (#2282) +### build: include test in sdist (#2282) Commit [e3884b7](https://github.com/RDFLib/rdflib/commit/e3884b7), closes [#2282](https://github.com/RDFLib/rdflib/issues/2282). A perhaps minor regression from earlier versions is that the sdist does not include the test folder, which makes it harder for downstreams to use a single source of truth to build and test a reliable package. This restores the test folder for sdists. -## docs: don't use kroki (#2284) +### docs: don't use kroki (#2284) Commit [bea782f](https://github.com/RDFLib/rdflib/commit/bea782f), closes [#2284](https://github.com/RDFLib/rdflib/issues/2284). @@ -217,18 +648,18 @@ I also added a task to the Taskfile to re-generate the SVG images from the PlantUML sources by calling docker. -# 2023-03-16 RELEASE 6.3.0 +## 2023-03-16 RELEASE 6.3.0 This is a minor release that includes bug fixes and features. -## Important Information +### Important Information - RDFLib will drop support for Python 3.7 when it becomes EOL on 2023-06-27, this will not be considered a breaking change, and RDFLib's major version number will not be changed solely on the basis of Python 3.7 support being dropped. -## User facing changes +### User facing changes This section lists changes that have a potential impact on users of RDFLib, changes with no user impact are not included in this section. @@ -435,7 +866,7 @@ changes with no user impact are not included in this section. Closed [issue #1844](https://github.com/RDFLib/rdflib/issues/1844). [PR #2267](https://github.com/RDFLib/rdflib/pull/2270). -## PRs merged since last release +### PRs merged since last release * fix: validation issues with examples [PR #2269](https://github.com/RDFLib/rdflib/pull/2269) @@ -609,11 +1040,11 @@ changes with no user impact are not included in this section. [PR #1968](https://github.com/RDFLib/rdflib/pull/1968) -# 2022-07-16 RELEASE 6.2.0 +## 2022-07-16 RELEASE 6.2.0 This is a minor release that includes bug fixes and features. -## User facing changes +### User facing changes This section lists changes that have a potential impact on users of RDFLib, changes with no user impact are not included in this section. @@ -804,7 +1235,7 @@ be a graph. - `rdflib.term` - `rdflib.parser` -## PRs merged since last release +### PRs merged since last release * Fallback to old `Store.bind` signature on `TypeError` [PR #2018](https://github.com/RDFLib/rdflib/pull/2018) @@ -1219,8 +1650,8 @@ be a graph. * Fix issue #936 HAVING clause with variable comparison not correctly evaluated [PR #1093](https://github.com/RDFLib/rdflib/pull/1093) -2021-12-20 RELEASE 6.1.1 -======================== +## 2021-12-20 RELEASE 6.1.1 + Better testing and tidier code. This is a semi-major release that: @@ -1329,12 +1760,12 @@ PRs merged since last release: * Export DCMITYPE [PR #1433](https://github.com/RDFLib/rdflib/pull/1433) -2021-12-20 RELEASE 6.1.0 -======================== +## 2021-12-20 RELEASE 6.1.0 + A slightly messed-up release of what is now 6.1.1. Do not use! -2021-10-10 RELEASE 6.0.2 -======================== +## 2021-10-10 RELEASE 6.0.2 + Minor release to add OWL.rational & OWL.real which are needed to allow the OWL-RL package to use only rdflib namespaces, not it's own versions. * Add owl:rational and owl:real to match standard. @@ -1356,8 +1787,8 @@ A few other small things have been added, see the following merged PRs list: [PR #1407](https://github.com/RDFLib/rdflib/pull/1407) -2021-09-17 RELEASE 6.0.1 -======================== +## 2021-09-17 RELEASE 6.0.1 + Minor release to fix a few small errors, in particular with JSON-LD parsing & serializing integration from rdflib-jsonld. Also, a few other niceties, such as allowing graph `add()`, `remove()` etc. to be chainable. * Add test for adding JSON-LD to guess_format() @@ -1379,8 +1810,8 @@ Minor release to fix a few small errors, in particular with JSON-LD parsing & se * Iterate over dataset return quads [PR #1382](https://github.com/RDFLib/rdflib/pull/1382) -2021-07-20 RELEASE 6.0.0 -======================== +## 2021-07-20 RELEASE 6.0.0 + 6.0.0 is a major stable release that drops support for Python 2 and Python 3 < 3.7. Type hinting is now present in much of the toolkit as a result. @@ -1644,8 +2075,8 @@ _**All PRs merged since 5.0.0:**_ [PR #1006](https://github.com/RDFLib/rdflib/pull/1006) -2020-04-18 RELEASE 5.0.0 -======================== +## 2020-04-18 RELEASE 5.0.0 + 5.0.0 is a major stable release and is the last release to support Python 2 & 3.4. 5.0.0 is mostly backwards- compatible with 4.2.2 and is intended for long-term, bug fix only support. @@ -1679,8 +2110,7 @@ _**All PRs merged since 5.0.0RC1:**_ [PR #1004](https://github.com/RDFLib/rdflib/pull/1004) -2020-04-04 RELEASE 5.0.0RC1 -=========================== +## 2020-04-04 RELEASE 5.0.0RC1 After more than three years, RDFLib 5.0.0rc1 is finally released. @@ -1864,13 +2294,12 @@ _**All issues closed and PRs merged since 4.2.2:**_ -2017-01-29 RELEASE 4.2.2 -======================== +## 2017-01-29 RELEASE 4.2.2 This is a bug-fix release, and the last release in the 4.X.X series. -Bug fixes: ----------- +### Bug fixes: + * SPARQL bugs fixed: * Fix for filters in sub-queries [#693](https://github.com/RDFLib/rdflib/pull/693) @@ -1985,8 +2414,8 @@ Bug fixes: [#573](https://github.com/RDFLib/rdflib/pull/573) -Enhancements: -------------- +### Enhancements: + * Added support for Python 3.5+ [#526](https://github.com/RDFLib/rdflib/pull/526) * More aliases for common formats (nt, turtle) @@ -2042,8 +2471,8 @@ Enhancements: [#529](https://github.com/RDFLib/rdflib/pull/529) -Cleanups: ---------- +### Cleanups: + * cleanup: SPARQL Prologue and Query new style classes [#566](https://github.com/RDFLib/rdflib/pull/566) * Reduce amount of warnings, especially closing opened file pointers @@ -2073,8 +2502,8 @@ Cleanups: [#593](https://github.com/RDFLib/rdflib/pull/593) -Testing improvements: ---------------------- +### Testing improvements: + * updating deprecated testing syntax [#697](https://github.com/RDFLib/rdflib/pull/697) * make test 375 more portable (use sys.executable rather than python) @@ -2090,8 +2519,8 @@ Testing improvements: [#598](https://github.com/RDFLib/rdflib/pull/598) -Doc improvements: ------------------ +### Doc improvements: + * Update list of builtin serialisers in docstring [#621](https://github.com/RDFLib/rdflib/pull/621) * Update reference to "Emulating container types" @@ -2106,13 +2535,12 @@ Doc improvements: -2015-08-12 RELEASE 4.2.1 -======================== +## 2015-08-12 RELEASE 4.2.1 This is a bug-fix release. -Minor enhancements: -------------------- +### Minor enhancements: + * Added a Networkx connector [#471](https://github.com/RDFLib/rdflib/pull/471), [#507](https://github.com/RDFLib/rdflib/pull/507) @@ -2124,8 +2552,8 @@ Minor enhancements: * Batch commits for `SPARQLUpdateStore` [#486](https://github.com/RDFLib/rdflib/pull/486) -Bug fixes: ----------- +### Bug fixes: + * Fixed bnode collision bug [#506](https://github.com/RDFLib/rdflib/pull/506), [#496](https://github.com/RDFLib/rdflib/pull/496), @@ -2167,8 +2595,8 @@ Bug fixes: * slight performance increase for graph.all_nodes() [#458](https://github.com/RDFLib/rdflib/pull/458) -Testing improvements: ---------------------- +### Testing improvements: + * travis: migrate to docker container infrastructure [#508](https://github.com/RDFLib/rdflib/pull/508) * test for narrow python builds (chars > 0xFFFF) (related to @@ -2189,8 +2617,7 @@ Testing improvements: [#472](https://github.com/RDFLib/rdflib/pull/472) -2015-02-19 RELEASE 4.2.0 -======================== +## 2015-02-19 RELEASE 4.2.0 This is a new minor version of RDFLib including a handful of new features: @@ -2219,8 +2646,7 @@ This is a new minor version of RDFLib including a handful of new features: * Python 3.4 fully supported [#418](https://github.com/RDFLib/rdflib/pull/418) -Minor enhancements & bugs fixed: --------------------------------- +### Minor enhancements & bugs fixed: * Fixed double invocation of 2to3 [#437](https://github.com/RDFLib/rdflib/pull/437) @@ -2251,16 +2677,14 @@ Minor enhancements & bugs fixed: [#383](https://github.com/RDFLib/rdflib/pull/383) -2014-04-15 RELEASE 4.1.2 -======================== +## 2014-04-15 RELEASE 4.1.2 This is a bug-fix release. * Fixed unicode/str bug in py3 for rdfpipe [#375](https://github.com/RDFLib/rdflib/issues/375) -2014-03-03 RELEASE 4.1.1 -======================== +## 2014-03-03 RELEASE 4.1.1 This is a bug-fix release. @@ -2285,8 +2709,7 @@ This will be the last RDFLib release to support python 2.5. * A bug in the manchester OWL syntax was fixed [#355](https://github.com/RDFLib/rdflib/issues/355) -2013-12-31 RELEASE 4.1 -====================== +## 2013-12-31 RELEASE 4.1 This is a new minor version RDFLib, which includes a handful of new features: @@ -2312,8 +2735,7 @@ This is a new minor version RDFLib, which includes a handful of new features: * After long deprecation, ```BackwardCompatibleGraph``` was removed. -Minor enhancements/bugs fixed: ------------------------------- +### Minor enhancements/bugs fixed: * Many code samples in the documentation were fixed thanks to @PuckCh @@ -2356,8 +2778,7 @@ Minor enhancements/bugs fixed: * Introduced ordering of ```QuotedGraphs``` [#291](https://github.com/RDFLib/rdflib/issues/291) -2013-05-22 RELEASE 4.0.1 -======================== +## 2013-05-22 RELEASE 4.0.1 Following RDFLib tradition, some bugs snuck into the 4.0 release. This is a bug-fixing release: @@ -2384,8 +2805,7 @@ This is a bug-fixing release: * Moved HTML5Lib dependency to the recently released 1.0b1 which support python3 -2013-05-16 RELEASE 4.0 -====================== +## 2013-05-16 RELEASE 4.0 This release includes several major changes: @@ -2447,8 +2867,7 @@ This release includes several major changes: for most features have been added. -Minor Changes: --------------- +### Minor Changes: * String operations on URIRefs return new URIRefs: ([#258](https://github.com/RDFLib/rdflib/issues/258)) ```py @@ -2468,8 +2887,7 @@ Minor Changes: * Several bugs in the TriG serializer were fixed * Several bugs in the NQuads parser were fixed -2013-03-01 RELEASE 3.4 -====================== +## 2013-03-01 RELEASE 3.4 This release introduced new parsers for structured data in HTML. In particular formats: hturtle, rdfa, mdata and an auto-detecting @@ -2496,8 +2914,7 @@ with any cpython from 2.5 through 3.3. * allow read-only sleepycat stores * language tag parsing in N3/Turtle fixes to allow several subtags. -2012-10-10 RELEASE 3.2.3 -======================== +## 2012-10-10 RELEASE 3.2.3 Almost identical to 3.2.2 A stupid bug snuck into 3.2.2, and querying graphs were broken. @@ -2505,8 +2922,7 @@ A stupid bug snuck into 3.2.2, and querying graphs were broken. * Fixes broken querying ([#234](https://github.com/RDFLib/rdflib/issues/234)) * graph.transitiveClosure now works with loops ([#206](https://github.com/RDFLib/rdflib/issues/206)) -2012-09-25 RELEASE 3.2.2 -======================== +## 2012-09-25 RELEASE 3.2.2 This is mainly a maintenance release. @@ -2528,8 +2944,7 @@ Changes: * Illegal BNode IDs removed from NT output: ([#212](https://github.com/RDFLib/rdflib/issues/212)) * and more minor bug fixes that had no issues -2012-04-24 RELEASE 3.2.1 -======================== +## 2012-04-24 RELEASE 3.2.1 This is mainly a maintenance release. @@ -2544,8 +2959,7 @@ Changes: * Consistent toPyton() for all node objects ([#174](https://github.com/RDFLib/rdflib/issues/174)) * Better random BNode ID in multi-thread environments ([#185](https://github.com/RDFLib/rdflib/issues/185)) -2012-01-19 RELEASE 3.2.0 -======================== +## 2012-01-19 RELEASE 3.2.0 Major changes: * Thanks to Thomas Kluyver, rdflib now works under python3, @@ -2569,8 +2983,7 @@ Major changes: serialize(format="pretty-xml") fails on cyclic links -2011-03-17 RELEASE 3.1.0 -======================== +## 2011-03-17 RELEASE 3.1.0 Fixed a range of minor issues: @@ -2655,8 +3068,7 @@ Fixed a range of minor issues: Store SPARQL Support -2010-05-13 RELEASE 3.0.0 -======================== +## 2010-05-13 RELEASE 3.0.0 Working test suite with all tests passing. @@ -2696,8 +3108,7 @@ Fixed interoperability issue with plugin module. Fixed use of Deprecated functionality. -2009-03-30 RELEASE 2.4.1 -======================== +## 2009-03-30 RELEASE 2.4.1 Fixed Literal comparison case involving Literal's with datatypes of XSD.base64Binary. @@ -2712,8 +3123,7 @@ Fixed Literal repr to handle apostrophes correctly (issue #28). Fixed Literal's repr to be consistent with its ```__init__``` (issue #33). -2007-04-04 RELEASE 2.4.0 -======================== +## 2007-04-04 RELEASE 2.4.0 Improved Literal comparison / equality @@ -2884,8 +3294,7 @@ Added BerkeleyDB store implementation. Merged TextIndex from michel-events branch. -2006-10-15 RELEASE 2.3.3 -======================== +## 2006-10-15 RELEASE 2.3.3 Added TriXParser, N3Serializer and TurtleSerializer. @@ -2920,8 +3329,7 @@ various patches for the testsuite - http://rdflib.net/pipermail/dev/2006-September/000069.html -2006-08-01 RELEASE 2.3.2 -======================== +## 2006-08-01 RELEASE 2.3.2 Added SPARQL query support. @@ -2963,8 +3371,7 @@ Added more test cases. Cleaned up source code to follow pep8 / pep257. -2006-02-27 RELEASE 2.3.1 -======================== +## 2006-02-27 RELEASE 2.3.1 Added save method to BackwardCompatibleGraph so that example.py etc work again. @@ -2997,8 +3404,7 @@ Fixed RDF/XML serializer so that it does not choke on n3 bits (rather it'll just ignore them) -2005-12-23 RELEASE 2.3.0 -======================== +## 2005-12-23 RELEASE 2.3.0 See http://rdflib.net/2.3.0/ for most up-to-date release notes @@ -3033,8 +3439,7 @@ all executed, the store will remain in an invalid state, but it provides Atomicity in the best case scenario. -2005-10-10 RELEASE 2.2.3 -======================== +## 2005-10-10 RELEASE 2.2.3 Fixed BerkeleyDB backend to commit after an add and remove. This should help just a bit with those unclean @@ -3051,8 +3456,7 @@ now supports an empty constructor and an open method that takes a configuration string. -2005-09-10 RELEASE 2.2.2 -======================== +## 2005-09-10 RELEASE 2.2.2 Applied patch from inkel to add encoding argument to all serialization related methods. @@ -3077,16 +3481,14 @@ Fixed setup.py so that install does not try to uninstall command if one needs to uninstall. -2005-08-25 RELEASE 2.2.1 -======================== +## 2005-08-25 RELEASE 2.2.1 Fixed issue regarding Python2.3 compatibility. Fixed minor issue with URIRef's absolute method. -2005-08-12 RELEASE 2.1.4 -======================== +## 2005-08-12 RELEASE 2.1.4 Added optional base argument to URIRef. @@ -3101,8 +3503,7 @@ Included pyparsing (pyparsing.sourceforge.net) for sparql parsing. Added attribute support to namespaces. -2005-06-28 RELEASE 2.1.3 -======================== +## 2005-06-28 RELEASE 2.1.3 Added Ivan's sparql-p implementation. @@ -3118,8 +3519,7 @@ Fixed BNode's n3 serialization bug (recently introduced). Fixed a collections related bug. -2005-05-13 RELEASE 2.1.2 -======================== +## 2005-05-13 RELEASE 2.1.2 Added patch from Sidnei da Silva that adds a sqlobject based backend. @@ -3129,8 +3529,7 @@ Fixed bug in RDF/XML parser where empty collections where causing exceptions. -2005-05-01 RELEASE 2.1.1 -======================== +## 2005-05-01 RELEASE 2.1.1 Fixed a number of bugs relating to 2.0 backward compatibility. @@ -3143,8 +3542,7 @@ Added check_context to Graph. Added patch the improves IOMemory implementation. -2005-04-12 RELEASE 2.1.0 -======================== +## 2005-04-12 RELEASE 2.1.0 Merged TripleStore and InformationStore into Graph. @@ -3163,8 +3561,7 @@ Added notion of NamespaceManager. Added couple new backends, IOMemory and ZODB. -2005-03-19 RELEASE 2.0.6 -======================== +## 2005-03-19 RELEASE 2.0.6 Added pretty-xml serializer (inlines BNodes where possible, typed nodes, Collections). @@ -3182,16 +3579,14 @@ Changed InMemoryBackend to update third index in the same style it does the first two. -2005-01-08 RELEASE 2.0.5 -======================== +## 2005-01-08 RELEASE 2.0.5 Added publicID argument to Store's load method. Added RDF and RDFS to top level rdflib package. -2004-10-14 RELEASE 2.0.4 -======================== +## 2004-10-14 RELEASE 2.0.4 Removed unfinished functionality. @@ -3201,8 +3596,7 @@ defined for the rdf namespace (causing an assertion to fail). Fixed bug in serializer where nodeIDs were not valid NCNames. -2004-04-21 RELEASE 2.0.3 -======================== +## 2004-04-21 RELEASE 2.0.3 Added missing "from __future__ import generators" statement to InformationStore. @@ -3221,8 +3615,7 @@ Context now goes through InformationStore (was bypassing it going directly to backend). -2004-03-22 RELEASE 2.0.2 -======================== +## 2004-03-22 RELEASE 2.0.2 Improved performance of Identifier equality tests. @@ -3238,8 +3631,7 @@ Fixed bug recently introduced into InformationStore's remove method. -2004-03-15 RELEASE 2.0.1 -======================== +## 2004-03-15 RELEASE 2.0.1 Fixed a bug in the SleepyCatBackend multi threaded concurrency support. (Tested fairly extensively under the following @@ -3259,8 +3651,7 @@ responsible for implementing ```__len__```. Context objects now have a identifier property. -2004-03-10 RELEASE 2.0.0 -======================== +## 2004-03-10 RELEASE 2.0.0 Fixed a few bugs in the SleepyCatBackend multi process concurrency support. @@ -3274,8 +3665,7 @@ Added ```__iadd__``` method to Store in support of store += another_store. -2004-01-04 RELEASE 1.3.2 -======================== +## 2004-01-04 RELEASE 1.3.2 Added a serialization dispatcher. @@ -3287,8 +3677,7 @@ Backends are now more pluggable ... -2003-10-14 RELEASE 1.3.1 -======================== +## 2003-10-14 RELEASE 1.3.1 Fixed bug in serializer where triples where only getting serialized the first time. @@ -3306,8 +3695,7 @@ Changed rdf:RDF to be optional to conform with latest spec. Fixed handling of XMLLiterals -2003-04-40 RELEASE 1.3.0 -======================== +## 2003-04-40 RELEASE 1.3.0 Removed bag_id support and added it to OLD_TERMS. @@ -3321,8 +3709,7 @@ Added a KDTreeStore and RedlandStore backends. Added a StoreTester. -2003-02-28 RELEASE 1.2.4 -======================== +## 2003-02-28 RELEASE 1.2.4 Fixed bug in SCBackend where language and datatype information where being ignored. @@ -3335,8 +3722,7 @@ async_load now adds more http header and error information to the InformationStore. -2003-02-11 RELEASE 1.2.3 -======================== +## 2003-02-11 RELEASE 1.2.3 Fixed bug in load methods where relative URLs where not being absolutized correctly on Windows. @@ -3345,8 +3731,7 @@ Fixed serializer so that it throws an exception when trying to serialize a graph with a predicate that can not be split. -2003-02-07 RELEASE 1.2.2 -======================== +## 2003-02-07 RELEASE 1.2.2 Added an exists method to the BackwardCompatibility mixin. @@ -3355,8 +3740,7 @@ to the BackwardCompatility mixin for TripleStores that take an s, p, o as opposed to an (s, p, o). -2003-02-03 RELEASE 1.2.1 -======================== +## 2003-02-03 RELEASE 1.2.1 Added support for parsing XMLLiterals. @@ -3368,8 +3752,7 @@ Fixed remaining rdfcore test cases that where not passing. Fixed windows bug in AbstractInformationStore's run method. -2003-01-02 RELEASE 1.2.0 -======================== +## 2003-01-02 RELEASE 1.2.0 Added systemID, line #, and column # to error messages. @@ -3381,8 +3764,7 @@ Added an asynchronous load method, methods for scheduling context updates, and a run method. -2002-12-16 RELEASE 1.1.5 -======================== +## 2002-12-16 RELEASE 1.1.5 Introduction of InformationStore, a TripleStore with the addition of context support. @@ -3394,8 +3776,7 @@ Fixed bug in parser that was introduced in last release regaurding unqualified names. -2002-12-10 RELEASE 1.1.4 -======================== +## 2002-12-10 RELEASE 1.1.4 Interface realigned with last stable release. @@ -3416,8 +3797,7 @@ rdflib.BTreeTripleStore.BTreeTripleStore. Minor reorganization of mix-in classes. -2002-12-03 RELEASE 1.1.3 -======================== +## 2002-12-03 RELEASE 1.1.3 BNodes now created with a more unique identifier so BNodes from different sessions do not collide. @@ -3432,8 +3812,7 @@ type for ```__getitem__```. Instead there is now a get(predicate, default) method. -2002-11-21 RELEASE 1.1.2 -======================== +## 2002-11-21 RELEASE 1.1.2 Fixed Literal's ```__eq__``` method so that Literal('foo')=='foo' etc. @@ -3441,8 +3820,7 @@ Fixed Resource's ```__setitem__``` method so that it does not raise a dictionary changed size while iterating exception. -2002-11-09 RELEASE 1.1.1 -======================== +## 2002-11-09 RELEASE 1.1.1 Resource is now a special kind of URIRef @@ -3451,10 +3829,9 @@ return type in default case. -2002-11-05 RELEASE 1.1.0 -======================== +## 2002-11-05 RELEASE 1.1.0 -# A new development branch +### A new development branch Cleaned up interface and promoted it to SIR: Simple Interface for RDF. @@ -3472,11 +3849,9 @@ Cleaned up interfaces for load/parse: removed generate_path from loadsave andrenamed parse_URI to parse. -2002-10-08 RELEASE 0.9.6 -======================== +## 2002-10-08 RELEASE 0.9.6 - -# The end of a development branch +### The end of a development branch BNode can now be created with specified value. @@ -3498,9 +3873,7 @@ This functionality is still experimental Consecutive Collections now parse correctly. -2002-08-06 RELEASE 0.9.5 -======================== - +## 2002-08-06 RELEASE 0.9.5 Added support for rdf:parseType="Collection" @@ -3520,9 +3893,7 @@ Added a little more to example.py Removed generate_uri since we have BNodes now. -2002-07-29 RELEASE 0.9.4 -======================== - +## 2002-07-29 RELEASE 0.9.4 Added support for proposed rdf:nodeID to both the parser and serializer. @@ -3533,9 +3904,7 @@ possible. Added partial support for XML Literal parseTypes. -2002-07-16 RELEASE 0.9.3 -======================== - +## 2002-07-16 RELEASE 0.9.3 Fixed bug where bNodes where being created for nested property elements when they where not supposed to be. @@ -3548,9 +3917,7 @@ Added missing check for valid attribute names in the case of production 5.18 of latest WD spec. -2002-07-05 RELEASE 0.9.2 -======================== - +## 2002-07-05 RELEASE 0.9.2 Added missing constants for SUBPROPERTYOF, ISDEFINEDBY. @@ -3559,9 +3926,7 @@ Added test case for running all of the rdf/xml test cases. Reimplemented rdf/xml parser to conform to latest WD. -2002-06-10 RELEASE 0.9.1 -======================== - +## 2002-06-10 RELEASE 0.9.1 There is now a remove and a remove_triples (no more overloaded remove). @@ -3584,7 +3949,6 @@ Added N-Triples parser. Added ```__len__``` and ```__eq__``` methods to store interface. -2002-06-04 RELEASE 0.9.0 -======================== +## 2002-06-04 RELEASE 0.9.0 Initial release after being split from redfootlib. diff --git a/CITATION.cff b/CITATION.cff new file mode 100644 index 000000000..2e5095a5b --- /dev/null +++ b/CITATION.cff @@ -0,0 +1,75 @@ +cff-version: 1.2.0 +message: "If you use this software, please cite it as below." +authors: +- family-names: "Krech" + given-names: "Daniel" +- family-names: "Grimnes" + given-names: "Gunnar AAstrand" +- family-names: "Higgins" + given-names: "Graham" +- family-names: "Hees" + given-names: "Jörn" + orcid: "https://orcid.org/0000-0002-0084-8998" +- family-names: "Aucamp" + given-names: "Iwan" + orcid: "https://orcid.org/0000-0002-7325-3231" +- family-names: "Lindström" + given-names: "Niklas" +- family-names: "Arndt" + given-names: "Natanael" + orcid: "https://orcid.org/0000-0002-8130-8677" +- family-names: "Sommer" + given-names: "Ashley" + orcid: "https://orcid.org/0000-0003-0590-0131" +- family-names: "Chuc" + given-names: "Edmond" + orcid: "https://orcid.org/0000-0002-6047-9864" +- family-names: "Herman" + given-names: "Ivan" + orcid: "https://orcid.org/0000-0003-0782-2704" +- family-names: "Nelson" + given-names: "Alex" +- family-names: "McCusker" + given-names: "Jamie" + orcid: "https://orcid.org/0000-0003-1085-6059" +- family-names: "Gillespie" + given-names: "Tom" +- family-names: "Kluyver" + given-names: "Thomas" + orcid: "https://orcid.org/0000-0003-4020-6364" +- family-names: "Ludwig" + given-names: "Florian" +- family-names: "Champin" + given-names: "Pierre-Antoine" + orcid: "https://orcid.org/0000-0001-7046-4474" +- family-names: "Watts" + given-names: "Mark" +- family-names: "Holzer" + given-names: "Urs" +- family-names: "Summers" + given-names: "Ed" +- family-names: "Morriss" + given-names: "Whit" +- family-names: "Winston" + given-names: "Donny" +- family-names: "Perttula" + given-names: "Drew" +- family-names: "Kovacevic" + given-names: "Filip" + orcid: "https://orcid.org/0000-0002-2854-0434" +- family-names: "Chateauneu" + given-names: "Remi" + orcid: "https://orcid.org/0000-0002-7505-8149" +- family-names: "Solbrig" + given-names: "Harold" + orcid: "https://orcid.org/0000-0002-5928-3071" +- family-names: "Cogrel" + given-names: "Benjamin" + orcid: "https://orcid.org/0000-0002-7566-4077" +- family-names: "Stuart" + given-names: "Veyndan" +title: "RDFLib" +version: 7.0.0 +date-released: 2023-08-02 +url: "https://github.com/RDFLib/rdflib" +doi: 10.5281/zenodo.6845245 diff --git a/README.md b/README.md index c654a2700..d48f211f6 100644 --- a/README.md +++ b/README.md @@ -43,8 +43,10 @@ Help with maintenance of all of the RDFLib family of packages is always welcome ## Versions & Releases -* `7.0.0a0` current `main` branch and supports Python 3.8.1+ only. -* `6.x.y` current release and support Python 3.7+ only. Many improvements over 5.0.0 +* `7.1.0a0` current `main` branch. +* `7.x.y` current release, supports Python 3.8.1+ only. + * see [Releases](https://github.com/RDFLib/rdflib/releases) +* `6.x.y` supports Python 3.7+ only. Many improvements over 5.0.0 * see [Releases](https://github.com/RDFLib/rdflib/releases) * `5.x.y` supports Python 2.7 and 3.4+ and is [mostly backwards compatible with 4.2.2](https://rdflib.readthedocs.io/en/stable/upgrade4to5.html). @@ -58,10 +60,14 @@ The stable release of RDFLib may be installed with Python's package management t $ pip install rdflib +Some features of RDFLib require optional dependencies which may be installed using *pip* extras: + + $ pip install rdflib[berkeleydb,networkx,html,lxml] + Alternatively manually download the package from the Python Package Index (PyPI) at https://pypi.python.org/pypi/rdflib -The current version of RDFLib is 6.3.2, see the ``CHANGELOG.md`` file for what's new in this release. +The current version of RDFLib is 7.0.0, see the ``CHANGELOG.md`` file for what's new in this release. ### Installation of the current main branch (for developers) diff --git a/Taskfile.yml b/Taskfile.yml index b2febc570..2b9582f5f 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -35,6 +35,7 @@ vars: PIP_COMPILE: pip-compile DOCKER: docker OCI_REFERENCE: ghcr.io/rdflib/rdflib + MYPY_VARIANT: '{{ env "MYPY_VARIANT" | default "mypy" }}' tasks: install:system-deps: desc: Install system dependencies @@ -110,17 +111,10 @@ tasks: desc: Run tests cmds: - '{{.TEST_HARNESS}}{{.RUN_PREFIX}} pytest {{if (mustFromJson .WITH_COVERAGE)}}--cov --cov-report={{end}} {{.CLI_ARGS}}' - flake8: - desc: Run flake8 + ruff: + desc: Run ruff cmds: - - | - if {{.VENV_PYTHON}} -c 'import importlib.util; exit(0 if importlib.util.find_spec("flakeheaven") is not None else 1)' - then - 1>&2 echo "running flakeheaven" - {{.VENV_PYTHON}} -m flakeheaven lint {{.CLI_ARGS}} - else - 1>&2 echo "skipping flakeheaven as it is not installed, likely because python version is older than 3.8" - fi + - '{{.RUN_PREFIX}} ruff check {{if (mustFromJson (.FIX | default "false"))}}--fix {{end}}{{.CLI_ARGS | default "."}}' black: desc: Run black cmds: @@ -130,24 +124,30 @@ tasks: cmds: - '{{.VENV_PYTHON}} -m isort {{if (mustFromJson (.CHECK | default "false"))}}--check --diff {{end}}{{.CLI_ARGS | default "."}}' mypy: + desc: Run mypy + cmds: + - task: "mypy:{{ .MYPY_VARIANT }}" + mypy:mypy: desc: Run mypy cmds: - "{{.VENV_PYTHON}} -m mypy --show-error-context --show-error-codes {{.CLI_ARGS}}" - + mypy:dmypy: + desc: Run dmypy + cmds: + - "{{.RUN_PREFIX}} dmypy run {{.CLI_ARGS}}" lint:fix: desc: Fix auto-fixable linting errors cmds: - - task: isort + - task: ruff + vars: { FIX: "true" } - task: black lint: desc: Perform linting cmds: - - task: isort - vars: { CHECK: true } - task: black - vars: { CHECK: true } - - task: flake8 + vars: { CHECK: "true" } + - task: ruff validate:static: desc: Perform static validation @@ -250,20 +250,20 @@ tasks: - task: install:system-deps - task: install:tox vars: - WITH_GITHUB_ACTIONS: true + WITH_GITHUB_ACTIONS: "true" - cmd: "{{.PYTHON}} -m pip install coveralls" - task: tox vars: COVERAGE_FILE: ".coverage" - cmd: coveralls - gha:flake8: - desc: GitHub Actions flake8 workflow + gha:lint: + desc: GitHub Actions lint workflow cmds: - task: poetry:configure vars: - CLI_ARGS: --no-root --only=flake8 - - task: flake8 + CLI_ARGS: --no-root --only=lint + - task: ruff cmd:rdfpipe: desc: Run rdfpipe diff --git a/devtools/diffrtpy.py b/devtools/diffrtpy.py index 6da8bc37b..1d4b09722 100755 --- a/devtools/diffrtpy.py +++ b/devtools/diffrtpy.py @@ -17,6 +17,7 @@ Then attach ``/var/tmp/compact.diff`` to the PR. """ +from __future__ import annotations import argparse import logging diff --git a/devtools/requirements-poetry.in b/devtools/requirements-poetry.in index d179dac98..51526d194 100644 --- a/devtools/requirements-poetry.in +++ b/devtools/requirements-poetry.in @@ -1,3 +1,3 @@ # Fixing this here as readthedocs can't use the compiled requirements-poetry.txt # due to conflicts. -poetry==1.5.1 +poetry==1.8.3 diff --git a/docker/latest/Dockerfile b/docker/latest/Dockerfile index 29e44d715..2a3b701bb 100644 --- a/docker/latest/Dockerfile +++ b/docker/latest/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.4-slim@sha256:53a67c012da3b807905559fa59fac48a3a68600d73c5da10c2f0d8adc96dbd01 +FROM docker.io/library/python:3.12.4-slim@sha256:52f92c54e879539342692d20a4bea99516d4a2eb3cd16dfbb4e4b964aa8becaa COPY docker/latest/requirements.txt /var/tmp/build/ diff --git a/docker/latest/requirements.in b/docker/latest/requirements.in index 93e26ac09..99c4ce22d 100644 --- a/docker/latest/requirements.in +++ b/docker/latest/requirements.in @@ -1,4 +1,4 @@ # This file is used for building a docker image of hte latest rdflib release. It # will be updated by dependabot when new releases are made. -rdflib==6.3.2 +rdflib==7.0.0 html5lib diff --git a/docker/latest/requirements.txt b/docker/latest/requirements.txt index 5b3bae5d3..aeaa35cea 100644 --- a/docker/latest/requirements.txt +++ b/docker/latest/requirements.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile with Python 3.11 # by the following command: # -# pip-compile --resolver=backtracking docker/latest/requirements.in +# pip-compile --config=pyproject.toml docker/latest/requirements.in # html5lib==1.1 # via -r docker/latest/requirements.in @@ -10,7 +10,7 @@ isodate==0.6.1 # via rdflib pyparsing==3.0.9 # via rdflib -rdflib==6.3.2 +rdflib==7.0.0 # via -r docker/latest/requirements.in six==1.16.0 # via diff --git a/docker/unstable/Dockerfile b/docker/unstable/Dockerfile index d76f15cd6..57ca7512c 100644 --- a/docker/unstable/Dockerfile +++ b/docker/unstable/Dockerfile @@ -1,4 +1,4 @@ -FROM docker.io/library/python:3.11.4-slim@sha256:53a67c012da3b807905559fa59fac48a3a68600d73c5da10c2f0d8adc96dbd01 +FROM docker.io/library/python:3.12.4-slim@sha256:52f92c54e879539342692d20a4bea99516d4a2eb3cd16dfbb4e4b964aa8becaa # This file is generated from docker:unstable in Taskfile.yml COPY var/requirements.txt /var/tmp/build/ diff --git a/docs/CONTRIBUTING.md b/docs/CONTRIBUTING.md index dfbb00e1d..6efab1a0b 100644 --- a/docs/CONTRIBUTING.md +++ b/docs/CONTRIBUTING.md @@ -40,7 +40,8 @@ Some ways in which you can contribute to RDFLib are: [![Gitter](https://badges.gitter.im/RDFLib/rdflib.svg)](https://gitter.im/RDFLib/rdflib?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) [![Matrix](https://img.shields.io/matrix/rdflib:matrix.org?label=matrix.org%20chat)](https://matrix.to/#/#RDFLib_rdflib:gitter.im) - Participate in GitHub discussions: [![GitHub Discussions](https://img.shields.io/github/discussions/RDFLib/rdflib)](https://github.com/RDFLib/rdflib/discussions) -- Fix flake8 failures. +- Fix linting failures (see ruff settings in `pyproject.toml` and `# + noqa:` directives in the codebase). ## Pull Requests diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 000000000..63ae71beb --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1,4 @@ +# Changelog + +```{include} ../CHANGELOG.md +``` diff --git a/docs/conf.py b/docs/conf.py index 93d78d8a0..2cac915ed 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# # rdflib documentation build configuration file, created by # sphinx-quickstart on Fri May 15 15:03:54 2009. # @@ -11,6 +9,7 @@ # All configuration values have a default; values that are commented out # serve to show the default. # https://www.sphinx-doc.org/en/master/usage/configuration.html +from __future__ import annotations import logging import os @@ -275,62 +274,17 @@ def find_version(filename): nitpicky = True -if sphinx_version < (5,): - # Being nitpicky on Sphinx 4.x causes lots of problems. - logging.warning( - "disabling nitpicky because sphinx is too old: %s", sphinx.__version__ - ) - nitpicky = False - nitpick_ignore = [ ("py:class", "urllib.response.addinfourl"), - ("py:data", "typing.Literal"), - ("py:class", "typing.IO[bytes]"), - ("py:class", "http.client.HTTPMessage"), ("py:class", "importlib.metadata.EntryPoint"), ("py:class", "xml.dom.minidom.Document"), ("py:class", "xml.dom.minidom.DocumentFragment"), ("py:class", "isodate.duration.Duration"), - # sphinx-autodoc-typehints has some issues with TypeVars. - # https://github.com/tox-dev/sphinx-autodoc-typehints/issues/39 - ("py:class", "rdflib.plugin.PluginT"), - # sphinx-autodoc-typehints does not like generic parmaeters in inheritance it seems - ("py:class", "Identifier"), - # These are related to pyparsing. - ("py:class", "Diagnostics"), - ("py:class", "ParseAction"), - ("py:class", "ParseFailAction"), ("py:class", "pyparsing.core.TokenConverter"), ("py:class", "pyparsing.results.ParseResults"), - # These are related to BerkeleyDB - ("py:class", "db.DBEnv"), + ("py:class", "pyparsing.core.ParserElement"), ] -if sys.version_info < (3, 9): - nitpick_ignore.extend( - [ - ("py:class", "_ContextIdentifierType"), - ("py:class", "_ContextType"), - ("py:class", "_GraphT"), - ("py:class", "_NamespaceSetString"), - ("py:class", "_ObjectType"), - ("py:class", "_PredicateType"), - ("py:class", "_QuadSelectorType"), - ("py:class", "_SubjectType"), - ("py:class", "_TripleOrPathTripleType"), - ("py:class", "_TripleOrQuadPathPatternType"), - ("py:class", "_TripleOrQuadPatternType"), - ("py:class", "_TriplePathPatternType"), - ("py:class", "_TriplePathType"), - ("py:class", "_TriplePatternType"), - ("py:class", "_TripleSelectorType"), - ("py:class", "_TripleType"), - ("py:class", "_TripleOrTriplePathType"), - ("py:class", "TextIO"), - ("py:class", "Message"), - ] - ) - def autodoc_skip_member_handler( app: sphinx.application.Sphinx, diff --git a/docs/developers.rst b/docs/developers.rst index d6cc67e2e..7e00dc950 100644 --- a/docs/developers.rst +++ b/docs/developers.rst @@ -160,9 +160,9 @@ Any new functionality being added to RDFLib *must* have unit tests and should have doc tests supplied. Typically, you should add your functionality and new tests to a branch of -RDFlib and and run all tests locally and see them pass. There are currently -close to 4,000 tests with a few extra expected failures and skipped tests. -We won't allow Pull Requests that break any of the existing tests. +RDFlib and run all tests locally and see them pass. There are currently +close to 4,000 tests, with a some expected failures and skipped tests. +We won't merge pull requests unless the test suite completes successfully. Tests that you add should show how your new feature or bug fix is doing what you say it is doing: if you remove your enhancement, your new tests should fail! @@ -313,13 +313,13 @@ makes it easier to run validation on all supported python versions. tox -a # Run a specific environment. - tox -e py37 # default environment with py37 + tox -e py38 # default environment with py37 tox -e py39-extra # extra tests with py39 # Override the test command. # the below command will run `pytest test/test_translate_algebra.py` # instead of the default pytest command. - tox -e py37,py39 -- pytest test/test_translate_algebra.py + tox -e py38,py39 -- pytest test/test_translate_algebra.py ``go-task`` and ``Taskfile.yml`` @@ -345,7 +345,7 @@ Some useful commands for working with the task in the taskfile is given below: task validate # Build docs - task docs:build + task docs # Run live-preview on the docs task docs:live-server @@ -448,6 +448,7 @@ Releasing Create a release-preparation pull request with the following changes: +* Updated version and date in ``CITATION.cff``. * Updated copyright year in the ``LICENSE`` file. * Updated copyright year in the ``docs/conf.py`` file. * Updated main branch version and current version in the ``README.md`` file. The @@ -468,8 +469,22 @@ Once the PR is merged, switch to the main branch, build the release and upload i # Build artifacts poetry build - # Check that the built wheel works correctly: - pipx run --spec "$(readlink -f dist/rdflib*.whl)" rdfpipe --version + # Verify package metadata + bsdtar -xvf dist/rdflib-*.whl -O '*/METADATA' | view - + bsdtar -xvf dist/rdflib-*.tar.gz -O '*/PKG-INFO' | view - + + # Check that the built wheel and sdist works correctly: + pipx run --no-cache --spec "$(readlink -f dist/rdflib*.whl)" rdfpipe --version + pipx run --no-cache --spec "$(readlink -f dist/rdflib*.whl)" rdfpipe https://github.com/RDFLib/rdflib/raw/main/test/data/defined_namespaces/rdfs.ttl + pipx run --no-cache --spec "$(readlink -f dist/rdflib*.tar.gz)" rdfpipe --version + pipx run --no-cache --spec "$(readlink -f dist/rdflib*.tar.gz)" rdfpipe https://github.com/RDFLib/rdflib/raw/main/test/data/defined_namespaces/rdfs.ttl + + # Dry run publishing + poetry publish --repository=testpypi --dry-run + poetry publish --dry-run + + # Publish to TestPyPI + poetry publish --repository=testpypi # Publish to PyPI poetry publish @@ -484,7 +499,7 @@ uploaded to the release as release artifacts. The resulting release will be available at https://github.com/RDFLib/rdflib/releases/tag/6.3.1 -Once this is done announce the release at the following locations: +Once this is done, announce the release at the following locations: * Twitter: Just make a tweet from your own account linking to the latest release. * RDFLib mailing list. diff --git a/docs/gettingstarted.rst b/docs/gettingstarted.rst index 44307ae8a..b3ee9572f 100644 --- a/docs/gettingstarted.rst +++ b/docs/gettingstarted.rst @@ -149,7 +149,7 @@ A SPARQL query example from rdflib import Graph - # Create a Graph, pare in Internet data + # Create a Graph, parse in Internet data g = Graph().parse("http://www.w3.org/People/Berners-Lee/card") # Query the data in g using SPARQL diff --git a/docs/index.rst b/docs/index.rst index 9d130501e..ad6e7c00d 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -66,6 +66,7 @@ If you are familiar with RDF and are looking for details on how RDFLib handles i namespaces_and_bindings persistence merging + changelog upgrade6to7 upgrade5to6 upgrade4to5 diff --git a/docs/intro_to_parsing.rst b/docs/intro_to_parsing.rst index 4593aa7d9..8b011c53f 100644 --- a/docs/intro_to_parsing.rst +++ b/docs/intro_to_parsing.rst @@ -116,7 +116,7 @@ Working with multi-graphs ------------------------- To read and query multi-graphs, that is RDF data that is context-aware, you need to use rdflib's -:class:`rdflib.ConjunctiveGraph` or :class:`rdflib.Dataset` class. These are extensions to :class:`rdflib.Graph` that +:class:`rdflib.Dataset` class. This an extension to :class:`rdflib.Graph` that know all about quads (triples + graph IDs). If you had this multi-graph data file (in the ``trig`` format, using new-style ``PREFIX`` statement (not the older diff --git a/docs/merging.rst b/docs/merging.rst index d0e5f5e51..1721d9206 100644 --- a/docs/merging.rst +++ b/docs/merging.rst @@ -25,7 +25,7 @@ In RDFLib, blank nodes are given unique IDs when parsing, so graph merging can b ``graph`` now contains the merged graph of ``input1`` and ``input2``. -.. note:: However, the set-theoretic graph operations in RDFLib are assumed to be performed in sub-graphs of some larger data-base (for instance, in the context of a :class:`~rdflib.graph.ConjunctiveGraph`) and assume shared blank node IDs, and therefore do NOT do *correct* merging, i.e.:: +.. note:: However, the set-theoretic graph operations in RDFLib are assumed to be performed in sub-graphs of some larger data-base (for instance, in the context of a :class:`~rdflib.graph.Dataset`) and assume shared blank node IDs, and therefore do NOT do *correct* merging, i.e.:: from rdflib import Graph diff --git a/docs/persistence.rst b/docs/persistence.rst index 43c561768..ca7449ed5 100644 --- a/docs/persistence.rst +++ b/docs/persistence.rst @@ -70,9 +70,9 @@ Additional store plugins More store implementations are available in RDFLib extension projects: - * `rdflib-sqlalchemy `_, which supports stored on a wide-variety of RDBMs backends, - * `rdflib-leveldb `_ - a store on to of Google's `LevelDB `_ key-value store. - * `rdflib-kyotocabinet `_ - a store on to of the `Kyoto Cabinet `_ key-value store. + * `rdflib-sqlalchemy `_ – a store which supports a wide-variety of RDBMS backends, + * `rdflib-leveldb `_ – a store on top of Google's `LevelDB `_ key-value store. + * `rdflib-kyotocabinet `_ – a store on top of the `Kyoto Cabinet `_ key-value store. Example ^^^^^^^ diff --git a/docs/plugin_parsers.rst b/docs/plugin_parsers.rst index f199b9226..7b3c2a568 100644 --- a/docs/plugin_parsers.rst +++ b/docs/plugin_parsers.rst @@ -32,9 +32,8 @@ xml :class:`~rdflib.plugins.parsers.rdfxml.RDFXMLParser` Multi-graph IDs --------------- -Note that for correct parsing of multi-graph data, e.g. Trig, HexT, etc., into a ``ConjunctiveGraph`` or a ``Dataset``, -as opposed to a context-unaware ``Graph``, you will need to set the ``publicID`` of the ``ConjunctiveGraph`` a -``Dataset`` to the identifier of the ``default_context`` (default graph), for example:: +Note that for correct parsing of multi-graph data, e.g. Trig, HexT, etc., into a ``Dataset``, +as opposed to a context-unaware ``Graph``, you will need to set the ``publicID`` of the ``Dataset`` to the identifier of the ``default_context`` (default graph), for example:: d = Dataset() d.parse( diff --git a/docs/plugin_serializers.rst b/docs/plugin_serializers.rst index ac836ee73..39d00df7f 100644 --- a/docs/plugin_serializers.rst +++ b/docs/plugin_serializers.rst @@ -32,7 +32,7 @@ xml :class:`~rdflib.plugins.serializers.rdfxml.XMLSerializer` JSON-LD ------- -JSON-LD - 'json-ld' - has been incorprated in rdflib since v6.0.0. +JSON-LD - 'json-ld' - has been incorporated into RDFLib since v6.0.0. HexTuples --------- @@ -43,3 +43,12 @@ Hextuple) will be an empty string. For context-aware (multi-graph) serialization, the 'graph' field of the default graph will be an empty string and the values for other graphs will be Blank Node IDs or IRIs. + +Longturtle +---------- +Longturtle is just the turtle format with newlines preferred over compactness - multiple nodes on the same line +to enhance the format's text file version control (think Git) friendliness - and more modern forms of prefix markers - +PREFIX instead of @prefix - to make it as similar to SPARQL as possible. + +Longturtle is Turtle 1.1 compliant and will work wherever ordinary turtle works, however some very old parsers don't +understand PREFIX, only @prefix... diff --git a/docs/security_considerations.rst b/docs/security_considerations.rst index c6edb5ddc..77925a0f5 100644 --- a/docs/security_considerations.rst +++ b/docs/security_considerations.rst @@ -15,7 +15,7 @@ processed. RDFLib also supports SPARQL, which has federated query capabilities that allow queries to query arbitrary remote endpoints. -If you are using RDFLib to process untrusted documents or queries you should +If you are using RDFLib to process untrusted documents or queries, you should take measures to restrict file and network access. Some measures that can be taken to restrict file and network access are: @@ -26,7 +26,7 @@ Some measures that can be taken to restrict file and network access are: Of these, operating system security measures are recommended. The other measures work, but they are not as effective as operating system security -measures, and even if they are used they should be used in conjunction with +measures, and even if they are used, they should be used in conjunction with operating system security measures. Operating System Security Measures @@ -40,11 +40,12 @@ Some examples of these include: * `Open Container Initiative (OCI) Containers `_ (aka Docker containers). - Most OCI runtimes provide mechanisms to restrict network and file access of - containers. For example, using Docker, you can limit your container to only - being access files explicitly mapped into the container and only access the - network through a firewall. For more information refer to the - documentation of the tool you use to manage your OCI containers: + Most OCI runtimes provide mechanisms to restrict network and file + access of containers. For example, using Docker, you can limit your + container to only being able to access files explicitly mapped into + the container and only access the network through a firewall. For more + information, refer to the documentation of the tool you use to manage + your OCI containers: * `Kubernetes `_ * `Docker `_ @@ -60,10 +61,10 @@ Some examples of these include: allowed. Applications that process untrusted input could be run as a user with these restrictions in place. -Many other measures are available, however, listing them outside the scope -of this document. +Many other measures are available, however, listing them is outside +the scope of this document. -Of the listed measures OCI containers are recommended. In most cases, OCI +Of the listed measures, OCI containers are recommended. In most cases, OCI containers are constrained by default and can't access the loopback interface and can only access files that are explicitly mapped into the container. @@ -87,7 +88,7 @@ and this function raises a ``urllib.Request`` audit event. For file access, RDFLib uses `open`, which raises an ``open`` audit event. Users of RDFLib can install audit hooks that react to these audit events and -raises an exception when an attempt is made to access files or network resources +raise an exception when an attempt is made to access files or network resources that are not explicitly allowed. RDFLib's test suite includes tests which verify that audit hooks can block @@ -103,7 +104,7 @@ RDFLib uses the `urllib.request.urlopen` for HTTP, HTTPS and other network access. This function will use a `urllib.request.OpenerDirector` installed with `urllib.request.install_opener` to open the URLs. -Users of RDFLib can install a custom URL opener that raise an exception when an +Users of RDFLib can install a custom URL opener that raises an exception when an attempt is made to access network resources that are not explicitly allowed. RDFLib's test suite includes tests which verify that custom URL openers can be diff --git a/docs/upgrade6to7.rst b/docs/upgrade6to7.rst index d58d25735..d687634d5 100644 --- a/docs/upgrade6to7.rst +++ b/docs/upgrade6to7.rst @@ -1,7 +1,7 @@ -.. _upgrade4to5: Upgrading from RDFLib version 6 to 7 +.. _upgrade6to7: Upgrading from RDFLib version 6 to 7 ============================================ -Upgrading 6 to 7 +Upgrading from version 6 to 7 ============================================ Python version @@ -13,8 +13,8 @@ New behaviour for ``publicID`` in ``parse`` methods. ---------------------------------------------------- Before version 7, the ``publicID`` argument to the -:meth:`~rdflib.graph.ConjunctiveGraph.parse` and -:meth:`~rdflib.graph.Dataset.parse` methods was used as the name for the default +:meth:`rdflib.graph.ConjunctiveGraph.parse` and +:meth:`rdflib.graph.Dataset.parse` methods was used as the name for the default graph, and triples from the default graph in a source were loaded into the graph named ``publicID``. diff --git a/examples/berkeleydb_example.py b/examples/berkeleydb_example.py index 6430315a6..085e3db4a 100644 --- a/examples/berkeleydb_example.py +++ b/examples/berkeleydb_example.py @@ -15,6 +15,7 @@ * loads multiple graphs downloaded from GitHub into a BerkeleyDB-baked graph stored in the folder gsq_vocabs. * does not delete the DB at the end so you can see it on disk """ + import os import tempfile @@ -46,7 +47,7 @@ def example_1(): print("(will always be 0 when using temp file for DB)") # Now we'll add some triples to the graph & commit the changes - EG = Namespace("http://example.net/test/") + EG = Namespace("http://example.net/test/") # noqa: N806 graph.bind("eg", EG) graph.add((EG["pic:1"], EG.name, Literal("Jane & Bob"))) diff --git a/examples/custom_datatype.py b/examples/custom_datatype.py index 4d402793f..46f2a5f23 100644 --- a/examples/custom_datatype.py +++ b/examples/custom_datatype.py @@ -8,7 +8,6 @@ mappings between literal datatypes and Python objects """ - from rdflib import XSD, Graph, Literal, Namespace, term if __name__ == "__main__": @@ -20,7 +19,7 @@ # Create a complex number RDFlib Literal EG = Namespace("http://example.com/") c = complex(2, 3) - l = Literal(c) + l = Literal(c) # noqa: E741 # Add it to a graph g = Graph() diff --git a/examples/custom_eval.py b/examples/custom_eval.py index f8dfd3902..32c268606 100644 --- a/examples/custom_eval.py +++ b/examples/custom_eval.py @@ -16,7 +16,6 @@ } """ - from pathlib import Path import rdflib @@ -31,7 +30,7 @@ ) # any number of rdfs.subClassOf -def customEval(ctx, part): +def customEval(ctx, part): # noqa: N802 """ Rewrite triple patterns to get super-classes """ diff --git a/examples/datasets.py b/examples/datasets.py index 8bf3c9d3c..d550775a1 100644 --- a/examples/datasets.py +++ b/examples/datasets.py @@ -1,9 +1,9 @@ """ An RDFLib Dataset is a slight extension to ConjunctiveGraph: it uses simpler terminology -and has a few additional convenience method extensions, for example add() can be used to +and has a few additional convenience methods, for example add() can be used to add quads directly to a specific Graph within the Dataset. -This example file shows how to decalre a Dataset, add content to it, serialise it, query it +This example file shows how to declare a Dataset, add content to it, serialise it, query it and remove things from it. """ @@ -12,11 +12,10 @@ # Note regarding `mypy: ignore_errors=true`: # # This example is using URIRef values as context identifiers. This is contrary -# to the type hints, but it does work. Most likely the type hints are wrong. +# to the type hints, but it does work. Most likely, the type hints are wrong. # Ideally we should just use `# type: ignore` comments for the lines that are -# causing problems, but for some reason the error occurs on different lines with -# different python versions, so the only option is to ignore errors for the -# whole file. +# causing problems, but the error occurs on different lines with different +# Python versions, so the only option is to ignore errors for the whole file. # mypy: ignore_errors=true @@ -102,7 +101,7 @@ print() print() -# Query one graph in the Dataset for all it's triples +# Query one graph in the Dataset for all its triples # we should get """ (rdflib.term.URIRef('http://example.com/subject-z'), rdflib.term.URIRef('http://example.com/predicate-z'), rdflib.term.Literal('Triple Z')) @@ -117,7 +116,7 @@ print() # Query the union of all graphs in the dataset for all triples -# we should get Nothing: +# we should get nothing: """ """ # A Dataset's default union graph does not exist by default (default_union property is False) diff --git a/examples/infixowl_ontology_creation.py b/examples/infixowl_ontology_creation.py index 8efeb69ca..962fc1ae1 100644 --- a/examples/infixowl_ontology_creation.py +++ b/examples/infixowl_ontology_creation.py @@ -249,22 +249,22 @@ def infixowl_example(): disjointWith=[CPR["physical-therapy"], CPR["psychological-therapy"]], ) therapy += medicalTherapy - medicalTherapy += Class(CPR["substance-administration"]) + medicalTherapy += Class(CPR["substance-administration"]) # noqa: N806 diagnosticAct = Class(CPR["diagnostic-act"], subClassOf=[clinicalAct]) # noqa: N806 diagnosticAct.disjointWith = [CPR["therapeutic-act"]] screeningAct = Class(CPR["screening-act"]) # noqa: N806 - screeningAct += Class(CPR["laboratory-test"]) + screeningAct += Class(CPR["laboratory-test"]) # noqa: N806 - diagnosticAct += screeningAct + diagnosticAct += screeningAct # noqa: N806 - screeningAct += Class( + screeningAct += Class( # noqa: N806 CPR["medical-history-screening-act"], disjointWith=[CPR["clinical-examination"], CPR["laboratory-test"]], ) - screeningAct += Class( + screeningAct += Class( # noqa: N806 CPR["clinical-examination"], disjointWith=[CPR["laboratory-test"], CPR["medical-history-screening-act"]], ) diff --git a/examples/jsonld_serialization.py b/examples/jsonld_serialization.py new file mode 100644 index 000000000..5bee1a614 --- /dev/null +++ b/examples/jsonld_serialization.py @@ -0,0 +1,211 @@ +""" +JSON-LD is "A JSON-based Serialization for Linked Data" (https://www.w3.org/TR/json-ld/) that RDFLib implements for RDF serialization. + +This file demonstrated some of the JSON-LD things you can do with RDFLib. Parsing & serializing so far. More to be added later. + + +Parsing +------- +There are a number of "flavours" of JSON-LD - compact and verbose etc. RDFLib can parse all of these in a normal RDFLib way. + + +Serialization +------------- +JSON-LD has a number of options for serialization - more than other RDF formats. For example, IRIs within JSON-LD can be compacted down to CURIES when a "context" statment is added to the JSON-LD data that maps identifiers - short codes - to IRIs and namespace IRIs like this: + +# here the short code "dcterms" is mapped to the IRI http://purl.org/dc/terms/ and "schema" to https://schema.org/, as per RDFLib's in-build namespace prefixes + +"@context": { + "dct": "http://purl.org/dc/terms/", + "schema": "https://schema.org/" +} +""" + +# import RDFLib and other things +try: + from rdflib import Graph +except ModuleNotFoundError: + import sys + from pathlib import Path + + sys.path.append(str(Path(__file__).parent.parent)) + from rdflib import Graph + +# 1. JSON-LD Parsing + +# RDFLib can read all forms of JSON-LD. Here is an example: + +json_ld_data_string = """ +{ + "@context": { + "dct": "http://purl.org/dc/terms/", + "sdo": "https://schema.org/" + }, + "@graph": [ + { + "@id": "https://kurrawong.ai", + "@type": [ + "dct:Agent", + "sdo:Organization" + ], + "sdo:name": "KurrawongAI" + }, + { + "@id": "http://example.com/person/nick", + "@type": "dct:Agent", + "sdo:memberOf": { + "@id": "https://kurrawong.ai" + }, + "sdo:name": "Nicholas Car" + } + ] +} +""" + +# Parse the data in the 'normal' RDFLib way, setting the format parameter to "json-ld" + +g = Graph() +g.parse(data=json_ld_data_string, format="json-ld") + +# print out a count of triples to show successful parsing + +print(len(g)) + +# should be 6 + +# tidy up... +del g + + +# 2. JSON-LD Serialization + +# Load an RDF graph with some data - parsing Turtle input + +g = Graph().parse( + data=""" + PREFIX dcterms: + PREFIX rdfs: + + + a dcterms:Agent ; + "Nicholas Car" ; + ; + . + + + a dcterms:Agent , ; + "KurrawongAI" ; + . + """ +) + +# 2.1 Basic JSON-LD serialization + +# Serialize with only the format option indicated +# Notice: +# - all IRIs are in long form - no CURIES / prefixes used +print(g.serialize(format="json-ld")) + +""" +[ + { + "@id": "https://kurrawong.ai", + "@type": [ + "http://purl.org/dc/terms/Agent", + "https://schema.org/Organization" + ], + "https://schema.org/name": [ + { + "@value": "KurrawongAI" + } + ] + }, + { + "@id": "http://example.com/person/nick", + "@type": [ + "http://purl.org/dc/terms/Agent" + ], + "https://schema.org/memberOf": [ + { + "@id": "https://kurrawong.ai" + } + ], + "https://schema.org/name": [ + { + "@value": "Nicholas Car" + } + ] + } +] +""" + +# 2.2 Compact the JSON-LD by using RDFLib's in-built namespace prefixes +# Notice: +# - the "@context" JSON element with prefix / namespace mappings +# - no prefix is known for schema.org since we are using only RDFLib's core namespace prefixes + +print(g.serialize(format="json-ld", auto_compact=True)) + +""" +{ + "@context": { + "dcterms": "http://purl.org/dc/terms/", + "owl": "http://www.w3.org/2002/07/owl#", + "rdf": "http://www.w3.org/1999/02/22-rdf-syntax-ns#", + "rdfs": "http://www.w3.org/2000/01/rdf-schema#", + "xsd": "http://www.w3.org/2001/XMLSchema#" + }, + "@graph": [ + { + "@id": "http://example.com/person/nick", + "@type": "dcterms:Agent", + "https://schema.org/memberOf": { + "@id": "https://kurrawong.ai" + }, + "https://schema.org/name": "Nicholas Car" + }, + { + "@id": "https://kurrawong.ai", + "@type": [ + "dcterms:Agent", + "https://schema.org/Organization" + ], + "https://schema.org/name": "KurrawongAI" + } + ] +} +""" + +# 2.3 Compact the JSON-LD by supplying own context +# We now override RDFLib's namespace prefixes by supplying our own context information +context = {"sdo": "https://schema.org/", "dct": "http://purl.org/dc/terms/"} + +# Now when we serialise the RDF data, this context can be used to overwrite the default RDFLib one. auto_compact need not be specified +print(g.serialize(format="json-ld", context=context)) + +""" +{ + "@context": { + "dct": "http://purl.org/dc/terms/", + "sdo": "https://schema.org/" + }, + "@graph": [ + { + "@id": "https://kurrawong.ai", + "@type": [ + "dct:Agent", + "sdo:Organization" + ], + "sdo:name": "KurrawongAI" + }, + { + "@id": "http://example.com/person/nick", + "@type": "dct:Agent", + "sdo:memberOf": { + "@id": "https://kurrawong.ai" + }, + "sdo:name": "Nicholas Car" + } + ] +} +""" diff --git a/examples/secure_with_audit.py b/examples/secure_with_audit.py index f49ccd164..2bd4e28fb 100644 --- a/examples/secure_with_audit.py +++ b/examples/secure_with_audit.py @@ -10,6 +10,8 @@ URLs and files as expected. """ +from __future__ import annotations + import logging import os import sys diff --git a/examples/secure_with_urlopen.py b/examples/secure_with_urlopen.py index fd6576b1e..005504796 100644 --- a/examples/secure_with_urlopen.py +++ b/examples/secure_with_urlopen.py @@ -1,6 +1,9 @@ """ This example demonstrates how to use a custom global URL opener installed with `urllib.request.install_opener` to block access to URLs. """ + +from __future__ import annotations + import http.client import logging import os diff --git a/examples/smushing.py b/examples/smushing.py index 7c367a25f..88d68a520 100644 --- a/examples/smushing.py +++ b/examples/smushing.py @@ -34,7 +34,7 @@ g = Graph() g.parse(f"{EXAMPLES_DIR / 'smushingdemo.n3'}", format="n3") - newURI = {} # old subject : stable uri + newURI = {} # old subject : stable uri # noqa: N816 for s, p, o in g.triples((None, FOAF["mbox_sha1sum"], None)): # For this graph, all objects are Identifiers, which is a subclass of # string. `n3` does allow for objects which are not Identifiers, like diff --git a/examples/swap_primer.py b/examples/swap_primer.py index fbcc52c3c..d1fa806aa 100644 --- a/examples/swap_primer.py +++ b/examples/swap_primer.py @@ -14,7 +14,7 @@ # Think of it as a blank piece of graph paper! primer = ConjunctiveGraph() - myNS = Namespace("https://example.com/") + myNS = Namespace("https://example.com/") # noqa: N816 primer.add((myNS.pat, myNS.knows, myNS.jo)) # or: @@ -96,7 +96,7 @@ :Woman = foo:FemaleAdult . :Title a rdf:Property; = dc:title . - """ # --- End of primer code + """ # --- End of primer code # noqa: N816 # To make this go easier to spit back out... # technically, we already created a namespace diff --git a/examples/transitive.py b/examples/transitive.py index 5def7c058..800cbc80c 100644 --- a/examples/transitive.py +++ b/examples/transitive.py @@ -49,10 +49,10 @@ person = URIRef("ex:person") dad = URIRef("ex:d") mom = URIRef("ex:m") - momOfDad = URIRef("ex:gm0") - momOfMom = URIRef("ex:gm1") - dadOfDad = URIRef("ex:gf0") - dadOfMom = URIRef("ex:gf1") + momOfDad = URIRef("ex:gm0") # noqa: N816 + momOfMom = URIRef("ex:gm1") # noqa: N816 + dadOfDad = URIRef("ex:gf0") # noqa: N816 + dadOfMom = URIRef("ex:gf1") # noqa: N816 parent = URIRef("ex:parent") diff --git a/poetry.lock b/poetry.lock index be0da07d8..27c05a77e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -27,46 +27,43 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [[package]] name = "berkeleydb" -version = "18.1.6" +version = "18.1.10" description = "Python bindings for Oracle Berkeley DB" optional = true python-versions = "*" files = [ - {file = "berkeleydb-18.1.6.tar.gz", hash = "sha256:6d412dd1a5b702aeeda3cbfa10d3399b16a804d016de087234f8579fca613ec9"}, + {file = "berkeleydb-18.1.10.tar.gz", hash = "sha256:426341a16007a9002d987a6f4d97226f8eafffcb1a0488488053d38a3127c81a"}, ] [[package]] name = "black" -version = "23.3.0" +version = "24.4.2" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.7" -files = [ - {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, - {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, - {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, - {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, - {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, - {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, - {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, - {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, - {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, - {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, - {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, - {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, - {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, - {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, - {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, - {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, - {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, - {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, +python-versions = ">=3.8" +files = [ + {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, + {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, + {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, + {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, + {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, + {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, + {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, + {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, + {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, + {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, + {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, + {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, + {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, + {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, + {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, + {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, + {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, + {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, + {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, + {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, + {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, + {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, ] [package.dependencies] @@ -76,118 +73,118 @@ packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} +typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)"] +d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "certifi" -version = "2022.12.7" +version = "2023.7.22" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, - {file = "certifi-2022.12.7.tar.gz", hash = "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3"}, + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, ] [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.2.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, - {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, - {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -206,71 +203,63 @@ files = [ [[package]] name = "coverage" -version = "7.2.7" +version = "7.6.0" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" -files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +python-versions = ">=3.8" +files = [ + {file = "coverage-7.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dff044f661f59dace805eedb4a7404c573b6ff0cdba4a524141bc63d7be5c7fd"}, + {file = "coverage-7.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8659fd33ee9e6ca03950cfdcdf271d645cf681609153f218826dd9805ab585c"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7792f0ab20df8071d669d929c75c97fecfa6bcab82c10ee4adb91c7a54055463"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b3cd1ca7cd73d229487fa5caca9e4bc1f0bca96526b922d61053ea751fe791"}, + {file = "coverage-7.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7e128f85c0b419907d1f38e616c4f1e9f1d1b37a7949f44df9a73d5da5cd53c"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a94925102c89247530ae1dab7dc02c690942566f22e189cbd53579b0693c0783"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dcd070b5b585b50e6617e8972f3fbbee786afca71b1936ac06257f7e178f00f6"}, + {file = "coverage-7.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d50a252b23b9b4dfeefc1f663c568a221092cbaded20a05a11665d0dbec9b8fb"}, + {file = "coverage-7.6.0-cp310-cp310-win32.whl", hash = "sha256:0e7b27d04131c46e6894f23a4ae186a6a2207209a05df5b6ad4caee6d54a222c"}, + {file = "coverage-7.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:54dece71673b3187c86226c3ca793c5f891f9fc3d8aa183f2e3653da18566169"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7b525ab52ce18c57ae232ba6f7010297a87ced82a2383b1afd238849c1ff933"}, + {file = "coverage-7.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bea27c4269234e06f621f3fac3925f56ff34bc14521484b8f66a580aacc2e7d"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed8d1d1821ba5fc88d4a4f45387b65de52382fa3ef1f0115a4f7a20cdfab0e94"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01c322ef2bbe15057bc4bf132b525b7e3f7206f071799eb8aa6ad1940bcf5fb1"}, + {file = "coverage-7.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03cafe82c1b32b770a29fd6de923625ccac3185a54a5e66606da26d105f37dac"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0d1b923fc4a40c5832be4f35a5dab0e5ff89cddf83bb4174499e02ea089daf57"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4b03741e70fb811d1a9a1d75355cf391f274ed85847f4b78e35459899f57af4d"}, + {file = "coverage-7.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a73d18625f6a8a1cbb11eadc1d03929f9510f4131879288e3f7922097a429f63"}, + {file = "coverage-7.6.0-cp311-cp311-win32.whl", hash = "sha256:65fa405b837060db569a61ec368b74688f429b32fa47a8929a7a2f9b47183713"}, + {file = "coverage-7.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:6379688fb4cfa921ae349c76eb1a9ab26b65f32b03d46bb0eed841fd4cb6afb1"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f7db0b6ae1f96ae41afe626095149ecd1b212b424626175a6633c2999eaad45b"}, + {file = "coverage-7.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbdf9a72403110a3bdae77948b8011f644571311c2fb35ee15f0f10a8fc082e8"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc44bf0315268e253bf563f3560e6c004efe38f76db03a1558274a6e04bf5d5"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da8549d17489cd52f85a9829d0e1d91059359b3c54a26f28bec2c5d369524807"}, + {file = "coverage-7.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0086cd4fc71b7d485ac93ca4239c8f75732c2ae3ba83f6be1c9be59d9e2c6382"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1fad32ee9b27350687035cb5fdf9145bc9cf0a094a9577d43e909948ebcfa27b"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:044a0985a4f25b335882b0966625270a8d9db3d3409ddc49a4eb00b0ef5e8cee"}, + {file = "coverage-7.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:76d5f82213aa78098b9b964ea89de4617e70e0d43e97900c2778a50856dac605"}, + {file = "coverage-7.6.0-cp312-cp312-win32.whl", hash = "sha256:3c59105f8d58ce500f348c5b56163a4113a440dad6daa2294b5052a10db866da"}, + {file = "coverage-7.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:ca5d79cfdae420a1d52bf177de4bc2289c321d6c961ae321503b2ca59c17ae67"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d39bd10f0ae453554798b125d2f39884290c480f56e8a02ba7a6ed552005243b"}, + {file = "coverage-7.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb08e8508e53a568811016e59f3234d29c2583f6b6e28572f0954a6b4f7e03d"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2e16f4cd2bc4d88ba30ca2d3bbf2f21f00f382cf4e1ce3b1ddc96c634bc48ca"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6616d1c9bf1e3faea78711ee42a8b972367d82ceae233ec0ac61cc7fec09fa6b"}, + {file = "coverage-7.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad4567d6c334c46046d1c4c20024de2a1c3abc626817ae21ae3da600f5779b44"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d17c6a415d68cfe1091d3296ba5749d3d8696e42c37fca5d4860c5bf7b729f03"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9146579352d7b5f6412735d0f203bbd8d00113a680b66565e205bc605ef81bc6"}, + {file = "coverage-7.6.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cdab02a0a941af190df8782aafc591ef3ad08824f97850b015c8c6a8b3877b0b"}, + {file = "coverage-7.6.0-cp38-cp38-win32.whl", hash = "sha256:df423f351b162a702c053d5dddc0fc0ef9a9e27ea3f449781ace5f906b664428"}, + {file = "coverage-7.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f2501d60d7497fd55e391f423f965bbe9e650e9ffc3c627d5f0ac516026000b8"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7221f9ac9dad9492cecab6f676b3eaf9185141539d5c9689d13fd6b0d7de840c"}, + {file = "coverage-7.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddaaa91bfc4477d2871442bbf30a125e8fe6b05da8a0015507bfbf4718228ab2"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cbe651f3904e28f3a55d6f371203049034b4ddbce65a54527a3f189ca3b390"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:831b476d79408ab6ccfadaaf199906c833f02fdb32c9ab907b1d4aa0713cfa3b"}, + {file = "coverage-7.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46c3d091059ad0b9c59d1034de74a7f36dcfa7f6d3bde782c49deb42438f2450"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4d5fae0a22dc86259dee66f2cc6c1d3e490c4a1214d7daa2a93d07491c5c04b6"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:07ed352205574aad067482e53dd606926afebcb5590653121063fbf4e2175166"}, + {file = "coverage-7.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:49c76cdfa13015c4560702574bad67f0e15ca5a2872c6a125f6327ead2b731dd"}, + {file = "coverage-7.6.0-cp39-cp39-win32.whl", hash = "sha256:482855914928c8175735a2a59c8dc5806cf7d8f032e4820d52e845d1f731dca2"}, + {file = "coverage-7.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:543ef9179bc55edfd895154a51792b01c017c87af0ebaae092720152e19e42ca"}, + {file = "coverage-7.6.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:6fe885135c8a479d3e37a7aae61cbd3a0fb2deccb4dda3c25f92a49189f766d6"}, + {file = "coverage-7.6.0.tar.gz", hash = "sha256:289cc803fa1dc901f84701ac10c9ee873619320f2f9aff38794db4a4a0268d51"}, ] [package.dependencies] @@ -281,78 +270,29 @@ toml = ["tomli"] [[package]] name = "docutils" -version = "0.19" +version = "0.20.1" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.7" files = [ - {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, - {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, -] - -[[package]] -name = "entrypoints" -version = "0.4" -description = "Discover and load entry points from installed packages." -optional = false -python-versions = ">=3.6" -files = [ - {file = "entrypoints-0.4-py3-none-any.whl", hash = "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f"}, - {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, + {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, + {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] [[package]] name = "exceptiongroup" -version = "1.1.1" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] test = ["pytest (>=6)"] -[[package]] -name = "flake8" -version = "4.0.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.6" -files = [ - {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, - {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, -] - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.8.0,<2.9.0" -pyflakes = ">=2.4.0,<2.5.0" - -[[package]] -name = "flakeheaven" -version = "3.3.0" -description = "FlakeHeaven is a [Flake8](https://gitlab.com/pycqa/flake8) wrapper to make it cool." -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "flakeheaven-3.3.0-py3-none-any.whl", hash = "sha256:ae246197a178845b30b63fc03023f7ba925cc84cc96314ec19807dafcd6b39a3"}, - {file = "flakeheaven-3.3.0.tar.gz", hash = "sha256:eb07860e028ff8dd56cce742c4766624a37a4ce397fd34300254ab623d13047b"}, -] - -[package.dependencies] -colorama = "*" -entrypoints = "*" -flake8 = ">=4.0.1,<5.0.0" -pygments = "*" -toml = "*" -urllib3 = "*" - -[package.extras] -docs = ["alabaster", "myst-parser (>=0.18.0,<0.19.0)", "pygments-github-lexers", "sphinx"] - [[package]] name = "html5lib" version = "1.1" @@ -398,13 +338,13 @@ files = [ [[package]] name = "importlib-metadata" -version = "6.6.0" +version = "6.8.0" description = "Read metadata from Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "importlib_metadata-6.6.0-py3-none-any.whl", hash = "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed"}, - {file = "importlib_metadata-6.6.0.tar.gz", hash = "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705"}, + {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, + {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, ] [package.dependencies] @@ -413,7 +353,7 @@ zipp = ">=0.5" [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] [[package]] name = "iniconfig" @@ -440,23 +380,6 @@ files = [ [package.dependencies] six = "*" -[[package]] -name = "isort" -version = "5.12.0" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, - {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, -] - -[package.extras] -colors = ["colorama (>=0.4.3)"] -pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] -plugins = ["setuptools"] -requirements-deprecated-finder = ["pip-api", "pipreqs"] - [[package]] name = "jinja2" version = "3.1.2" @@ -476,119 +399,185 @@ i18n = ["Babel (>=2.7)"] [[package]] name = "lxml" -version = "4.9.2" +version = "5.2.2" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +python-versions = ">=3.6" files = [ - {file = "lxml-4.9.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:76cf573e5a365e790396a5cc2b909812633409306c6531a6877c59061e42c4f2"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b1f42b6921d0e81b1bcb5e395bc091a70f41c4d4e55ba99c6da2b31626c44892"}, - {file = "lxml-4.9.2-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:9f102706d0ca011de571de32c3247c6476b55bb6bc65a20f682f000b07a4852a"}, - {file = "lxml-4.9.2-cp27-cp27m-win32.whl", hash = "sha256:8d0b4612b66ff5d62d03bcaa043bb018f74dfea51184e53f067e6fdcba4bd8de"}, - {file = "lxml-4.9.2-cp27-cp27m-win_amd64.whl", hash = "sha256:4c8f293f14abc8fd3e8e01c5bd86e6ed0b6ef71936ded5bf10fe7a5efefbaca3"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2899456259589aa38bfb018c364d6ae7b53c5c22d8e27d0ec7609c2a1ff78b50"}, - {file = "lxml-4.9.2-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6749649eecd6a9871cae297bffa4ee76f90b4504a2a2ab528d9ebe912b101975"}, - {file = "lxml-4.9.2-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:a08cff61517ee26cb56f1e949cca38caabe9ea9fbb4b1e10a805dc39844b7d5c"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:85cabf64adec449132e55616e7ca3e1000ab449d1d0f9d7f83146ed5bdcb6d8a"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8340225bd5e7a701c0fa98284c849c9b9fc9238abf53a0ebd90900f25d39a4e4"}, - {file = "lxml-4.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:1ab8f1f932e8f82355e75dda5413a57612c6ea448069d4fb2e217e9a4bed13d4"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:699a9af7dffaf67deeae27b2112aa06b41c370d5e7633e0ee0aea2e0b6c211f7"}, - {file = "lxml-4.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9cc34af337a97d470040f99ba4282f6e6bac88407d021688a5d585e44a23184"}, - {file = "lxml-4.9.2-cp310-cp310-win32.whl", hash = "sha256:d02a5399126a53492415d4906ab0ad0375a5456cc05c3fc0fc4ca11771745cda"}, - {file = "lxml-4.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:a38486985ca49cfa574a507e7a2215c0c780fd1778bb6290c21193b7211702ab"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c83203addf554215463b59f6399835201999b5e48019dc17f182ed5ad87205c9"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2a87fa548561d2f4643c99cd13131acb607ddabb70682dcf1dff5f71f781a4bf"}, - {file = "lxml-4.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:d6b430a9938a5a5d85fc107d852262ddcd48602c120e3dbb02137c83d212b380"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3efea981d956a6f7173b4659849f55081867cf897e719f57383698af6f618a92"}, - {file = "lxml-4.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:df0623dcf9668ad0445e0558a21211d4e9a149ea8f5666917c8eeec515f0a6d1"}, - {file = "lxml-4.9.2-cp311-cp311-win32.whl", hash = "sha256:da248f93f0418a9e9d94b0080d7ebc407a9a5e6d0b57bb30db9b5cc28de1ad33"}, - {file = "lxml-4.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:3818b8e2c4b5148567e1b09ce739006acfaa44ce3156f8cbbc11062994b8e8dd"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, - {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, - {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, - {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, - {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f49e52d174375a7def9915c9f06ec4e569d235ad428f70751765f48d5926678c"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:36c3c175d34652a35475a73762b545f4527aec044910a651d2bf50de9c3352b1"}, - {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a35f8b7fa99f90dd2f5dc5a9fa12332642f087a7641289ca6c40d6e1a2637d8e"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, - {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, - {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, - {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, - {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:c9ec3eaf616d67db0764b3bb983962b4f385a1f08304fd30c7283954e6a7869b"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a29ba94d065945944016b6b74e538bdb1751a1db6ffb80c9d3c2e40d6fa9894"}, - {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a82d05da00a58b8e4c0008edbc8a4b6ec5a4bc1e2ee0fb6ed157cf634ed7fa45"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:223f4232855ade399bd409331e6ca70fb5578efef22cf4069a6090acc0f53c0e"}, - {file = "lxml-4.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d17bc7c2ccf49c478c5bdd447594e82692c74222698cfc9b5daae7ae7e90743b"}, - {file = "lxml-4.9.2-cp37-cp37m-win32.whl", hash = "sha256:b64d891da92e232c36976c80ed7ebb383e3f148489796d8d31a5b6a677825efe"}, - {file = "lxml-4.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a0a336d6d3e8b234a3aae3c674873d8f0e720b76bc1d9416866c41cd9500ffb9"}, - {file = "lxml-4.9.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:da4dd7c9c50c059aba52b3524f84d7de956f7fef88f0bafcf4ad7dde94a064e8"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:821b7f59b99551c69c85a6039c65b75f5683bdc63270fec660f75da67469ca24"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e5168986b90a8d1f2f9dc1b841467c74221bd752537b99761a93d2d981e04889"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:8e20cb5a47247e383cf4ff523205060991021233ebd6f924bca927fcf25cf86f"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13598ecfbd2e86ea7ae45ec28a2a54fb87ee9b9fdb0f6d343297d8e548392c03"}, - {file = "lxml-4.9.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:880bbbcbe2fca64e2f4d8e04db47bcdf504936fa2b33933efd945e1b429bea8c"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7d2278d59425777cfcb19735018d897ca8303abe67cc735f9f97177ceff8027f"}, - {file = "lxml-4.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5344a43228767f53a9df6e5b253f8cdca7dfc7b7aeae52551958192f56d98457"}, - {file = "lxml-4.9.2-cp38-cp38-win32.whl", hash = "sha256:925073b2fe14ab9b87e73f9a5fde6ce6392da430f3004d8b72cc86f746f5163b"}, - {file = "lxml-4.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:9b22c5c66f67ae00c0199f6055705bc3eb3fcb08d03d2ec4059a2b1b25ed48d7"}, - {file = "lxml-4.9.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:5f50a1c177e2fa3ee0667a5ab79fdc6b23086bc8b589d90b93b4bd17eb0e64d1"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:090c6543d3696cbe15b4ac6e175e576bcc3f1ccfbba970061b7300b0c15a2140"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:63da2ccc0857c311d764e7d3d90f429c252e83b52d1f8f1d1fe55be26827d1f4"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:5b4545b8a40478183ac06c073e81a5ce4cf01bf1734962577cf2bb569a5b3bbf"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2e430cd2824f05f2d4f687701144556646bae8f249fd60aa1e4c768ba7018947"}, - {file = "lxml-4.9.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6804daeb7ef69e7b36f76caddb85cccd63d0c56dedb47555d2fc969e2af6a1a5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a6e441a86553c310258aca15d1c05903aaf4965b23f3bc2d55f200804e005ee5"}, - {file = "lxml-4.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca34efc80a29351897e18888c71c6aca4a359247c87e0b1c7ada14f0ab0c0fb2"}, - {file = "lxml-4.9.2-cp39-cp39-win32.whl", hash = "sha256:6b418afe5df18233fc6b6093deb82a32895b6bb0b1155c2cdb05203f583053f1"}, - {file = "lxml-4.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:f1496ea22ca2c830cbcbd473de8f114a320da308438ae65abad6bab7867fe38f"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b264171e3143d842ded311b7dccd46ff9ef34247129ff5bf5066123c55c2431c"}, - {file = "lxml-4.9.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0dc313ef231edf866912e9d8f5a042ddab56c752619e92dfd3a2c277e6a7299a"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:16efd54337136e8cd72fb9485c368d91d77a47ee2d42b057564aae201257d419"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0f2b1e0d79180f344ff9f321327b005ca043a50ece8713de61d1cb383fb8ac05"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:7b770ed79542ed52c519119473898198761d78beb24b107acf3ad65deae61f1f"}, - {file = "lxml-4.9.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efa29c2fe6b4fdd32e8ef81c1528506895eca86e1d8c4657fda04c9b3786ddf9"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7e91ee82f4199af8c43d8158024cbdff3d931df350252288f0d4ce656df7f3b5"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:b23e19989c355ca854276178a0463951a653309fb8e57ce674497f2d9f208746"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:01d36c05f4afb8f7c20fd9ed5badca32a2029b93b1750f571ccc0b142531caf7"}, - {file = "lxml-4.9.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7b515674acfdcadb0eb5d00d8a709868173acece5cb0be3dd165950cbfdf5409"}, - {file = "lxml-4.9.2.tar.gz", hash = "sha256:2455cfaeb7ac70338b3257f41e21f0724f4b5b0c0e7702da67ee6c3640835b67"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:364d03207f3e603922d0d3932ef363d55bbf48e3647395765f9bfcbdf6d23632"}, + {file = "lxml-5.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:50127c186f191b8917ea2fb8b206fbebe87fd414a6084d15568c27d0a21d60db"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74e4f025ef3db1c6da4460dd27c118d8cd136d0391da4e387a15e48e5c975147"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:981a06a3076997adf7c743dcd0d7a0415582661e2517c7d961493572e909aa1d"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aef5474d913d3b05e613906ba4090433c515e13ea49c837aca18bde190853dff"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1e275ea572389e41e8b039ac076a46cb87ee6b8542df3fff26f5baab43713bca"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5b65529bb2f21ac7861a0e94fdbf5dc0daab41497d18223b46ee8515e5ad297"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bcc98f911f10278d1daf14b87d65325851a1d29153caaf146877ec37031d5f36"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:b47633251727c8fe279f34025844b3b3a3e40cd1b198356d003aa146258d13a2"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:fbc9d316552f9ef7bba39f4edfad4a734d3d6f93341232a9dddadec4f15d425f"}, + {file = "lxml-5.2.2-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:13e69be35391ce72712184f69000cda04fc89689429179bc4c0ae5f0b7a8c21b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3b6a30a9ab040b3f545b697cb3adbf3696c05a3a68aad172e3fd7ca73ab3c835"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a233bb68625a85126ac9f1fc66d24337d6e8a0f9207b688eec2e7c880f012ec0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:dfa7c241073d8f2b8e8dbc7803c434f57dbb83ae2a3d7892dd068d99e96efe2c"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1a7aca7964ac4bb07680d5c9d63b9d7028cace3e2d43175cb50bba8c5ad33316"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ae4073a60ab98529ab8a72ebf429f2a8cc612619a8c04e08bed27450d52103c0"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ffb2be176fed4457e445fe540617f0252a72a8bc56208fd65a690fdb1f57660b"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e290d79a4107d7d794634ce3e985b9ae4f920380a813717adf61804904dc4393"}, + {file = "lxml-5.2.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:96e85aa09274955bb6bd483eaf5b12abadade01010478154b0ec70284c1b1526"}, + {file = "lxml-5.2.2-cp310-cp310-win32.whl", hash = "sha256:f956196ef61369f1685d14dad80611488d8dc1ef00be57c0c5a03064005b0f30"}, + {file = "lxml-5.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:875a3f90d7eb5c5d77e529080d95140eacb3c6d13ad5b616ee8095447b1d22e7"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:45f9494613160d0405682f9eee781c7e6d1bf45f819654eb249f8f46a2c22545"}, + {file = "lxml-5.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0b3f2df149efb242cee2ffdeb6674b7f30d23c9a7af26595099afaf46ef4e88"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d28cb356f119a437cc58a13f8135ab8a4c8ece18159eb9194b0d269ec4e28083"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:657a972f46bbefdbba2d4f14413c0d079f9ae243bd68193cb5061b9732fa54c1"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b74b9ea10063efb77a965a8d5f4182806fbf59ed068b3c3fd6f30d2ac7bee734"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07542787f86112d46d07d4f3c4e7c760282011b354d012dc4141cc12a68cef5f"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:303f540ad2dddd35b92415b74b900c749ec2010e703ab3bfd6660979d01fd4ed"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:2eb2227ce1ff998faf0cd7fe85bbf086aa41dfc5af3b1d80867ecfe75fb68df3"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:1d8a701774dfc42a2f0b8ccdfe7dbc140500d1049e0632a611985d943fcf12df"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:56793b7a1a091a7c286b5f4aa1fe4ae5d1446fe742d00cdf2ffb1077865db10d"}, + {file = "lxml-5.2.2-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eb00b549b13bd6d884c863554566095bf6fa9c3cecb2e7b399c4bc7904cb33b5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a2569a1f15ae6c8c64108a2cd2b4a858fc1e13d25846be0666fc144715e32ab"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:8cf85a6e40ff1f37fe0f25719aadf443686b1ac7652593dc53c7ef9b8492b115"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:d237ba6664b8e60fd90b8549a149a74fcc675272e0e95539a00522e4ca688b04"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0b3f5016e00ae7630a4b83d0868fca1e3d494c78a75b1c7252606a3a1c5fc2ad"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23441e2b5339bc54dc949e9e675fa35efe858108404ef9aa92f0456929ef6fe8"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2fb0ba3e8566548d6c8e7dd82a8229ff47bd8fb8c2da237607ac8e5a1b8312e5"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:79d1fb9252e7e2cfe4de6e9a6610c7cbb99b9708e2c3e29057f487de5a9eaefa"}, + {file = "lxml-5.2.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6dcc3d17eac1df7859ae01202e9bb11ffa8c98949dcbeb1069c8b9a75917e01b"}, + {file = "lxml-5.2.2-cp311-cp311-win32.whl", hash = "sha256:4c30a2f83677876465f44c018830f608fa3c6a8a466eb223535035fbc16f3438"}, + {file = "lxml-5.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:49095a38eb333aaf44c06052fd2ec3b8f23e19747ca7ec6f6c954ffea6dbf7be"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7429e7faa1a60cad26ae4227f4dd0459efde239e494c7312624ce228e04f6391"}, + {file = "lxml-5.2.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:50ccb5d355961c0f12f6cf24b7187dbabd5433f29e15147a67995474f27d1776"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc911208b18842a3a57266d8e51fc3cfaccee90a5351b92079beed912a7914c2"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ce9e786753743159799fdf8e92a5da351158c4bfb6f2db0bf31e7892a1feb5"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ec87c44f619380878bd49ca109669c9f221d9ae6883a5bcb3616785fa8f94c97"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08ea0f606808354eb8f2dfaac095963cb25d9d28e27edcc375d7b30ab01abbf6"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75a9632f1d4f698b2e6e2e1ada40e71f369b15d69baddb8968dcc8e683839b18"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:74da9f97daec6928567b48c90ea2c82a106b2d500f397eeb8941e47d30b1ca85"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:0969e92af09c5687d769731e3f39ed62427cc72176cebb54b7a9d52cc4fa3b73"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:9164361769b6ca7769079f4d426a41df6164879f7f3568be9086e15baca61466"}, + {file = "lxml-5.2.2-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d26a618ae1766279f2660aca0081b2220aca6bd1aa06b2cf73f07383faf48927"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab67ed772c584b7ef2379797bf14b82df9aa5f7438c5b9a09624dd834c1c1aaf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:3d1e35572a56941b32c239774d7e9ad724074d37f90c7a7d499ab98761bd80cf"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:8268cbcd48c5375f46e000adb1390572c98879eb4f77910c6053d25cc3ac2c67"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e282aedd63c639c07c3857097fc0e236f984ceb4089a8b284da1c526491e3f3d"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfdc2bfe69e9adf0df4915949c22a25b39d175d599bf98e7ddf620a13678585"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4aefd911793b5d2d7a921233a54c90329bf3d4a6817dc465f12ffdfe4fc7b8fe"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8b8df03a9e995b6211dafa63b32f9d405881518ff1ddd775db4e7b98fb545e1c"}, + {file = "lxml-5.2.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f11ae142f3a322d44513de1018b50f474f8f736bc3cd91d969f464b5bfef8836"}, + {file = "lxml-5.2.2-cp312-cp312-win32.whl", hash = "sha256:16a8326e51fcdffc886294c1e70b11ddccec836516a343f9ed0f82aac043c24a"}, + {file = "lxml-5.2.2-cp312-cp312-win_amd64.whl", hash = "sha256:bbc4b80af581e18568ff07f6395c02114d05f4865c2812a1f02f2eaecf0bfd48"}, + {file = "lxml-5.2.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3d9d13603410b72787579769469af730c38f2f25505573a5888a94b62b920f8"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38b67afb0a06b8575948641c1d6d68e41b83a3abeae2ca9eed2ac59892b36706"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c689d0d5381f56de7bd6966a4541bff6e08bf8d3871bbd89a0c6ab18aa699573"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:cf2a978c795b54c539f47964ec05e35c05bd045db5ca1e8366988c7f2fe6b3ce"}, + {file = "lxml-5.2.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:739e36ef7412b2bd940f75b278749106e6d025e40027c0b94a17ef7968d55d56"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d8bbcd21769594dbba9c37d3c819e2d5847656ca99c747ddb31ac1701d0c0ed9"}, + {file = "lxml-5.2.2-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:2304d3c93f2258ccf2cf7a6ba8c761d76ef84948d87bf9664e14d203da2cd264"}, + {file = "lxml-5.2.2-cp36-cp36m-win32.whl", hash = "sha256:02437fb7308386867c8b7b0e5bc4cd4b04548b1c5d089ffb8e7b31009b961dc3"}, + {file = "lxml-5.2.2-cp36-cp36m-win_amd64.whl", hash = "sha256:edcfa83e03370032a489430215c1e7783128808fd3e2e0a3225deee278585196"}, + {file = "lxml-5.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:28bf95177400066596cdbcfc933312493799382879da504633d16cf60bba735b"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a745cc98d504d5bd2c19b10c79c61c7c3df9222629f1b6210c0368177589fb8"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b590b39ef90c6b22ec0be925b211298e810b4856909c8ca60d27ffbca6c12e6"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b336b0416828022bfd5a2e3083e7f5ba54b96242159f83c7e3eebaec752f1716"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:c2faf60c583af0d135e853c86ac2735ce178f0e338a3c7f9ae8f622fd2eb788c"}, + {file = "lxml-5.2.2-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:4bc6cb140a7a0ad1f7bc37e018d0ed690b7b6520ade518285dc3171f7a117905"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7ff762670cada8e05b32bf1e4dc50b140790909caa8303cfddc4d702b71ea184"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:57f0a0bbc9868e10ebe874e9f129d2917750adf008fe7b9c1598c0fbbfdde6a6"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:a6d2092797b388342c1bc932077ad232f914351932353e2e8706851c870bca1f"}, + {file = "lxml-5.2.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:60499fe961b21264e17a471ec296dcbf4365fbea611bf9e303ab69db7159ce61"}, + {file = "lxml-5.2.2-cp37-cp37m-win32.whl", hash = "sha256:d9b342c76003c6b9336a80efcc766748a333573abf9350f4094ee46b006ec18f"}, + {file = "lxml-5.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:b16db2770517b8799c79aa80f4053cd6f8b716f21f8aca962725a9565ce3ee40"}, + {file = "lxml-5.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7ed07b3062b055d7a7f9d6557a251cc655eed0b3152b76de619516621c56f5d3"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f60fdd125d85bf9c279ffb8e94c78c51b3b6a37711464e1f5f31078b45002421"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7e24cb69ee5f32e003f50e016d5fde438010c1022c96738b04fc2423e61706"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23cfafd56887eaed93d07bc4547abd5e09d837a002b791e9767765492a75883f"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19b4e485cd07b7d83e3fe3b72132e7df70bfac22b14fe4bf7a23822c3a35bff5"}, + {file = "lxml-5.2.2-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7ce7ad8abebe737ad6143d9d3bf94b88b93365ea30a5b81f6877ec9c0dee0a48"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e49b052b768bb74f58c7dda4e0bdf7b79d43a9204ca584ffe1fb48a6f3c84c66"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d14a0d029a4e176795cef99c056d58067c06195e0c7e2dbb293bf95c08f772a3"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:be49ad33819d7dcc28a309b86d4ed98e1a65f3075c6acd3cd4fe32103235222b"}, + {file = "lxml-5.2.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a6d17e0370d2516d5bb9062c7b4cb731cff921fc875644c3d751ad857ba9c5b1"}, + {file = "lxml-5.2.2-cp38-cp38-win32.whl", hash = "sha256:5b8c041b6265e08eac8a724b74b655404070b636a8dd6d7a13c3adc07882ef30"}, + {file = "lxml-5.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:f61efaf4bed1cc0860e567d2ecb2363974d414f7f1f124b1df368bbf183453a6"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fb91819461b1b56d06fa4bcf86617fac795f6a99d12239fb0c68dbeba41a0a30"}, + {file = "lxml-5.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d4ed0c7cbecde7194cd3228c044e86bf73e30a23505af852857c09c24e77ec5d"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54401c77a63cc7d6dc4b4e173bb484f28a5607f3df71484709fe037c92d4f0ed"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:625e3ef310e7fa3a761d48ca7ea1f9d8718a32b1542e727d584d82f4453d5eeb"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:519895c99c815a1a24a926d5b60627ce5ea48e9f639a5cd328bda0515ea0f10c"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c7079d5eb1c1315a858bbf180000757db8ad904a89476653232db835c3114001"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:343ab62e9ca78094f2306aefed67dcfad61c4683f87eee48ff2fd74902447726"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:cd9e78285da6c9ba2d5c769628f43ef66d96ac3085e59b10ad4f3707980710d3"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:546cf886f6242dff9ec206331209db9c8e1643ae642dea5fdbecae2453cb50fd"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:02f6a8eb6512fdc2fd4ca10a49c341c4e109aa6e9448cc4859af5b949622715a"}, + {file = "lxml-5.2.2-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:339ee4a4704bc724757cd5dd9dc8cf4d00980f5d3e6e06d5847c1b594ace68ab"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0a028b61a2e357ace98b1615fc03f76eb517cc028993964fe08ad514b1e8892d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:f90e552ecbad426eab352e7b2933091f2be77115bb16f09f78404861c8322981"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d83e2d94b69bf31ead2fa45f0acdef0757fa0458a129734f59f67f3d2eb7ef32"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a02d3c48f9bb1e10c7788d92c0c7db6f2002d024ab6e74d6f45ae33e3d0288a3"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6d68ce8e7b2075390e8ac1e1d3a99e8b6372c694bbe612632606d1d546794207"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:453d037e09a5176d92ec0fd282e934ed26d806331a8b70ab431a81e2fbabf56d"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:3b019d4ee84b683342af793b56bb35034bd749e4cbdd3d33f7d1107790f8c472"}, + {file = "lxml-5.2.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb3942960f0beb9f46e2a71a3aca220d1ca32feb5a398656be934320804c0df9"}, + {file = "lxml-5.2.2-cp39-cp39-win32.whl", hash = "sha256:ac6540c9fff6e3813d29d0403ee7a81897f1d8ecc09a8ff84d2eea70ede1cdbf"}, + {file = "lxml-5.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:610b5c77428a50269f38a534057444c249976433f40f53e3b47e68349cca1425"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b537bd04d7ccd7c6350cdaaaad911f6312cbd61e6e6045542f781c7f8b2e99d2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4820c02195d6dfb7b8508ff276752f6b2ff8b64ae5d13ebe02e7667e035000b9"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2a09f6184f17a80897172863a655467da2b11151ec98ba8d7af89f17bf63dae"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76acba4c66c47d27c8365e7c10b3d8016a7da83d3191d053a58382311a8bf4e1"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b128092c927eaf485928cec0c28f6b8bead277e28acf56800e972aa2c2abd7a2"}, + {file = "lxml-5.2.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ae791f6bd43305aade8c0e22f816b34f3b72b6c820477aab4d18473a37e8090b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a2f6a1bc2460e643785a2cde17293bd7a8f990884b822f7bca47bee0a82fc66b"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e8d351ff44c1638cb6e980623d517abd9f580d2e53bfcd18d8941c052a5a009"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bec4bd9133420c5c52d562469c754f27c5c9e36ee06abc169612c959bd7dbb07"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:55ce6b6d803890bd3cc89975fca9de1dff39729b43b73cb15ddd933b8bc20484"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ab6a358d1286498d80fe67bd3d69fcbc7d1359b45b41e74c4a26964ca99c3f8"}, + {file = "lxml-5.2.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:06668e39e1f3c065349c51ac27ae430719d7806c026fec462e5693b08b95696b"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9cd5323344d8ebb9fb5e96da5de5ad4ebab993bbf51674259dbe9d7a18049525"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89feb82ca055af0fe797a2323ec9043b26bc371365847dbe83c7fd2e2f181c34"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e481bba1e11ba585fb06db666bfc23dbe181dbafc7b25776156120bf12e0d5a6"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d6c6ea6a11ca0ff9cd0390b885984ed31157c168565702959c25e2191674a14"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3d98de734abee23e61f6b8c2e08a88453ada7d6486dc7cdc82922a03968928db"}, + {file = "lxml-5.2.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:69ab77a1373f1e7563e0fb5a29a8440367dec051da6c7405333699d07444f511"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34e17913c431f5ae01d8658dbf792fdc457073dcdfbb31dc0cc6ab256e664a8d"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f8757b03208c3f50097761be2dea0aba02e94f0dc7023ed73a7bb14ff11eb0"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a520b4f9974b0a0a6ed73c2154de57cdfd0c8800f4f15ab2b73238ffed0b36e"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5e097646944b66207023bc3c634827de858aebc226d5d4d6d16f0b77566ea182"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b5e4ef22ff25bfd4ede5f8fb30f7b24446345f3e79d9b7455aef2836437bc38a"}, + {file = "lxml-5.2.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ff69a9a0b4b17d78170c73abe2ab12084bdf1691550c5629ad1fe7849433f324"}, + {file = "lxml-5.2.2.tar.gz", hash = "sha256:bb2dc4898180bea79863d5487e5f9c7c34297414bad54bcd0f0852aee9cfdb87"}, ] [package.extras] cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] html5 = ["html5lib"] htmlsoup = ["BeautifulSoup4"] -source = ["Cython (>=0.29.7)"] +source = ["Cython (>=3.0.10)"] [[package]] name = "lxml-stubs" -version = "0.4.0" +version = "0.5.1" description = "Type annotations for the lxml package" optional = false python-versions = "*" files = [ - {file = "lxml-stubs-0.4.0.tar.gz", hash = "sha256:184877b42127256abc2b932ba8bd0ab5ea80bd0b0fee618d16daa40e0b71abee"}, - {file = "lxml_stubs-0.4.0-py3-none-any.whl", hash = "sha256:3b381e9e82397c64ea3cc4d6f79d1255d015f7b114806d4826218805c10ec003"}, + {file = "lxml-stubs-0.5.1.tar.gz", hash = "sha256:e0ec2aa1ce92d91278b719091ce4515c12adc1d564359dfaf81efa7d4feab79d"}, + {file = "lxml_stubs-0.5.1-py3-none-any.whl", hash = "sha256:1f689e5dbc4b9247cb09ae820c7d34daeb1fdbd1db06123814b856dae7787272"}, ] [package.extras] -test = ["coverage[toml] (==5.2)", "pytest (>=6.0.0)", "pytest-mypy-plugins (==1.9.3)"] +test = ["coverage[toml] (>=7.2.5)", "mypy (>=1.2.0)", "pytest (>=7.3.0)", "pytest-mypy-plugins (>=1.10.1)"] [[package]] name = "markdown-it-py" -version = "2.2.0" +version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"}, - {file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"}, + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, ] [package.dependencies] @@ -601,96 +590,95 @@ compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0 linkify = ["linkify-it-py (>=1,<3)"] plugins = ["mdit-py-plugins"] profiling = ["gprof2dot"] -rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.3" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, - {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, - {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, -] - -[[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = "*" -files = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, ] [[package]] name = "mdit-py-plugins" -version = "0.3.5" +version = "0.4.0" description = "Collection of plugins for markdown-it-py" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mdit-py-plugins-0.3.5.tar.gz", hash = "sha256:eee0adc7195e5827e17e02d2a258a2ba159944a0748f59c5099a4a27f78fcf6a"}, - {file = "mdit_py_plugins-0.3.5-py3-none-any.whl", hash = "sha256:ca9a0714ea59a24b2b044a1831f48d817dd0c817e84339f20e7889f392d77c4e"}, + {file = "mdit_py_plugins-0.4.0-py3-none-any.whl", hash = "sha256:b51b3bb70691f57f974e257e367107857a93b36f322a9e6d44ca5bf28ec2def9"}, + {file = "mdit_py_plugins-0.4.0.tar.gz", hash = "sha256:d8ab27e9aed6c38aa716819fedfde15ca275715955f8a185a8e1cf90fb1d2c1b"}, ] [package.dependencies] -markdown-it-py = ">=1.0.0,<3.0.0" +markdown-it-py = ">=1.0.0,<4.0.0" [package.extras] code-style = ["pre-commit"] -rtd = ["attrs", "myst-parser (>=0.16.1,<0.17.0)", "sphinx-book-theme (>=0.1.0,<0.2.0)"] +rtd = ["myst-parser", "sphinx-book-theme"] testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] @@ -706,48 +694,49 @@ files = [ [[package]] name = "mypy" -version = "1.3.0" +version = "1.11.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" -files = [ - {file = "mypy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c1eb485cea53f4f5284e5baf92902cd0088b24984f4209e25981cc359d64448d"}, - {file = "mypy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c99c3ecf223cf2952638da9cd82793d8f3c0c5fa8b6ae2b2d9ed1e1ff51ba85"}, - {file = "mypy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:550a8b3a19bb6589679a7c3c31f64312e7ff482a816c96e0cecec9ad3a7564dd"}, - {file = "mypy-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cbc07246253b9e3d7d74c9ff948cd0fd7a71afcc2b77c7f0a59c26e9395cb152"}, - {file = "mypy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:a22435632710a4fcf8acf86cbd0d69f68ac389a3892cb23fbad176d1cddaf228"}, - {file = "mypy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6e33bb8b2613614a33dff70565f4c803f889ebd2f859466e42b46e1df76018dd"}, - {file = "mypy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7d23370d2a6b7a71dc65d1266f9a34e4cde9e8e21511322415db4b26f46f6b8c"}, - {file = "mypy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:658fe7b674769a0770d4b26cb4d6f005e88a442fe82446f020be8e5f5efb2fae"}, - {file = "mypy-1.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e42d29e324cdda61daaec2336c42512e59c7c375340bd202efa1fe0f7b8f8ca"}, - {file = "mypy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:d0b6c62206e04061e27009481cb0ec966f7d6172b5b936f3ead3d74f29fe3dcf"}, - {file = "mypy-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:76ec771e2342f1b558c36d49900dfe81d140361dd0d2df6cd71b3db1be155409"}, - {file = "mypy-1.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc95f8386314272bbc817026f8ce8f4f0d2ef7ae44f947c4664efac9adec929"}, - {file = "mypy-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:faff86aa10c1aa4a10e1a301de160f3d8fc8703b88c7e98de46b531ff1276a9a"}, - {file = "mypy-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8c5979d0deb27e0f4479bee18ea0f83732a893e81b78e62e2dda3e7e518c92ee"}, - {file = "mypy-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c5d2cc54175bab47011b09688b418db71403aefad07cbcd62d44010543fc143f"}, - {file = "mypy-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:87df44954c31d86df96c8bd6e80dfcd773473e877ac6176a8e29898bfb3501cb"}, - {file = "mypy-1.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:473117e310febe632ddf10e745a355714e771ffe534f06db40702775056614c4"}, - {file = "mypy-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:74bc9b6e0e79808bf8678d7678b2ae3736ea72d56eede3820bd3849823e7f305"}, - {file = "mypy-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:44797d031a41516fcf5cbfa652265bb994e53e51994c1bd649ffcd0c3a7eccbf"}, - {file = "mypy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ddae0f39ca146972ff6bb4399f3b2943884a774b8771ea0a8f50e971f5ea5ba8"}, - {file = "mypy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c4c42c60a8103ead4c1c060ac3cdd3ff01e18fddce6f1016e08939647a0e703"}, - {file = "mypy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e86c2c6852f62f8f2b24cb7a613ebe8e0c7dc1402c61d36a609174f63e0ff017"}, - {file = "mypy-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f9dca1e257d4cc129517779226753dbefb4f2266c4eaad610fc15c6a7e14283e"}, - {file = "mypy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:95d8d31a7713510685b05fbb18d6ac287a56c8f6554d88c19e73f724a445448a"}, - {file = "mypy-1.3.0-py3-none-any.whl", hash = "sha256:a8763e72d5d9574d45ce5881962bc8e9046bf7b375b0abf031f3e6811732a897"}, - {file = "mypy-1.3.0.tar.gz", hash = "sha256:e1f4d16e296f5135624b34e8fb741eb0eadedca90862405b1f1fde2040b9bd11"}, +python-versions = ">=3.8" +files = [ + {file = "mypy-1.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3824187c99b893f90c845bab405a585d1ced4ff55421fdf5c84cb7710995229"}, + {file = "mypy-1.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:96f8dbc2c85046c81bcddc246232d500ad729cb720da4e20fce3b542cab91287"}, + {file = "mypy-1.11.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a5d8d8dd8613a3e2be3eae829ee891b6b2de6302f24766ff06cb2875f5be9c6"}, + {file = "mypy-1.11.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72596a79bbfb195fd41405cffa18210af3811beb91ff946dbcb7368240eed6be"}, + {file = "mypy-1.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:35ce88b8ed3a759634cb4eb646d002c4cef0a38f20565ee82b5023558eb90c00"}, + {file = "mypy-1.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:98790025861cb2c3db8c2f5ad10fc8c336ed2a55f4daf1b8b3f877826b6ff2eb"}, + {file = "mypy-1.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25bcfa75b9b5a5f8d67147a54ea97ed63a653995a82798221cca2a315c0238c1"}, + {file = "mypy-1.11.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bea2a0e71c2a375c9fa0ede3d98324214d67b3cbbfcbd55ac8f750f85a414e3"}, + {file = "mypy-1.11.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2b3d36baac48e40e3064d2901f2fbd2a2d6880ec6ce6358825c85031d7c0d4d"}, + {file = "mypy-1.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:d8e2e43977f0e09f149ea69fd0556623919f816764e26d74da0c8a7b48f3e18a"}, + {file = "mypy-1.11.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1d44c1e44a8be986b54b09f15f2c1a66368eb43861b4e82573026e04c48a9e20"}, + {file = "mypy-1.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cea3d0fb69637944dd321f41bc896e11d0fb0b0aa531d887a6da70f6e7473aba"}, + {file = "mypy-1.11.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a83ec98ae12d51c252be61521aa5731f5512231d0b738b4cb2498344f0b840cd"}, + {file = "mypy-1.11.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7b73a856522417beb78e0fb6d33ef89474e7a622db2653bc1285af36e2e3e3d"}, + {file = "mypy-1.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:f2268d9fcd9686b61ab64f077be7ffbc6fbcdfb4103e5dd0cc5eaab53a8886c2"}, + {file = "mypy-1.11.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:940bfff7283c267ae6522ef926a7887305945f716a7704d3344d6d07f02df850"}, + {file = "mypy-1.11.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14f9294528b5f5cf96c721f231c9f5b2733164e02c1c018ed1a0eff8a18005ac"}, + {file = "mypy-1.11.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d7b54c27783991399046837df5c7c9d325d921394757d09dbcbf96aee4649fe9"}, + {file = "mypy-1.11.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:65f190a6349dec29c8d1a1cd4aa71284177aee5949e0502e6379b42873eddbe7"}, + {file = "mypy-1.11.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbe286303241fea8c2ea5466f6e0e6a046a135a7e7609167b07fd4e7baf151bf"}, + {file = "mypy-1.11.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:104e9c1620c2675420abd1f6c44bab7dd33cc85aea751c985006e83dcd001095"}, + {file = "mypy-1.11.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f006e955718ecd8d159cee9932b64fba8f86ee6f7728ca3ac66c3a54b0062abe"}, + {file = "mypy-1.11.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:becc9111ca572b04e7e77131bc708480cc88a911adf3d0239f974c034b78085c"}, + {file = "mypy-1.11.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6801319fe76c3f3a3833f2b5af7bd2c17bb93c00026a2a1b924e6762f5b19e13"}, + {file = "mypy-1.11.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1a184c64521dc549324ec6ef7cbaa6b351912be9cb5edb803c2808a0d7e85ac"}, + {file = "mypy-1.11.0-py3-none-any.whl", hash = "sha256:56913ec8c7638b0091ef4da6fcc9136896914a9d60d54670a75880c3e5b99ace"}, + {file = "mypy-1.11.0.tar.gz", hash = "sha256:93743608c7348772fdc717af4aeee1997293a1ad04bc0ea6efa15bf65385c538"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=3.10" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -763,68 +752,68 @@ files = [ [[package]] name = "myst-parser" -version = "1.0.0" +version = "3.0.1" description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "myst-parser-1.0.0.tar.gz", hash = "sha256:502845659313099542bd38a2ae62f01360e7dd4b1310f025dd014dfc0439cdae"}, - {file = "myst_parser-1.0.0-py3-none-any.whl", hash = "sha256:69fb40a586c6fa68995e6521ac0a525793935db7e724ca9bac1d33be51be9a4c"}, + {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, + {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, ] [package.dependencies] -docutils = ">=0.15,<0.20" +docutils = ">=0.18,<0.22" jinja2 = "*" -markdown-it-py = ">=1.0.0,<3.0.0" -mdit-py-plugins = ">=0.3.4,<0.4.0" +markdown-it-py = ">=3.0,<4.0" +mdit-py-plugins = ">=0.4,<1.0" pyyaml = "*" -sphinx = ">=5,<7" +sphinx = ">=6,<8" [package.extras] code-style = ["pre-commit (>=3.0,<4.0)"] -linkify = ["linkify-it-py (>=1.0,<2.0)"] -rtd = ["ipython", "pydata-sphinx-theme (==v0.13.0rc4)", "sphinx-autodoc2 (>=0.4.2,<0.5.0)", "sphinx-book-theme (==1.0.0rc2)", "sphinx-copybutton", "sphinx-design2", "sphinx-pyscript", "sphinx-tippy (>=0.3.1)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.7.5,<0.8.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] -testing = ["beautifulsoup4", "coverage[toml]", "pytest (>=7,<8)", "pytest-cov", "pytest-param-files (>=0.3.4,<0.4.0)", "pytest-regressions", "sphinx-pytest"] -testing-docutils = ["pygments", "pytest (>=7,<8)", "pytest-param-files (>=0.3.4,<0.4.0)"] +linkify = ["linkify-it-py (>=2.0,<3.0)"] +rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] +testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] +testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] [[package]] name = "networkx" -version = "2.6.3" +version = "3.1" description = "Python package for creating and manipulating graphs and networks" optional = true -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "networkx-2.6.3-py3-none-any.whl", hash = "sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef"}, - {file = "networkx-2.6.3.tar.gz", hash = "sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51"}, + {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, + {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, ] [package.extras] -default = ["matplotlib (>=3.3)", "numpy (>=1.19)", "pandas (>=1.1)", "scipy (>=1.5,!=1.6.1)"] -developer = ["black (==21.5b1)", "pre-commit (>=2.12)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.1)", "pillow (>=8.2)", "pydata-sphinx-theme (>=0.6,<1.0)", "sphinx (>=4.0,<5.0)", "sphinx-gallery (>=0.9,<1.0)", "texext (>=0.6.6)"] -extra = ["lxml (>=4.5)", "pydot (>=1.4.1)", "pygraphviz (>=1.7)"] -test = ["codecov (>=2.1)", "pytest (>=6.2)", "pytest-cov (>=2.12)"] +default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] +developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] +doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] +extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] +test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "packaging" -version = "23.0" +version = "23.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pathspec" -version = "0.11.1" +version = "0.11.2" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] @@ -838,81 +827,45 @@ files = [ {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] -[[package]] -name = "pep8-naming" -version = "0.13.2" -description = "Check PEP-8 naming conventions, plugin for flake8" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, - {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, -] - -[package.dependencies] -flake8 = ">=3.9.1" - [[package]] name = "platformdirs" -version = "3.1.1" +version = "3.10.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"}, - {file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"}, + {file = "platformdirs-3.10.0-py3-none-any.whl", hash = "sha256:d7c24979f292f916dc9cbf8648319032f551ea8c49a4c9bf2fb556a02070ec1d"}, + {file = "platformdirs-3.10.0.tar.gz", hash = "sha256:b45696dab2d7cc691a3226759c0d3b00c47c8b6e293d96f6436f733303f77f6d"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] [[package]] name = "pluggy" -version = "1.0.0" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] -[[package]] -name = "pycodestyle" -version = "2.8.0" -description = "Python style guide checker" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, - {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, -] - -[[package]] -name = "pyflakes" -version = "2.4.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, - {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, -] - [[package]] name = "pygments" -version = "2.14.0" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, - {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -920,13 +873,13 @@ plugins = ["importlib-metadata"] [[package]] name = "pyparsing" -version = "3.1.0" +version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false python-versions = ">=3.6.8" files = [ - {file = "pyparsing-3.1.0-py3-none-any.whl", hash = "sha256:d554a96d1a7d3ddaf7183104485bc19fd80543ad6ac5bdb6426719d766fb06c1"}, - {file = "pyparsing-3.1.0.tar.gz", hash = "sha256:edb662d6fe322d6e990b1594b5feaeadf806803359e3d4d42f11e295e588f0ea"}, + {file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"}, + {file = "pyparsing-3.1.2.tar.gz", hash = "sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad"}, ] [package.extras] @@ -934,13 +887,13 @@ diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "7.3.2" +version = "8.3.1" description = "pytest: simple powerful testing with Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-7.3.2-py3-none-any.whl", hash = "sha256:cdcbd012c9312258922f8cd3f1b62a6580fdced17db6014896053d47cddf9295"}, - {file = "pytest-7.3.2.tar.gz", hash = "sha256:ee990a3cc55ba808b80795a79944756f315c67c12b56abd3ac993a7b8c17030b"}, + {file = "pytest-8.3.1-py3-none-any.whl", hash = "sha256:e9600ccf4f563976e2c99fa02c7624ab938296551f280835ee6516df8bc4ae8c"}, + {file = "pytest-8.3.1.tar.gz", hash = "sha256:7e8e5c5abd6e93cb1cc151f23e57adc31fcf8cfd2a3ff2da63e23f732de35db6"}, ] [package.dependencies] @@ -948,21 +901,21 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +pluggy = ">=1.5,<2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} [package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" -version = "4.1.0" +version = "5.0.0" description = "Pytest plugin for measuring coverage." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, - {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, + {file = "pytest-cov-5.0.0.tar.gz", hash = "sha256:5837b58e9f6ebd335b0f8060eecce69b662415b16dc503883a02f45dfeb14857"}, + {file = "pytest_cov-5.0.0-py3-none-any.whl", hash = "sha256:4f0764a1219df53214206bf1feea4633c3b558a2925c8b59f144f682861ce652"}, ] [package.dependencies] @@ -970,104 +923,142 @@ coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] [[package]] name = "pyyaml" -version = "6.0" +version = "6.0.1" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.6" files = [ - {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, - {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, - {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, - {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, - {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, - {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, - {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, - {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, - {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, - {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, - {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, - {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, - {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, - {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, - {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, - {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, - {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, - {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, - {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, - {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, - {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, - {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, - {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, - {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, - {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, - {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, ] [[package]] name = "requests" -version = "2.28.2" +version = "2.31.0" description = "Python HTTP for Humans." optional = false -python-versions = ">=3.7, <4" +python-versions = ">=3.7" files = [ - {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, - {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<1.27" +urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.5.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.5.4-py3-none-linux_armv6l.whl", hash = "sha256:82acef724fc639699b4d3177ed5cc14c2a5aacd92edd578a9e846d5b5ec18ddf"}, + {file = "ruff-0.5.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:da62e87637c8838b325e65beee485f71eb36202ce8e3cdbc24b9fcb8b99a37be"}, + {file = "ruff-0.5.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e98ad088edfe2f3b85a925ee96da652028f093d6b9b56b76fc242d8abb8e2059"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c55efbecc3152d614cfe6c2247a3054cfe358cefbf794f8c79c8575456efe19"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f9b85eaa1f653abd0a70603b8b7008d9e00c9fa1bbd0bf40dad3f0c0bdd06793"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0cf497a47751be8c883059c4613ba2f50dd06ec672692de2811f039432875278"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:09c14ed6a72af9ccc8d2e313d7acf7037f0faff43cde4b507e66f14e812e37f7"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:628f6b8f97b8bad2490240aa84f3e68f390e13fabc9af5c0d3b96b485921cd60"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3520a00c0563d7a7a7c324ad7e2cde2355733dafa9592c671fb2e9e3cd8194c1"}, + {file = "ruff-0.5.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93789f14ca2244fb91ed481456f6d0bb8af1f75a330e133b67d08f06ad85b516"}, + {file = "ruff-0.5.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:029454e2824eafa25b9df46882f7f7844d36fd8ce51c1b7f6d97e2615a57bbcc"}, + {file = "ruff-0.5.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9492320eed573a13a0bc09a2957f17aa733fff9ce5bf00e66e6d4a88ec33813f"}, + {file = "ruff-0.5.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a6e1f62a92c645e2919b65c02e79d1f61e78a58eddaebca6c23659e7c7cb4ac7"}, + {file = "ruff-0.5.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:768fa9208df2bec4b2ce61dbc7c2ddd6b1be9fb48f1f8d3b78b3332c7d71c1ff"}, + {file = "ruff-0.5.4-py3-none-win32.whl", hash = "sha256:e1e7393e9c56128e870b233c82ceb42164966f25b30f68acbb24ed69ce9c3a4e"}, + {file = "ruff-0.5.4-py3-none-win_amd64.whl", hash = "sha256:58b54459221fd3f661a7329f177f091eb35cf7a603f01d9eb3eb11cc348d38c4"}, + {file = "ruff-0.5.4-py3-none-win_arm64.whl", hash = "sha256:bd53da65f1085fb5b307c38fd3c0829e76acf7b2a912d8d79cadcdb4875c1eb7"}, + {file = "ruff-0.5.4.tar.gz", hash = "sha256:2795726d5f71c4f4e70653273d1c23a8182f07dd8e48c12de5d867bfb7557eed"}, +] + [[package]] name = "setuptools" -version = "67.8.0" +version = "71.1.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-67.8.0-py3-none-any.whl", hash = "sha256:5df61bf30bb10c6f756eb19e7c9f3b473051f48db77fddbe06ff2ca307df9a6f"}, - {file = "setuptools-67.8.0.tar.gz", hash = "sha256:62642358adc77ffa87233bc4d2354c4b2682d214048f500964dbe760ccedf102"}, + {file = "setuptools-71.1.0-py3-none-any.whl", hash = "sha256:33874fdc59b3188304b2e7c80d9029097ea31627180896fb549c578ceb8a0855"}, + {file = "setuptools-71.1.0.tar.gz", hash = "sha256:032d42ee9fb536e33087fb66cac5f840eb9391ed05637b3f2a76a7c8fb477936"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.text (>=3.7)", "more-itertools (>=8.8)", "ordered-set (>=3.1.1)", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "mypy (==1.11.*)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (<0.4)", "pytest-ruff (>=0.2.1)", "pytest-ruff (>=0.3.2)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] [[package]] name = "six" @@ -1093,26 +1084,26 @@ files = [ [[package]] name = "sphinx" -version = "5.3.0" +version = "7.1.2" description = "Python documentation generator" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, + {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, + {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" +docutils = ">=0.18.1,<0.21" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" +Pygments = ">=2.13" +requests = ">=2.25.0" snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -1123,52 +1114,52 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.23.0" +version = "2.0.1" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "sphinx_autodoc_typehints-1.23.0-py3-none-any.whl", hash = "sha256:ac099057e66b09e51b698058ba7dd76e57e1fe696cd91b54e121d3dad188f91d"}, - {file = "sphinx_autodoc_typehints-1.23.0.tar.gz", hash = "sha256:5d44e2996633cdada499b6d27a496ddf9dbc95dd1f0f09f7b37940249e61f6e9"}, + {file = "sphinx_autodoc_typehints-2.0.1-py3-none-any.whl", hash = "sha256:f73ae89b43a799e587e39266672c1075b2ef783aeb382d3ebed77c38a3fc0149"}, + {file = "sphinx_autodoc_typehints-2.0.1.tar.gz", hash = "sha256:60ed1e3b2c970acc0aa6e877be42d48029a9faec7378a17838716cacd8c10b12"}, ] [package.dependencies] -sphinx = ">=5.3" +sphinx = ">=7.1.2" [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23.4)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "nptyping (>=2.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.5)"] -type-comment = ["typed-ast (>=1.5.4)"] +docs = ["furo (>=2024.1.29)"] +numpy = ["nptyping (>=2.5)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.4.2)", "diff-cover (>=8.0.3)", "pytest (>=8.0.1)", "pytest-cov (>=4.1)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.9)"] [[package]] name = "sphinxcontrib-apidoc" -version = "0.3.0" +version = "0.5.0" description = "A Sphinx extension for running 'sphinx-apidoc' on each build" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-apidoc-0.3.0.tar.gz", hash = "sha256:729bf592cf7b7dd57c4c05794f732dc026127275d785c2a5494521fdde773fb9"}, - {file = "sphinxcontrib_apidoc-0.3.0-py2.py3-none-any.whl", hash = "sha256:6671a46b2c6c5b0dca3d8a147849d159065e50443df79614f921b42fbd15cb09"}, + {file = "sphinxcontrib-apidoc-0.5.0.tar.gz", hash = "sha256:65efcd92212a5f823715fb95ee098b458a6bb09a5ee617d9ed3dead97177cd55"}, + {file = "sphinxcontrib_apidoc-0.5.0-py3-none-any.whl", hash = "sha256:c671d644d6dc468be91b813dcddf74d87893bff74fe8f1b8b01b69408f0fb776"}, ] [package.dependencies] pbr = "*" -Sphinx = ">=1.6.0" +Sphinx = ">=5.0.0" [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] [package.extras] @@ -1192,13 +1183,13 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.0" +version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] [package.extras] @@ -1249,17 +1240,6 @@ files = [ lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - [[package]] name = "tomli" version = "2.0.1" @@ -1273,41 +1253,42 @@ files = [ [[package]] name = "types-setuptools" -version = "67.8.0.0" +version = "71.1.0.20240723" description = "Typing stubs for setuptools" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-setuptools-67.8.0.0.tar.gz", hash = "sha256:95c9ed61871d6c0e258433373a4e1753c0a7c3627a46f4d4058c7b5a08ab844f"}, - {file = "types_setuptools-67.8.0.0-py3-none-any.whl", hash = "sha256:6df73340d96b238a4188b7b7668814b37e8018168aef1eef94a3b1872e3f60ff"}, + {file = "types-setuptools-71.1.0.20240723.tar.gz", hash = "sha256:8a9349038c7e22d88e6c5d9c6705b347b22930424114a452c1712899e85131ff"}, + {file = "types_setuptools-71.1.0.20240723-py3-none-any.whl", hash = "sha256:ac9fc263f59d1e02bca49cb7270a12c47ab80b3b911fb4d92f1fecf978bfe88a"}, ] [[package]] name = "typing-extensions" -version = "4.6.3" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.6.3-py3-none-any.whl", hash = "sha256:88a4153d8505aabbb4e13aacb7c486c2b4a33ca3b3f807914a9b4c844c471c26"}, - {file = "typing_extensions-4.6.3.tar.gz", hash = "sha256:d91d5919357fe7f681a9f2b5b4cb2a5f1ef0a1e9f59c4d8ff0d3491e05c0ffd5"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" -version = "1.26.15" +version = "2.0.4" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +python-versions = ">=3.7" files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, + {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "webencodings" @@ -1320,20 +1301,34 @@ files = [ {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] +[[package]] +name = "wheel" +version = "0.43.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "wheel-0.43.0-py3-none-any.whl", hash = "sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81"}, + {file = "wheel-0.43.0.tar.gz", hash = "sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + [[package]] name = "zipp" -version = "3.15.0" +version = "3.16.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, - {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, + {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, + {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] [extras] berkeleydb = ["berkeleydb"] @@ -1344,4 +1339,4 @@ networkx = ["networkx"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "1c56b77bc9381ed73f90bdc11243e8fae40be3fe06aec26c74eef94937698017" +content-hash = "7514432973368065fa5482d533d74c83b54642cd6f4e8598c0ec7af28bf2ced9" diff --git a/pyproject.toml b/pyproject.toml index 0d7d4c8c6..4ca58a019 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "rdflib" -version = "7.0.0a0" +version = "7.1.0a0" description = """RDFLib is a Python library for working with RDF, \ a simple yet powerful language for representing information.""" authors = ["Daniel 'eikeon' Krech "] @@ -42,34 +42,32 @@ python = "^3.8.1" isodate = "^0.6.0" pyparsing = ">=2.1.0,<4" berkeleydb = {version = "^18.1.0", optional = true} -networkx = {version = "^2.0.0", optional = true} +networkx = {version = ">=2,<4", optional = true} html5lib = {version = "^1.0", optional = true} -lxml = {version = "^4.3.0", optional = true} +lxml = {version = ">=4.3,<6.0", optional = true} [tool.poetry.group.dev.dependencies] -black = "23.3.0" -isort = "^5.10.0" +black = "24.4.2" mypy = "^1.1.0" -lxml-stubs = "^0.4.0" +lxml-stubs = ">=0.4,<0.6" [tool.poetry.group.tests.dependencies] -pytest = "^7.1.3" -pytest-cov = "^4.0.0" +pytest = ">=7.1.3,<9.0.0" +pytest-cov = ">=4,<6" coverage = {version = "^7.0.1", extras = ["toml"]} -types-setuptools = ">=65.6.0.3,<68.0.0.0" -setuptools = ">=65.6.3,<68.0.0" +types-setuptools = ">=68.0.0.3,<72.0.0.0" +setuptools = ">=68,<72" +wheel = ">=0.42,<0.44" [tool.poetry.group.docs.dependencies] -sphinx = "^5.3.0" -myst-parser = "^1.0.0" -sphinxcontrib-apidoc = "^0.3.0" -sphinx-autodoc-typehints = "^1.17.1" +sphinx = ">=7.1.2,<8" +myst-parser = ">=2,<4" +sphinxcontrib-apidoc = ">=0.3,<0.6" +sphinx-autodoc-typehints = ">=1.25.3,<=2.0.1" typing-extensions = "^4.5.0" -[tool.poetry.group.flake8.dependencies] -flake8 = {version = ">=4.0.1"} # flakeheaven is incompatible with flake8 >=5.0 (https://github.com/flakeheaven/flakeheaven/issues/132) -flakeheaven = {version = "^3.2.1"} -pep8-naming = {version = "^0.13.2"} +[tool.poetry.group.lint.dependencies] +ruff = ">=0.0.286,<0.5.5" [tool.poetry.extras] berkeleydb = ["berkeleydb"] @@ -81,48 +79,91 @@ lxml = ["lxml"] requires = ["poetry-core>=1.4.0"] build-backend = "poetry.core.masonry.api" -[tool.flakeheaven] -format = "grouped" -baseline = ".flakeheaven.baseline" - -[tool.flakeheaven.plugins] -pycodestyle = [ - "+*", - # mirrored from setup.cfg - "-E501", - "-E203", - "-W503", - "-E231", -] -pyflakes = [ - "+*", -] -pep8-naming = ["+*"] - -[tool.flakeheaven.exceptions."rdflib/plugins/sparql/*"] -pep8-naming = ["-N802", "-N803", "-N806", "-N812", "-N816", "-N801"] -[tool.flakeheaven.exceptions."rdflib/namespace/_*"] -pep8-naming = ["-N815"] -[tool.flakeheaven.exceptions."rdflib/plugins/parsers/notation3.py"] -pep8-naming = ["-N802", "-N803", "-N806", "-N816"] -[tool.flakeheaven.exceptions."rdflib/plugins/serializers/turtle.py"] -pep8-naming = ["-N802", "-N806", "-N815"] -[tool.flakeheaven.exceptions."rdflib/__init__.py"] -pycodestyle = ["-E402"] -[tool.flakeheaven.exceptions."test/utils/namespace/_*"] -pep8-naming = ["-N815"] -[tool.flakeheaven.exceptions."rdflib/plugins/parsers/rdfxml.py"] -pep8-naming = ["-N802"] -[tool.flakeheaven.exceptions."rdflib/plugins/parsers/trix.py"] -pep8-naming = ["-N802"] -[tool.flakeheaven.exceptions."docs/*.rst"] -pyflakes = ["-F821"] +[tool.ruff] +# https://beta.ruff.rs/docs/configuration/ +target-version = "py38" +# Same as Black. +line-length = 88 +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # Pyflakes + "I", # isort + "N", # pep8-naming + "RUF100", # Unused noqa directive + "UP001", # Remove unused imports + "UP003", # Use {} instead of type(...) + "UP004", # Class inherits from object + "UP005", # is deprecated, use + "UP009", # UTF-8 encoding declaration is unnecessary + "UP010", # Unnecessary __future__ import for target Python version + "UP011", # Unnecessary parentheses to functools.lru_cache + "UP012", # Unnecessary call to encode as UTF-8 + "UP017", # Use datetime.UTC alias + "UP018", # Unnecessary {literal_type} call (rewrite as a literal) + "UP019", # typing.Text is deprecated, use str + "UP020", # Use builtin open + "UP021", # universal_newlines is deprecated, use text + "UP022", # Sending stdout and stderr to PIPE is deprecated, use capture_output + "UP023", # cElementTree is deprecated, use ElementTree + "UP024", # Replace aliased errors with OSError + "UP025", # Remove unicode literals from strings + "UP026", # mock is deprecated, use unittest.mock + "UP029", # Unnecessary builtin import + "UP034", # Avoid extraneous parentheses + "UP035", # Import from {target} instead: + "UP036", # Version block is outdated for minimum Python version + "UP037", # Remove quotes from type annotation + "UP039", # Unnecessary parentheses after class definition + "FA", # flake8-future-annotations +] + +ignore = [ + "E501", # line too long: + # Disabled based on black recommendations + # https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated + "E203", # whitespace before ':' + "E231", # missing whitespace after ',' +] + +[tool.ruff.lint.per-file-ignores] +"rdflib/plugins/sparql/*" = [ + "N801", # Class name should be UpperCamelCase + "N802", # Function name should be lowercase + "N803", # Argument name should be lowercase + "N806", # Variable in function should be lowercase + "N812", # Lowercase imported as non lowercase + "N816", # Variable in class scope should be mixedCase +] +"rdflib/namespace/_*" = [ + "N815", # Variable in class scope should not be mixedCase + "N999", # Invalid module name +] +"rdflib/plugins/parsers/{trix,rdfxml,notation3}.py" = [ + "N802", # Function name should be lowercase + "N803", # Argument name should be lowercase + "N806", # Variable in function should be lowercase + "N816", # Variable in class scope should be mixedCase +] +"rdflib/plugins/serializers/{turtle,longturtle,trig}.py" = [ + "N802", # Function name should be lowercase + "N806", # Variable in function should be lowercase + "N815", # Variable in class scope should not be mixedCase +] +"test/utils/namespace/_*" = [ + "N815", # Variable in class scope should not be mixedCase + "N999", # Invalid module name +] +"{test/conftest.py,rdflib/namespace/__init__.py,rdflib/__init__.py,rdflib/plugins/sparql/__init__.py}" = [ + "E402", # Module level import not at top of file +] [tool.black] -required-version = "23.3.0" line-length = "88" -target-version = ['py37'] +target-version = ['py38'] +required-version = "24.4.2" include = '\.pyi?$' exclude = ''' ( @@ -159,7 +200,6 @@ addopts = [ "--doctest-glob=docs/*.rst", "--strict-markers", ] -doctest_optionflags = "ALLOW_UNICODE" filterwarnings = [ # The below warning is a consequence of how pytest doctest detects mocks and how DefinedNamespace behaves when an undefined attribute is being accessed. "ignore:Code. pytest_mock_example_attribute_that_shouldnt_exist is not defined in namespace .*:UserWarning", @@ -214,12 +254,11 @@ warn_unused_ignores = true no_implicit_optional = false implicit_reexport = false + [[tool.mypy.overrides]] -module = "pyparsing.*" -# This is here because of an upstream issue with pyparsing: -# https://github.com/pyparsing/pyparsing/issues/385 -# Once the issue is fixed this should be removed. -follow_imports = "skip" +module = "rdflib.*" +check_untyped_defs = true + [tool.coverage.run] branch = true diff --git a/rdflib/__init__.py b/rdflib/__init__.py index 4677e0a95..3381e191a 100644 --- a/rdflib/__init__.py +++ b/rdflib/__init__.py @@ -42,6 +42,7 @@ True """ + import logging import sys from importlib import metadata @@ -51,7 +52,7 @@ __docformat__ = "restructuredtext en" __version__: str = _DISTRIBUTION_METADATA["Version"] -__date__ = "2023-03-26" +__date__ = "2023-08-02" __all__ = [ "URIRef", @@ -92,6 +93,7 @@ "util", "plugin", "query", + "NORMALIZE_LITERALS", ] logger = logging.getLogger(__name__) @@ -196,4 +198,4 @@ from rdflib.term import BNode, IdentifiedNode, Literal, URIRef, Variable from rdflib import plugin, query, util # isort:skip -from rdflib.container import * # isort:skip # noqa:F401,F403 +from rdflib.container import * # isort:skip # noqa: F403 diff --git a/rdflib/_type_checking.py b/rdflib/_type_checking.py index c9e0202ea..1bbeda134 100644 --- a/rdflib/_type_checking.py +++ b/rdflib/_type_checking.py @@ -23,4 +23,4 @@ from typing import Literal as PyLiteral _NamespaceSetString = PyLiteral["core", "rdflib", "none"] -_MulPathMod = PyLiteral["*", "+", "?"] # noqa: F722 +_MulPathMod = PyLiteral["*", "+", "?"] diff --git a/rdflib/collection.py b/rdflib/collection.py index fd64ab20b..2afc7f279 100644 --- a/rdflib/collection.py +++ b/rdflib/collection.py @@ -12,7 +12,7 @@ class Collection: - __doc__ = """ + """ See "Emulating container types": https://docs.python.org/reference/datamodel.html#emulating-container-types @@ -37,9 +37,9 @@ class Collection: )> >>> c = Collection(g,listname) >>> pprint([term.n3() for term in c]) - [u'"1"^^', - u'"2"^^', - u'"3"^^'] + ['"1"^^', + '"2"^^', + '"3"^^'] >>> Literal(1) in c True @@ -82,8 +82,7 @@ def n3(self) -> str: "2"^^ "3"^^ ) """ - # type error: "Node" has no attribute "n3" - return "( %s )" % (" ".join([i.n3() for i in self])) # type: ignore[attr-defined] + return "( %s )" % (" ".join([i.n3() for i in self])) def _get_container(self, index: int) -> Optional[Node]: """Gets the first, rest holding node at index.""" diff --git a/rdflib/compare.py b/rdflib/compare.py index 30f52d973..afc2c40b5 100644 --- a/rdflib/compare.py +++ b/rdflib/compare.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ A collection of utilities for canonicalizing and inspecting graphs. @@ -73,7 +72,8 @@ _:cb558f30e21ddfc05ca53108348338ade8 "B" . """ -from __future__ import absolute_import, division, print_function + +from __future__ import annotations # TODO: # - Doesn't handle quads. @@ -252,7 +252,7 @@ def stringify(x): self._hash_cache[color] = val return val - def distinguish(self, W: "Color", graph: Graph): + def distinguish(self, W: Color, graph: Graph): # noqa: N803 colors: Dict[str, Color] = {} for n in self.nodes: new_color: Tuple[ColorItem, ...] = list(self.color) # type: ignore[assignment] @@ -352,7 +352,7 @@ def _refine(self, coloring: List[Color], sequence: List[Color]) -> List[Color]: sequence = sorted(sequence, key=lambda x: x.key(), reverse=True) coloring = coloring[:] while len(sequence) > 0 and not self._discrete(coloring): - W = sequence.pop() + W = sequence.pop() # noqa: N806 for c in coloring[:]: if len(c.nodes) > 1 or isinstance(c.nodes[0], BNode): colors = sorted( @@ -522,7 +522,7 @@ def canonical_triples(self, stats: Optional[Stats] = None): def _canonicalize_bnodes( self, - triple: "_TripleType", + triple: _TripleType, labels: Dict[Node, str], ): for term in triple: diff --git a/rdflib/compat.py b/rdflib/compat.py index 1cc4adacd..ddb55eb0b 100644 --- a/rdflib/compat.py +++ b/rdflib/compat.py @@ -3,6 +3,8 @@ and different versions of support libraries. """ +from __future__ import annotations + import codecs import re import warnings @@ -20,7 +22,8 @@ def ascii(stream): def bopen(*args, **kwargs): - return open(*args, mode="rb", **kwargs) + # type error: No overload variant of "open" matches argument types "Tuple[Any, ...]", "str", "Dict[str, Any]" + return open(*args, mode="rb", **kwargs) # type: ignore[call-overload] long_type = int @@ -34,14 +37,14 @@ def sign(n): return 0 -r_unicodeEscape = re.compile(r"(\\u[0-9A-Fa-f]{4}|\\U[0-9A-Fa-f]{8})") +r_unicodeEscape = re.compile(r"(\\u[0-9A-Fa-f]{4}|\\U[0-9A-Fa-f]{8})") # noqa: N816 -def _unicodeExpand(s): +def _unicodeExpand(s): # noqa: N802 return r_unicodeEscape.sub(lambda m: chr(int(m.group(0)[2:], 16)), s) -def decodeStringEscape(s): +def decodeStringEscape(s): # noqa: N802 warnings.warn( DeprecationWarning( "rdflib.compat.decodeStringEscape() is deprecated, " @@ -92,7 +95,7 @@ def _turtle_escape_subber(match: Match[str]) -> str: ) -def decodeUnicodeEscape(escaped: str) -> str: +def decodeUnicodeEscape(escaped: str) -> str: # noqa: N802 if "\\" not in escaped: # Most of times, there are no backslashes in strings. return escaped diff --git a/rdflib/container.py b/rdflib/container.py index 56554df04..6ee92848b 100644 --- a/rdflib/container.py +++ b/rdflib/container.py @@ -1,3 +1,4 @@ +import warnings from random import randint from rdflib.namespace import RDF @@ -82,6 +83,15 @@ def __len__(self): return self._len def type_of_conatiner(self): + warnings.warn( + "rdflib.container.Container.type_of_conatiner is deprecated. " + "Use type_of_container method instead.", + DeprecationWarning, + stacklevel=2, + ) + return self._rtype + + def type_of_container(self): return self._rtype def index(self, item): @@ -260,7 +270,7 @@ def add_at_position(self, pos, item): return self -class NoElementException(Exception): +class NoElementException(Exception): # noqa: N818 def __init__(self, message="rdf:Alt Container is empty"): self.message = message diff --git a/rdflib/events.py b/rdflib/events.py index d0290d5cd..61f3454b6 100644 --- a/rdflib/events.py +++ b/rdflib/events.py @@ -1,4 +1,4 @@ -__doc__ = """ +""" Dirt Simple Events A Dispatcher (or a subclass of Dispatcher) stores event handlers that @@ -23,6 +23,10 @@ """ +from __future__ import annotations + +from typing import Any, Dict, Optional + __all__ = ["Event", "Dispatcher"] @@ -53,9 +57,9 @@ class Dispatcher: subscribers. """ - _dispatch_map = None + _dispatch_map: Optional[Dict[Any, Any]] = None - def set_map(self, amap): + def set_map(self, amap: Dict[Any, Any]): self._dispatch_map = amap return self @@ -68,12 +72,14 @@ def subscribe(self, event_type, handler): """ if self._dispatch_map is None: self.set_map({}) - lst = self._dispatch_map.get(event_type, None) + # type error: error: Item "None" of "Optional[Dict[Any, Any]]" has no attribute "get" + lst = self._dispatch_map.get(event_type, None) # type: ignore[union-attr] if lst is None: lst = [handler] else: lst.append(handler) - self._dispatch_map[event_type] = lst + # type error: Unsupported target for indexed assignment ("Optional[Dict[Any, Any]]") + self._dispatch_map[event_type] = lst # type: ignore[index] return self def dispatch(self, event): diff --git a/rdflib/exceptions.py b/rdflib/exceptions.py index 708756ef6..cbe68fb98 100644 --- a/rdflib/exceptions.py +++ b/rdflib/exceptions.py @@ -2,6 +2,8 @@ TODO: """ +from __future__ import annotations + __all__ = [ "Error", "ParserError", diff --git a/rdflib/extras/__init__.py b/rdflib/extras/__init__.py index 40a96afc6..e69de29bb 100644 --- a/rdflib/extras/__init__.py +++ b/rdflib/extras/__init__.py @@ -1 +0,0 @@ -# -*- coding: utf-8 -*- diff --git a/rdflib/extras/cmdlineutils.py b/rdflib/extras/cmdlineutils.py index 1d82d836f..85fd85fbc 100644 --- a/rdflib/extras/cmdlineutils.py +++ b/rdflib/extras/cmdlineutils.py @@ -1,7 +1,10 @@ +from __future__ import annotations + import codecs import getopt import sys import time +from typing import TextIO, Union import rdflib from rdflib.util import guess_format @@ -40,6 +43,7 @@ def main(target, _help=_help, options="", stdin=True): else: f = None + out: Union[TextIO, codecs.StreamReaderWriter] if "-o" in dargs: sys.stderr.write("Output to %s\n" % dargs["-o"]) out = codecs.open(dargs["-o"], "w", "utf-8") diff --git a/rdflib/extras/describer.py b/rdflib/extras/describer.py index 023970555..f0df70675 100644 --- a/rdflib/extras/describer.py +++ b/rdflib/extras/describer.py @@ -1,7 +1,4 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -__doc__ = """ +""" A Describer is a stateful utility for creating RDF statements in a semi-declarative manner. It has methods for creating literal values, rel and rev resource relations (somewhat resembling RDFa). @@ -22,15 +19,15 @@ >>> >>> class Person: ... def __init__(self): - ... self.first_name = u"Some" - ... self.last_name = u"Body" + ... self.first_name = "Some" + ... self.last_name = "Body" ... self.username = "some1" - ... self.presentation = u"Just a Python & RDF hacker." + ... self.presentation = "Just a Python & RDF hacker." ... self.image = "/images/persons/" + self.username + ".jpg" ... self.site = "http://example.net/" ... self.start_date = datetime.date(2009, 9, 4) ... def get_full_name(self): - ... return u" ".join([self.first_name, self.last_name]) + ... return " ".join([self.first_name, self.last_name]) ... def get_absolute_url(self): ... return "/persons/" + self.username ... def get_thumbnail_url(self): @@ -133,7 +130,7 @@ def about(self, subject, **kws): rdflib.term.BNode(...) >>> d.about("http://example.org/") >>> d._current() - rdflib.term.URIRef(u'http://example.org/') + rdflib.term.URIRef('http://example.org/') """ kws.setdefault("base", self.base) @@ -155,7 +152,7 @@ def value(self, p, v, **kws): >>> d = Describer(about="http://example.org/") >>> d.value(RDFS.label, "Example") >>> d.graph.value(URIRef('http://example.org/'), RDFS.label) - rdflib.term.Literal(u'Example') + rdflib.term.Literal('Example') """ v = cast_value(v, **kws) @@ -176,7 +173,7 @@ def rel(self, p, o=None, **kws): >>> d = Describer(about="/", base="http://example.org/") >>> _ctxt = d.rel(RDFS.seeAlso, "/about") >>> d.graph.value(URIRef('http://example.org/'), RDFS.seeAlso) - rdflib.term.URIRef(u'http://example.org/about') + rdflib.term.URIRef('http://example.org/about') >>> with d.rel(RDFS.seeAlso, "/more"): ... d.value(RDFS.label, "More") @@ -184,7 +181,7 @@ def rel(self, p, o=None, **kws): ... URIRef('http://example.org/more')) in d.graph True >>> d.graph.value(URIRef('http://example.org/more'), RDFS.label) - rdflib.term.Literal(u'More') + rdflib.term.Literal('More') """ @@ -211,7 +208,7 @@ def rev(self, p, s=None, **kws): ... URIRef('http://example.org/')) in d.graph True >>> d.graph.value(URIRef('http://example.net/'), RDFS.label) - rdflib.term.Literal(u'Net') + rdflib.term.Literal('Net') """ kws.setdefault("base", self.base) diff --git a/rdflib/extras/external_graph_libs.py b/rdflib/extras/external_graph_libs.py index f50994b5b..7a8050f46 100644 --- a/rdflib/extras/external_graph_libs.py +++ b/rdflib/extras/external_graph_libs.py @@ -1,7 +1,3 @@ -#!/usr/bin/env python -# encoding: utf-8 -from __future__ import annotations - """Convert (to and) from rdflib graphs to other well known graph libraries. Currently the following libraries are supported: @@ -13,12 +9,15 @@ see ../../test/test_extras_external_graph_libs.py for conditional tests """ +from __future__ import annotations + import logging from typing import TYPE_CHECKING, Any, Dict, List if TYPE_CHECKING: from rdflib.graph import Graph + logger = logging.getLogger(__name__) @@ -256,11 +255,11 @@ def rdflib_to_networkx_graph( def rdflib_to_graphtool( graph: Graph, - v_prop_names: List[str] = [str("term")], - e_prop_names: List[str] = [str("term")], - transform_s=lambda s, p, o: {str("term"): s}, - transform_p=lambda s, p, o: {str("term"): p}, - transform_o=lambda s, p, o: {str("term"): o}, + v_prop_names: List[str] = ["term"], + e_prop_names: List[str] = ["term"], + transform_s=lambda s, p, o: {"term": s}, + transform_p=lambda s, p, o: {"term": p}, + transform_o=lambda s, p, o: {"term": o}, ): """Converts the given graph into a graph_tool.Graph(). diff --git a/rdflib/extras/infixowl.py b/rdflib/extras/infixowl.py index dadc6324e..b80fb0c16 100644 --- a/rdflib/extras/infixowl.py +++ b/rdflib/extras/infixowl.py @@ -1,6 +1,4 @@ -# -*- coding: utf-8 -*- - -__doc__ = """RDFLib Python binding for OWL Abstract Syntax +"""RDFLib Python binding for OWL Abstract Syntax OWL Constructor DL Syntax Manchester OWL Syntax Example ==================================================================================== @@ -115,11 +113,14 @@ """ +from __future__ import annotations + import itertools import logging +from typing import Iterable, Union from rdflib.collection import Collection -from rdflib.graph import Graph +from rdflib.graph import Graph, _ObjectType from rdflib.namespace import OWL, RDF, RDFS, XSD, Namespace, NamespaceManager from rdflib.term import BNode, Identifier, Literal, URIRef, Variable from rdflib.util import first @@ -434,11 +435,11 @@ def replace(self, other): self.graph.add((s, p, classOrIdentifier(other))) self.delete() - def _get_type(self): + def _get_type(self) -> Iterable[_ObjectType]: for _t in self.graph.objects(subject=self.identifier, predicate=RDF.type): yield _t - def _set_type(self, kind): + def _set_type(self, kind: Union[Individual, Identifier, Iterable[_ObjectType]]): if not kind: return if isinstance(kind, (Individual, Identifier)): @@ -464,10 +465,10 @@ def _delete_type(self): type = property(_get_type, _set_type, _delete_type) - def _get_identifier(self): + def _get_identifier(self) -> Identifier: return self.__identifier - def _set_identifier(self, i): + def _set_identifier(self, i: Identifier): assert i if i != self.__identifier: oldstatements_out = [ @@ -494,11 +495,13 @@ def _set_identifier(self, i): identifier = property(_get_identifier, _set_identifier) - def _get_sameAs(self): # noqa: N802 + def _get_sameAs(self) -> Iterable[_ObjectType]: # noqa: N802 for _t in self.graph.objects(subject=self.identifier, predicate=OWL.sameAs): yield _t - def _set_sameAs(self, term): # noqa: N802 + def _set_sameAs( # noqa: N802 + self, term: Union[Individual, Identifier, Iterable[_ObjectType]] + ): # if not kind: # return if isinstance(term, (Individual, Identifier)): @@ -928,7 +931,7 @@ def deepClearIfBNode(_class): # noqa: N802 ) -class MalformedClass(ValueError): +class MalformedClass(ValueError): # noqa: N818 """ .. deprecated:: TODO-NEXT-VERSION This class will be removed in version ``7.0.0``. @@ -1044,15 +1047,15 @@ def __init__( self, identifier=None, subClassOf=None, # noqa: N803 - equivalentClass=None, - disjointWith=None, - complementOf=None, + equivalentClass=None, # noqa: N803 + disjointWith=None, # noqa: N803 + complementOf=None, # noqa: N803 graph=None, - skipOWLClassMembership=False, + skipOWLClassMembership=False, # noqa: N803 comment=None, - nounAnnotations=None, - nameAnnotation=None, - nameIsLabel=False, + nounAnnotations=None, # noqa: N803 + nameAnnotation=None, # noqa: N803 + nameIsLabel=False, # noqa: N803 ): super(Class, self).__init__(identifier, graph, nameAnnotation, nameIsLabel) @@ -1307,7 +1310,8 @@ def isPrimitive(self): # noqa: N802 # sc = list(self.subClassOf) ec = list(self.equivalentClass) for _boolclass, p, rdf_list in self.graph.triples_choices( - (self.identifier, [OWL.intersectionOf, OWL.unionOf], None) + # type error: Argument 1 to "triples_choices" of "Graph" has incompatible type "Tuple[Any, List[URIRef], None]"; expected "Union[Tuple[List[Node], Node, Node], Tuple[Node, List[Node], Node], Tuple[Node, Node, List[Node]]]" + (self.identifier, [OWL.intersectionOf, OWL.unionOf], None) # type: ignore[arg-type] ): ec.append(manchesterSyntax(rdf_list, self.graph, boolean=p)) for _e in ec: @@ -1325,7 +1329,10 @@ def subSumpteeIds(self): # noqa: N802 # predicate=RDFS.subClassOf,object=self.identifier): # yield Class(s,skipOWLClassMembership=True) - def __repr__(self, full=False, normalization=True): + def __repr__(self): + return self.manchesterClass(full=False, normalization=True) + + def manchesterClass(self, full=False, normalization=True): # noqa: N802 """ Returns the Manchester Syntax equivalent for this class """ @@ -1333,7 +1340,8 @@ def __repr__(self, full=False, normalization=True): sc = list(self.subClassOf) ec = list(self.equivalentClass) for _boolclass, p, rdf_list in self.graph.triples_choices( - (self.identifier, [OWL.intersectionOf, OWL.unionOf], None) + # type error: Argument 1 to "triples_choices" of "Graph" has incompatible type "Tuple[Any, List[URIRef], None]"; expected "Union[Tuple[List[Node], Node, Node], Tuple[Node, List[Node], Node], Tuple[Node, Node, List[Node]]]" + (self.identifier, [OWL.intersectionOf, OWL.unionOf], None) # type: ignore[arg-type] ): ec.append(manchesterSyntax(rdf_list, self.graph, boolean=p)) dc = list(self.disjointWith) @@ -1342,7 +1350,9 @@ def __repr__(self, full=False, normalization=True): dc.append(c) klasskind = "" label = list(self.graph.objects(self.identifier, RDFS.label)) - label = label and "(" + label[0] + ")" or "" + # type error: Incompatible types in assignment (expression has type "str", variable has type "List[Node]") + # type error: Unsupported operand types for + ("str" and "Node") + label = label and "(" + label[0] + ")" or "" # type: ignore[assignment, operator] if sc: if full: scjoin = "\n " @@ -1352,7 +1362,7 @@ def __repr__(self, full=False, normalization=True): isinstance(s, Class) and isinstance(self.identifier, BNode) and repr(CastClass(s, self.graph)) - or # noqa: W504 + or # repr(BooleanClass(classOrIdentifier(s), # operator=None, # graph=self.graph)) or @@ -1423,7 +1433,9 @@ def __init__(self, rdf_list, members=None, graph=None): self._rdfList = Collection( self.graph, BNode(), [classOrIdentifier(m) for m in members] ) - self.graph.add((self.identifier, self._operator, self._rdfList.uri)) + # type error: "OWLRDFListProxy" has no attribute "identifier" + # type error: "OWLRDFListProxy" has no attribute "_operator" + self.graph.add((self.identifier, self._operator, self._rdfList.uri)) # type: ignore[attr-defined] def __eq__(self, other): """ @@ -1441,7 +1453,8 @@ def __eq__(self, other): return False return True else: - return self.identifier == other.identifier + # type error: "OWLRDFListProxy" has no attribute "identifier" + return self.identifier == other.identifier # type: ignore[attr-defined] # Redirect python list accessors to the underlying Collection instance def __len__(self): @@ -1736,12 +1749,12 @@ def __init__( self, onProperty, # noqa: N803 graph=None, - allValuesFrom=None, - someValuesFrom=None, + allValuesFrom=None, # noqa: N803 + someValuesFrom=None, # noqa: N803 value=None, cardinality=None, - maxCardinality=None, - minCardinality=None, + maxCardinality=None, # noqa: N803 + minCardinality=None, # noqa: N803 identifier=None, ): graph = Graph() if graph is None else graph @@ -1779,8 +1792,10 @@ def __init__( elif isinstance(restriction_range, Class): self.restrictionRange = classOrIdentifier(restriction_range) else: - self.restrictionRange = first( - self.graph.objects(self.identifier, restriction_type) + # error: Incompatible types in assignment (expression has type "Optional[Identifier]", variable has type "Identifier") + self.restrictionRange = first( # type: ignore[assignment] + # type error: Argument 1 to "first" has incompatible type "Generator[Node, None, None]"; expected "Iterable[Identifier]" + self.graph.objects(self.identifier, restriction_type) # type: ignore[arg-type] ) if ( self.identifier, @@ -1836,7 +1851,8 @@ def __eq__(self, other): if isinstance(other, Restriction): return ( other.onProperty == self.onProperty - and other.restriction_range == self.restrictionRange + # type error: "Restriction" has no attribute "restriction_range"; maybe "restrictionRange"? + and other.restriction_range == self.restrictionRange # type: ignore[attr-defined] ) else: return False @@ -2001,9 +2017,11 @@ def _del_mincardinality(self): def restrictionKind(self): # noqa: N802 for s, p, o in self.graph.triples_choices( - (self.identifier, self.restrictionKinds, None) + # type error: Argument 1 to "triples_choices" of "Graph" has incompatible type "Tuple[Any, List[URIRef], None]"; expected "Union[Tuple[List[Node], Node, Node], Tuple[Node, List[Node], Node], Tuple[Node, Node, List[Node]]]" + (self.identifier, self.restrictionKinds, None) # type: ignore[arg-type] ): - return p.split(str(OWL))[-1] + # type error: "Node" has no attribute "split" + return p.split(str(OWL))[-1] # type: ignore[attr-defined] return None def __repr__(self): @@ -2098,16 +2116,16 @@ def __init__( identifier=None, graph=None, baseType=OWL.ObjectProperty, # noqa: N803 - subPropertyOf=None, + subPropertyOf=None, # noqa: N803 domain=None, range=None, - inverseOf=None, - otherType=None, - equivalentProperty=None, + inverseOf=None, # noqa: N803 + otherType=None, # noqa: N803 + equivalentProperty=None, # noqa: N803 comment=None, - verbAnnotations=None, - nameAnnotation=None, - nameIsLabel=False, + verbAnnotations=None, # noqa: N803 + nameAnnotation=None, # noqa: N803 + nameIsLabel=False, # noqa: N803 ): super(Property, self).__init__(identifier, graph, nameAnnotation, nameIsLabel) if verbAnnotations: @@ -2157,9 +2175,11 @@ def __repr__(self): % (self.qname, first(self.comment) and first(self.comment) or "") ) if first(self.inverseOf): - two_link_inverse = first(first(self.inverseOf).inverseOf) + # type error: Item "None" of "Optional[Any]" has no attribute "inverseOf" + two_link_inverse = first(first(self.inverseOf).inverseOf) # type: ignore[union-attr] if two_link_inverse and two_link_inverse.identifier == self.identifier: - inverserepr = first(self.inverseOf).qname + # type error: Item "None" of "Optional[Any]" has no attribute "qname" + inverserepr = first(self.inverseOf).qname # type: ignore[union-attr] else: inverserepr = repr(first(self.inverseOf)) rt.append( @@ -2170,7 +2190,8 @@ def __repr__(self): ) ) for _s, _p, roletype in self.graph.triples_choices( - ( + # type error: Argument 1 to "triples_choices" of "Graph" has incompatible type "Tuple[Any, URIRef, List[URIRef]]"; expected "Union[Tuple[List[Node], Node, Node], Tuple[Node, List[Node], Node], Tuple[Node, Node, List[Node]]]" + ( # type: ignore[arg-type] self.identifier, RDF.type, [ @@ -2180,7 +2201,8 @@ def __repr__(self): ], ) ): - rt.append(str(roletype.split(str(OWL))[-1])) + # type error: "Node" has no attribute "split" + rt.append(str(roletype.split(str(OWL))[-1])) # type: ignore[attr-defined] else: rt.append( "DatatypeProperty( %s %s" @@ -2230,7 +2252,8 @@ def canonicalName(term, g): # noqa: N802 ] ) ) - rt = "\n".join([expr for expr in rt if expr]) + # type error: Incompatible types in assignment (expression has type "str", variable has type "List[str]") + rt = "\n".join([expr for expr in rt if expr]) # type: ignore[assignment] rt += "\n)" return rt diff --git a/rdflib/extras/shacl.py b/rdflib/extras/shacl.py new file mode 100644 index 000000000..30fdab07b --- /dev/null +++ b/rdflib/extras/shacl.py @@ -0,0 +1,93 @@ +""" +Utilities for interacting with SHACL Shapes Graphs more easily. +""" + +from __future__ import annotations + +from typing import Optional, Union + +from rdflib import Graph, Literal, URIRef, paths +from rdflib.namespace import RDF, SH +from rdflib.paths import Path +from rdflib.term import Node + + +class SHACLPathError(Exception): + pass + + +# This implementation is roughly based on +# pyshacl.helper.sparql_query_helper::SPARQLQueryHelper._shacl_path_to_sparql_path +def parse_shacl_path( + shapes_graph: Graph, + path_identifier: Node, +) -> Union[URIRef, Path]: + """ + Parse a valid SHACL path (e.g. the object of a triple with predicate sh:path) + from a :class:`~rdflib.graph.Graph` as a :class:`~rdflib.term.URIRef` if the path + is simply a predicate or a :class:`~rdflib.paths.Path` otherwise. + + :param shapes_graph: A :class:`~rdflib.graph.Graph` containing the path to be parsed + :param path_identifier: A :class:`~rdflib.term.Node` of the path + :return: A :class:`~rdflib.term.URIRef` or a :class:`~rdflib.paths.Path` + """ + path: Optional[Union[URIRef, Path]] = None + + # Literals are not allowed. + if isinstance(path_identifier, Literal): + raise TypeError("Literals are not a valid SHACL path.") + + # If a path is a URI, that's the whole path. + elif isinstance(path_identifier, URIRef): + if path_identifier == RDF.nil: + raise SHACLPathError( + "A list of SHACL Paths must contain at least two path items." + ) + path = path_identifier + + # Handle Sequence Paths + elif shapes_graph.value(path_identifier, RDF.first) is not None: + sequence = list(shapes_graph.items(path_identifier)) + if len(sequence) < 2: + raise SHACLPathError( + "A list of SHACL Sequence Paths must contain at least two path items." + ) + path = paths.SequencePath( + *(parse_shacl_path(shapes_graph, path) for path in sequence) + ) + + # Handle sh:inversePath + elif inverse_path := shapes_graph.value(path_identifier, SH.inversePath): + path = paths.InvPath(parse_shacl_path(shapes_graph, inverse_path)) + + # Handle sh:alternativePath + elif alternative_path := shapes_graph.value(path_identifier, SH.alternativePath): + alternatives = list(shapes_graph.items(alternative_path)) + if len(alternatives) < 2: + raise SHACLPathError( + "List of SHACL alternate paths must have at least two path items." + ) + path = paths.AlternativePath( + *( + parse_shacl_path(shapes_graph, alternative) + for alternative in alternatives + ) + ) + + # Handle sh:zeroOrMorePath + elif zero_or_more_path := shapes_graph.value(path_identifier, SH.zeroOrMorePath): + path = paths.MulPath(parse_shacl_path(shapes_graph, zero_or_more_path), "*") + + # Handle sh:oneOrMorePath + elif one_or_more_path := shapes_graph.value(path_identifier, SH.oneOrMorePath): + path = paths.MulPath(parse_shacl_path(shapes_graph, one_or_more_path), "+") + + # Handle sh:zeroOrOnePath + elif zero_or_one_path := shapes_graph.value(path_identifier, SH.zeroOrOnePath): + path = paths.MulPath(parse_shacl_path(shapes_graph, zero_or_one_path), "?") + + # Raise error if none of the above options were found + elif path is None: + raise SHACLPathError(f"Cannot parse {repr(path_identifier)} as a SHACL Path.") + + return path diff --git a/rdflib/graph.py b/rdflib/graph.py index 3b3792f96..41383ace8 100644 --- a/rdflib/graph.py +++ b/rdflib/graph.py @@ -1,125 +1,4 @@ -from __future__ import annotations - -import logging -import pathlib -import random -from io import BytesIO -from typing import ( - IO, - TYPE_CHECKING, - Any, - BinaryIO, - Callable, - Dict, - Generator, - Iterable, - List, - Mapping, - NoReturn, - Optional, - Set, - TextIO, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, -) -from urllib.parse import urlparse -from urllib.request import url2pathname - -import rdflib.exceptions as exceptions -import rdflib.namespace as namespace # noqa: F401 # This is here because it is used in a docstring. -import rdflib.plugin as plugin -import rdflib.query as query -import rdflib.util # avoid circular dependency -from rdflib.collection import Collection -from rdflib.exceptions import ParserError -from rdflib.namespace import RDF, Namespace, NamespaceManager -from rdflib.parser import InputSource, Parser, create_input_source -from rdflib.paths import Path -from rdflib.resource import Resource -from rdflib.serializer import Serializer -from rdflib.store import Store -from rdflib.term import ( - BNode, - IdentifiedNode, - Identifier, - Literal, - Node, - URIRef, - _Deskolemizer, -) - -if TYPE_CHECKING: - import typing_extensions as te - - import rdflib.query - from rdflib.plugins.sparql.sparql import Query, Update - -_SubjectType = Node -_PredicateType = Node -_ObjectType = Node -_ContextIdentifierType = IdentifiedNode - -_TripleType = Tuple["_SubjectType", "_PredicateType", "_ObjectType"] -_QuadType = Tuple["_SubjectType", "_PredicateType", "_ObjectType", "_ContextType"] -_OptionalQuadType = Tuple[ - "_SubjectType", "_PredicateType", "_ObjectType", Optional["_ContextType"] -] -_TripleOrOptionalQuadType = Union["_TripleType", "_OptionalQuadType"] -_OptionalIdentifiedQuadType = Tuple[ - "_SubjectType", "_PredicateType", "_ObjectType", Optional["_ContextIdentifierType"] -] -_TriplePatternType = Tuple[ - Optional["_SubjectType"], Optional["_PredicateType"], Optional["_ObjectType"] -] -_TriplePathPatternType = Tuple[Optional["_SubjectType"], Path, Optional["_ObjectType"]] -_QuadPatternType = Tuple[ - Optional["_SubjectType"], - Optional["_PredicateType"], - Optional["_ObjectType"], - Optional["_ContextType"], -] -_QuadPathPatternType = Tuple[ - Optional["_SubjectType"], - Path, - Optional["_ObjectType"], - Optional["_ContextType"], -] -_TripleOrQuadPatternType = Union["_TriplePatternType", "_QuadPatternType"] -_TripleOrQuadPathPatternType = Union["_TriplePathPatternType", "_QuadPathPatternType"] -_TripleSelectorType = Tuple[ - Optional["_SubjectType"], - Optional[Union["Path", "_PredicateType"]], - Optional["_ObjectType"], -] -_QuadSelectorType = Tuple[ - Optional["_SubjectType"], - Optional[Union["Path", "_PredicateType"]], - Optional["_ObjectType"], - Optional["_ContextType"], -] -_TripleOrQuadSelectorType = Union["_TripleSelectorType", "_QuadSelectorType"] -_TriplePathType = Tuple["_SubjectType", Path, "_ObjectType"] -_TripleOrTriplePathType = Union["_TripleType", "_TriplePathType"] - -_GraphT = TypeVar("_GraphT", bound="Graph") -_ConjunctiveGraphT = TypeVar("_ConjunctiveGraphT", bound="ConjunctiveGraph") -_DatasetT = TypeVar("_DatasetT", bound="Dataset") - -# type error: Function "Type[Literal]" could always be true in boolean contex -assert Literal # type: ignore[truthy-function] # avoid warning -# type error: Function "Type[Namespace]" could always be true in boolean context -assert Namespace # type: ignore[truthy-function] # avoid warning - -if TYPE_CHECKING: - from rdflib._type_checking import _NamespaceSetString - -logger = logging.getLogger(__name__) - -__doc__ = """\ +"""\ RDFLib defines the following kinds of Graphs: @@ -140,6 +19,9 @@ Conjunctive Graph ----------------- +.. warning:: + ConjunctiveGraph is deprecated, use :class:`~rdflib.graph.Dataset` instead. + A Conjunctive Graph is the most relevant collection of graphs that are considered to be the boundary for closed world assumptions. This boundary is equivalent to that of the store instance (which is itself @@ -365,6 +247,129 @@ """ +from __future__ import annotations + +import logging +import pathlib +import random +import warnings +from io import BytesIO +from typing import ( + IO, + TYPE_CHECKING, + Any, + BinaryIO, + Callable, + Dict, + Generator, + Iterable, + List, + Mapping, + NoReturn, + Optional, + Set, + TextIO, + Tuple, + Type, + TypeVar, + Union, + cast, + overload, +) +from urllib.parse import urlparse +from urllib.request import url2pathname + +import rdflib.exceptions as exceptions +import rdflib.namespace as namespace # noqa: F401 # This is here because it is used in a docstring. +import rdflib.plugin as plugin +import rdflib.query as query +import rdflib.util # avoid circular dependency +from rdflib.collection import Collection +from rdflib.exceptions import ParserError +from rdflib.namespace import RDF, Namespace, NamespaceManager +from rdflib.parser import InputSource, Parser, create_input_source +from rdflib.paths import Path +from rdflib.resource import Resource +from rdflib.serializer import Serializer +from rdflib.store import Store +from rdflib.term import ( + BNode, + Genid, + IdentifiedNode, + Identifier, + Literal, + Node, + RDFLibGenid, + URIRef, +) + +if TYPE_CHECKING: + import typing_extensions as te + + import rdflib.query + from rdflib.plugins.sparql.sparql import Query, Update + +_SubjectType = Node +_PredicateType = Node +_ObjectType = Node +_ContextIdentifierType = IdentifiedNode + +_TripleType = Tuple["_SubjectType", "_PredicateType", "_ObjectType"] +_QuadType = Tuple["_SubjectType", "_PredicateType", "_ObjectType", "_ContextType"] +_OptionalQuadType = Tuple[ + "_SubjectType", "_PredicateType", "_ObjectType", Optional["_ContextType"] +] +_TripleOrOptionalQuadType = Union["_TripleType", "_OptionalQuadType"] +_OptionalIdentifiedQuadType = Tuple[ + "_SubjectType", "_PredicateType", "_ObjectType", Optional["_ContextIdentifierType"] +] +_TriplePatternType = Tuple[ + Optional["_SubjectType"], Optional["_PredicateType"], Optional["_ObjectType"] +] +_TriplePathPatternType = Tuple[Optional["_SubjectType"], Path, Optional["_ObjectType"]] +_QuadPatternType = Tuple[ + Optional["_SubjectType"], + Optional["_PredicateType"], + Optional["_ObjectType"], + Optional["_ContextType"], +] +_QuadPathPatternType = Tuple[ + Optional["_SubjectType"], + Path, + Optional["_ObjectType"], + Optional["_ContextType"], +] +_TripleOrQuadPatternType = Union["_TriplePatternType", "_QuadPatternType"] +_TripleOrQuadPathPatternType = Union["_TriplePathPatternType", "_QuadPathPatternType"] +_TripleSelectorType = Tuple[ + Optional["_SubjectType"], + Optional[Union["Path", "_PredicateType"]], + Optional["_ObjectType"], +] +_QuadSelectorType = Tuple[ + Optional["_SubjectType"], + Optional[Union["Path", "_PredicateType"]], + Optional["_ObjectType"], + Optional["_ContextType"], +] +_TripleOrQuadSelectorType = Union["_TripleSelectorType", "_QuadSelectorType"] +_TriplePathType = Tuple["_SubjectType", Path, "_ObjectType"] +_TripleOrTriplePathType = Union["_TripleType", "_TriplePathType"] + +_GraphT = TypeVar("_GraphT", bound="Graph") +_ConjunctiveGraphT = TypeVar("_ConjunctiveGraphT", bound="ConjunctiveGraph") +_DatasetT = TypeVar("_DatasetT", bound="Dataset") + +# type error: Function "Type[Literal]" could always be true in boolean contex +assert Literal # type: ignore[truthy-function] # avoid warning +# type error: Function "Type[Namespace]" could always be true in boolean context +assert Namespace # type: ignore[truthy-function] # avoid warning + +if TYPE_CHECKING: + from rdflib._type_checking import _NamespaceSetString + +logger = logging.getLogger(__name__) + __all__ = [ "Graph", @@ -428,13 +433,18 @@ class Graph(Node): For more on named graphs, see: http://www.w3.org/2004/03/trix/ """ + context_aware: bool + formula_aware: bool + default_union: bool + base: Optional[str] + def __init__( self, store: Union[Store, str] = "default", identifier: Optional[Union[_ContextIdentifierType, str]] = None, namespace_manager: Optional[NamespaceManager] = None, base: Optional[str] = None, - bind_namespaces: "_NamespaceSetString" = "rdflib", + bind_namespaces: _NamespaceSetString = "rdflib", ): super(Graph, self).__init__() self.base = base @@ -460,7 +470,7 @@ def store(self) -> Store: return self.__store @property - def identifier(self) -> "_ContextIdentifierType": + def identifier(self) -> _ContextIdentifierType: return self.__identifier @property @@ -524,7 +534,7 @@ def close(self, commit_pending_transaction: bool = False) -> None: """ return self.__store.close(commit_pending_transaction=commit_pending_transaction) - def add(self: _GraphT, triple: "_TripleType") -> _GraphT: + def add(self: _GraphT, triple: _TripleType) -> _GraphT: """Add a triple with self as context""" s, p, o = triple assert isinstance(s, Node), "Subject %s must be an rdflib term" % (s,) @@ -533,7 +543,7 @@ def add(self: _GraphT, triple: "_TripleType") -> _GraphT: self.__store.add((s, p, o), self, quoted=False) return self - def addN(self: _GraphT, quads: Iterable["_QuadType"]) -> _GraphT: # noqa: N802 + def addN(self: _GraphT, quads: Iterable[_QuadType]) -> _GraphT: # noqa: N802 """Add a sequence of triple with context""" self.__store.addN( @@ -545,7 +555,7 @@ def addN(self: _GraphT, quads: Iterable["_QuadType"]) -> _GraphT: # noqa: N802 ) return self - def remove(self: _GraphT, triple: "_TriplePatternType") -> _GraphT: + def remove(self: _GraphT, triple: _TriplePatternType) -> _GraphT: """Remove a triple from the graph If the triple does not provide a context attribute, removes the triple @@ -557,28 +567,25 @@ def remove(self: _GraphT, triple: "_TriplePatternType") -> _GraphT: @overload def triples( self, - triple: "_TriplePatternType", - ) -> Generator["_TripleType", None, None]: - ... + triple: _TriplePatternType, + ) -> Generator[_TripleType, None, None]: ... @overload def triples( self, - triple: "_TriplePathPatternType", - ) -> Generator["_TriplePathType", None, None]: - ... + triple: _TriplePathPatternType, + ) -> Generator[_TriplePathType, None, None]: ... @overload def triples( self, - triple: "_TripleSelectorType", - ) -> Generator["_TripleOrTriplePathType", None, None]: - ... + triple: _TripleSelectorType, + ) -> Generator[_TripleOrTriplePathType, None, None]: ... def triples( self, - triple: "_TripleSelectorType", - ) -> Generator["_TripleOrTriplePathType", None, None]: + triple: _TripleSelectorType, + ) -> Generator[_TripleOrTriplePathType, None, None]: """Generator over the triple store Returns triples that match the given triple pattern. If triple pattern @@ -655,7 +662,8 @@ def __getitem__(self, item): return (s, p, o) in self elif isinstance(item, (Path, Node)): - return self.predicate_objects(item) + # type error: Argument 1 to "predicate_objects" of "Graph" has incompatible type "Union[Path, Node]"; expected "Optional[Node]" + return self.predicate_objects(item) # type: ignore[arg-type] else: raise TypeError( @@ -671,7 +679,7 @@ def __len__(self) -> int: # type error: Unexpected keyword argument "context" for "__len__" of "Store" return self.__store.__len__(context=self) # type: ignore[call-arg] - def __iter__(self) -> Generator["_TripleType", None, None]: + def __iter__(self) -> Generator[_TripleType, None, None]: """Iterates over all triples in the store""" return self.triples((None, None, None)) @@ -716,20 +724,20 @@ def __gt__(self, other) -> bool: def __ge__(self, other: Graph) -> bool: return self > other or self == other - def __iadd__(self: "_GraphT", other: Iterable["_TripleType"]) -> "_GraphT": + def __iadd__(self: _GraphT, other: Iterable[_TripleType]) -> _GraphT: """Add all triples in Graph other to Graph. BNode IDs are not changed.""" self.addN((s, p, o, self) for s, p, o in other) return self - def __isub__(self: "_GraphT", other: Iterable["_TripleType"]) -> "_GraphT": + def __isub__(self: _GraphT, other: Iterable[_TripleType]) -> _GraphT: """Subtract all triples in Graph other from Graph. BNode IDs are not changed.""" for triple in other: self.remove(triple) return self - def __add__(self, other: "Graph") -> "Graph": + def __add__(self, other: Graph) -> Graph: """Set-theoretic union BNode IDs are not changed.""" try: @@ -744,7 +752,7 @@ def __add__(self, other: "Graph") -> "Graph": retval.add(y) return retval - def __mul__(self, other: "Graph") -> "Graph": + def __mul__(self, other: Graph) -> Graph: """Set-theoretic intersection. BNode IDs are not changed.""" try: @@ -756,7 +764,7 @@ def __mul__(self, other: "Graph") -> "Graph": retval.add(x) return retval - def __sub__(self, other: "Graph") -> "Graph": + def __sub__(self, other: Graph) -> Graph: """Set-theoretic difference. BNode IDs are not changed.""" try: @@ -768,7 +776,7 @@ def __sub__(self, other: "Graph") -> "Graph": retval.add(x) return retval - def __xor__(self, other: "Graph") -> "Graph": + def __xor__(self, other: Graph) -> Graph: """Set-theoretic XOR. BNode IDs are not changed.""" return (self - other) + (other - self) @@ -799,10 +807,10 @@ def set( def subjects( self, - predicate: Union[None, Path, "_PredicateType"] = None, - object: Optional["_ObjectType"] = None, + predicate: Union[None, Path, _PredicateType] = None, + object: Optional[_ObjectType] = None, unique: bool = False, - ) -> Generator["_SubjectType", None, None]: + ) -> Generator[_SubjectType, None, None]: """A generator of (optionally unique) subjects with the given predicate and object""" if not unique: @@ -823,10 +831,10 @@ def subjects( def predicates( self, - subject: Optional["_SubjectType"] = None, - object: Optional["_ObjectType"] = None, + subject: Optional[_SubjectType] = None, + object: Optional[_ObjectType] = None, unique: bool = False, - ) -> Generator["_PredicateType", None, None]: + ) -> Generator[_PredicateType, None, None]: """A generator of (optionally unique) predicates with the given subject and object""" if not unique: @@ -847,10 +855,10 @@ def predicates( def objects( self, - subject: Optional["_SubjectType"] = None, - predicate: Union[None, Path, "_PredicateType"] = None, + subject: Optional[_SubjectType] = None, + predicate: Union[None, Path, _PredicateType] = None, unique: bool = False, - ) -> Generator["_ObjectType", None, None]: + ) -> Generator[_ObjectType, None, None]: """A generator of (optionally unique) objects with the given subject and predicate""" if not unique: @@ -870,8 +878,8 @@ def objects( raise def subject_predicates( - self, object: Optional["_ObjectType"] = None, unique: bool = False - ) -> Generator[Tuple["_SubjectType", "_PredicateType"], None, None]: + self, object: Optional[_ObjectType] = None, unique: bool = False + ) -> Generator[Tuple[_SubjectType, _PredicateType], None, None]: """A generator of (optionally unique) (subject, predicate) tuples for the given object""" if not unique: @@ -892,9 +900,9 @@ def subject_predicates( def subject_objects( self, - predicate: Union[None, Path, "_PredicateType"] = None, + predicate: Union[None, Path, _PredicateType] = None, unique: bool = False, - ) -> Generator[Tuple["_SubjectType", "_ObjectType"], None, None]: + ) -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: """A generator of (optionally unique) (subject, object) tuples for the given predicate""" if not unique: @@ -914,8 +922,8 @@ def subject_objects( raise def predicate_objects( - self, subject: Optional["_SubjectType"] = None, unique: bool = False - ) -> Generator[Tuple["_PredicateType", "_ObjectType"], None, None]: + self, subject: Optional[_SubjectType] = None, unique: bool = False + ) -> Generator[Tuple[_PredicateType, _ObjectType], None, None]: """A generator of (optionally unique) (predicate, object) tuples for the given subject""" if not unique: @@ -937,11 +945,11 @@ def predicate_objects( def triples_choices( self, triple: Union[ - Tuple[List["_SubjectType"], "_PredicateType", "_ObjectType"], - Tuple["_SubjectType", List["_PredicateType"], "_ObjectType"], - Tuple["_SubjectType", "_PredicateType", List["_ObjectType"]], + Tuple[List[_SubjectType], _PredicateType, _ObjectType], + Tuple[_SubjectType, List[_PredicateType], _ObjectType], + Tuple[_SubjectType, _PredicateType, List[_ObjectType]], ], - context: Optional["_ContextType"] = None, + context: Optional[_ContextType] = None, ) -> Generator[_TripleType, None, None]: subject, predicate, object_ = triple # type error: Argument 1 to "triples_choices" of "Store" has incompatible type "Tuple[Union[List[Node], Node], Union[Node, List[Node]], Union[Node, List[Node]]]"; expected "Union[Tuple[List[Node], Node, Node], Tuple[Node, List[Node], Node], Tuple[Node, Node, List[Node]]]" @@ -959,8 +967,7 @@ def value( object: Optional[_ObjectType] = ..., default: Optional[Node] = ..., any: bool = ..., - ) -> None: - ... + ) -> None: ... @overload def value( @@ -970,8 +977,7 @@ def value( object: None = ..., default: Optional[Node] = ..., any: bool = ..., - ) -> None: - ... + ) -> None: ... @overload def value( @@ -981,8 +987,7 @@ def value( object: None = ..., default: Optional[Node] = ..., any: bool = ..., - ) -> None: - ... + ) -> None: ... @overload def value( @@ -992,8 +997,7 @@ def value( object: Optional[_ObjectType] = ..., default: Optional[Node] = ..., any: bool = ..., - ) -> Optional[Node]: - ... + ) -> Optional[Node]: ... def value( self, @@ -1077,10 +1081,10 @@ def items(self, list: Node) -> Generator[Node, None, None]: def transitiveClosure( # noqa: N802 self, - func: Callable[[_TCArgT, "Graph"], Iterable[_TCArgT]], + func: Callable[[_TCArgT, Graph], Iterable[_TCArgT]], arg: _TCArgT, seen: Optional[Dict[_TCArgT, int]] = None, - ): # noqa: N802 + ): """ Generates transitive closure of a user-defined function against the graph @@ -1231,8 +1235,7 @@ def serialize( base: Optional[str], encoding: str, **args: Any, - ) -> bytes: - ... + ) -> bytes: ... # no destination and non-None keyword encoding @overload @@ -1244,8 +1247,7 @@ def serialize( *, encoding: str, **args: Any, - ) -> bytes: - ... + ) -> bytes: ... # no destination and None encoding @overload @@ -1256,8 +1258,7 @@ def serialize( base: Optional[str] = ..., encoding: None = ..., **args: Any, - ) -> str: - ... + ) -> str: ... # non-None destination @overload @@ -1268,8 +1269,7 @@ def serialize( base: Optional[str] = ..., encoding: Optional[str] = ..., **args: Any, - ) -> "Graph": - ... + ) -> Graph: ... # fallback @overload @@ -1280,8 +1280,7 @@ def serialize( base: Optional[str] = ..., encoding: Optional[str] = ..., **args: Any, - ) -> Union[bytes, str, "Graph"]: - ... + ) -> Union[bytes, str, Graph]: ... def serialize( self: _GraphT, @@ -1297,7 +1296,7 @@ def serialize( :param destination: The destination to serialize the graph to. This can be a path as a :class:`str` or :class:`~pathlib.PurePath` object, or it can be a - :class:`~typing.IO[bytes]` like object. If this parameter is not + :class:`~typing.IO` ``[bytes]`` like object. If this parameter is not supplied the serialized graph will be returned. :param format: The format that the output should be written in. This value @@ -1381,7 +1380,7 @@ def parse( file: Optional[Union[BinaryIO, TextIO]] = None, data: Optional[Union[str, bytes]] = None, **args: Any, - ) -> "Graph": + ) -> Graph: """ Parse an RDF source adding the resulting triples to the Graph. @@ -1400,9 +1399,9 @@ def parse( :doc:`Security Considerations ` documentation. - :param source: An `InputSource`, file-like object, `Path` like object, - or string. In the case of a string the string is the location of the - source. + :param source: An `xml.sax.xmlreader.InputSource`, file-like object, + `pathlib.Path` like object, or string. In the case of a string the string + is the location of the source. :param location: A string indicating the relative or absolute URL of the source. `Graph`'s absolutize method is used if a relative location is specified. @@ -1486,7 +1485,18 @@ def parse( if format is None: format = "turtle" could_not_guess_format = True - parser = plugin.get(format, Parser)() + try: + parser = plugin.get(format, Parser)() + except plugin.PluginException: + # Handle the case when a URLInputSource returns RDF but with the headers + # as a format that does not exist in the plugin system. + # Use guess_format to guess the format based on the input's file suffix. + format = rdflib.util.guess_format( + source if not isinstance(source, InputSource) else str(source) + ) + if format is None: + raise + parser = plugin.get(format, Parser)() try: # TODO FIXME: Parser.parse should have **kwargs argument. parser.parse(source, self, **args) @@ -1510,7 +1520,7 @@ def query( processor: Union[str, query.Processor] = "sparql", result: Union[str, Type[query.Result]] = "sparql", initNs: Optional[Mapping[str, Any]] = None, # noqa: N803 - initBindings: Optional[Mapping[str, Identifier]] = None, + initBindings: Optional[Mapping[str, Identifier]] = None, # noqa: N803 use_store_provided: bool = True, **kwargs: Any, ) -> query.Result: @@ -1544,13 +1554,19 @@ def query( initBindings = initBindings or {} # noqa: N806 initNs = initNs or dict(self.namespaces()) # noqa: N806 + if self.default_union: + query_graph = "__UNION__" + elif isinstance(self, ConjunctiveGraph): + query_graph = self.default_context.identifier + else: + query_graph = self.identifier if hasattr(self.store, "query") and use_store_provided: try: return self.store.query( query_object, initNs, initBindings, - self.default_union and "__UNION__" or self.identifier, + query_graph, **kwargs, ) except NotImplementedError: @@ -1569,7 +1585,7 @@ def update( update_object: Union[Update, str], processor: Union[str, rdflib.query.UpdateProcessor] = "sparql", initNs: Optional[Mapping[str, Any]] = None, # noqa: N803 - initBindings: Optional[Mapping[str, Identifier]] = None, + initBindings: Optional[Mapping[str, Identifier]] = None, # noqa: N803 use_store_provided: bool = True, **kwargs: Any, ) -> None: @@ -1592,13 +1608,20 @@ def update( initBindings = initBindings or {} # noqa: N806 initNs = initNs or dict(self.namespaces()) # noqa: N806 + if self.default_union: + query_graph = "__UNION__" + elif isinstance(self, ConjunctiveGraph): + query_graph = self.default_context.identifier + else: + query_graph = self.identifier + if hasattr(self.store, "update") and use_store_provided: try: return self.store.update( update_object, initNs, initBindings, - self.default_union and "__UNION__" or self.identifier, + query_graph, **kwargs, ) except NotImplementedError: @@ -1609,10 +1632,9 @@ def update( return processor.update(update_object, initBindings, initNs, **kwargs) - def n3(self) -> str: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: """Return an n3 identifier for the Graph""" - # type error: "IdentifiedNode" has no attribute "n3" - return "[%s]" % self.identifier.n3() # type: ignore[attr-defined] + return "[%s]" % self.identifier.n3(namespace_manager=namespace_manager) def __reduce__(self) -> Tuple[Type[Graph], Tuple[Store, _ContextIdentifierType]]: return ( @@ -1635,11 +1657,11 @@ def isomorphic(self, other: Graph) -> bool: return False for s, p, o in self: if not isinstance(s, BNode) and not isinstance(o, BNode): - if not (s, p, o) in other: + if not (s, p, o) in other: # noqa: E713 return False for s, p, o in other: if not isinstance(s, BNode) and not isinstance(o, BNode): - if not (s, p, o) in self: + if not (s, p, o) in self: # noqa: E713 return False # TODO: very well could be a false positive at this point yet. return True @@ -1766,7 +1788,7 @@ def do_skolemize2(t: _TripleType) -> _TripleType: self._process_skolem_tuples(retval, do_skolemize2) elif isinstance(bnode, BNode): # type error: Argument 1 to "do_skolemize" has incompatible type "Optional[BNode]"; expected "BNode" - self._process_skolem_tuples(retval, lambda t: do_skolemize(bnode, t)) # type: ignore[arg-type] + self._process_skolem_tuples(retval, lambda t: do_skolemize(bnode, t)) # type: ignore[arg-type, unused-ignore] return retval @@ -1806,7 +1828,7 @@ def do_de_skolemize2(t: _TripleType) -> _TripleType: self._process_skolem_tuples(retval, do_de_skolemize2) else: # type error: Argument 1 to "do_de_skolemize" has incompatible type "Optional[URIRef]"; expected "URIRef" - self._process_skolem_tuples(retval, lambda t: do_de_skolemize(uriref, t)) # type: ignore[arg-type] + self._process_skolem_tuples(retval, lambda t: do_de_skolemize(uriref, t)) # type: ignore[arg-type, unused-ignore] return retval @@ -1851,7 +1873,7 @@ def add_to_cbd(uri: _SubjectType) -> None: for s, p, o in self.triples((uri, None, None)): subgraph.add((s, p, o)) # recurse 'down' through ll Blank Nodes - if type(o) == BNode and not (o, None, None) in subgraph: + if type(o) is BNode and (o, None, None) not in subgraph: add_to_cbd(o) # for Rule 3 (reification) @@ -1877,6 +1899,9 @@ class ConjunctiveGraph(Graph): """A ConjunctiveGraph is an (unnamed) aggregation of all the named graphs in a store. + .. warning:: + ConjunctiveGraph is deprecated, use :class:`~rdflib.graph.Dataset` instead. + It has a ``default`` graph, whose name is associated with the graph throughout its life. :meth:`__init__` can take an identifier to use as the name of this default graph or it will assign a @@ -1887,6 +1912,8 @@ class ConjunctiveGraph(Graph): All queries are carried out against the union of all graphs. """ + default_context: _ContextType + def __init__( self, store: Union[Store, str] = "default", @@ -1894,6 +1921,14 @@ def __init__( default_graph_base: Optional[str] = None, ): super(ConjunctiveGraph, self).__init__(store, identifier=identifier) + + if type(self) is ConjunctiveGraph: + warnings.warn( + "ConjunctiveGraph is deprecated, use Dataset instead.", + DeprecationWarning, + stacklevel=2, + ) + assert self.store.context_aware, ( "ConjunctiveGraph must be backed by" " a context aware store." ) @@ -1913,50 +1948,44 @@ def __str__(self) -> str: @overload def _spoc( self, - triple_or_quad: "_QuadType", + triple_or_quad: _QuadType, default: bool = False, - ) -> "_QuadType": - ... + ) -> _QuadType: ... @overload def _spoc( self, - triple_or_quad: Union["_TripleType", "_OptionalQuadType"], + triple_or_quad: Union[_TripleType, _OptionalQuadType], default: bool = False, - ) -> "_OptionalQuadType": - ... + ) -> _OptionalQuadType: ... @overload def _spoc( self, triple_or_quad: None, default: bool = False, - ) -> Tuple[None, None, None, Optional[Graph]]: - ... + ) -> Tuple[None, None, None, Optional[Graph]]: ... @overload def _spoc( self, triple_or_quad: Optional[_TripleOrQuadPatternType], default: bool = False, - ) -> "_QuadPatternType": - ... + ) -> _QuadPatternType: ... @overload def _spoc( self, triple_or_quad: _TripleOrQuadSelectorType, default: bool = False, - ) -> _QuadSelectorType: - ... + ) -> _QuadSelectorType: ... @overload def _spoc( self, triple_or_quad: Optional[_TripleOrQuadSelectorType], default: bool = False, - ) -> _QuadSelectorType: - ... + ) -> _QuadSelectorType: ... def _spoc( self, @@ -1972,10 +2001,10 @@ def _spoc( if len(triple_or_quad) == 3: c = self.default_context if default else None # type error: Too many values to unpack (3 expected, 4 provided) - (s, p, o) = triple_or_quad # type: ignore[misc] + (s, p, o) = triple_or_quad # type: ignore[misc, unused-ignore] elif len(triple_or_quad) == 4: # type error: Need more than 3 values to unpack (4 expected) - (s, p, o, c) = triple_or_quad # type: ignore[misc] + (s, p, o, c) = triple_or_quad # type: ignore[misc, unused-ignore] c = self._graph(c) return s, p, o, c @@ -2005,12 +2034,10 @@ def add( return self @overload - def _graph(self, c: Union[Graph, _ContextIdentifierType, str]) -> Graph: - ... + def _graph(self, c: Union[Graph, _ContextIdentifierType, str]) -> Graph: ... @overload - def _graph(self, c: None) -> None: - ... + def _graph(self, c: None) -> None: ... def _graph( self, c: Optional[Union[Graph, _ContextIdentifierType, str]] @@ -2023,7 +2050,7 @@ def _graph( return c def addN( # noqa: N802 - self: _ConjunctiveGraphT, quads: Iterable["_QuadType"] + self: _ConjunctiveGraphT, quads: Iterable[_QuadType] ) -> _ConjunctiveGraphT: """Add a sequence of triples with context""" @@ -2050,32 +2077,29 @@ def remove(self: _ConjunctiveGraphT, triple_or_quad: _TripleOrOptionalQuadType) @overload def triples( self, - triple_or_quad: "_TripleOrQuadPatternType", + triple_or_quad: _TripleOrQuadPatternType, context: Optional[_ContextType] = ..., - ) -> Generator["_TripleType", None, None]: - ... + ) -> Generator[_TripleType, None, None]: ... @overload def triples( self, - triple_or_quad: "_TripleOrQuadPathPatternType", + triple_or_quad: _TripleOrQuadPathPatternType, context: Optional[_ContextType] = ..., - ) -> Generator["_TriplePathType", None, None]: - ... + ) -> Generator[_TriplePathType, None, None]: ... @overload def triples( self, triple_or_quad: _TripleOrQuadSelectorType, context: Optional[_ContextType] = ..., - ) -> Generator["_TripleOrTriplePathType", None, None]: - ... + ) -> Generator[_TripleOrTriplePathType, None, None]: ... def triples( self, triple_or_quad: _TripleOrQuadSelectorType, context: Optional[_ContextType] = None, - ) -> Generator["_TripleOrTriplePathType", None, None]: + ) -> Generator[_TripleOrTriplePathType, None, None]: """ Iterate over all the triples in the entire conjunctive graph @@ -2118,11 +2142,11 @@ def quads( def triples_choices( self, triple: Union[ - Tuple[List["_SubjectType"], "_PredicateType", "_ObjectType"], - Tuple["_SubjectType", List["_PredicateType"], "_ObjectType"], - Tuple["_SubjectType", "_PredicateType", List["_ObjectType"]], + Tuple[List[_SubjectType], _PredicateType, _ObjectType], + Tuple[_SubjectType, List[_PredicateType], _ObjectType], + Tuple[_SubjectType, _PredicateType, List[_ObjectType]], ], - context: Optional["_ContextType"] = None, + context: Optional[_ContextType] = None, ) -> Generator[_TripleType, None, None]: """Iterate over all the triples in the entire conjunctive graph""" s, p, o = triple @@ -2141,8 +2165,8 @@ def __len__(self) -> int: return self.store.__len__() def contexts( - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: """Iterate over all contexts in the graph If triple is specified, iterate over all contexts the triple is in. @@ -2157,13 +2181,13 @@ def contexts( # type error: Statement is unreachable yield self.get_context(context) # type: ignore[unreachable] - def get_graph(self, identifier: "_ContextIdentifierType") -> Union[Graph, None]: + def get_graph(self, identifier: _ContextIdentifierType) -> Union[Graph, None]: """Returns the graph identified by given identifier""" return [x for x in self.contexts() if x.identifier == identifier][0] def get_context( self, - identifier: Optional[Union["_ContextIdentifierType", str]], + identifier: Optional[Union[_ContextIdentifierType, str]], quoted: bool = False, base: Optional[str] = None, ) -> Graph: @@ -2178,7 +2202,7 @@ def get_context( base=base, ) - def remove_context(self, context: "_ContextType") -> None: + def remove_context(self, context: _ContextType) -> None: """Removes the given context from the graph""" self.store.remove((None, None, None), context) @@ -2200,15 +2224,16 @@ def parse( file: Optional[Union[BinaryIO, TextIO]] = None, data: Optional[Union[str, bytes]] = None, **args: Any, - ) -> "Graph": + ) -> Graph: """ Parse source adding the resulting triples to its own context (sub graph of this graph). See :meth:`rdflib.graph.Graph.parse` for documentation on arguments. - If the source is in a format that does not support named graphs it's triples - will be added to the default graph (i.e. `Dataset.default_context`). + If the source is in a format that does not support named graphs its triples + will be added to the default graph + (i.e. :attr:`ConjunctiveGraph.default_context`). :Returns: @@ -2234,7 +2259,7 @@ def parse( the ``publicID`` parameter will also not be used as the name for the graph that the data is loaded into, and instead the triples from sources that do not support named graphs will be loaded into the default graph - (i.e. `ConjunctionGraph.default_context`). + (i.e. :attr:`ConjunctiveGraph.default_context`). """ source = create_input_source( @@ -2267,7 +2292,7 @@ def __reduce__(self) -> Tuple[Type[Graph], Tuple[Store, _ContextIdentifierType]] class Dataset(ConjunctiveGraph): - __doc__ = """ + """ RDF 1.1 Dataset. Small extension to the Conjunctive Graph: - the primary term is graphs in the datasets and not contexts with quads, so there is a separate method to set/retrieve a graph in a dataset and @@ -2460,7 +2485,7 @@ def parse( file: Optional[Union[BinaryIO, TextIO]] = None, data: Optional[Union[str, bytes]] = None, **args: Any, - ) -> "Graph": + ) -> Graph: """ Parse an RDF source adding the resulting triples to the Graph. @@ -2468,8 +2493,9 @@ def parse( The source is specified using one of source, location, file or data. - If the source is in a format that does not support named graphs it's triples - will be added to the default graph (i.e. `Dataset.default_context`). + If the source is in a format that does not support named graphs its triples + will be added to the default graph + (i.e. :attr:`.Dataset.default_context`). .. caution:: @@ -2490,7 +2516,7 @@ def parse( the ``publicID`` parameter will also not be used as the name for the graph that the data is loaded into, and instead the triples from sources that do not support named graphs will be loaded into the default graph - (i.e. `ConjunctionGraph.default_context`). + (i.e. :attr:`.Dataset.default_context`). """ c = ConjunctiveGraph.parse( @@ -2519,8 +2545,8 @@ def remove_graph( return self def contexts( - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: default = False for c in super(Dataset, self).contexts(triple): default |= c.identifier == DATASET_DEFAULT_GRAPH_ID @@ -2532,7 +2558,7 @@ def contexts( # type error: Return type "Generator[Tuple[Node, Node, Node, Optional[Node]], None, None]" of "quads" incompatible with return type "Generator[Tuple[Node, Node, Node, Optional[Graph]], None, None]" in supertype "ConjunctiveGraph" def quads( # type: ignore[override] - self, quad: Optional["_TripleOrQuadPatternType"] = None + self, quad: Optional[_TripleOrQuadPatternType] = None ) -> Generator[_OptionalIdentifiedQuadType, None, None]: for s, p, o, c in super(Dataset, self).quads(quad): # type error: Item "None" of "Optional[Graph]" has no attribute "identifier" @@ -2565,7 +2591,7 @@ def __init__( ): super(QuotedGraph, self).__init__(store, identifier) - def add(self: _GraphT, triple: "_TripleType") -> _GraphT: + def add(self: _GraphT, triple: _TripleType) -> _GraphT: """Add a triple with self as context""" s, p, o = triple assert isinstance(s, Node), "Subject %s must be an rdflib term" % (s,) @@ -2575,7 +2601,7 @@ def add(self: _GraphT, triple: "_TripleType") -> _GraphT: self.store.add((s, p, o), self, quoted=True) return self - def addN(self: _GraphT, quads: Iterable["_QuadType"]) -> _GraphT: # noqa: N802 + def addN(self: _GraphT, quads: Iterable[_QuadType]) -> _GraphT: # noqa: N802 """Add a sequence of triple with context""" self.store.addN( @@ -2587,14 +2613,12 @@ def addN(self: _GraphT, quads: Iterable["_QuadType"]) -> _GraphT: # noqa: N802 ) return self - def n3(self) -> str: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: """Return an n3 identifier for the Graph""" - # type error: "IdentifiedNode" has no attribute "n3" - return "{%s}" % self.identifier.n3() # type: ignore[attr-defined] + return "{%s}" % self.identifier.n3(namespace_manager=namespace_manager) def __str__(self) -> str: - # type error: "IdentifiedNode" has no attribute "n3" - identifier = self.identifier.n3() # type: ignore[attr-defined] + identifier = self.identifier.n3() label = self.store.__class__.__name__ pattern = ( "{this rdflib.identifier %s;rdflib:storage " @@ -2648,7 +2672,7 @@ def __init__(self, graph: Graph, subject: _SubjectType): # by sorting the keys (by integer) we have what we want! _list.sort() - def toPython(self) -> "Seq": # noqa: N802 + def toPython(self) -> Seq: # noqa: N802 return self def __iter__(self) -> Generator[_ObjectType, None, None]: @@ -2666,7 +2690,7 @@ def __getitem__(self, index) -> _ObjectType: return item -class ModificationException(Exception): +class ModificationException(Exception): # noqa: N818 def __init__(self) -> None: pass @@ -2677,7 +2701,7 @@ def __str__(self) -> str: ) -class UnSupportedAggregateOperation(Exception): +class UnSupportedAggregateOperation(Exception): # noqa: N818 def __init__(self) -> None: pass @@ -2734,7 +2758,7 @@ def close(self) -> None: # type: ignore[override] def add(self, triple: _TripleOrOptionalQuadType) -> NoReturn: raise ModificationException() - def addN(self, quads: Iterable["_QuadType"]) -> NoReturn: # noqa: N802 + def addN(self, quads: Iterable[_QuadType]) -> NoReturn: # noqa: N802 raise ModificationException() # type error: Argument 1 of "remove" is incompatible with supertype "Graph"; supertype defines the argument type as "Tuple[Optional[Node], Optional[Node], Optional[Node]]" @@ -2745,28 +2769,25 @@ def remove(self, triple: _TripleOrOptionalQuadType) -> NoReturn: # type: ignore @overload # type: ignore[override] def triples( self, - triple: "_TriplePatternType", - ) -> Generator["_TripleType", None, None]: - ... + triple: _TriplePatternType, + ) -> Generator[_TripleType, None, None]: ... @overload def triples( self, - triple: "_TriplePathPatternType", - ) -> Generator["_TriplePathType", None, None]: - ... + triple: _TriplePathPatternType, + ) -> Generator[_TriplePathType, None, None]: ... @overload def triples( self, - triple: "_TripleSelectorType", - ) -> Generator["_TripleOrTriplePathType", None, None]: - ... + triple: _TripleSelectorType, + ) -> Generator[_TripleOrTriplePathType, None, None]: ... def triples( self, - triple: "_TripleSelectorType", - ) -> Generator["_TripleOrTriplePathType", None, None]: + triple: _TripleSelectorType, + ) -> Generator[_TripleOrTriplePathType, None, None]: s, p, o = triple for graph in self.graphs: if isinstance(p, Path): @@ -2780,7 +2801,7 @@ def __contains__(self, triple_or_quad: _TripleOrQuadSelectorType) -> bool: context = None if len(triple_or_quad) == 4: # type error: Tuple index out of range - context = triple_or_quad[3] # type: ignore [misc] + context = triple_or_quad[3] # type: ignore [misc, unused-ignore] for graph in self.graphs: if context is None or graph.identifier == context.identifier: if triple_or_quad[:3] in graph: @@ -2791,9 +2812,7 @@ def __contains__(self, triple_or_quad: _TripleOrQuadSelectorType) -> bool: def quads( # type: ignore[override] self, triple_or_quad: _TripleOrQuadSelectorType ) -> Generator[ - Tuple[ - "_SubjectType", Union[Path, "_PredicateType"], "_ObjectType", "_ContextType" - ], + Tuple[_SubjectType, Union[Path, _PredicateType], _ObjectType, _ContextType], None, None, ]: @@ -2801,10 +2820,10 @@ def quads( # type: ignore[override] c = None if len(triple_or_quad) == 4: # type error: Need more than 3 values to unpack (4 expected) - s, p, o, c = triple_or_quad # type: ignore[misc] + s, p, o, c = triple_or_quad # type: ignore[misc, unused-ignore] else: # type error: Too many values to unpack (3 expected, 4 provided) - s, p, o = triple_or_quad # type: ignore[misc] + s, p, o = triple_or_quad # type: ignore[misc, unused-ignore] if c is not None: for graph in [g for g in self.graphs if g == c]: @@ -2831,10 +2850,10 @@ def __cmp__(self, other) -> int: else: return -1 - def __iadd__(self: "_GraphT", other: Iterable["_TripleType"]) -> NoReturn: + def __iadd__(self: _GraphT, other: Iterable[_TripleType]) -> NoReturn: raise ModificationException() - def __isub__(self: "_GraphT", other: Iterable["_TripleType"]) -> NoReturn: + def __isub__(self: _GraphT, other: Iterable[_TripleType]) -> NoReturn: raise ModificationException() # Conv. methods @@ -2842,11 +2861,11 @@ def __isub__(self: "_GraphT", other: Iterable["_TripleType"]) -> NoReturn: def triples_choices( self, triple: Union[ - Tuple[List["_SubjectType"], "_PredicateType", "_ObjectType"], - Tuple["_SubjectType", List["_PredicateType"], "_ObjectType"], - Tuple["_SubjectType", "_PredicateType", List["_ObjectType"]], + Tuple[List[_SubjectType], _PredicateType, _ObjectType], + Tuple[_SubjectType, List[_PredicateType], _ObjectType], + Tuple[_SubjectType, _PredicateType, List[_ObjectType]], ], - context: Optional["_ContextType"] = None, + context: Optional[_ContextType] = None, ) -> Generator[_TripleType, None, None]: subject, predicate, object_ = triple for graph in self.graphs: @@ -2874,7 +2893,7 @@ def bind( # type: ignore[override] def namespaces(self) -> Generator[Tuple[str, URIRef], None, None]: if hasattr(self, "namespace_manager"): - for prefix, namespace in self.namespace_manager.namespaces(): # noqa: F402 + for prefix, namespace in self.namespace_manager.namespaces(): yield prefix, namespace else: for graph in self.graphs: @@ -2890,13 +2909,13 @@ def parse( # type: ignore[override] source: Optional[ Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath] ], - publicID: Optional[str] = None, + publicID: Optional[str] = None, # noqa: N803 format: Optional[str] = None, **args: Any, - ) -> NoReturn: # noqa: N803 + ) -> NoReturn: raise ModificationException() - def n3(self) -> NoReturn: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> NoReturn: raise UnSupportedAggregateOperation() def __reduce__(self) -> NoReturn: @@ -2904,13 +2923,11 @@ def __reduce__(self) -> NoReturn: @overload -def _assertnode(*terms: Node) -> "te.Literal[True]": - ... +def _assertnode(*terms: Node) -> te.Literal[True]: ... @overload -def _assertnode(*terms: Any) -> bool: - ... +def _assertnode(*terms: Any) -> bool: ... def _assertnode(*terms: Any) -> bool: @@ -2958,10 +2975,10 @@ def reset(self) -> BatchAddGraph: def add( self, triple_or_quad: Union[ - "_TripleType", - "_QuadType", + _TripleType, + _QuadType, ], - ) -> "BatchAddGraph": + ) -> BatchAddGraph: """ Add a triple to the buffer @@ -2973,13 +2990,13 @@ def add( self.count += 1 if len(triple_or_quad) == 3: # type error: Argument 1 to "append" of "list" has incompatible type "Tuple[Node, ...]"; expected "Tuple[Node, Node, Node, Graph]" - self.batch.append(triple_or_quad + self.__graph_tuple) # type: ignore[arg-type] + self.batch.append(triple_or_quad + self.__graph_tuple) # type: ignore[arg-type, unused-ignore] else: # type error: Argument 1 to "append" of "list" has incompatible type "Union[Tuple[Node, Node, Node], Tuple[Node, Node, Node, Graph]]"; expected "Tuple[Node, Node, Node, Graph]" - self.batch.append(triple_or_quad) # type: ignore[arg-type] + self.batch.append(triple_or_quad) # type: ignore[arg-type, unused-ignore] return self - def addN(self, quads: Iterable["_QuadType"]) -> BatchAddGraph: # noqa: N802 + def addN(self, quads: Iterable[_QuadType]) -> BatchAddGraph: # noqa: N802 if self.__batch_addn: for q in quads: self.add(q) diff --git a/rdflib/namespace/_BRICK.py b/rdflib/namespace/_BRICK.py index 5bccfe5d5..20c60fc1c 100644 --- a/rdflib/namespace/_BRICK.py +++ b/rdflib/namespace/_BRICK.py @@ -24,7 +24,9 @@ class BRICK(DefinedNamespace): Adjust_Sensor: URIRef # Measures user-provided adjustment of some value Air: URIRef # the invisible gaseous substance surrounding the earth, a mixture mainly of oxygen and nitrogen. Air_Alarm: URIRef - Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between two regions of air + Air_Differential_Pressure_Sensor: ( + URIRef # Measures the difference in pressure between two regions of air + ) Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a air duct or conduit Air_Diffuser: URIRef # A device that is a component of the air distribution system that controls the delivery of conditioned and/or ventilating air into a room Air_Enthalpy_Sensor: URIRef # Measures the total heat content of air @@ -58,10 +60,14 @@ class BRICK(DefinedNamespace): Automatic_Mode_Command: URIRef # Controls whether or not a device or controller is operating in "Automatic" mode Availability_Status: URIRef # Indicates if a piece of equipment, system, or functionality is available for operation Average_Cooling_Demand_Sensor: URIRef # Measures the average power consumed by a cooling process as the amount of power consumed over some interval - Average_Discharge_Air_Flow_Sensor: URIRef # The computed average flow of discharge air over some interval + Average_Discharge_Air_Flow_Sensor: ( + URIRef # The computed average flow of discharge air over some interval + ) Average_Exhaust_Air_Static_Pressure_Sensor: URIRef # The computed average static pressure of air in exhaust regions of an HVAC system over some period of time Average_Heating_Demand_Sensor: URIRef # Measures the average power consumed by a heating process as the amount of power consumed over some interval - Average_Supply_Air_Flow_Sensor: URIRef # The computed average flow of supply air over some interval + Average_Supply_Air_Flow_Sensor: ( + URIRef # The computed average flow of supply air over some interval + ) Average_Zone_Air_Temperature_Sensor: URIRef # The computed average temperature of air in a zone, over some period of time Baseboard_Radiator: URIRef # Steam, hydronic, or electric heating device located at or near the floor. Basement: URIRef # The floor of a building which is partly or entirely below ground level. @@ -77,12 +83,18 @@ class BRICK(DefinedNamespace): Break_Room: URIRef # A space for people to relax while not working Breaker_Panel: URIRef # Breaker Panel distributes power into various end-uses. Breakroom: URIRef # A space for people to relax while not working - Broadcast_Room: URIRef # A space to organize and manage a broadcast. Separate from studio + Broadcast_Room: ( + URIRef # A space to organize and manage a broadcast. Separate from studio + ) Building: URIRef # An independent unit of the built environment with a characteristic spatial structure, intended to serve at least one function or user activity [ISO 12006-2:2013] Building_Air: URIRef # air contained within a building Building_Air_Humidity_Setpoint: URIRef # Setpoint for humidity in a building - Building_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within a building - Building_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of the entire building + Building_Air_Static_Pressure_Sensor: ( + URIRef # The static pressure of air within a building + ) + Building_Air_Static_Pressure_Setpoint: ( + URIRef # Sets static pressure of the entire building + ) Building_Chilled_Water_Meter: URIRef # A meter that measures the usage or consumption of chilled water of a whole building Building_Electrical_Meter: URIRef # A meter that measures the usage or consumption of electricity of a whole building Building_Gas_Meter: URIRef # A meter that measures the usage or consumption of gas of a whole building @@ -102,7 +114,9 @@ class BRICK(DefinedNamespace): CO: URIRef # Carbon Monoxide in the vapor phase CO2: URIRef # Carbon Dioxide in the vapor phase CO2_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the presence of carbon dioxide. - CO2_Differential_Sensor: URIRef # Measures the difference between CO2 levels of inside and outside air + CO2_Differential_Sensor: ( + URIRef # Measures the difference between CO2 levels of inside and outside air + ) CO2_Level_Sensor: URIRef # Measures the concentration of CO2 in air CO2_Sensor: URIRef # Measures properties of CO2 in air CO2_Setpoint: URIRef # Sets some property of CO2 @@ -119,7 +133,9 @@ class BRICK(DefinedNamespace): Chilled_Beam: URIRef # A device with an integrated coil that performs sensible heating of a space via circulation of room air. Chilled Beams are not designed to perform latent cooling; see Induction Units. Despite their name, Chilled Beams may perform heating or cooling of a space depending on their configuration. Chilled_Water: URIRef # water used as a cooling medium (particularly in air-conditioning systems or in processes) at below ambient temperature. Chilled_Water_Coil: URIRef # A cooling element made of pipe or tube that removes heat from equipment, machines or airflows that is filled with chilled water. - Chilled_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of chilled water + Chilled_Water_Differential_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of differential pressure of chilled water + ) Chilled_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Chilled_Water_Differential_Pressure_Load_Shed_Reset_Status: URIRef Chilled_Water_Differential_Pressure_Load_Shed_Setpoint: URIRef @@ -129,23 +145,43 @@ class BRICK(DefinedNamespace): Chilled_Water_Differential_Pressure_Setpoint: URIRef # Sets the target water differential pressure between an upstream and downstream point in a water pipe or conduit used to carry chilled water Chilled_Water_Differential_Pressure_Step_Parameter: URIRef Chilled_Water_Differential_Temperature_Sensor: URIRef # Measures the difference in temperature between the entering water to the chiller or other water cooling device and leaving water from the same chiller or other water cooling device - Chilled_Water_Discharge_Flow_Sensor: URIRef # Measures the rate of flow of chilled discharge water - Chilled_Water_Discharge_Flow_Setpoint: URIRef # Sets the target flow rate of chilled discharge water - Chilled_Water_Flow_Sensor: URIRef # Measures the rate of flow in a chilled water circuit + Chilled_Water_Discharge_Flow_Sensor: ( + URIRef # Measures the rate of flow of chilled discharge water + ) + Chilled_Water_Discharge_Flow_Setpoint: ( + URIRef # Sets the target flow rate of chilled discharge water + ) + Chilled_Water_Flow_Sensor: ( + URIRef # Measures the rate of flow in a chilled water circuit + ) Chilled_Water_Flow_Setpoint: URIRef # Sets the target flow rate of chilled water Chilled_Water_Loop: URIRef # A collection of equipment that transport and regulate chilled water among each other - Chilled_Water_Meter: URIRef # A meter that measures the usage or consumption of chilled water + Chilled_Water_Meter: ( + URIRef # A meter that measures the usage or consumption of chilled water + ) Chilled_Water_Pump: URIRef # A pump that performs work on chilled water; typically part of a chilled water system Chilled_Water_Pump_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of chilled water in a chilled water pump - Chilled_Water_Return_Flow_Sensor: URIRef # Measures the rate of flow of chilled return water + Chilled_Water_Return_Flow_Sensor: ( + URIRef # Measures the rate of flow of chilled return water + ) Chilled_Water_Return_Temperature_Sensor: URIRef # Measures the temperature of chilled water that is returned to a cooling tower - Chilled_Water_Static_Pressure_Setpoint: URIRef # Sets static pressure of chilled water - Chilled_Water_Supply_Flow_Sensor: URIRef # Measures the rate of flow of chilled supply water - Chilled_Water_Supply_Flow_Setpoint: URIRef # Sets the target flow rate of chilled supply water + Chilled_Water_Static_Pressure_Setpoint: ( + URIRef # Sets static pressure of chilled water + ) + Chilled_Water_Supply_Flow_Sensor: ( + URIRef # Measures the rate of flow of chilled supply water + ) + Chilled_Water_Supply_Flow_Setpoint: ( + URIRef # Sets the target flow rate of chilled supply water + ) Chilled_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of chilled water that is supplied from a chiller Chilled_Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of chilled water in a building - Chilled_Water_System_Enable_Command: URIRef # Enables operation of the chilled water system - Chilled_Water_Temperature_Sensor: URIRef # Measures the temperature of chilled water + Chilled_Water_System_Enable_Command: ( + URIRef # Enables operation of the chilled water system + ) + Chilled_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of chilled water + ) Chilled_Water_Temperature_Setpoint: URIRef # Sets the temperature of chilled water Chilled_Water_Valve: URIRef # A valve that modulates the flow of chilled water Chiller: URIRef # Refrigerating machine used to transfer heat between fluids. Chillers are either direct expansion with a compressor or absorption type. @@ -161,20 +197,30 @@ class BRICK(DefinedNamespace): Collection_Basin_Water_Level_Sensor: URIRef # Measures the level of the water in the collection basin, e.g. within a Cooling_Tower Collection_Basin_Water_Temperature_Sensor: URIRef # Measures the temperature of the water in the collection basin, e.g. within a Cooling_Tower Command: URIRef # A Command is an output point that directly determines the behavior of equipment and/or affects relevant operational points. - Common_Space: URIRef # A class of spaces that are used by multiple people at the same time + Common_Space: ( + URIRef # A class of spaces that are used by multiple people at the same time + ) Communication_Loss_Alarm: URIRef # An alarm that indicates a loss of communication e.g. with a device or controller Compressor: URIRef # (1) device for mechanically increasing the pressure of a gas. (2) often described as being either open, hermetic, or semihermetic to describe how the compressor and motor drive is situated in relation to the gas or vapor being compressed. Types include centrifugal, axial flow, reciprocating, rotary screw, rotary vane, scroll, or diaphragm. 1. device for mechanically increasing the pressure of a gas. 2. specific machine, with or without accessories, for compressing refrigerant vapor. Computer_Room_Air_Conditioning: URIRef # A device that monitors and maintains the temperature, air distribution and humidity in a network room or data center. Concession: URIRef # A space to sell food and beverages. Usually embedded in a larger space and does not include a space where people consume their purchases - Condensate_Leak_Alarm: URIRef # An alarm that indicates a leak of condensate from a cooling system + Condensate_Leak_Alarm: ( + URIRef # An alarm that indicates a leak of condensate from a cooling system + ) Condenser: URIRef # A heat exchanger in which the primary heat transfer vapor changes its state to a liquid phase. Condenser_Heat_Exchanger: URIRef # A heat exchanger in which the primary heat transfer vapor changes its state to a liquid phase. Condenser_Water: URIRef # Water used used to remove heat through condensation - Condenser_Water_Bypass_Valve: URIRef # A valve installed in a bypass line of a condenser water loop - Condenser_Water_Isolation_Valve: URIRef # An isolation valve installed in the condenser water loop + Condenser_Water_Bypass_Valve: ( + URIRef # A valve installed in a bypass line of a condenser water loop + ) + Condenser_Water_Isolation_Valve: ( + URIRef # An isolation valve installed in the condenser water loop + ) Condenser_Water_Pump: URIRef # A pump that is part of a condenser system; the pump circulates condenser water from the chiller back to the cooling tower Condenser_Water_System: URIRef # A heat rejection system consisting of (typically) cooling towers, condenser water pumps, chillers and the piping connecting the components - Condenser_Water_Temperature_Sensor: URIRef # Measures the temperature of condenser water + Condenser_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of condenser water + ) Condenser_Water_Valve: URIRef # A valve that modulates the flow of condenser water Condensing_Natural_Gas_Boiler: URIRef # A closed, pressure vessel that uses natural gas and heat exchanger that capture and reuse any latent heat for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Conductivity_Sensor: URIRef # Measures electrical conductance @@ -187,12 +233,16 @@ class BRICK(DefinedNamespace): Cooling_Demand_Sensor: URIRef # Measures the amount of power consumed by a cooling process; typically found by multiplying the tonnage of a unit (e.g. RTU) by the efficiency rating in kW/ton Cooling_Demand_Setpoint: URIRef # Sets the rate required for cooling Cooling_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for cooling - Cooling_Discharge_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of cooling discharge air + Cooling_Discharge_Air_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of cooling discharge air + ) Cooling_Discharge_Air_Temperature_Integral_Time_Parameter: URIRef Cooling_Discharge_Air_Temperature_Proportional_Band_Parameter: URIRef Cooling_Start_Stop_Status: URIRef Cooling_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for cooling - Cooling_Supply_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply air for cooling + Cooling_Supply_Air_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of supply air for cooling + ) Cooling_Supply_Air_Temperature_Integral_Time_Parameter: URIRef Cooling_Supply_Air_Temperature_Proportional_Band_Parameter: URIRef Cooling_Temperature_Setpoint: URIRef # Sets temperature for cooling @@ -206,22 +256,30 @@ class BRICK(DefinedNamespace): Current_Imbalance_Sensor: URIRef # A sensor which measures the current difference (imbalance) between phases of an electrical system Current_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Current_Setpoint. Current_Output_Sensor: URIRef # Senses the amperes of electrical current produced as output by a device - Current_Sensor: URIRef # Senses the amperes of electrical current passing through the sensor + Current_Sensor: ( + URIRef # Senses the amperes of electrical current passing through the sensor + ) Curtailment_Override_Command: URIRef Cycle_Alarm: URIRef # An alarm that indicates off-normal conditions associated with HVAC cycles DC_Bus_Voltage_Sensor: URIRef # Measures the voltage across a DC bus DOAS: URIRef # See Dedicated_Outdoor_Air_System_Unit Damper: URIRef # Element inserted into an air-distribution system or element of an air-distribution system permitting modification of the air resistance of the system and consequently changing the airflow rate or shutting off the airflow. Damper_Command: URIRef # Controls properties of dampers - Damper_Position_Command: URIRef # Controls the position (the degree of openness) of a damper + Damper_Position_Command: ( + URIRef # Controls the position (the degree of openness) of a damper + ) Damper_Position_Sensor: URIRef # Measures the current position of a damper in terms of the percent of fully open Damper_Position_Setpoint: URIRef # Sets the position of damper Deadband_Setpoint: URIRef # Sets the size of a deadband Deceleration_Time_Setpoint: URIRef Dedicated_Outdoor_Air_System_Unit: URIRef # A device that conditions and delivers 100% outdoor air to its assigned spaces. It decouples air-conditioning of the outdoor air, usually used to provide minimum outdoor air ventilation, from conditioning of the internal loads. Dehumidification_Start_Stop_Status: URIRef - Deionised_Water_Conductivity_Sensor: URIRef # Measures the electrical conductance of deionised water - Deionised_Water_Level_Sensor: URIRef # Measures the height/level of deionised water in some container + Deionised_Water_Conductivity_Sensor: ( + URIRef # Measures the electrical conductance of deionised water + ) + Deionised_Water_Level_Sensor: ( + URIRef # Measures the height/level of deionised water in some container + ) Deionized_Water: URIRef # Water which has been purified by removing its ions (constituting the majority of non-particulate contaminants) Deionized_Water_Alarm: URIRef # An alarm that indicates deionized water leaks. Delay_Parameter: URIRef # A parameter determining how long to delay a subsequent action to take place after a received signal @@ -229,16 +287,24 @@ class BRICK(DefinedNamespace): Demand_Setpoint: URIRef # Sets the rate required for a process Derivative_Gain_Parameter: URIRef Derivative_Time_Parameter: URIRef - Detention_Room: URIRef # A space for the temporary involuntary confinement of people + Detention_Room: ( + URIRef # A space for the temporary involuntary confinement of people + ) Dew_Point_Setpoint: URIRef # Sets dew point Dewpoint_Sensor: URIRef # Senses the dewpoint temperature . Dew point is the temperature to which air must be cooled to become saturated with water vapor - Differential_Air_Temperature_Setpoint: URIRef # Sets temperature of differential air + Differential_Air_Temperature_Setpoint: ( + URIRef # Sets temperature of differential air + ) Differential_Pressure_Bypass_Valve: URIRef # A 2-way, self contained proportional valve with an integral differential pressure adjustment setting. - Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure + Differential_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of differential pressure + ) Differential_Pressure_Integral_Time_Parameter: URIRef Differential_Pressure_Load_Shed_Status: URIRef Differential_Pressure_Proportional_Band: URIRef - Differential_Pressure_Sensor: URIRef # Measures the difference between two applied pressures + Differential_Pressure_Sensor: ( + URIRef # Measures the difference between two applied pressures + ) Differential_Pressure_Setpoint: URIRef # Sets differential pressure Differential_Pressure_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Differential_Pressure_Setpoint. Differential_Pressure_Step_Parameter: URIRef @@ -249,42 +315,70 @@ class BRICK(DefinedNamespace): Direct_Expansion_Cooling_Coil: URIRef Direct_Expansion_Heating_Coil: URIRef Direction_Command: URIRef # Commands that affect the direction of some phenomenon - Direction_Sensor: URIRef # Measures the direction in degrees in which a phenomenon is occurring + Direction_Sensor: ( + URIRef # Measures the direction in degrees in which a phenomenon is occurring + ) Direction_Status: URIRef # Indicates which direction a device is operating in Disable_Command: URIRef # Commands that disable functionality - Disable_Differential_Enthalpy_Command: URIRef # Disables the use of differential enthalpy control - Disable_Differential_Temperature_Command: URIRef # Disables the use of differential temperature control + Disable_Differential_Enthalpy_Command: ( + URIRef # Disables the use of differential enthalpy control + ) + Disable_Differential_Temperature_Command: ( + URIRef # Disables the use of differential temperature control + ) Disable_Fixed_Enthalpy_Command: URIRef # Disables the use of fixed enthalpy control - Disable_Fixed_Temperature_Command: URIRef # Disables the use of fixed temperature temperature + Disable_Fixed_Temperature_Command: ( + URIRef # Disables the use of fixed temperature temperature + ) Disable_Hot_Water_System_Outside_Air_Temperature_Setpoint: URIRef # Disables hot water system when outside air temperature reaches the indicated value Disable_Status: URIRef # Indicates if functionality has been disabled Discharge_Air: URIRef # the air exiting the registers (vents). Discharge_Air_Dewpoint_Sensor: URIRef # Measures dewpoint of discharge air - Discharge_Air_Duct_Pressure_Status: URIRef # Indicates if air pressure in discharge duct is within expected bounds - Discharge_Air_Flow_Demand_Setpoint: URIRef # Sets the rate of discharge air flow required for a process + Discharge_Air_Duct_Pressure_Status: ( + URIRef # Indicates if air pressure in discharge duct is within expected bounds + ) + Discharge_Air_Flow_Demand_Setpoint: ( + URIRef # Sets the rate of discharge air flow required for a process + ) Discharge_Air_Flow_High_Reset_Setpoint: URIRef Discharge_Air_Flow_Low_Reset_Setpoint: URIRef Discharge_Air_Flow_Reset_Setpoint: URIRef # Setpoints used in Reset strategies Discharge_Air_Flow_Sensor: URIRef # Measures the rate of flow of discharge air Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow - Discharge_Air_Humidity_Sensor: URIRef # Measures the relative humidity of discharge air + Discharge_Air_Humidity_Sensor: ( + URIRef # Measures the relative humidity of discharge air + ) Discharge_Air_Humidity_Setpoint: URIRef # Humidity setpoint for discharge air Discharge_Air_Smoke_Detection_Alarm: URIRef - Discharge_Air_Static_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of static pressure of discharge air + Discharge_Air_Static_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of static pressure of discharge air + ) Discharge_Air_Static_Pressure_Integral_Time_Parameter: URIRef Discharge_Air_Static_Pressure_Proportional_Band_Parameter: URIRef - Discharge_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within discharge regions of an HVAC system - Discharge_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of discharge air + Discharge_Air_Static_Pressure_Sensor: ( + URIRef # The static pressure of air within discharge regions of an HVAC system + ) + Discharge_Air_Static_Pressure_Setpoint: ( + URIRef # Sets static pressure of discharge air + ) Discharge_Air_Static_Pressure_Step_Parameter: URIRef Discharge_Air_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the temperature of discharge air. - Discharge_Air_Temperature_Cooling_Setpoint: URIRef # Sets temperature of discharge air for cooling - Discharge_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of discharge air - Discharge_Air_Temperature_Heating_Setpoint: URIRef # Sets temperature of discharge air for heating + Discharge_Air_Temperature_Cooling_Setpoint: ( + URIRef # Sets temperature of discharge air for cooling + ) + Discharge_Air_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of discharge air + ) + Discharge_Air_Temperature_Heating_Setpoint: ( + URIRef # Sets temperature of discharge air for heating + ) Discharge_Air_Temperature_High_Reset_Setpoint: URIRef Discharge_Air_Temperature_Low_Reset_Setpoint: URIRef Discharge_Air_Temperature_Proportional_Band_Parameter: URIRef Discharge_Air_Temperature_Reset_Differential_Setpoint: URIRef - Discharge_Air_Temperature_Sensor: URIRef # Measures the temperature of discharge air + Discharge_Air_Temperature_Sensor: ( + URIRef # Measures the temperature of discharge air + ) Discharge_Air_Temperature_Setpoint: URIRef # Sets temperature of discharge air Discharge_Air_Temperature_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Discharge_Air_Temperature_Setpoint. Discharge_Air_Temperature_Step_Parameter: URIRef @@ -297,21 +391,35 @@ class BRICK(DefinedNamespace): Discharge_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Discharge_Water_Differential_Pressure_Proportional_Band_Parameter: URIRef Discharge_Water_Flow_Sensor: URIRef # Measures the rate of flow of discharge water - Discharge_Water_Flow_Setpoint: URIRef # Sets the target flow rate of discharge water + Discharge_Water_Flow_Setpoint: ( + URIRef # Sets the target flow rate of discharge water + ) Discharge_Water_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature of the discharge water. Discharge_Water_Temperature_Proportional_Band_Parameter: URIRef - Discharge_Water_Temperature_Sensor: URIRef # Measures the temperature of discharge water + Discharge_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of discharge water + ) Discharge_Water_Temperature_Setpoint: URIRef # Sets temperature of discharge water Disconnect_Switch: URIRef # Building power is most commonly provided by utility company through a master disconnect switch (sometimes called a service disconnect) in the main electrical room of a building. The Utility Company provided master disconnect switch often owns or restricts access to this switch. There can also be other cases where a disconnect is placed into an electrical system to allow service cut-off to a portion of the building. Displacement_Flow_Air_Diffuser: URIRef # An air diffuser that is designed for low discharge air speeds to minimize turbulence and induction of room air. This diffuser is used with displacement ventilation systems. Distribution_Frame: URIRef # A class of spaces where the cables carrying signals meet and connect, e.g. a wiring closet or a broadcast downlink room Domestic_Hot_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of domestic water supplied by a hot water system - Domestic_Hot_Water_Supply_Temperature_Setpoint: URIRef # Sets temperature of supplying part of domestic hot water + Domestic_Hot_Water_Supply_Temperature_Setpoint: ( + URIRef # Sets temperature of supplying part of domestic hot water + ) Domestic_Hot_Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of domestic hot water in a building - Domestic_Hot_Water_System_Enable_Command: URIRef # Enables operation of the domestic hot water system - Domestic_Hot_Water_Temperature_Setpoint: URIRef # Sets temperature of domestic hot water - Domestic_Hot_Water_Valve: URIRef # A valve regulating the flow of domestic hot water - Domestic_Water: URIRef # Tap water for drinking, washing, cooking, and flushing of toliets + Domestic_Hot_Water_System_Enable_Command: ( + URIRef # Enables operation of the domestic hot water system + ) + Domestic_Hot_Water_Temperature_Setpoint: ( + URIRef # Sets temperature of domestic hot water + ) + Domestic_Hot_Water_Valve: ( + URIRef # A valve regulating the flow of domestic hot water + ) + Domestic_Water: ( + URIRef # Tap water for drinking, washing, cooking, and flushing of toliets + ) Domestic_Water_Loop: URIRef Drench_Hose: URIRef Drive_Ready_Status: URIRef # Indicates if a hard drive or other storage device is ready to be used, e.g. in the context of RAID @@ -328,17 +436,27 @@ class BRICK(DefinedNamespace): Effective_Room_Air_Temperature_Setpoint: URIRef Effective_Supply_Air_Temperature_Setpoint: URIRef Effective_Zone_Air_Temperature_Setpoint: URIRef - Electric_Baseboard_Radiator: URIRef # Electric heating device located at or near the floor + Electric_Baseboard_Radiator: ( + URIRef # Electric heating device located at or near the floor + ) Electric_Boiler: URIRef # A closed, pressure vessel that uses electricity for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Electric_Radiator: URIRef # Electric heating device Electrical_Equipment: URIRef - Electrical_Meter: URIRef # A meter that measures the usage or consumption of electricity - Electrical_Power_Sensor: URIRef # Measures the amount of instantaneous electric power consumed + Electrical_Meter: ( + URIRef # A meter that measures the usage or consumption of electricity + ) + Electrical_Power_Sensor: ( + URIRef # Measures the amount of instantaneous electric power consumed + ) Electrical_Room: URIRef # A class of service rooms that house electrical equipment for a building Electrical_System: URIRef # Devices that serve or are part of the electrical subsystem in the building Elevator: URIRef # A device that provides vertical transportation between floors, levels or decks of a building, vessel or other structure - Elevator_Shaft: URIRef # The vertical space in which an elevator ascends and descends - Elevator_Space: URIRef # The vertical space in which an elevator ascends and descends + Elevator_Shaft: ( + URIRef # The vertical space in which an elevator ascends and descends + ) + Elevator_Space: ( + URIRef # The vertical space in which an elevator ascends and descends + ) Embedded_Surface_System_Panel: URIRef # Radiant panel heating and cooling system where the energy heat source or sink is embedded in a radiant layer which is thermally insulated from the building structure. Embedded_Temperature_Sensor: URIRef # Measures the internal temperature of the radiant layer of the radiant heating and cooling HVAC system. Embedded_Temperature_Setpoint: URIRef # Sets temperature for the internal material, e.g. concrete slab, of the radiant panel. @@ -352,23 +470,39 @@ class BRICK(DefinedNamespace): Emergency_Power_Off_System_Activated_By_High_Temperature_Status: URIRef Emergency_Power_Off_System_Activated_By_Leak_Detection_System_Status: URIRef Emergency_Power_Off_System_Status: URIRef - Emergency_Push_Button_Status: URIRef # Indicates if an emergency button has been pushed + Emergency_Push_Button_Status: ( + URIRef # Indicates if an emergency button has been pushed + ) Emergency_Wash_Station: URIRef Employee_Entrance_Lobby: URIRef # An open space near an entrance that is typically only used for employees Enable_Command: URIRef # Commands that enable functionality - Enable_Differential_Enthalpy_Command: URIRef # Enables the use of differential enthalpy control - Enable_Differential_Temperature_Command: URIRef # Enables the use of differential temperature control + Enable_Differential_Enthalpy_Command: ( + URIRef # Enables the use of differential enthalpy control + ) + Enable_Differential_Temperature_Command: ( + URIRef # Enables the use of differential temperature control + ) Enable_Fixed_Enthalpy_Command: URIRef # Enables the use of fixed enthalpy control - Enable_Fixed_Temperature_Command: URIRef # Enables the use of fixed temperature control + Enable_Fixed_Temperature_Command: ( + URIRef # Enables the use of fixed temperature control + ) Enable_Hot_Water_System_Outside_Air_Temperature_Setpoint: URIRef # Enables hot water system when outside air temperature reaches the indicated value - Enable_Status: URIRef # Indicates if a system or piece of functionality has been enabled + Enable_Status: ( + URIRef # Indicates if a system or piece of functionality has been enabled + ) Enclosed_Office: URIRef # A space for individuals to work with walls and a door - Energy_Generation_System: URIRef # A collection of devices that generates electricity + Energy_Generation_System: ( + URIRef # A collection of devices that generates electricity + ) Energy_Sensor: URIRef # Measures energy consumption - Energy_Storage: URIRef # Devices or equipment that store energy in its various forms + Energy_Storage: ( + URIRef # Devices or equipment that store energy in its various forms + ) Energy_Storage_System: URIRef # A collection of devices that stores electricity Energy_System: URIRef # A collection of devices that generates, stores or transports electricity - Energy_Usage_Sensor: URIRef # Measures the total amount of energy used over some period of time + Energy_Usage_Sensor: ( + URIRef # Measures the total amount of energy used over some period of time + ) Energy_Zone: URIRef # A space or group of spaces that are managed or monitored as one unit for energy purposes Entering_Water: URIRef # Water that is entering a piece of equipment or system Entering_Water_Flow_Sensor: URIRef # Measures the rate of flow of water entering a piece of equipment or system @@ -394,13 +528,19 @@ class BRICK(DefinedNamespace): Exhaust_Air_Flow_Setpoint: URIRef # Sets exhaust air flow rate Exhaust_Air_Humidity_Sensor: URIRef # Measures the relative humidity of exhaust air Exhaust_Air_Humidity_Setpoint: URIRef # Humidity setpoint for exhaust air - Exhaust_Air_Stack_Flow_Deadband_Setpoint: URIRef # Sets the size of a deadband of exhaust air stack flow + Exhaust_Air_Stack_Flow_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of exhaust air stack flow + ) Exhaust_Air_Stack_Flow_Integral_Time_Parameter: URIRef Exhaust_Air_Stack_Flow_Proportional_Band_Parameter: URIRef - Exhaust_Air_Stack_Flow_Sensor: URIRef # Measures the rate of flow of air in the exhaust air stack + Exhaust_Air_Stack_Flow_Sensor: ( + URIRef # Measures the rate of flow of air in the exhaust air stack + ) Exhaust_Air_Stack_Flow_Setpoint: URIRef # Sets exhaust air stack flow rate Exhaust_Air_Static_Pressure_Proportional_Band_Parameter: URIRef - Exhaust_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within exhaust regions of an HVAC system + Exhaust_Air_Static_Pressure_Sensor: ( + URIRef # The static pressure of air within exhaust regions of an HVAC system + ) Exhaust_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of exhaust air Exhaust_Air_Temperature_Sensor: URIRef # Measures the temperature of exhaust air Exhaust_Air_Velocity_Pressure_Sensor: URIRef @@ -417,10 +557,14 @@ class BRICK(DefinedNamespace): Fan_Status: URIRef # Indicates properties of fans Fan_VFD: URIRef # Variable-frequency drive for fans Fault_Reset_Command: URIRef # Clears a fault status - Fault_Status: URIRef # Indicates the presence of a fault in a device, system or control loop + Fault_Status: ( + URIRef # Indicates the presence of a fault in a device, system or control loop + ) Field_Of_Play: URIRef # The area of a stadium where athletic events occur, e.g. the soccer pitch Filter: URIRef # Device to remove gases from a mixture of gases or to remove solid material from a fluid - Filter_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure on either side of a filter + Filter_Differential_Pressure_Sensor: ( + URIRef # Measures the difference in pressure on either side of a filter + ) Filter_Reset_Command: URIRef Filter_Status: URIRef # Indicates if a filter needs to be replaced Final_Filter: URIRef # The last, high-efficiency filter installed in a sequence to remove the finest particulates from the substance being filtered @@ -436,15 +580,21 @@ class BRICK(DefinedNamespace): Flow_Setpoint: URIRef # Sets flow Fluid: URIRef # substance, as a liquid or gas, that is capable of flowing and that changes shape when acted on by a force. Food_Service_Room: URIRef # A space used in the production, storage, serving, or cleanup of food and beverages - Formaldehyde_Level_Sensor: URIRef # Measures the concentration of formaldehyde in air - Freeze_Status: URIRef # Indicates if a substance contained within a vessel has frozen + Formaldehyde_Level_Sensor: ( + URIRef # Measures the concentration of formaldehyde in air + ) + Freeze_Status: ( + URIRef # Indicates if a substance contained within a vessel has frozen + ) Freezer: URIRef # cold chamber usually kept at a temperature of 22°F to 31°F (–5°C to –1°C), with high-volume air circulation. Frequency_Command: URIRef # Controls the frequency of a device's operation (e.g. rotational frequency) Frequency_Sensor: URIRef # Measures the frequency of a phenomenon or aspect of a phenomenon, e.g. the frequency of a fan turning Fresh_Air_Fan: URIRef # Fan moving fresh air -- air that is supplied into the building from the outdoors Fresh_Air_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Fresh_Air_Setpoint. Frost: URIRef # frost formed on the cold surface (tubes, plates) of a cooling coil. - Frost_Sensor: URIRef # Senses the presence of frost or conditions that may cause frost + Frost_Sensor: ( + URIRef # Senses the presence of frost or conditions that may cause frost + ) Fuel_Oil: URIRef # Petroleum based oil burned for energy Fume_Hood: URIRef # A fume-collection device mounted over a work space, table, or shelf and serving to conduct unwanted gases away from the area enclosed. Fume_Hood_Air_Flow_Sensor: URIRef # Measures the rate of flow of air in a fume hood @@ -458,19 +608,27 @@ class BRICK(DefinedNamespace): Gas_Valve: URIRef Gasoline: URIRef # Petroleum derived liquid used as a fuel source Gatehouse: URIRef # The standalone building used to manage the entrance to a campus or building grounds - Generator_Room: URIRef # A room for electrical equipment, specifically electrical generators. + Generator_Room: ( + URIRef # A room for electrical equipment, specifically electrical generators. + ) Glycol: URIRef HVAC_Equipment: URIRef # See Heating_Ventilation_Air_Conditioning_System HVAC_System: URIRef # See Heating_Ventilation_Air_Conditioning_System HVAC_Zone: URIRef # a space or group of spaces, within a building with heating, cooling, and ventilating requirements, that are sufficiently similar so that desired conditions (e.g., temperature) can be maintained throughout using a single sensor (e.g., thermostat or temperature sensor). HX: URIRef # See Heat_Exchanger - Hail: URIRef # pellets of frozen rain which fall in showers from cumulonimbus clouds. + Hail: ( + URIRef # pellets of frozen rain which fall in showers from cumulonimbus clouds. + ) Hail_Sensor: URIRef # Measures hail in terms of its size and damage potential Hallway: URIRef # A common space, used to connect other parts of a building Hazardous_Materials_Storage: URIRef # A storage space set aside (usually with restricted access) for the storage of materials that can be hazardous to living beings or the environment Heat_Exchanger: URIRef # A heat exchanger is a piece of equipment built for efficient heat transfer from one medium to another. The media may be separated by a solid wall to prevent mixing or they may be in direct contact (BEDES) - Heat_Exchanger_Supply_Water_Temperature_Sensor: URIRef # Measures the temperature of water supplied by a heat exchanger - Heat_Exchanger_System_Enable_Status: URIRef # Indicates if the heat exchanger system has been enabled + Heat_Exchanger_Supply_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of water supplied by a heat exchanger + ) + Heat_Exchanger_System_Enable_Status: ( + URIRef # Indicates if the heat exchanger system has been enabled + ) Heat_Recovery_Hot_Water_System: URIRef Heat_Sensor: URIRef # Measures heat Heat_Wheel: URIRef # A rotary heat exchanger positioned within the supply and exhaust air streams of an air handling system in order to recover heat energy @@ -480,25 +638,39 @@ class BRICK(DefinedNamespace): Heating_Demand_Sensor: URIRef # Measures the amount of power consumed by a heating process; typically found by multiplying the tonnage of a unit (e.g. RTU) by the efficiency rating in kW/ton Heating_Demand_Setpoint: URIRef # Sets the rate required for heating Heating_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for heating - Heating_Discharge_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of heating discharge air + Heating_Discharge_Air_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of heating discharge air + ) Heating_Discharge_Air_Temperature_Integral_Time_Parameter: URIRef Heating_Discharge_Air_Temperature_Proportional_Band_Parameter: URIRef Heating_Start_Stop_Status: URIRef Heating_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for heating - Heating_Supply_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply air for heating + Heating_Supply_Air_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of supply air for heating + ) Heating_Supply_Air_Temperature_Integral_Time_Parameter: URIRef Heating_Supply_Air_Temperature_Proportional_Band_Parameter: URIRef Heating_Temperature_Setpoint: URIRef # Sets temperature for heating Heating_Thermal_Power_Sensor: URIRef Heating_Valve: URIRef # A valve that controls air temperature by modulating the amount of hot water flowing through a heating coil Heating_Ventilation_Air_Conditioning_System: URIRef # The equipment, distribution systems and terminals that provide, either collectively or individually, the processes of heating, ventilating or air conditioning to a building or portion of a building - High_CO2_Alarm: URIRef # A device that indicates high concentration of carbon dioxide. - High_Discharge_Air_Temperature_Alarm: URIRef # An alarm that indicates that discharge air temperature is too high + High_CO2_Alarm: ( + URIRef # A device that indicates high concentration of carbon dioxide. + ) + High_Discharge_Air_Temperature_Alarm: ( + URIRef # An alarm that indicates that discharge air temperature is too high + ) High_Head_Pressure_Alarm: URIRef # An alarm that indicates a high pressure generated on the output side of a gas compressor in a refrigeration or air conditioning system. - High_Humidity_Alarm: URIRef # An alarm that indicates high concentration of water vapor in the air. + High_Humidity_Alarm: ( + URIRef # An alarm that indicates high concentration of water vapor in the air. + ) High_Humidity_Alarm_Parameter: URIRef # A parameter determining the humidity level at which to trigger a high humidity alarm - High_Outside_Air_Lockout_Temperature_Differential_Parameter: URIRef # The upper bound of the outside air temperature lockout range - High_Return_Air_Temperature_Alarm: URIRef # An alarm that indicates that return air temperature is too high + High_Outside_Air_Lockout_Temperature_Differential_Parameter: ( + URIRef # The upper bound of the outside air temperature lockout range + ) + High_Return_Air_Temperature_Alarm: ( + URIRef # An alarm that indicates that return air temperature is too high + ) High_Static_Pressure_Cutout_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a High_Static_Pressure_Cutout_Setpoint. High_Temperature_Alarm: URIRef # An alarm that indicates high temperature. High_Temperature_Alarm_Parameter: URIRef # A parameter determining the temperature level at which to trigger a high temperature alarm @@ -508,9 +680,13 @@ class BRICK(DefinedNamespace): Hospitality_Box: URIRef # A room at a stadium, usually overlooking the field of play, that is physical separate from the other seating at the venue Hot_Box: URIRef # hot air chamber forming part of an air handler. Hot_Water: URIRef # Hot water used for HVAC heating or supply to hot taps - Hot_Water_Baseboard_Radiator: URIRef # Hydronic heating device located at or near the floor + Hot_Water_Baseboard_Radiator: ( + URIRef # Hydronic heating device located at or near the floor + ) Hot_Water_Coil: URIRef # A heating element typically made of pipe, tube or wire that emits heat that is filled with hot water. - Hot_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of hot water + Hot_Water_Differential_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of differential pressure of hot water + ) Hot_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Hot_Water_Differential_Pressure_Load_Shed_Reset_Status: URIRef Hot_Water_Differential_Pressure_Load_Shed_Status: URIRef @@ -518,24 +694,40 @@ class BRICK(DefinedNamespace): Hot_Water_Differential_Pressure_Sensor: URIRef # Measures the difference in water pressure on either side of a hot water valve Hot_Water_Differential_Pressure_Setpoint: URIRef # Sets the target water differential pressure between an upstream and downstream point in a water pipe or conduit used to carry hot water Hot_Water_Differential_Temperature_Sensor: URIRef # Measures the difference in temperature between the entering water to the boiler or other water heating device and leaving water from the same boiler or other water heating device - Hot_Water_Discharge_Flow_Sensor: URIRef # Measures the rate of flow of hot discharge water - Hot_Water_Discharge_Flow_Setpoint: URIRef # Sets the target flow rate of hot discharge water + Hot_Water_Discharge_Flow_Sensor: ( + URIRef # Measures the rate of flow of hot discharge water + ) + Hot_Water_Discharge_Flow_Setpoint: ( + URIRef # Sets the target flow rate of hot discharge water + ) Hot_Water_Discharge_Temperature_Load_Shed_Status: URIRef Hot_Water_Flow_Sensor: URIRef # Measures the rate of flow in a hot water circuit Hot_Water_Flow_Setpoint: URIRef # Sets the target flow rate of hot water Hot_Water_Loop: URIRef # A collection of equipment that transport and regulate hot water among each other - Hot_Water_Meter: URIRef # A meter that measures the usage or consumption of hot water + Hot_Water_Meter: ( + URIRef # A meter that measures the usage or consumption of hot water + ) Hot_Water_Pump: URIRef # A pump that performs work on hot water; typically part of a hot water system Hot_Water_Radiator: URIRef # Radiator that uses hot water - Hot_Water_Return_Flow_Sensor: URIRef # Measures the rate of flow of hot return water - Hot_Water_Return_Temperature_Sensor: URIRef # Measures the temperature of water returned to a hot water system + Hot_Water_Return_Flow_Sensor: ( + URIRef # Measures the rate of flow of hot return water + ) + Hot_Water_Return_Temperature_Sensor: ( + URIRef # Measures the temperature of water returned to a hot water system + ) Hot_Water_Static_Pressure_Setpoint: URIRef # Sets static pressure of hot air - Hot_Water_Supply_Flow_Sensor: URIRef # Measures the rate of flow of hot supply water - Hot_Water_Supply_Flow_Setpoint: URIRef # Sets the target flow rate of hot supply water + Hot_Water_Supply_Flow_Sensor: ( + URIRef # Measures the rate of flow of hot supply water + ) + Hot_Water_Supply_Flow_Setpoint: ( + URIRef # Sets the target flow rate of hot supply water + ) Hot_Water_Supply_Temperature_High_Reset_Setpoint: URIRef Hot_Water_Supply_Temperature_Load_Shed_Status: URIRef Hot_Water_Supply_Temperature_Low_Reset_Setpoint: URIRef - Hot_Water_Supply_Temperature_Sensor: URIRef # Measures the temperature of water supplied by a hot water system + Hot_Water_Supply_Temperature_Sensor: ( + URIRef # Measures the temperature of water supplied by a hot water system + ) Hot_Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of hot water in a building Hot_Water_System_Enable_Command: URIRef # Enables operation of the hot water system Hot_Water_Temperature_Setpoint: URIRef # Sets the temperature of hot water @@ -546,29 +738,41 @@ class BRICK(DefinedNamespace): Humidifier_Fault_Status: URIRef # Indicates the presence of a fault in a humidifier Humidify_Command: URIRef Humidity_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the concentration of water vapor in the air. - Humidity_Parameter: URIRef # Parameters relevant to humidity-related systems and points + Humidity_Parameter: ( + URIRef # Parameters relevant to humidity-related systems and points + ) Humidity_Sensor: URIRef # Measures the concentration of water vapor in air Humidity_Setpoint: URIRef # Sets humidity Humidity_Tolerance_Parameter: URIRef # A parameter determining the difference between upper and lower limits of humidity. IDF: URIRef # An room for an intermediate distribution frame, where cables carrying signals from the main distribution frame terminate and then feed out to endpoints Ice: URIRef # Water in its solid form - Ice_Tank_Leaving_Water_Temperature_Sensor: URIRef # Measures the temperature of water leaving an ice tank - Illuminance_Sensor: URIRef # Measures the total luminous flux incident on a surface, per unit area + Ice_Tank_Leaving_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of water leaving an ice tank + ) + Illuminance_Sensor: ( + URIRef # Measures the total luminous flux incident on a surface, per unit area + ) Imbalance_Sensor: URIRef # A sensor which measures difference (imbalance) between phases of an electrical system Induction_Unit: URIRef # A device with an primary air connection and integrated coil and condensate pan that performs sensible and latent cooling of a space. Essentially an Active Chilled Beam with a built in condensate pan. Information_Area: URIRef # An information booth or kiosk where visitors would look for information Inside_Face_Surface_Temperature_Sensor: URIRef # Measures the inside surface (relative to the space) of the radiant panel of the radiant heating and cooling HVAC system. Inside_Face_Surface_Temperature_Setpoint: URIRef # Sets temperature for the inside face surface temperature of the radiant panel. Intake_Air_Filter: URIRef # Filters air intake - Intake_Air_Temperature_Sensor: URIRef # Measures air at the interface between the building and the outside + Intake_Air_Temperature_Sensor: ( + URIRef # Measures air at the interface between the building and the outside + ) Integral_Gain_Parameter: URIRef Integral_Time_Parameter: URIRef Intercom_Equipment: URIRef - Interface: URIRef # A device that provides an occupant control over a lighting system + Interface: ( + URIRef # A device that provides an occupant control over a lighting system + ) Intrusion_Detection_Equipment: URIRef Inverter: URIRef # A device that changes direct current into alternating current Isolation_Valve: URIRef # A valve that stops the flow of a fluid, usually for maintenance or safety purposes - Janitor_Room: URIRef # A room set aside for the storage of cleaning equipment and supplies + Janitor_Room: ( + URIRef # A room set aside for the storage of cleaning equipment and supplies + ) Jet_Nozzle_Air_Diffuser: URIRef # An air diffuser that is designed to produce high velocity discharge air stream to throw the air over a large distance or target the air stream to a localize area Laboratory: URIRef # facility acceptable to the local, national, or international recognized authority having jurisdiction and which provides uniform testing and examination procedures and standards for meeting design, manufacturing, and factory testing requirements. Laminar_Flow_Air_Diffuser: URIRef # An air diffuser that is designed for low discharge air speeds to provide uniform and unidirectional air pattern which minimizes room air entrainment @@ -576,7 +780,9 @@ class BRICK(DefinedNamespace): Lead_Lag_Command: URIRef # Enables lead/lag operation Lead_Lag_Status: URIRef # Indicates if lead/lag operation is enabled Lead_On_Off_Command: URIRef # Controls the active/inactive status of the "lead" part of a lead/lag system - Leak_Alarm: URIRef # An alarm that indicates leaks occurred in systems containing fluids + Leak_Alarm: ( + URIRef # An alarm that indicates leaks occurred in systems containing fluids + ) Leaving_Water: URIRef # Water that is leaving a piece of equipment or system Leaving_Water_Flow_Sensor: URIRef # Measures the rate of flow of water that is leaving a piece of equipment or system Leaving_Water_Flow_Setpoint: URIRef # Sets the target flow rate of leaving water @@ -594,7 +800,9 @@ class BRICK(DefinedNamespace): Load_Current_Sensor: URIRef # Measures the current consumed by a load Load_Parameter: URIRef Load_Setpoint: URIRef - Load_Shed_Command: URIRef # Controls load shedding behavior provided by a control system + Load_Shed_Command: ( + URIRef # Controls load shedding behavior provided by a control system + ) Load_Shed_Differential_Pressure_Setpoint: URIRef Load_Shed_Setpoint: URIRef Load_Shed_Status: URIRef # Indicates if a load shedding policy is in effect @@ -608,12 +816,18 @@ class BRICK(DefinedNamespace): Lounge: URIRef # A room for lesiure activities or relaxing Louver: URIRef # Device consisting of an assembly of parallel sloping vanes, intended to permit the passage of air while providing a measure of protection against environmental influences Low_Freeze_Protect_Temperature_Parameter: URIRef - Low_Humidity_Alarm: URIRef # An alarm that indicates low concentration of water vapor in the air. + Low_Humidity_Alarm: ( + URIRef # An alarm that indicates low concentration of water vapor in the air. + ) Low_Humidity_Alarm_Parameter: URIRef # A parameter determining the humidity level at which to trigger a low humidity alarm - Low_Outside_Air_Lockout_Temperature_Differential_Parameter: URIRef # The lower bound of the outside air temperature lockout range + Low_Outside_Air_Lockout_Temperature_Differential_Parameter: ( + URIRef # The lower bound of the outside air temperature lockout range + ) Low_Outside_Air_Temperature_Enable_Differential_Sensor: URIRef Low_Outside_Air_Temperature_Enable_Setpoint: URIRef - Low_Return_Air_Temperature_Alarm: URIRef # An alarm that indicates that return air temperature is too low + Low_Return_Air_Temperature_Alarm: ( + URIRef # An alarm that indicates that return air temperature is too low + ) Low_Suction_Pressure_Alarm: URIRef # An alarm that indicates a low suction pressure in the compressor in a refrigeration or air conditioning system. Low_Temperature_Alarm: URIRef # An alarm that indicates low temperature. Low_Temperature_Alarm_Parameter: URIRef # A parameter determining the temperature level at which to trigger a low temperature alarm @@ -621,7 +835,9 @@ class BRICK(DefinedNamespace): Luminaire: URIRef # A complete lighting unit consisting of a lamp or lamps and ballast(s) (when applicable) together with the parts designed to distribute the light, to position and protect the lamps, and to connect the lamps to the power supply. Luminaire_Driver: URIRef # A power source for a luminaire Luminance_Alarm: URIRef - Luminance_Command: URIRef # Controls the amount of luminance delivered by a lighting system + Luminance_Command: ( + URIRef # Controls the amount of luminance delivered by a lighting system + ) Luminance_Sensor: URIRef # Measures the luminous intensity per unit area of light travelling in a given direction Luminance_Setpoint: URIRef # Sets luminance MAU: URIRef # See Makeup_Air_Unit @@ -633,7 +849,9 @@ class BRICK(DefinedNamespace): Makeup_Air_Unit: URIRef # A device designed to condition ventilation air introduced into a space or to replace air exhausted from a process or general area exhaust. The device may be used to prevent negative pressure within buildings or to reduce airborne contaminants in a space. Makeup_Water: URIRef # Water used used to makeup water loss through leaks, evaporation, or blowdown Makeup_Water_Valve: URIRef # A valve regulating the flow of makeup water into a water holding tank, e.g. a cooling tower, hot water tank - Manual_Auto_Status: URIRef # Indicates if a system is under manual or automatic operation + Manual_Auto_Status: ( + URIRef # Indicates if a system is under manual or automatic operation + ) Massage_Room: URIRef # Usually adjunct to an athletic facility, a private/semi-private space where massages are performed Max_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Air_Flow_Setpoint. Max_Air_Temperature_Setpoint: URIRef # Setpoint for maximum air temperature @@ -661,10 +879,14 @@ class BRICK(DefinedNamespace): Max_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Cooling_Supply_Air_Flow_Setpoint. Max_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Heating_Discharge_Air_Flow_Setpoint. Max_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places an upper bound on the range of permitted values of a Unoccupied_Heating_Supply_Air_Flow_Setpoint. - Max_Water_Level_Alarm: URIRef # Alarm indicating that the maximum water level was reached + Max_Water_Level_Alarm: ( + URIRef # Alarm indicating that the maximum water level was reached + ) Max_Water_Temperature_Setpoint: URIRef # Setpoint for max water temperature Measurable: URIRef - Mechanical_Room: URIRef # A class of service rooms where mechanical equipment (HVAC) operates + Mechanical_Room: ( + URIRef # A class of service rooms where mechanical equipment (HVAC) operates + ) Media_Hot_Desk: URIRef # A non-enclosed space used by members of the media temporarily to cover an event while they are present at a venue Media_Production_Room: URIRef # A enclosed space used by media professionals for the production of media Media_Room: URIRef # A class of spaces related to the creation of media @@ -710,7 +932,9 @@ class BRICK(DefinedNamespace): Min_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Cooling_Supply_Air_Flow_Setpoint. Min_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Heating_Discharge_Air_Flow_Setpoint. Min_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit: URIRef # A parameter that places a lower bound on the range of permitted values of a Unoccupied_Heating_Supply_Air_Flow_Setpoint. - Min_Water_Level_Alarm: URIRef # Alarm indicating that the minimum water level was reached + Min_Water_Level_Alarm: ( + URIRef # Alarm indicating that the minimum water level was reached + ) Min_Water_Temperature_Setpoint: URIRef # Setpoint for min water temperature Mixed_Air: URIRef # (1) air that contains two or more streams of air. (2) combined outdoor air and recirculated air. Mixed_Air_Filter: URIRef # A filter that is applied to the mixture of recirculated and outside air @@ -721,7 +945,9 @@ class BRICK(DefinedNamespace): Mixed_Air_Temperature_Setpoint: URIRef # Sets temperature of mixed air Mixed_Damper: URIRef # A damper that modulates the flow of the mixed outside and return air streams Mode_Command: URIRef # Controls the operating mode of a device or controller - Mode_Status: URIRef # Indicates which mode a system, device or control loop is currently in + Mode_Status: ( + URIRef # Indicates which mode a system, device or control loop is currently in + ) Motion_Sensor: URIRef # Detects the presence of motion in some area Motor: URIRef # A machine in which power is applied to do work by the conversion of various forms of energy into mechanical force and motion. Motor_Control_Center: URIRef # The Motor Control Center is a specialized type of switchgear which provides electrical power to major mechanical systems in the building such as HVAC components. @@ -735,21 +961,39 @@ class BRICK(DefinedNamespace): Natural_Gas: URIRef # Fossil fuel energy source consisting largely of methane and other hydrocarbons Natural_Gas_Boiler: URIRef # A closed, pressure vessel that uses natural gas for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Network_Video_Recorder: URIRef - No_Water_Alarm: URIRef # Alarm indicating that there is no water in the equipment or system + No_Water_Alarm: ( + URIRef # Alarm indicating that there is no water in the equipment or system + ) Noncondensing_Natural_Gas_Boiler: URIRef # A closed, pressure vessel that uses natural gas with no system to capture latent heat for heating water or other fluids to supply steam or hot water for heating, humidification, or other applications. Occupancy_Command: URIRef # Controls whether or not a device or controller is operating in "Occupied" mode Occupancy_Sensor: URIRef # Detects occupancy of some space or area Occupancy_Status: URIRef # Indicates if a room or space is occupied Occupied_Air_Temperature_Setpoint: URIRef - Occupied_Cooling_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for cooling when occupied - Occupied_Cooling_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for cooling when occupied - Occupied_Cooling_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature for cooling when occupied - Occupied_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow when occupied + Occupied_Cooling_Discharge_Air_Flow_Setpoint: ( + URIRef # Sets discharge air flow for cooling when occupied + ) + Occupied_Cooling_Supply_Air_Flow_Setpoint: ( + URIRef # Sets supply air flow rate for cooling when occupied + ) + Occupied_Cooling_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature for cooling when occupied + ) + Occupied_Discharge_Air_Flow_Setpoint: ( + URIRef # Sets discharge air flow when occupied + ) Occupied_Discharge_Air_Temperature_Setpoint: URIRef - Occupied_Heating_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for heating when occupied - Occupied_Heating_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate for heating when occupied - Occupied_Heating_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature for heating when occupied - Occupied_Mode_Status: URIRef # Indicates if a system, device or control loop is in "Occupied" mode + Occupied_Heating_Discharge_Air_Flow_Setpoint: ( + URIRef # Sets discharge air flow for heating when occupied + ) + Occupied_Heating_Supply_Air_Flow_Setpoint: ( + URIRef # Sets supply air flow rate for heating when occupied + ) + Occupied_Heating_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature for heating when occupied + ) + Occupied_Mode_Status: ( + URIRef # Indicates if a system, device or control loop is in "Occupied" mode + ) Occupied_Return_Air_Temperature_Setpoint: URIRef Occupied_Room_Air_Temperature_Setpoint: URIRef Occupied_Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate when occupied @@ -762,23 +1006,35 @@ class BRICK(DefinedNamespace): Oil: URIRef # a viscous liquid derived from petroleum, especially for use as a fuel or lubricant. On_Command: URIRef # An On Command controls or reports the binary 'on' status of a control loop, relay or equipment activity. It can only be used to start/activate an associated equipment or process, or determine that the related entity is 'on' On_Off_Command: URIRef # An On/Off Command controls or reports the binary status of a control loop, relay or equipment activity - On_Off_Status: URIRef # Indicates the on/off status of a control loop, relay or equipment + On_Off_Status: ( + URIRef # Indicates the on/off status of a control loop, relay or equipment + ) On_Status: URIRef # Indicates if a control loop, relay or equipment is on On_Timer_Sensor: URIRef # Measures the duration for which a device was in an active or "on" state - Open_Close_Status: URIRef # Indicates the open/close status of a device such as a damper or valve + Open_Close_Status: ( + URIRef # Indicates the open/close status of a device such as a damper or valve + ) Open_Heating_Valve_Outside_Air_Temperature_Setpoint: URIRef Open_Office: URIRef # An open space used for work or study by multiple people. Usuaully subdivided into cubicles or desks Operating_Mode_Status: URIRef # Indicates the current operating mode of a system, device or control loop Outdoor_Area: URIRef # A class of spaces that exist outside of a building Output_Frequency_Sensor: URIRef - Output_Voltage_Sensor: URIRef # Measures the voltage output by some process or device + Output_Voltage_Sensor: ( + URIRef # Measures the voltage output by some process or device + ) Outside: URIRef Outside_Air: URIRef # air external to a defined zone (e.g., corridors). Outside_Air_CO2_Sensor: URIRef # Measures the concentration of CO2 in outside air Outside_Air_CO_Sensor: URIRef # Measures the concentration of CO in outside air - Outside_Air_Dewpoint_Sensor: URIRef # Senses the dewpoint temperature of outside air - Outside_Air_Enthalpy_Sensor: URIRef # Measures the total heat content of outside air - Outside_Air_Flow_Sensor: URIRef # Measures the rate of flow of outside air into the system + Outside_Air_Dewpoint_Sensor: ( + URIRef # Senses the dewpoint temperature of outside air + ) + Outside_Air_Enthalpy_Sensor: ( + URIRef # Measures the total heat content of outside air + ) + Outside_Air_Flow_Sensor: ( + URIRef # Measures the rate of flow of outside air into the system + ) Outside_Air_Flow_Setpoint: URIRef # Sets outside air flow rate Outside_Air_Grains_Sensor: URIRef # Measures the mass of water vapor in outside air Outside_Air_Humidity_Sensor: URIRef # Measures the relative humidity of outside air @@ -790,12 +1046,18 @@ class BRICK(DefinedNamespace): Outside_Air_Temperature_Low_Reset_Setpoint: URIRef Outside_Air_Temperature_Sensor: URIRef # Measures the temperature of outside air Outside_Air_Temperature_Setpoint: URIRef # Sets temperature of outside air - Outside_Air_Wet_Bulb_Temperature_Sensor: URIRef # A sensor measuring the wet-bulb temperature of outside air + Outside_Air_Wet_Bulb_Temperature_Sensor: ( + URIRef # A sensor measuring the wet-bulb temperature of outside air + ) Outside_Damper: URIRef # A damper that modulates the flow of outside air Outside_Face_Surface_Temperature_Sensor: URIRef # Measures the outside surface (relative to the space) of the radiant panel of a radiant heating and cooling HVAC system. Outside_Face_Surface_Temperature_Setpoint: URIRef # Sets temperature for the outside face surface temperature of the radiant panel. - Outside_Illuminance_Sensor: URIRef # Measures the total luminous flux incident on an outside, per unit area - Overload_Alarm: URIRef # An alarm that can indicate when a full-load current is exceeded. + Outside_Illuminance_Sensor: ( + URIRef # Measures the total luminous flux incident on an outside, per unit area + ) + Overload_Alarm: ( + URIRef # An alarm that can indicate when a full-load current is exceeded. + ) Overridden_Off_Status: URIRef # Indicates if a control loop, relay or equipment has been turned off when it would otherwise be scheduled to be on Overridden_On_Status: URIRef # Indicates if a control loop, relay or equipment has been turned on when it would otherwise be scheduled to be off Overridden_Status: URIRef # Indicates if the expected operating status of an equipment or control loop has been overridden @@ -811,15 +1073,21 @@ class BRICK(DefinedNamespace): PVT_Panel: URIRef # A type of solar panels that convert solar radiation into usable thermal and electrical energy PV_Array: URIRef PV_Current_Output_Sensor: URIRef # See Photovoltaic_Current_Output_Sensor - PV_Generation_System: URIRef # A collection of photovoltaic devices that generates energy + PV_Generation_System: ( + URIRef # A collection of photovoltaic devices that generates energy + ) PV_Panel: URIRef # An integrated assembly of interconnected photovoltaic cells designed to deliver a selected level of working voltage and current at its output terminals packaged for protection against environment degradation and suited for incorporation in photovoltaic power systems. Parameter: URIRef # Parameter points are configuration settings used to guide the operation of equipment and control systems; for example they may provide bounds on valid setpoint values Parking_Level: URIRef # A floor of a parking structure Parking_Space: URIRef # An area large enough to park an individual vehicle - Parking_Structure: URIRef # A building or part of a building devoted to vehicle parking + Parking_Structure: ( + URIRef # A building or part of a building devoted to vehicle parking + ) Particulate_Matter_Sensor: URIRef # Detects pollutants in the ambient air Passive_Chilled_Beam: URIRef # A chilled beam that does not have an integral air supply and instead relies on natural convection to draw air through the device. - Peak_Power_Demand_Sensor: URIRef # The peak power consumed by a process over some period of time + Peak_Power_Demand_Sensor: ( + URIRef # The peak power consumed by a process over some period of time + ) Photovoltaic_Array: URIRef # A collection of photovoltaic panels Photovoltaic_Current_Output_Sensor: URIRef # Senses the amperes of electrical current produced as output by a photovoltaic device Piezoelectric_Sensor: URIRef # Senses changes pressure, acceleration, temperature, force or strain via the piezoelectric effect @@ -838,15 +1106,21 @@ class BRICK(DefinedNamespace): Pre_Filter: URIRef # A filter installed in front of a more efficient filter to extend the life of the more expensive higher efficiency filter Pre_Filter_Status: URIRef # Indicates if a prefilter needs to be replaced Preheat_Demand_Setpoint: URIRef # Sets the rate required for preheat - Preheat_Discharge_Air_Temperature_Sensor: URIRef # Measures the temperature of discharge air before heating is applied + Preheat_Discharge_Air_Temperature_Sensor: ( + URIRef # Measures the temperature of discharge air before heating is applied + ) Preheat_Hot_Water_System: URIRef Preheat_Hot_Water_Valve: URIRef - Preheat_Supply_Air_Temperature_Sensor: URIRef # Measures the temperature of supply air before it is heated + Preheat_Supply_Air_Temperature_Sensor: ( + URIRef # Measures the temperature of supply air before it is heated + ) Pressure_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with pressure. Pressure_Sensor: URIRef # Measure the amount of force acting on a unit area Pressure_Setpoint: URIRef # Sets pressure Pressure_Status: URIRef # Indicates if pressure is within expected bounds - Private_Office: URIRef # An office devoted to a single individual, with walls and door + Private_Office: ( + URIRef # An office devoted to a single individual, with walls and door + ) Proportional_Band_Parameter: URIRef Proportional_Gain_Parameter: URIRef Pump: URIRef # Machine for imparting energy to a fluid, causing it to do work, drawing a fluid into itself through an entrance port, and forcing the fluid out through an exhaust port. @@ -864,9 +1138,15 @@ class BRICK(DefinedNamespace): Radiant_Panel_Temperature_Setpoint: URIRef # Sets temperature of radiant panel. Radiation_Hot_Water_System: URIRef Radiator: URIRef # Heat exchangers designed to transfer thermal energy from one medium to another - Radioactivity_Concentration_Sensor: URIRef # Measures the concentration of radioactivity - Radon_Concentration_Sensor: URIRef # Measures the concentration of radioactivity due to radon - Rain_Duration_Sensor: URIRef # Measures the duration of precipitation within some time frame + Radioactivity_Concentration_Sensor: ( + URIRef # Measures the concentration of radioactivity + ) + Radon_Concentration_Sensor: ( + URIRef # Measures the concentration of radioactivity due to radon + ) + Rain_Duration_Sensor: ( + URIRef # Measures the duration of precipitation within some time frame + ) Rain_Sensor: URIRef # Measures the amount of precipitation fallen Rated_Speed_Setpoint: URIRef # Sets rated speed Reactive_Power_Sensor: URIRef # Measures the portion of power that, averaged over a complete cycle of the AC waveform, is due to stored energy which returns to the source in each cycle @@ -878,7 +1158,9 @@ class BRICK(DefinedNamespace): Relief_Damper: URIRef # A damper that is a component of a Relief Air System, ensuring building doesn't become over-pressurised Relief_Fan: URIRef # A fan that is a component of a Relief Air System, ensuring building doesn't become over-pressurised Remotely_On_Off_Status: URIRef - Reset_Command: URIRef # Commands that reset a flag, property or value to its default + Reset_Command: ( + URIRef # Commands that reset a flag, property or value to its default + ) Reset_Setpoint: URIRef # Setpoints used in reset strategies Rest_Room: URIRef # A room that provides toilets and washbowls. Alternate spelling of Restroom Restroom: URIRef # A room that provides toilets and washbowls. @@ -888,7 +1170,9 @@ class BRICK(DefinedNamespace): Return_Air_CO2_Setpoint: URIRef # Sets some property of CO2 in Return Air Return_Air_CO_Sensor: URIRef # Measures the concentration of CO in return air Return_Air_Dewpoint_Sensor: URIRef # Senses the dewpoint temperature of return air - Return_Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between the return and supply side + Return_Air_Differential_Pressure_Sensor: ( + URIRef # Measures the difference in pressure between the return and supply side + ) Return_Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a return air duct or conduit Return_Air_Enthalpy_Sensor: URIRef # Measures the total heat content of return air Return_Air_Filter: URIRef # Filters return air @@ -904,15 +1188,25 @@ class BRICK(DefinedNamespace): Return_Air_Temperature_Setpoint: URIRef # The target temperature for return air, often used as an approximation of zone air temperature Return_Chilled_Water_Temperature_Setpoint: URIRef # Sets the temperature of return (downstream of the chilled water load) chilled water Return_Condenser_Water: URIRef # In a condenser water loop, this is water being brought away from the condenser side of a heat-rejection device (e.g. chiller). It is the 'warm' side. - Return_Condenser_Water_Flow_Sensor: URIRef # Measures the flow of the return condenser water - Return_Condenser_Water_Temperature_Sensor: URIRef # Measures the temperature of the return condenser water - Return_Condenser_Water_Temperature_Setpoint: URIRef # The temperature setpoint for the return condenser water + Return_Condenser_Water_Flow_Sensor: ( + URIRef # Measures the flow of the return condenser water + ) + Return_Condenser_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of the return condenser water + ) + Return_Condenser_Water_Temperature_Setpoint: ( + URIRef # The temperature setpoint for the return condenser water + ) Return_Damper: URIRef # A damper that modulates the flow of return air Return_Fan: URIRef # Fan moving return air -- air that is circulated from the building back into the HVAC system - Return_Heating_Valve: URIRef # A valve installed on the return side of a heat exchanger + Return_Heating_Valve: ( + URIRef # A valve installed on the return side of a heat exchanger + ) Return_Hot_Water: URIRef Return_Hot_Water_Temperature_Setpoint: URIRef # Sets the temperature of return (downstream of the hot water load) hot water - Return_Water: URIRef # The water is a system after it is used in a heat transfer cycle + Return_Water: ( + URIRef # The water is a system after it is used in a heat transfer cycle + ) Return_Water_Flow_Sensor: URIRef Return_Water_Temperature_Sensor: URIRef # Measures the temperature of return water Return_Water_Temperature_Setpoint: URIRef # Sets the temperature of return water @@ -922,20 +1216,28 @@ class BRICK(DefinedNamespace): Room: URIRef # Base class for all more specific room types. Room_Air_Temperature_Setpoint: URIRef # Sets temperature of room air Run_Enable_Command: URIRef - Run_Request_Status: URIRef # Indicates if a request has been filed to start a device or equipment + Run_Request_Status: ( + URIRef # Indicates if a request has been filed to start a device or equipment + ) Run_Status: URIRef Run_Time_Sensor: URIRef # Measures the duration for which a device was in an active or "on" state Safety_Equipment: URIRef Safety_Shower: URIRef Safety_System: URIRef Sash_Position_Sensor: URIRef # Measures the current position of a sash in terms of the percent of fully open - Schedule_Temperature_Setpoint: URIRef # The current setpoint as indicated by the schedule + Schedule_Temperature_Setpoint: ( + URIRef # The current setpoint as indicated by the schedule + ) Security_Equipment: URIRef - Security_Service_Room: URIRef # A class of spaces used by the security staff of a facility + Security_Service_Room: ( + URIRef # A class of spaces used by the security staff of a facility + ) Sensor: URIRef # A Sensor is an input point that represents the value of a device or instrument designed to detect and measure a variable (ASHRAE Dictionary). Server_Room: URIRef Service_Room: URIRef # A class of spaces related to the operations of building subsystems, e.g. HVAC, electrical, IT, plumbing, etc - Setpoint: URIRef # A Setpoint is an input value at which the desired property is set + Setpoint: ( + URIRef # A Setpoint is an input value at which the desired property is set + ) Shading_System: URIRef # Devices that can control daylighting through various means Shared_Office: URIRef # An office used by multiple people Short_Cycle_Alarm: URIRef # An alarm that indicates a short cycle occurred. A short cycle occurs when a cooling cycle is prevented from completing its full cycle @@ -954,7 +1256,9 @@ class BRICK(DefinedNamespace): Speed_Sensor: URIRef # Measures the magnitude of velocity of some form of movement Speed_Setpoint: URIRef # Sets speed Speed_Setpoint_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Speed_Setpoint. - Speed_Status: URIRef # Indicates the operating speed of a device or equipment, e.g. fan + Speed_Status: ( + URIRef # Indicates the operating speed of a device or equipment, e.g. fan + ) Sports_Service_Room: URIRef # A class of spaces used in the support of sports Stage_Enable_Command: URIRef # A point representing a discrete stage which the equipment should be operating at. The desired stage number should be identified by an entity property Stage_Riser: URIRef # A low platform in a space or on a stage @@ -962,12 +1266,16 @@ class BRICK(DefinedNamespace): Staircase: URIRef # A vertical space containing stairs Standby_CRAC: URIRef # A CRAC that is activated as part of a lead/lag operation or when an alarm occurs in a primary unit Standby_Fan: URIRef # Fan that is activated as part of a lead/lag operation or when a primary fan raises an alarm - Standby_Glycool_Unit_On_Off_Status: URIRef # Indicates the on/off status of a standby glycool unit + Standby_Glycool_Unit_On_Off_Status: ( + URIRef # Indicates the on/off status of a standby glycool unit + ) Standby_Load_Shed_Command: URIRef Standby_Unit_On_Off_Status: URIRef # Indicates the on/off status of a standby unit Start_Stop_Command: URIRef # A Start/Stop Command controls or reports the active/inactive status of a control sequence Start_Stop_Status: URIRef # Indicates the active/inactive status of a control loop (but not equipment activities or relays -- use On/Off for this purpose) - Static_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of static pressure + Static_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of static pressure + ) Static_Pressure_Integral_Time_Parameter: URIRef Static_Pressure_Proportional_Band_Parameter: URIRef Static_Pressure_Sensor: URIRef # Measures resistance to airflow in a heating and cooling system's components and duct work @@ -976,7 +1284,9 @@ class BRICK(DefinedNamespace): Static_Pressure_Step_Parameter: URIRef Status: URIRef # A Status is input point that reports the current operating mode, state, position, or condition of an item. Statuses are observations and should be considered 'read-only' Steam: URIRef # water in the vapor phase. - Steam_Baseboard_Radiator: URIRef # Steam heating device located at or near the floor + Steam_Baseboard_Radiator: ( + URIRef # Steam heating device located at or near the floor + ) Steam_Distribution: URIRef # Utilize a steam distribution source to represent how steam is distributed across multiple destinations Steam_On_Off_Command: URIRef Steam_Radiator: URIRef # Radiator that uses steam @@ -991,8 +1301,12 @@ class BRICK(DefinedNamespace): Supply_Air: URIRef # (1) air delivered by mechanical or natural ventilation to a space, composed of any combination of outdoor air, recirculated air, or transfer air. (2) air entering a space from an air-conditioning, heating, or ventilating apparatus for the purpose of comfort conditioning. Supply air is generally filtered, fan forced, and either heated, cooled, humidified, or dehumidified as necessary to maintain specified conditions. Only the quantity of outdoor air within the supply airflow may be used as replacement air. Supply_Air_Differential_Pressure_Sensor: URIRef # Measures the difference in pressure between an upstream and downstream of an air duct or other air conduit used to supply air into the building Supply_Air_Differential_Pressure_Setpoint: URIRef # Sets the target air differential pressure between an upstream and downstream point in a supply air duct or conduit - Supply_Air_Duct_Pressure_Status: URIRef # Indicates if air pressure in supply duct is within expected bounds - Supply_Air_Flow_Demand_Setpoint: URIRef # Sets the rate of supply air flow required for a process + Supply_Air_Duct_Pressure_Status: ( + URIRef # Indicates if air pressure in supply duct is within expected bounds + ) + Supply_Air_Flow_Demand_Setpoint: ( + URIRef # Sets the rate of supply air flow required for a process + ) Supply_Air_Flow_Sensor: URIRef # Measures the rate of flow of supply air Supply_Air_Flow_Setpoint: URIRef # Sets supply air flow rate Supply_Air_Humidity_Sensor: URIRef # Measures the relative humidity of supply air @@ -1000,13 +1314,19 @@ class BRICK(DefinedNamespace): Supply_Air_Integral_Gain_Parameter: URIRef Supply_Air_Plenum: URIRef # A component of the HVAC the receives air from the air handling unit to distribute to the building Supply_Air_Proportional_Gain_Parameter: URIRef - Supply_Air_Static_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of static pressure of supply air + Supply_Air_Static_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of static pressure of supply air + ) Supply_Air_Static_Pressure_Integral_Time_Parameter: URIRef Supply_Air_Static_Pressure_Proportional_Band_Parameter: URIRef - Supply_Air_Static_Pressure_Sensor: URIRef # The static pressure of air within supply regions of an HVAC system + Supply_Air_Static_Pressure_Sensor: ( + URIRef # The static pressure of air within supply regions of an HVAC system + ) Supply_Air_Static_Pressure_Setpoint: URIRef # Sets static pressure of supply air Supply_Air_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with the temperature of supply air. - Supply_Air_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply air + Supply_Air_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of supply air + ) Supply_Air_Temperature_High_Reset_Setpoint: URIRef Supply_Air_Temperature_Low_Reset_Setpoint: URIRef Supply_Air_Temperature_Proportional_Band_Parameter: URIRef @@ -1016,22 +1336,36 @@ class BRICK(DefinedNamespace): Supply_Air_Temperature_Step_Parameter: URIRef Supply_Air_Velocity_Pressure_Sensor: URIRef Supply_Chilled_Water: URIRef - Supply_Chilled_Water_Temperature_Setpoint: URIRef # Temperature setpoint for supply chilled water + Supply_Chilled_Water_Temperature_Setpoint: ( + URIRef # Temperature setpoint for supply chilled water + ) Supply_Condenser_Water: URIRef # In a condenser water loop, this is water being brought to the condenser side of a heat-rejection device (e.g. chiller). It is the 'cold' side. - Supply_Condenser_Water_Flow_Sensor: URIRef # Measures the flow of the supply condenser water - Supply_Condenser_Water_Temperature_Sensor: URIRef # Measures the temperature of the supply condenser water - Supply_Condenser_Water_Temperature_Setpoint: URIRef # The temperature setpoint for the supply condenser water + Supply_Condenser_Water_Flow_Sensor: ( + URIRef # Measures the flow of the supply condenser water + ) + Supply_Condenser_Water_Temperature_Sensor: ( + URIRef # Measures the temperature of the supply condenser water + ) + Supply_Condenser_Water_Temperature_Setpoint: ( + URIRef # The temperature setpoint for the supply condenser water + ) Supply_Fan: URIRef # Fan moving supply air -- air that is supplied from the HVAC system into the building Supply_Hot_Water: URIRef - Supply_Hot_Water_Temperature_Setpoint: URIRef # Temperature setpoint for supply hot water + Supply_Hot_Water_Temperature_Setpoint: ( + URIRef # Temperature setpoint for supply hot water + ) Supply_Water: URIRef - Supply_Water_Differential_Pressure_Deadband_Setpoint: URIRef # Sets the size of a deadband of differential pressure of supply water + Supply_Water_Differential_Pressure_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of differential pressure of supply water + ) Supply_Water_Differential_Pressure_Integral_Time_Parameter: URIRef Supply_Water_Differential_Pressure_Proportional_Band_Parameter: URIRef Supply_Water_Flow_Sensor: URIRef # Measures the rate of flow of hot supply water Supply_Water_Flow_Setpoint: URIRef # Sets the flow rate of hot supply water Supply_Water_Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature of the supply water. - Supply_Water_Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature of supply water + Supply_Water_Temperature_Deadband_Setpoint: ( + URIRef # Sets the size of a deadband of temperature of supply water + ) Supply_Water_Temperature_Integral_Time_Parameter: URIRef Supply_Water_Temperature_Proportional_Band_Parameter: URIRef Supply_Water_Temperature_Setpoint: URIRef # Sets temperature of supply water @@ -1048,13 +1382,17 @@ class BRICK(DefinedNamespace): TVOC_Level_Sensor: URIRef # A sensor measuring the level of all VOCs in air TVOC_Sensor: URIRef Team_Room: URIRef # An office used by multiple team members for specific work tasks. Distinct from Conference Room - Telecom_Room: URIRef # A class of spaces used to support telecommuncations and IT equipment + Telecom_Room: ( + URIRef # A class of spaces used to support telecommuncations and IT equipment + ) Temperature_Alarm: URIRef # An alarm that indicates the off-normal conditions associated with temperature. Temperature_Deadband_Setpoint: URIRef # Sets the size of a deadband of temperature Temperature_Differential_Reset_Setpoint: URIRef Temperature_High_Reset_Setpoint: URIRef Temperature_Low_Reset_Setpoint: URIRef - Temperature_Parameter: URIRef # Parameters relevant to temperature-related systems and points + Temperature_Parameter: ( + URIRef # Parameters relevant to temperature-related systems and points + ) Temperature_Sensor: URIRef # Measures temperature: the physical property of matter that quantitatively expresses the common notions of hot and cold Temperature_Setpoint: URIRef # Sets temperature Temperature_Step_Parameter: URIRef @@ -1077,13 +1415,27 @@ class BRICK(DefinedNamespace): Tunnel: URIRef # An enclosed space that connects buildings. Often underground Underfloor_Air_Plenum: URIRef # An open space between a structural concrete slab and the underside of a raised access floor system that connects to an air handling unit to receive conditioned and/or ventilating air before delivery to the room(s) Underfloor_Air_Plenum_Static_Pressure_Sensor: URIRef # Measures the outward push of air against the plenum surfaces and used to measure the resistance when air moves through the plenum - Underfloor_Air_Plenum_Static_Pressure_Setpoint: URIRef # Sets the underfloor air plenum static pressure - Underfloor_Air_Temperature_Sensor: URIRef # Measures the temperature of underfloor air - Unit_Failure_Alarm: URIRef # An alarm that indicates the failure of an equipment or device - Unoccupied_Air_Temperature_Cooling_Setpoint: URIRef # Sets temperature of air when unoccupied for cooling - Unoccupied_Air_Temperature_Heating_Setpoint: URIRef # Sets temperature of air when unoccupied for heating - Unoccupied_Air_Temperature_Setpoint: URIRef # Sets temperature of air when unoccupied - Unoccupied_Cooling_Discharge_Air_Flow_Setpoint: URIRef # Sets discharge air flow for cooling when unoccupied + Underfloor_Air_Plenum_Static_Pressure_Setpoint: ( + URIRef # Sets the underfloor air plenum static pressure + ) + Underfloor_Air_Temperature_Sensor: ( + URIRef # Measures the temperature of underfloor air + ) + Unit_Failure_Alarm: ( + URIRef # An alarm that indicates the failure of an equipment or device + ) + Unoccupied_Air_Temperature_Cooling_Setpoint: ( + URIRef # Sets temperature of air when unoccupied for cooling + ) + Unoccupied_Air_Temperature_Heating_Setpoint: ( + URIRef # Sets temperature of air when unoccupied for heating + ) + Unoccupied_Air_Temperature_Setpoint: ( + URIRef # Sets temperature of air when unoccupied + ) + Unoccupied_Cooling_Discharge_Air_Flow_Setpoint: ( + URIRef # Sets discharge air flow for cooling when unoccupied + ) Unoccupied_Discharge_Air_Temperature_Setpoint: URIRef Unoccupied_Load_Shed_Command: URIRef Unoccupied_Return_Air_Temperature_Setpoint: URIRef @@ -1100,12 +1452,16 @@ class BRICK(DefinedNamespace): Variable_Air_Volume_Box: URIRef # A device that regulates the volume and temperature of air delivered to a zone by opening or closing a damper Variable_Air_Volume_Box_With_Reheat: URIRef # A VAV box with a reheat coil mounted on the discharge end of the unit that can heat the air delivered to a zone Variable_Frequency_Drive: URIRef # Electronic device that varies its output frequency to vary the rotating speed of a motor, given a fixed input frequency. Used with fans or pumps to vary the flow in the system as a function of a maintained pressure. - Velocity_Pressure_Sensor: URIRef # Measures the difference between total pressure and static pressure + Velocity_Pressure_Sensor: ( + URIRef # Measures the difference between total pressure and static pressure + ) Velocity_Pressure_Setpoint: URIRef # Sets static veloicty pressure Vent_Operating_Mode_Status: URIRef # Indicates the current operating mode of a vent Ventilation_Air_Flow_Ratio_Limit: URIRef # A parameter that places a lower or upper bound on the range of permitted values of a Ventilation_Air_Flow_Ratio_Setpoint. Ventilation_Air_System: URIRef # The equipment, devices, and conduits that handle the introduction and distribution of ventilation air in the building - Vertical_Space: URIRef # A class of spaces used to connect multiple floors or levels.. + Vertical_Space: ( + URIRef # A class of spaces used to connect multiple floors or levels.. + ) Video_Intercom: URIRef Video_Surveillance_Equipment: URIRef Visitor_Lobby: URIRef # A lobby for visitors to the building. Sometimes used to distinguish from an employee entrance looby @@ -1124,10 +1480,14 @@ class BRICK(DefinedNamespace): Water_Flow_Sensor: URIRef # Measures the rate of flow of water Water_Flow_Setpoint: URIRef # Sets the target flow rate of water Water_Heater: URIRef # An apparatus for heating and usually storing hot water - Water_Level_Alarm: URIRef # An alarm that indicates a high or low water level e.g. in a basin + Water_Level_Alarm: ( + URIRef # An alarm that indicates a high or low water level e.g. in a basin + ) Water_Level_Sensor: URIRef # Measures the height/level of water in some container Water_Loop: URIRef # A collection of equipment that transport and regulate water among each other - Water_Loss_Alarm: URIRef # An alarm that indicates a loss of water e.g. during transport + Water_Loss_Alarm: ( + URIRef # An alarm that indicates a loss of water e.g. during transport + ) Water_Meter: URIRef # A meter that measures the usage or consumption of water Water_Pump: URIRef # A pump that performs work on water Water_System: URIRef # The equipment, devices and conduits that handle the production and distribution of water in a building @@ -1138,15 +1498,23 @@ class BRICK(DefinedNamespace): Water_Usage_Sensor: URIRef # Measures the amount of water that is consumed, over some period of time Water_Valve: URIRef # A valve that modulates the flow of water Weather_Station: URIRef # A dedicated weather measurement station - Wind_Direction_Sensor: URIRef # Measures the direction of wind in degrees relative to North - Wind_Speed_Sensor: URIRef # Measured speed of wind, caused by air moving from high to low pressure + Wind_Direction_Sensor: ( + URIRef # Measures the direction of wind in degrees relative to North + ) + Wind_Speed_Sensor: ( + URIRef # Measured speed of wind, caused by air moving from high to low pressure + ) Wing: URIRef # A wing is part of a building – or any feature of a building – that is subordinate to the main, central structure. Workshop: URIRef # A space used to house equipment that can be used to repair or fabricate things Zone: URIRef # (1) a separately controlled heated or cooled space. (2) one occupied space or several occupied spaces with similar occupancy category, occupant density, zone air distribution effectiveness, and zone primary airflow per unit area. (3) space or group of spaces within a building for which the heating, cooling, or lighting requirements are sufficiently similar that desired conditions can be maintained throughout by a single controlling device. Zone_Air: URIRef # air inside a defined zone (e.g., corridors). - Zone_Air_Cooling_Temperature_Setpoint: URIRef # The upper (cooling) setpoint for zone air temperature + Zone_Air_Cooling_Temperature_Setpoint: ( + URIRef # The upper (cooling) setpoint for zone air temperature + ) Zone_Air_Dewpoint_Sensor: URIRef # Measures dewpoint of zone air - Zone_Air_Heating_Temperature_Setpoint: URIRef # The lower (heating) setpoint for zone air temperature + Zone_Air_Heating_Temperature_Setpoint: ( + URIRef # The lower (heating) setpoint for zone air temperature + ) Zone_Air_Humidity_Sensor: URIRef # Measures the relative humidity of zone air Zone_Air_Humidity_Setpoint: URIRef # Humidity setpoint for zone air Zone_Air_Temperature_Sensor: URIRef # Measures the temperature of air in a zone @@ -1160,7 +1528,9 @@ class BRICK(DefinedNamespace): hasAddress: URIRef # To specify the address of a building. hasAssociatedTag: URIRef # The class is associated with the given tag hasInputSubstance: URIRef # The subject receives the given substance as an input to its internal process - hasLocation: URIRef # Subject is physically located in the location given by the object + hasLocation: ( + URIRef # Subject is physically located in the location given by the object + ) hasOutputSubstance: URIRef # The subject produces or exports the given substance from its internal process hasPart: URIRef # The subject is composed in part of the entity given by the object hasPoint: URIRef # The subject has a source of telemetry identified by the object. In some systems the source of telemetry may be represented as a digital/analog input/output point @@ -1180,7 +1550,9 @@ class BRICK(DefinedNamespace): longitude: URIRef measures: URIRef # The subject measures a quantity or substance given by the object regulates: URIRef # The subject contributes to or performs the regulation of the substance given by the object - storedAt: URIRef # A reference to where the data for this TimeseriesReference is stored + storedAt: ( + URIRef # A reference to where the data for this TimeseriesReference is stored + ) timeseries: URIRef # Relates a Brick point to the TimeseriesReference that indicates where and how the data for this point is stored value: URIRef # The basic value of an entity property @@ -1201,7 +1573,9 @@ class BRICK(DefinedNamespace): measuredPowerOutput: URIRef # The nominal measured power output of the entity netArea: URIRef # Entity has net 2-dimensional area operationalStage: URIRef # The associated operational stage - operationalStageCount: URIRef # The number of operational stages supported by this eqiupment + operationalStageCount: ( + URIRef # The number of operational stages supported by this eqiupment + ) panelArea: URIRef # Surface area of a panel, such as a PV panel powerComplexity: URIRef # Entity has this power complexity powerFlow: URIRef # Entity has this power flow relative to the building' diff --git a/rdflib/namespace/_CSVW.py b/rdflib/namespace/_CSVW.py index 9d4f46d35..73eb661b8 100644 --- a/rdflib/namespace/_CSVW.py +++ b/rdflib/namespace/_CSVW.py @@ -82,7 +82,9 @@ class CSVW(DefinedNamespace): # http://www.w3.org/2000/01/rdf-schema#Class Cell: URIRef # A Cell represents a cell at the intersection of a Row and a Column within a Table. - Column: URIRef # A Column represents a vertical arrangement of Cells within a Table. + Column: ( + URIRef # A Column represents a vertical arrangement of Cells within a Table. + ) Datatype: URIRef # Describes facets of a datatype. Dialect: URIRef # A Dialect Description provides hints to parsers about how to parse a linked file. Direction: URIRef # The class of table/text directions. @@ -106,7 +108,11 @@ class CSVW(DefinedNamespace): rtl: URIRef # Indicates whether the tables in the group should be displayed with the first column on the left. # http://www.w3.org/ns/prov#Role - csvEncodedTabularData: URIRef # Describes the role of a CSV file in the tabular data mapping. - tabularMetadata: URIRef # Describes the role of a Metadata file in the tabular data mapping. + csvEncodedTabularData: ( + URIRef # Describes the role of a CSV file in the tabular data mapping. + ) + tabularMetadata: ( + URIRef # Describes the role of a Metadata file in the tabular data mapping. + ) _NS = Namespace("http://www.w3.org/ns/csvw#") diff --git a/rdflib/namespace/_DC.py b/rdflib/namespace/_DC.py index 7afa6430c..fa175c22d 100644 --- a/rdflib/namespace/_DC.py +++ b/rdflib/namespace/_DC.py @@ -14,13 +14,17 @@ class DC(DefinedNamespace): _fail = True # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property - contributor: URIRef # An entity responsible for making contributions to the resource. + contributor: ( + URIRef # An entity responsible for making contributions to the resource. + ) coverage: URIRef # The spatial or temporal topic of the resource, spatial applicability of the resource, or jurisdiction under which the resource is relevant. creator: URIRef # An entity primarily responsible for making the resource. date: URIRef # A point or period of time associated with an event in the lifecycle of the resource. description: URIRef # An account of the resource. format: URIRef # The file format, physical medium, or dimensions of the resource. - identifier: URIRef # An unambiguous reference to the resource within a given context. + identifier: ( + URIRef # An unambiguous reference to the resource within a given context. + ) language: URIRef # A language of the resource. publisher: URIRef # An entity responsible for making the resource available. relation: URIRef # A related resource. diff --git a/rdflib/namespace/_DCAT.py b/rdflib/namespace/_DCAT.py index 246b4720c..8abcf1f38 100644 --- a/rdflib/namespace/_DCAT.py +++ b/rdflib/namespace/_DCAT.py @@ -35,7 +35,9 @@ class DCAT(DefinedNamespace): packageFormat: URIRef # The package format of the distribution in which one or more data files are grouped together, e.g. to enable a set of related files to be downloaded together. record: URIRef # A record describing the registration of a single dataset or data service that is part of the catalog. startDate: URIRef # The start of the period - theme: URIRef # A main category of the resource. A resource can have multiple themes. + theme: ( + URIRef # A main category of the resource. A resource can have multiple themes. + ) themeTaxonomy: URIRef # The knowledge organization system (KOS) used to classify catalog's datasets. # http://www.w3.org/2000/01/rdf-schema#Class @@ -60,7 +62,9 @@ class DCAT(DefinedNamespace): endpointDescription: URIRef # A description of the service end-point, including its operations, parameters etc. endpointURL: URIRef # The root location or primary endpoint of the service (a web-resolvable IRI). hadRole: URIRef # The function of an entity or agent with respect to another entity or resource. - qualifiedRelation: URIRef # Link to a description of a relationship with another resource. + qualifiedRelation: ( + URIRef # Link to a description of a relationship with another resource. + ) servesDataset: URIRef # A collection of data that this DataService can distribute. service: URIRef # A site or endpoint that is listed in the catalog. diff --git a/rdflib/namespace/_DCTERMS.py b/rdflib/namespace/_DCTERMS.py index 9b00f317b..d3548f4ba 100644 --- a/rdflib/namespace/_DCTERMS.py +++ b/rdflib/namespace/_DCTERMS.py @@ -19,7 +19,9 @@ class DCTERMS(DefinedNamespace): IMT: URIRef # The set of media types specified by the Internet Assigned Numbers Authority. LCC: URIRef # The set of conceptual resources specified by the Library of Congress Classification. LCSH: URIRef # The set of labeled concepts specified by the Library of Congress Subject Headings. - MESH: URIRef # The set of labeled concepts specified by the Medical Subject Headings. + MESH: ( + URIRef # The set of labeled concepts specified by the Medical Subject Headings. + ) NLM: URIRef # The set of conceptual resources specified by the National Library of Medicine Classification. TGN: URIRef # The set of places specified by the Getty Thesaurus of Geographic Names. UDC: URIRef # The set of conceptual resources specified by the Universal Decimal Classification. @@ -28,14 +30,20 @@ class DCTERMS(DefinedNamespace): abstract: URIRef # A summary of the resource. accessRights: URIRef # Information about who access the resource or an indication of its security status. accrualMethod: URIRef # The method by which items are added to a collection. - accrualPeriodicity: URIRef # The frequency with which items are added to a collection. + accrualPeriodicity: ( + URIRef # The frequency with which items are added to a collection. + ) accrualPolicy: URIRef # The policy governing the addition of items to a collection. alternative: URIRef # An alternative name for the resource. audience: URIRef # A class of agents for whom the resource is intended or useful. available: URIRef # Date that the resource became or will become available. bibliographicCitation: URIRef # A bibliographic reference for the resource. - conformsTo: URIRef # An established standard to which the described resource conforms. - contributor: URIRef # An entity responsible for making contributions to the resource. + conformsTo: ( + URIRef # An established standard to which the described resource conforms. + ) + contributor: ( + URIRef # An entity responsible for making contributions to the resource. + ) coverage: URIRef # The spatial or temporal topic of the resource, spatial applicability of the resource, or jurisdiction under which the resource is relevant. created: URIRef # Date of creation of the resource. creator: URIRef # An entity responsible for making the resource. @@ -50,7 +58,9 @@ class DCTERMS(DefinedNamespace): hasFormat: URIRef # A related resource that is substantially the same as the pre-existing described resource, but in another format. hasPart: URIRef # A related resource that is included either physically or logically in the described resource. hasVersion: URIRef # A related resource that is a version, edition, or adaptation of the described resource. - identifier: URIRef # An unambiguous reference to the resource within a given context. + identifier: ( + URIRef # An unambiguous reference to the resource within a given context. + ) instructionalMethod: URIRef # A process, used to engender knowledge, attitudes and skills, that the described resource is designed to support. isFormatOf: URIRef # A pre-existing related resource that is substantially the same as the described resource, but in another format. isPartOf: URIRef # A related resource in which the described resource is physically or logically included. @@ -71,7 +81,9 @@ class DCTERMS(DefinedNamespace): replaces: URIRef # A related resource that is supplanted, displaced, or superseded by the described resource. requires: URIRef # A related resource that is required by the described resource to support its function, delivery, or coherence. rights: URIRef # Information about rights held in and over the resource. - rightsHolder: URIRef # A person or organization owning or managing rights over the resource. + rightsHolder: ( + URIRef # A person or organization owning or managing rights over the resource. + ) source: URIRef # A related resource from which the described resource is derived. spatial: URIRef # Spatial characteristics of the resource. subject: URIRef # A topic of the resource. @@ -87,7 +99,9 @@ class DCTERMS(DefinedNamespace): BibliographicResource: URIRef # A book, article, or other documentary resource. FileFormat: URIRef # A digital resource format. Frequency: URIRef # A rate at which something recurs. - Jurisdiction: URIRef # The extent or range of judicial, law enforcement, or other authority. + Jurisdiction: ( + URIRef # The extent or range of judicial, law enforcement, or other authority. + ) LicenseDocument: URIRef # A legal document giving official permission to do something with a resource. LinguisticSystem: URIRef # A system of signs, symbols, sounds, gestures, or rules used in communication. Location: URIRef # A spatial region or named place. @@ -95,7 +109,9 @@ class DCTERMS(DefinedNamespace): MediaType: URIRef # A file format or physical medium. MediaTypeOrExtent: URIRef # A media type or extent. MethodOfAccrual: URIRef # A method by which resources are added to a collection. - MethodOfInstruction: URIRef # A process that is used to engender knowledge, attitudes, and skills. + MethodOfInstruction: ( + URIRef # A process that is used to engender knowledge, attitudes, and skills. + ) PeriodOfTime: URIRef # An interval of time that is named or defined by its start and end dates. PhysicalMedium: URIRef # A physical material or carrier. PhysicalResource: URIRef # A material thing. diff --git a/rdflib/namespace/_DOAP.py b/rdflib/namespace/_DOAP.py index 87ec20faf..30cdf23c3 100644 --- a/rdflib/namespace/_DOAP.py +++ b/rdflib/namespace/_DOAP.py @@ -20,8 +20,12 @@ class DOAP(DefinedNamespace): blog: URIRef # URI of a blog related to a project browse: URIRef # Web browser interface to repository. category: URIRef # A category of project. - created: URIRef # Date when something was created, in YYYY-MM-DD form. e.g. 2004-04-05 - description: URIRef # Plain text description of a project, of 2-4 sentences in length. + created: ( + URIRef # Date when something was created, in YYYY-MM-DD form. e.g. 2004-04-05 + ) + description: ( + URIRef # Plain text description of a project, of 2-4 sentences in length. + ) developer: URIRef # Developer of software for the project. documenter: URIRef # Contributor of documentation to the project. helper: URIRef # Project contributor. @@ -57,11 +61,15 @@ class DOAP(DefinedNamespace): Project: URIRef # A project. Repository: URIRef # Source code repository. SVNRepository: URIRef # Subversion source code repository. - Specification: URIRef # A specification of a system's aspects, technical or otherwise. + Specification: ( + URIRef # A specification of a system's aspects, technical or otherwise. + ) Version: URIRef # Version information of a project release. # http://www.w3.org/2002/07/owl#InverseFunctionalProperty - homepage: URIRef # URL of a project's homepage, associated with exactly one project. + homepage: ( + URIRef # URL of a project's homepage, associated with exactly one project. + ) # Valid non-python identifiers _extras = [ diff --git a/rdflib/namespace/_FOAF.py b/rdflib/namespace/_FOAF.py index 7441c6e33..18aa4af72 100644 --- a/rdflib/namespace/_FOAF.py +++ b/rdflib/namespace/_FOAF.py @@ -17,8 +17,12 @@ class FOAF(DefinedNamespace): # http://www.w3.org/1999/02/22-rdf-syntax-ns#Property account: URIRef # Indicates an account held by this agent. - accountName: URIRef # Indicates the name (identifier) associated with this online account. - accountServiceHomepage: URIRef # Indicates a homepage of the service provide for this online account. + accountName: ( + URIRef # Indicates the name (identifier) associated with this online account. + ) + accountServiceHomepage: ( + URIRef # Indicates a homepage of the service provide for this online account. + ) age: URIRef # The age in years of some agent. based_near: URIRef # A location that something is based near, for some broadly human notion of near. birthday: URIRef # The birthday of this Agent, represented in mm-dd string form, eg. '12-31'. @@ -43,7 +47,9 @@ class FOAF(DefinedNamespace): made: URIRef # Something that was made by this agent. maker: URIRef # An agent that made this thing. member: URIRef # Indicates a member of a Group - membershipClass: URIRef # Indicates the class of individuals that are a member of a Group + membershipClass: ( + URIRef # Indicates the class of individuals that are a member of a Group + ) myersBriggs: URIRef # A Myers Briggs (MBTI) personality classification. name: URIRef # A name for some thing. nick: URIRef # A short informal nickname characterising an agent (includes login identifiers, IRC and other chat nicknames). diff --git a/rdflib/namespace/_GEO.py b/rdflib/namespace/_GEO.py index c890973ca..d7168d64c 100644 --- a/rdflib/namespace/_GEO.py +++ b/rdflib/namespace/_GEO.py @@ -26,20 +26,42 @@ class GEO(DefinedNamespace): """ # http://www.w3.org/2000/01/rdf-schema#Datatype + dggsLiteral: URIRef # A DGGS serialization of a geometry object. + geoJSONLiteral: URIRef # A GeoJSON serialization of a geometry object. gmlLiteral: URIRef # A GML serialization of a geometry object. + kmlLiteral: URIRef # A KML serialization of a geometry object. wktLiteral: URIRef # A Well-known Text serialization of a geometry object. # http://www.w3.org/2002/07/owl#Class Feature: URIRef # This class represents the top-level feature type. This class is equivalent to GFI_Feature defined in ISO 19156:2011, and it is superclass of all feature types. + FeatureCollection: URIRef # A collection of individual Features. Geometry: URIRef # The class represents the top-level geometry type. This class is equivalent to the UML class GM_Object defined in ISO 19107, and it is superclass of all geometry types. + GeometryCollection: URIRef # A collection of individual Geometries. SpatialObject: URIRef # The class spatial-object represents everything that can have a spatial representation. It is superclass of feature and geometry. + SpatialObjectCollection: URIRef # A collection of individual Spatial Objects. This is the superclass of Feature Collection and Geometry Collection. # http://www.w3.org/2002/07/owl#DatatypeProperty asGML: URIRef # The GML serialization of a geometry asWKT: URIRef # The WKT serialization of a geometry + asGeoJSON: URIRef # The GeoJSON serialization of a geometry + asKML: URIRef # The KML serialization of a geometry + asDGGS: URIRef # The DGGS serialization of a geometry coordinateDimension: URIRef # The number of measurements or axes needed to describe the position of this geometry in a coordinate system. dimension: URIRef # The topological dimension of this geometric object, which must be less than or equal to the coordinate dimension. In non-homogeneous collections, this will return the largest topological dimension of the contained objects. - hasSerialization: URIRef # Connects a geometry object with its text-based serialization. + hasMetricArea: URIRef # The area of a Spatial Object in square meters. + hasMetricLength: URIRef # The length of a Spatial Object in meters. + hasMetricPerimeterLength: ( + URIRef # The length of the perimeter of a Spatial Object in meters. + ) + hasMetricSpatialAccuracy: URIRef # The spatial resolution of a Geometry in meters. + hasMetricSpatialResolution: ( + URIRef # The spatial resolution of a Geometry in meters. + ) + hasMetricSize: URIRef # Subproperties of this property are used to indicate the size of a Spatial Object as a measurement or estimate of one or more dimensions of the Spatial Object's spatial presence. Units are always metric (meter, square meter or cubic meter) + hasMetricVolume: URIRef # The volume of a Spatial Object in cubic meters. + hasSerialization: ( + URIRef # Connects a geometry object with its text-based serialization. + ) isEmpty: URIRef # (true) if this geometric object is the empty Geometry. If true, then this geometric object represents the empty point set for the coordinate space. isSimple: URIRef # (true) if this geometric object has no anomalous geometric points, such as self intersection or self tangency. spatialDimension: URIRef # The number of measurements or axes needed to describe the spatial position of this geometry in a coordinate system. @@ -54,7 +76,21 @@ class GEO(DefinedNamespace): ehInside: URIRef # Exists if the subject SpatialObject is spatially inside the object SpatialObject. DE-9IM: TFF*FFT** ehMeet: URIRef # Exists if the subject SpatialObject spatially meets the object SpatialObject. DE-9IM: FT******* ^ F**T***** ^ F***T**** ehOverlap: URIRef # Exists if the subject SpatialObject spatially overlaps the object SpatialObject. DE-9IM: T*T***T** + hasArea: URIRef # The area of a Spatial Object. + hasBoundingBox: ( + URIRef # The minimum or smallest bounding or enclosing box of a given Feature. + ) + hasCentroid: URIRef # The arithmetic mean position of all the geometry points of a given Feature. + hasDefaultGeometry: URIRef # The default geometry to be used in spatial calculations, usually the most detailed geometry. hasGeometry: URIRef # A spatial representation for a given feature. + hasLength: URIRef # The length of a Spatial Object. + hasPerimeterLength: URIRef # The length of the perimeter of a Spatial Object. + hasSize: URIRef # Subproperties of this property are used to indicate the size of a Spatial Object as a measurement or estimate of one or more dimensions of the Spatial Object's spatial presence. + hasSpatialAccuracy: ( + URIRef # The positional accuracy of the coordinates of a Geometry. + ) + hasSpatialResolution: URIRef # The spatial resolution of a Geometry. + hasVolume: URIRef # he volume of a three-dimensional Spatial Object. rcc8dc: URIRef # Exists if the subject SpatialObject is spatially disjoint from the object SpatialObject. DE-9IM: FFTFFTTTT rcc8ec: URIRef # Exists if the subject SpatialObject spatially meets the object SpatialObject. DE-9IM: FFTFTTTTT rcc8eq: URIRef # Exists if the subject SpatialObject spatially equals the object SpatialObject. DE-9IM: TFFFTFFFT diff --git a/rdflib/namespace/_ODRL2.py b/rdflib/namespace/_ODRL2.py index a9c8778b1..acb0b5b56 100644 --- a/rdflib/namespace/_ODRL2.py +++ b/rdflib/namespace/_ODRL2.py @@ -71,7 +71,9 @@ class ODRL2(DefinedNamespace): trackedParty: URIRef # The Party whose usage is being tracked. trackingParty: URIRef # The Party who is tracking usage. uid: URIRef # An unambiguous identifier - undefined: URIRef # Relates the strategy used for handling undefined actions to a Policy. + undefined: ( + URIRef # Relates the strategy used for handling undefined actions to a Policy. + ) unit: URIRef # The unit of measurement of the value of the rightOperand or rightOperandReference of a Constraint. xone: URIRef # The relation is satisfied when only one, and not more, of the Constraints is satisfied @@ -89,7 +91,9 @@ class ODRL2(DefinedNamespace): count: URIRef # Numeric count of executions of the action of the Rule. dateTime: URIRef # The date (and optional time and timezone) of exercising the action of the Rule. Right operand value MUST be an xsd:date or xsd:dateTime as defined by [[xmlschema11-2]]. delayPeriod: URIRef # A time delay period prior to exercising the action of the Rule. The point in time triggering this period MAY be defined by another temporal Constraint combined by a Logical Constraint (utilising the odrl:andSequence operand). Right operand value MUST be an xsd:duration as defined by [[xmlschema11-2]]. - deliveryChannel: URIRef # The delivery channel used for exercising the action of the Rule. + deliveryChannel: ( + URIRef # The delivery channel used for exercising the action of the Rule. + ) device: URIRef # An identified device used for exercising the action of the Rule. elapsedTime: URIRef # A continuous elapsed time period which may be used for exercising of the action of the Rule. Right operand value MUST be an xsd:duration as defined by [[xmlschema11-2]]. eq: URIRef # Indicating that a given value equals the right operand of the Constraint. @@ -115,7 +119,9 @@ class ODRL2(DefinedNamespace): payAmount: URIRef # The amount of a financial payment. Right operand value MUST be an xsd:decimal. percentage: URIRef # A percentage amount of the target Asset relevant for exercising the action of the Rule. Right operand value MUST be an xsd:decimal from 0 to 100. perm: URIRef # Permissions take preference over prohibitions. - policyUsage: URIRef # Indicates the actual datetime the action of the Rule was exercised. + policyUsage: ( + URIRef # Indicates the actual datetime the action of the Rule was exercised. + ) product: URIRef # Category of product or service setting a context for exercising the action of the Rule. prohibit: URIRef # Prohibitions take preference over permissions. purpose: URIRef # A defined purpose for exercising the action of the Rule. @@ -151,7 +157,9 @@ class ODRL2(DefinedNamespace): LogicalConstraint: URIRef # A logical expression that refines the semantics of an Action and Party/Asset Collection or declare the conditions applicable to a Rule. Offer: URIRef # A Policy that proposes a Rule over an Asset from an assigner. Operator: URIRef # Operator for constraint expression. - Party: URIRef # An entity or a collection of entities that undertake Roles in a Rule. + Party: ( + URIRef # An entity or a collection of entities that undertake Roles in a Rule. + ) PartyCollection: URIRef # A Party that is a group of individual entities PartyScope: URIRef # Scopes for Party Scope expressions. Permission: URIRef # The ability to perform an Action over an Asset. @@ -162,25 +170,33 @@ class ODRL2(DefinedNamespace): RightOperand: URIRef # Right operand for constraint expression. Rule: URIRef # An abstract concept that represents the common characteristics of Permissions, Prohibitions, and Duties. Set: URIRef # A Policy that expresses a Rule over an Asset. - Ticket: URIRef # A Policy that grants the holder a Rule over an Asset from an assigner. + Ticket: ( + URIRef # A Policy that grants the holder a Rule over an Asset from an assigner. + ) UndefinedTerm: URIRef # Is used to indicate how to support Actions that are not part of any vocabulary or profile in the policy expression system. acceptTracking: URIRef # To accept that the use of the Asset may be tracked. adHocShare: URIRef # The act of sharing the asset to parties in close proximity to the owner. - aggregate: URIRef # To use the Asset or parts of it as part of a composite collection. + aggregate: ( + URIRef # To use the Asset or parts of it as part of a composite collection. + ) annotate: URIRef # To add explanatory notations/commentaries to the Asset without modifying the Asset in any other way. anonymize: URIRef # To anonymize all or parts of the Asset. append: URIRef # The act of adding to the end of an asset. appendTo: URIRef # The act of appending data to the Asset without modifying the Asset in any other way. archive: URIRef # To store the Asset (in a non-transient form). attachPolicy: URIRef # The act of keeping the policy notice with the asset. - attachSource: URIRef # The act of attaching the source of the asset and its derivatives. + attachSource: ( + URIRef # The act of attaching the source of the asset and its derivatives. + ) attribute: URIRef # To attribute the use of the Asset. commercialize: URIRef # The act of using the asset in a business environment. compensate: URIRef # To compensate by transfer of some amount of value, if defined, for using or selling the Asset. concurrentUse: URIRef # To create multiple copies of the Asset that are being concurrently used. copy: URIRef # The act of making an exact reproduction of the asset. core: URIRef # Identifier for the ODRL Core Profile - delete: URIRef # To permanently remove all copies of the Asset after it has been used. + delete: ( + URIRef # To permanently remove all copies of the Asset after it has been used. + ) derive: URIRef # To create a new derivative Asset from this Asset and to edit or modify the derivative. digitize: URIRef # To produce a digital copy of (or otherwise digitize) the Asset from its analogue form. display: URIRef # To create a static and transient rendition of an Asset. @@ -190,8 +206,12 @@ class ODRL2(DefinedNamespace): export: URIRef # The act of transforming the asset into a new form. extract: URIRef # To extract parts of the Asset and to use it as a new Asset. extractChar: URIRef # The act of extracting (replicating) unchanged characters from the asset. - extractPage: URIRef # The act of extracting (replicating) unchanged pages from the asset. - extractWord: URIRef # The act of extracting (replicating) unchanged words from the asset. + extractPage: ( + URIRef # The act of extracting (replicating) unchanged pages from the asset. + ) + extractWord: ( + URIRef # The act of extracting (replicating) unchanged words from the asset. + ) give: URIRef # To transfer the ownership of the Asset to a third party without compensation and while deleting the original asset. grantUse: URIRef # To grant the use of the Asset to third parties. include: URIRef # To include other related assets in the Asset. diff --git a/rdflib/namespace/_OWL.py b/rdflib/namespace/_OWL.py index 98083a019..47ad4e5fd 100644 --- a/rdflib/namespace/_OWL.py +++ b/rdflib/namespace/_OWL.py @@ -33,17 +33,25 @@ class OWL(DefinedNamespace): cardinality: URIRef # The property that determines the cardinality of an exact cardinality restriction. complementOf: URIRef # The property that determines that a given class is the complement of another class. datatypeComplementOf: URIRef # The property that determines that a given data range is the complement of another data range with respect to the data domain. - differentFrom: URIRef # The property that determines that two given individuals are different. + differentFrom: ( + URIRef # The property that determines that two given individuals are different. + ) disjointUnionOf: URIRef # The property that determines that a given class is equivalent to the disjoint union of a collection of other classes. - disjointWith: URIRef # The property that determines that two given classes are disjoint. + disjointWith: ( + URIRef # The property that determines that two given classes are disjoint. + ) distinctMembers: URIRef # The property that determines the collection of pairwise different individuals in a owl:AllDifferent axiom. equivalentClass: URIRef # The property that determines that two given classes are equivalent, and that is used to specify datatype definitions. - equivalentProperty: URIRef # The property that determines that two given properties are equivalent. + equivalentProperty: ( + URIRef # The property that determines that two given properties are equivalent. + ) hasKey: URIRef # The property that determines the collection of properties that jointly build a key. hasSelf: URIRef # The property that determines the property that a self restriction refers to. hasValue: URIRef # The property that determines the individual that a has-value restriction refers to. intersectionOf: URIRef # The property that determines the collection of classes or data ranges that build an intersection. - inverseOf: URIRef # The property that determines that two given properties are inverse. + inverseOf: ( + URIRef # The property that determines that two given properties are inverse. + ) maxCardinality: URIRef # The property that determines the cardinality of a maximum cardinality restriction. maxQualifiedCardinality: URIRef # The property that determines the cardinality of a maximum qualified cardinality restriction. members: URIRef # The property that determines the collection of members in either a owl:AllDifferent, owl:AllDisjointClasses or owl:AllDisjointProperties axiom. @@ -56,7 +64,9 @@ class OWL(DefinedNamespace): onProperty: URIRef # The property that determines the property that a property restriction refers to. oneOf: URIRef # The property that determines the collection of individuals or data values that build an enumeration. propertyChainAxiom: URIRef # The property that determines the n-tuple of properties that build a sub property chain of a given property. - propertyDisjointWith: URIRef # The property that determines that two given properties are disjoint. + propertyDisjointWith: ( + URIRef # The property that determines that two given properties are disjoint. + ) qualifiedCardinality: URIRef # The property that determines the cardinality of an exact qualified cardinality restriction. sameAs: URIRef # The property that determines that two given individuals are equal. someValuesFrom: URIRef # The property that determines the class that an existential property restriction refers to. @@ -69,7 +79,9 @@ class OWL(DefinedNamespace): # http://www.w3.org/2000/01/rdf-schema#Class AllDifferent: URIRef # The class of collections of pairwise different individuals. AllDisjointClasses: URIRef # The class of collections of pairwise disjoint classes. - AllDisjointProperties: URIRef # The class of collections of pairwise disjoint properties. + AllDisjointProperties: ( + URIRef # The class of collections of pairwise disjoint properties. + ) Annotation: URIRef # The class of annotated annotations for which the RDF serialization consists of an annotated subject, predicate and object. AnnotationProperty: URIRef # The class of annotation properties. AsymmetricProperty: URIRef # The class of asymmetric properties. @@ -105,10 +117,14 @@ class OWL(DefinedNamespace): # http://www.w3.org/2002/07/owl#DatatypeProperty bottomDataProperty: URIRef # The data property that does not relate any individual to any data value. - topDataProperty: URIRef # The data property that relates every individual to every data value. + topDataProperty: ( + URIRef # The data property that relates every individual to every data value. + ) # http://www.w3.org/2002/07/owl#ObjectProperty - bottomObjectProperty: URIRef # The object property that does not relate any two individuals. + bottomObjectProperty: ( + URIRef # The object property that does not relate any two individuals. + ) topObjectProperty: URIRef # The object property that relates every two individuals. # http://www.w3.org/2002/07/owl#OntologyProperty diff --git a/rdflib/namespace/_PROV.py b/rdflib/namespace/_PROV.py index 67fd4398e..6a7970d5f 100644 --- a/rdflib/namespace/_PROV.py +++ b/rdflib/namespace/_PROV.py @@ -164,12 +164,18 @@ class PROV(DefinedNamespace): # http://www.w3.org/2002/07/owl#DatatypeProperty atTime: URIRef # The time at which an InstantaneousEvent occurred, in the form of xsd:dateTime. - endedAtTime: URIRef # The time at which an activity ended. See also prov:startedAtTime. + endedAtTime: ( + URIRef # The time at which an activity ended. See also prov:startedAtTime. + ) generatedAtTime: URIRef # The time at which an entity was completely created and is available for use. - invalidatedAtTime: URIRef # The time at which an entity was invalidated (i.e., no longer usable). + invalidatedAtTime: ( + URIRef # The time at which an entity was invalidated (i.e., no longer usable). + ) provenanceUriTemplate: URIRef # Relates a provenance service to a URI template string for constructing provenance-URIs. removedKey: URIRef # removedKey - startedAtTime: URIRef # The time at which an activity started. See also prov:endedAtTime. + startedAtTime: ( + URIRef # The time at which an activity started. See also prov:endedAtTime. + ) value: URIRef # value # http://www.w3.org/2002/07/owl#FunctionalProperty @@ -194,13 +200,17 @@ class PROV(DefinedNamespace): generated: URIRef # generated hadActivity: URIRef # The _optional_ Activity of an Influence, which used, generated, invalidated, or was the responsibility of some Entity. This property is _not_ used by ActivityInfluence (use prov:activity instead). hadDictionaryMember: URIRef # hadDictionaryMember - hadGeneration: URIRef # The _optional_ Generation involved in an Entity's Derivation. + hadGeneration: ( + URIRef # The _optional_ Generation involved in an Entity's Derivation. + ) hadMember: URIRef # hadMember hadPlan: URIRef # The _optional_ Plan adopted by an Agent in Association with some Activity. Plan specifications are out of the scope of this specification. hadPrimarySource: URIRef # hadPrimarySource hadRole: URIRef # This property has multiple RDFS domains to suit multiple OWL Profiles. See PROV-O OWL Profile. hadUsage: URIRef # The _optional_ Usage involved in an Entity's Derivation. - has_anchor: URIRef # Indicates anchor URI for a potentially dynamic resource instance. + has_anchor: ( + URIRef # Indicates anchor URI for a potentially dynamic resource instance. + ) has_provenance: URIRef # Indicates a provenance-URI for a resource; the resource identified by this property presents a provenance record about its subject or anchor resource. has_query_service: URIRef # Indicates a provenance query service that can access provenance related to its subject or anchor resource. influenced: URIRef # influenced diff --git a/rdflib/namespace/_QB.py b/rdflib/namespace/_QB.py index b3cfab7d7..6494fd53a 100644 --- a/rdflib/namespace/_QB.py +++ b/rdflib/namespace/_QB.py @@ -30,7 +30,9 @@ class QB(DefinedNamespace): measure: URIRef # An alternative to qb:componentProperty which makes explicit that the component is a measure measureDimension: URIRef # An alternative to qb:componentProperty which makes explicit that the component is a measure dimension measureType: URIRef # Generic measure dimension, the value of this dimension indicates which measure (from the set of measures in the DSD) is being given by the obsValue (or other primary measure) - observation: URIRef # indicates a observation contained within this slice of the data set + observation: ( + URIRef # indicates a observation contained within this slice of the data set + ) observationGroup: URIRef # Indicates a group of observations. The domain of this property is left open so that a group may be attached to different resources and need not be restricted to a single DataSet order: URIRef # indicates a priority order for the components of sets with this structure, used to guide presentations - lower order numbers come before higher numbers, un-numbered components come last parentChildProperty: URIRef # Specifies a property which relates a parent concept in the hierarchy to a child concept. @@ -48,7 +50,9 @@ class QB(DefinedNamespace): ComponentSpecification: URIRef # Used to define properties of a component (attribute, dimension etc) which are specific to its usage in a DSD. DataSet: URIRef # Represents a collection of observations, possibly organized into various slices, conforming to some common dimensional structure. DataStructureDefinition: URIRef # Defines the structure of a DataSet or slice - DimensionProperty: URIRef # The class of components which represent the dimensions of the cube + DimensionProperty: ( + URIRef # The class of components which represent the dimensions of the cube + ) HierarchicalCodeList: URIRef # Represents a generalized hierarchy of concepts which can be used for coding. The hierarchy is defined by one or more roots together with a property which relates concepts in the hierarchy to their child concept . The same concepts may be members of multiple hierarchies provided that different qb:parentChildProperty values are used for each hierarchy. MeasureProperty: URIRef # The class of components which represent the measured value of the phenomenon being observed Observation: URIRef # A single observation in the cube, may have one or more associated measured values diff --git a/rdflib/namespace/_SDO.py b/rdflib/namespace/_SDO.py index b3c01626f..634fe4322 100644 --- a/rdflib/namespace/_SDO.py +++ b/rdflib/namespace/_SDO.py @@ -17,7 +17,9 @@ class SDO(DefinedNamespace): # 3DModel: URIRef # A 3D model represents some kind of 3D content, which may have [[encoding]]s in one or more [[MediaObject]]s. Many 3D formats are available (e.g. see [Wikipedia](https://en.wikipedia.org/wiki/Category:3D_graphics_file_formats)); specific encoding formats can be represented using the [[encodingFormat]] property applied to the relevant [[MediaObject]]. For the case of a single file published after Zip compression, the convention of appending '+zip' to the [[encodingFormat]] can be used. Geospatial, AR/VR, artistic/animation, gaming, engineering and scientific content can all be represented using [[3DModel]]. AMRadioChannel: URIRef # A radio channel that uses AM. - APIReference: URIRef # Reference documentation for application programming interfaces (APIs). + APIReference: ( + URIRef # Reference documentation for application programming interfaces (APIs). + ) Abdomen: URIRef # Abdomen clinical examination. AboutPage: URIRef # Web page type: About page. AcceptAction: URIRef # The act of committing to/adopting an object.\n\nRelated actions:\n\n* [[RejectAction]]: The antonym of AcceptAction. @@ -44,7 +46,9 @@ class SDO(DefinedNamespace): AlbumRelease: URIRef # AlbumRelease. AlignmentObject: URIRef # An intangible item that describes an alignment between a learning resource and a node in an educational framework. Should not be used where the nature of the alignment can be described using a simple property, for example to express that a resource [[teaches]] or [[assesses]] a competency. AllWheelDriveConfiguration: URIRef # All-wheel Drive is a transmission layout where the engine drives all four wheels. - AllergiesHealthAspect: URIRef # Content about the allergy-related aspects of a health topic. + AllergiesHealthAspect: ( + URIRef # Content about the allergy-related aspects of a health topic. + ) AllocateAction: URIRef # The act of organizing tasks/objects/events by associating resources to it. AmpStory: URIRef # A creative work with a visual storytelling format intended to be viewed online, particularly on mobile devices. AmusementPark: URIRef # An amusement park. @@ -117,11 +121,17 @@ class SDO(DefinedNamespace): BoatTerminal: URIRef # A terminal for boats, ships, and other water vessels. BoatTrip: URIRef # A trip on a commercial ferry line. BodyMeasurementArm: URIRef # Arm length (measured between arms/shoulder line intersection and the prominent wrist bone). Used, for example, to fit shirts. - BodyMeasurementBust: URIRef # Maximum girth of bust. Used, for example, to fit women's suits. - BodyMeasurementChest: URIRef # Maximum girth of chest. Used, for example, to fit men's suits. + BodyMeasurementBust: ( + URIRef # Maximum girth of bust. Used, for example, to fit women's suits. + ) + BodyMeasurementChest: ( + URIRef # Maximum girth of chest. Used, for example, to fit men's suits. + ) BodyMeasurementFoot: URIRef # Foot length (measured between end of the most prominent toe and the most prominent part of the heel). Used, for example, to measure socks. BodyMeasurementHand: URIRef # Maximum hand girth (measured over the knuckles of the open right hand excluding thumb, fingers together). Used, for example, to fit gloves. - BodyMeasurementHead: URIRef # Maximum girth of head above the ears. Used, for example, to fit hats. + BodyMeasurementHead: ( + URIRef # Maximum girth of head above the ears. Used, for example, to fit hats. + ) BodyMeasurementHeight: URIRef # Body height (measured between crown of head and soles of feet). Used, for example, to fit jackets. BodyMeasurementHips: URIRef # Girth of hips (measured around the buttocks). Used, for example, to fit skirts. BodyMeasurementInsideLeg: URIRef # Inside leg (measured between crotch and soles of feet). Used, for example, to fit pants. @@ -129,7 +139,9 @@ class SDO(DefinedNamespace): BodyMeasurementTypeEnumeration: URIRef # Enumerates types (or dimensions) of a person's body measurements, for example for fitting of clothes. BodyMeasurementUnderbust: URIRef # Girth of body just below the bust. Used, for example, to fit women's swimwear. BodyMeasurementWaist: URIRef # Girth of natural waistline (between hip bones and lower ribs). Used, for example, to fit pants. - BodyMeasurementWeight: URIRef # Body weight. Used, for example, to measure pantyhose. + BodyMeasurementWeight: ( + URIRef # Body weight. Used, for example, to measure pantyhose. + ) BodyOfWater: URIRef # A body of water, such as a sea, ocean, or lake. Bone: URIRef # Rigid connective tissue that comprises up the skeletal structure of the human body. Book: URIRef # A book. @@ -161,7 +173,9 @@ class SDO(DefinedNamespace): BusinessEntityType: URIRef # A business entity type is a conceptual entity representing the legal form, the size, the main line of business, the position in the value chain, or any combination thereof, of an organization or business person.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#Business\n* http://purl.org/goodrelations/v1#Enduser\n* http://purl.org/goodrelations/v1#PublicInstitution\n* http://purl.org/goodrelations/v1#Reseller BusinessEvent: URIRef # Event type: Business event. BusinessFunction: URIRef # The business function specifies the type of activity or access (i.e., the bundle of rights) offered by the organization or business person through the offer. Typical are sell, rental or lease, maintenance or repair, manufacture / produce, recycle / dispose, engineering / construction, or installation. Proprietary specifications of access rights are also instances of this class.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#ConstructionInstallation\n* http://purl.org/goodrelations/v1#Dispose\n* http://purl.org/goodrelations/v1#LeaseOut\n* http://purl.org/goodrelations/v1#Maintain\n* http://purl.org/goodrelations/v1#ProvideService\n* http://purl.org/goodrelations/v1#Repair\n* http://purl.org/goodrelations/v1#Sell\n* http://purl.org/goodrelations/v1#Buy - BusinessSupport: URIRef # BusinessSupport: this is a benefit for supporting businesses. + BusinessSupport: ( + URIRef # BusinessSupport: this is a benefit for supporting businesses. + ) BuyAction: URIRef # The act of giving money to a seller in exchange for goods or services rendered. An agent buys an object, product, or service from a seller for a price. Reciprocal of SellAction. CDCPMDRecord: URIRef # A CDCPMDRecord is a data structure representing a record in a CDC tabular data format used for hospital data reporting. See [documentation](/docs/cdc-covid.html) for details, and the linked CDC materials for authoritative definitions used as the source here. CDFormat: URIRef # CDFormat. @@ -175,7 +189,9 @@ class SDO(DefinedNamespace): Car: URIRef # A car is a wheeled, self-powered motor vehicle used for transportation. CarUsageType: URIRef # A value indicating a special usage of a car, e.g. commercial rental, driving school, or as a taxi. Cardiovascular: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of heart and vasculature. - CardiovascularExam: URIRef # Cardiovascular system assessment withclinical examination. + CardiovascularExam: ( + URIRef # Cardiovascular system assessment withclinical examination. + ) CaseSeries: URIRef # A case series (also known as a clinical series) is a medical research study that tracks patients with a known exposure given similar treatment or examines their medical records for exposure and outcome. A case series can be retrospective or prospective and usually involves a smaller number of patients than the more powerful case-control studies or randomized controlled trials. Case series may be consecutive or non-consecutive, depending on whether all cases presenting to the reporting authors over a period of time were included, or only a selection. Casino: URIRef # A casino. CassetteFormat: URIRef # CassetteFormat. @@ -211,7 +227,9 @@ class SDO(DefinedNamespace): CohortStudy: URIRef # Also known as a panel study. A cohort study is a form of longitudinal study used in medicine and social science. It is one type of study design and should be compared with a cross-sectional study. A cohort is a group of people who share a common characteristic or experience within a defined period (e.g., are born, leave school, lose their job, are exposed to a drug or a vaccine, etc.). The comparison group may be the general population from which the cohort is drawn, or it may be another cohort of persons thought to have had little or no exposure to the substance under investigation, but otherwise similar. Alternatively, subgroups within the cohort may be compared with each other. Collection: URIRef # A collection of items e.g. creative works or products. CollectionPage: URIRef # Web page type: Collection page. - CollegeOrUniversity: URIRef # A college, university, or other third-level educational institution. + CollegeOrUniversity: ( + URIRef # A college, university, or other third-level educational institution. + ) ComedyClub: URIRef # A comedy club. ComedyEvent: URIRef # Event type: Comedy event. ComicCoverArt: URIRef # The artwork on the cover of a comic. @@ -234,7 +252,9 @@ class SDO(DefinedNamespace): Consortium: URIRef # A Consortium is a membership [[Organization]] whose members are typically Organizations. ConsumeAction: URIRef # The act of ingesting information/resources/food. ContactPage: URIRef # Web page type: Contact page. - ContactPoint: URIRef # A contact point—for example, a Customer Complaints department. + ContactPoint: ( + URIRef # A contact point—for example, a Customer Complaints department. + ) ContactPointOption: URIRef # Enumerated options related to a ContactPoint. ContagiousnessHealthAspect: URIRef # Content about contagion mechanisms and contagiousness information over the topic. Continent: URIRef # One of the continents (for example, Europe or Africa). @@ -259,7 +279,9 @@ class SDO(DefinedNamespace): CriticReview: URIRef # A [[CriticReview]] is a more specialized form of Review written or published by a source that is recognized for its reviewing activities. These can include online columns, travel and food guides, TV and radio shows, blogs and other independent Web sites. [[CriticReview]]s are typically more in-depth and professionally written. For simpler, casually written user/visitor/viewer/customer reviews, it is more appropriate to use the [[UserReview]] type. Review aggregator sites such as Metacritic already separate out the site's user reviews from selected critic reviews that originate from third-party sources. CrossSectional: URIRef # Studies carried out on pre-existing data (usually from 'snapshot' surveys), such as that collected by the Census Bureau. Sometimes called Prevalence Studies. CssSelectorType: URIRef # Text representing a CSS selector. - CurrencyConversionService: URIRef # A service to convert funds from one currency to another currency. + CurrencyConversionService: ( + URIRef # A service to convert funds from one currency to another currency. + ) DDxElement: URIRef # An alternative, closely-related condition typically considered later in the differential diagnosis process along with the signs that are used to distinguish it. DJMixAlbum: URIRef # DJMixAlbum. DVDFormat: URIRef # DVDFormat. @@ -271,7 +293,9 @@ class SDO(DefinedNamespace): DataFeed: URIRef # A single feed providing structured information about one or more entities or topics. DataFeedItem: URIRef # A single item within a larger data feed. DataType: URIRef # The basic data types such as Integers, Strings, etc. - Dataset: URIRef # A body of structured information describing some topic(s) of interest. + Dataset: ( + URIRef # A body of structured information describing some topic(s) of interest. + ) Date: URIRef # A date value in [ISO 8601 date format](http://en.wikipedia.org/wiki/ISO_8601). DateTime: URIRef # A combination of date and time of day in the form [-]CCYY-MM-DDThh:mm:ss[Z|(+|-)hh:mm] (see Chapter 5.4 of ISO 8601). DatedMoneySpecification: URIRef # A DatedMoneySpecification represents monetary values with optional start and end dates. For example, this could represent an employee's salary over a specific period of time. __Note:__ This type has been superseded by [[MonetaryAmount]] use of that type is recommended @@ -279,12 +303,16 @@ class SDO(DefinedNamespace): DaySpa: URIRef # A day spa. DeactivateAction: URIRef # The act of stopping or deactivating a device or application (e.g. stopping a timer or turning off a flashlight). DecontextualizedContent: URIRef # Content coded 'missing context' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'missing context': Presenting unaltered video in an inaccurate manner that misrepresents the footage. For example, using incorrect dates or locations, altering the transcript or sharing brief clips from a longer video to mislead viewers. (A video rated 'original' can also be missing context.) For an [[ImageObject]] to be 'missing context': Presenting unaltered images in an inaccurate manner to misrepresent the image and mislead the viewer. For example, a common tactic is using an unaltered image but saying it came from a different time or place. (An image rated 'original' can also be missing context.) For an [[ImageObject]] with embedded text to be 'missing context': An unaltered image presented in an inaccurate manner to misrepresent the image and mislead the viewer. For example, a common tactic is using an unaltered image but saying it came from a different time or place. (An 'original' image with inaccurate text would generally fall in this category.) For an [[AudioObject]] to be 'missing context': Unaltered audio presented in an inaccurate manner that misrepresents it. For example, using incorrect dates or locations, or sharing brief clips from a longer recording to mislead viewers. (Audio rated “original” can also be missing context.) - DefenceEstablishment: URIRef # A defence establishment, such as an army or navy base. + DefenceEstablishment: ( + URIRef # A defence establishment, such as an army or navy base. + ) DefinedRegion: URIRef # A DefinedRegion is a geographic area defined by potentially arbitrary (rather than political, administrative or natural geographical) criteria. Properties are provided for defining a region by reference to sets of postal codes. Examples: a delivery destination when shopping. Region where regional pricing is configured. Requirement 1: Country: US States: "NY", "CA" Requirement 2: Country: US PostalCode Set: { [94000-94585], [97000, 97999], [13000, 13599]} { [12345, 12345], [78945, 78945], } Region = state, canton, prefecture, autonomous community... DefinedTerm: URIRef # A word, name, acronym, phrase, etc. with a formal definition. Often used in the context of category or subject classification, glossaries or dictionaries, product or creative work types, etc. Use the name property for the term being defined, use termCode if the term has an alpha-numeric code allocated, use description to provide the definition of the term. DefinedTermSet: URIRef # A set of defined terms for example a set of categories or a classification scheme, a glossary, dictionary or enumeration. DefinitiveLegalValue: URIRef # Indicates a document for which the text is conclusively what the law says and is legally binding. (e.g. The digitally signed version of an Official Journal.) Something "Definitive" is considered to be also [[AuthoritativeLegalValue]]. - DeleteAction: URIRef # The act of editing a recipient by removing one of its objects. + DeleteAction: ( + URIRef # The act of editing a recipient by removing one of its objects. + ) DeliveryChargeSpecification: URIRef # The price for the delivery of an offer using a particular delivery method. DeliveryEvent: URIRef # An event involving the delivery of an item. DeliveryMethod: URIRef # A delivery method is a standardized procedure for transferring the product or service to the destination of fulfillment chosen by the customer. Delivery methods are characterized by the means of transportation used, and by the organization or group that is the contracting party for the sending organization or person.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#DeliveryModeDirectDownload\n* http://purl.org/goodrelations/v1#DeliveryModeFreight\n* http://purl.org/goodrelations/v1#DeliveryModeMail\n* http://purl.org/goodrelations/v1#DeliveryModeOwnFleet\n* http://purl.org/goodrelations/v1#DeliveryModePickUp\n* http://purl.org/goodrelations/v1#DHL\n* http://purl.org/goodrelations/v1#FederalExpress\n* http://purl.org/goodrelations/v1#UPS @@ -310,7 +338,9 @@ class SDO(DefinedNamespace): DigitalDocumentPermission: URIRef # A permission for a particular person or group to access a particular file. DigitalDocumentPermissionType: URIRef # A type of permission which can be granted for accessing a digital document. DigitalFormat: URIRef # DigitalFormat. - DisabilitySupport: URIRef # DisabilitySupport: this is a benefit for disability support. + DisabilitySupport: ( + URIRef # DisabilitySupport: this is a benefit for disability support. + ) DisagreeAction: URIRef # The act of expressing a difference of opinion with the object. An agent disagrees to/about an object (a proposition, topic or theme) with participants. Discontinued: URIRef # Indicates that the item has been discontinued. DiscoverAction: URIRef # The act of discovering/finding an object. @@ -327,8 +357,12 @@ class SDO(DefinedNamespace): DrawAction: URIRef # The act of producing a visual/graphical representation of an object, typically with a pen/pencil and paper as instruments. Drawing: URIRef # A picture or diagram made with a pencil, pen, or crayon rather than paint. DrinkAction: URIRef # The act of swallowing liquids. - DriveWheelConfigurationValue: URIRef # A value indicating which roadwheels will receive torque. - DrivingSchoolVehicleUsage: URIRef # Indicates the usage of the vehicle for driving school. + DriveWheelConfigurationValue: ( + URIRef # A value indicating which roadwheels will receive torque. + ) + DrivingSchoolVehicleUsage: ( + URIRef # Indicates the usage of the vehicle for driving school. + ) Drug: URIRef # A chemical or biologic substance, used as a medical therapy, that has a physiological effect on an organism. Here the term drug is used interchangeably with the term medicine although clinical knowledge make a clear difference between them. DrugClass: URIRef # A class of medical drugs, e.g., statins. Classes can represent general pharmacological class, common mechanisms of action, common physiological effects, etc. DrugCost: URIRef # The cost per unit of a medical drug. Note that this type is not meant to represent the price in an offer of a drug for sale; see the Offer type for that. This type will typically be used to tag wholesale or average retail cost of a drug, or maximum reimbursable cost. Costs of medical drugs vary widely depending on how and where they are paid for, so while this type captures some of the variables, costs should be used with caution by consumers of this schema's markup. @@ -360,7 +394,9 @@ class SDO(DefinedNamespace): EducationalOccupationalCredential: URIRef # An educational or occupational credential. A diploma, academic degree, certification, qualification, badge, etc., that may be awarded to a person or other entity that meets the requirements defined by the credentialer. EducationalOccupationalProgram: URIRef # A program offered by an institution which determines the learning progress to achieve an outcome, usually a credential like a degree or certificate. This would define a discrete set of opportunities (e.g., job, courses) that together constitute a program with a clear start, end, set of requirements, and transition to a new occupational opportunity (e.g., a job), or sometimes a higher educational opportunity (e.g., an advanced degree). EducationalOrganization: URIRef # An educational organization. - EffectivenessHealthAspect: URIRef # Content about the effectiveness-related aspects of a health topic. + EffectivenessHealthAspect: ( + URIRef # Content about the effectiveness-related aspects of a health topic. + ) Electrician: URIRef # An electrician. ElectronicsStore: URIRef # An electronics store. ElementarySchool: URIRef # An elementary school. @@ -373,13 +409,17 @@ class SDO(DefinedNamespace): EmployerReview: URIRef # An [[EmployerReview]] is a review of an [[Organization]] regarding its role as an employer, written by a current or former employee of that organization. EmploymentAgency: URIRef # An employment agency. Endocrine: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of endocrine glands and their secretions. - EndorseAction: URIRef # An agent approves/certifies/likes/supports/sanction an object. + EndorseAction: ( + URIRef # An agent approves/certifies/likes/supports/sanction an object. + ) EndorsementRating: URIRef # An EndorsementRating is a rating that expresses some level of endorsement, for example inclusion in a "critic's pick" blog, a "Like" or "+1" on a social network. It can be considered the [[result]] of an [[EndorseAction]] in which the [[object]] of the action is rated positively by some [[agent]]. As is common elsewhere in schema.org, it is sometimes more useful to describe the results of such an action without explicitly describing the [[Action]]. An [[EndorsementRating]] may be part of a numeric scale or organized system, but this is not required: having an explicit type for indicating a positive, endorsement rating is particularly useful in the absence of numeric scales as it helps consumers understand that the rating is broadly positive. Energy: URIRef # Properties that take Energy as values are of the form '<Number> <Energy unit of measure>'. EnergyConsumptionDetails: URIRef # EnergyConsumptionDetails represents information related to the energy efficiency of a product that consumes energy. The information that can be provided is based on international regulations such as for example [EU directive 2017/1369](https://eur-lex.europa.eu/eli/reg/2017/1369/oj) for energy labeling and the [Energy labeling rule](https://www.ftc.gov/enforcement/rules/rulemaking-regulatory-reform-proceedings/energy-water-use-labeling-consumer) under the Energy Policy and Conservation Act (EPCA) in the US. EnergyEfficiencyEnumeration: URIRef # Enumerates energy efficiency levels (also known as "classes" or "ratings") and certifications that are part of several international energy efficiency standards. EnergyStarCertified: URIRef # Represents EnergyStar certification. - EnergyStarEnergyEfficiencyEnumeration: URIRef # Used to indicate whether a product is EnergyStar certified. + EnergyStarEnergyEfficiencyEnumeration: ( + URIRef # Used to indicate whether a product is EnergyStar certified. + ) EngineSpecification: URIRef # Information about the engine of the vehicle. A vehicle can have multiple engines represented by multiple engine specification entities. EnrollingByInvitation: URIRef # Enrolling participants by invitation only. EntertainmentBusiness: URIRef # A business providing entertainment. @@ -398,8 +438,12 @@ class SDO(DefinedNamespace): EventStatusType: URIRef # EventStatusType is an enumeration type whose instances represent several states that an Event may be in. EventVenue: URIRef # An event venue. EvidenceLevelA: URIRef # Data derived from multiple randomized clinical trials or meta-analyses. - EvidenceLevelB: URIRef # Data derived from a single randomized trial, or nonrandomized studies. - EvidenceLevelC: URIRef # Only consensus opinion of experts, case studies, or standard-of-care. + EvidenceLevelB: ( + URIRef # Data derived from a single randomized trial, or nonrandomized studies. + ) + EvidenceLevelC: ( + URIRef # Only consensus opinion of experts, case studies, or standard-of-care. + ) ExchangeRateSpecification: URIRef # A structured value representing exchange rate. ExchangeRefund: URIRef # Specifies that a refund can be done as an exchange for the same product. ExerciseAction: URIRef # The act of participating in exertive activity for the purposes of improving health and fitness. @@ -437,7 +481,9 @@ class SDO(DefinedNamespace): FoodEvent: URIRef # Event type: Food event. FoodService: URIRef # A food service, like breakfast, lunch, or dinner. FourWheelDriveConfiguration: URIRef # Four-wheel drive is a transmission layout where the engine primarily drives two wheels with a part-time four-wheel drive capability. - FreeReturn: URIRef # Specifies that product returns are free of charge for the customer. + FreeReturn: ( + URIRef # Specifies that product returns are free of charge for the customer. + ) Friday: URIRef # The day of the week between Thursday and Saturday. FrontWheelDriveConfiguration: URIRef # Front-wheel drive is a transmission layout where the engine drives the front wheels. FullRefund: URIRef # Specifies that a refund can be done in the full amount the customer paid for the product @@ -446,7 +492,9 @@ class SDO(DefinedNamespace): Fungus: URIRef # Pathogenic fungus. FurnitureStore: URIRef # A furniture store. Game: URIRef # The Game type represents things which are games. These are typically rule-governed recreational activities, e.g. role-playing games in which players assume the role of characters in a fictional setting. - GamePlayMode: URIRef # Indicates whether this game is multi-player, co-op or single-player. + GamePlayMode: ( + URIRef # Indicates whether this game is multi-player, co-op or single-player. + ) GameServer: URIRef # Server that provides game interaction in a multiplayer game. GameServerStatus: URIRef # Status of a game server. GardenStore: URIRef # A garden store. @@ -457,7 +505,9 @@ class SDO(DefinedNamespace): Gene: URIRef # A discrete unit of inheritance which affects one or more biological traits (Source: [https://en.wikipedia.org/wiki/Gene](https://en.wikipedia.org/wiki/Gene)). Examples include FOXP2 (Forkhead box protein P2), SCARNA21 (small Cajal body-specific RNA 21), A- (agouti genotype). GeneralContractor: URIRef # A general contractor. Genetic: URIRef # A specific branch of medical science that pertains to hereditary transmission and the variation of inherited characteristics and disorders. - Genitourinary: URIRef # Genitourinary system function assessment with clinical examination. + Genitourinary: ( + URIRef # Genitourinary system function assessment with clinical examination. + ) GeoCircle: URIRef # A GeoCircle is a GeoShape representing a circular geographic area. As it is a GeoShape it provides the simple textual property 'circle', but also allows the combination of postalCode alongside geoRadius. The center of the circle can be indicated via the 'geoMidpoint' property, or more approximately using 'address', 'postalCode'. GeoCoordinates: URIRef # The geographic coordinates of a place or event. GeoShape: URIRef # The geographic shape of a place. A GeoShape can be described using several properties whose values are based on latitude/longitude pairs. Either whitespace or commas can be used to separate latitude and longitude; whitespace should be used when writing a list of several such points. @@ -469,14 +519,18 @@ class SDO(DefinedNamespace): GolfCourse: URIRef # A golf course. GovernmentBenefitsType: URIRef # GovernmentBenefitsType enumerates several kinds of government benefits to support the COVID-19 situation. Note that this structure may not capture all benefits offered. GovernmentBuilding: URIRef # A government building. - GovernmentOffice: URIRef # A government office—for example, an IRS or DMV office. + GovernmentOffice: ( + URIRef # A government office—for example, an IRS or DMV office. + ) GovernmentOrganization: URIRef # A governmental organization or agency. GovernmentPermit: URIRef # A permit issued by a government agency. GovernmentService: URIRef # A service provided by a government organization, e.g. food stamps, veterans benefits, etc. Grant: URIRef # A grant, typically financial or otherwise quantifiable, of resources. Typically a [[funder]] sponsors some [[MonetaryAmount]] to an [[Organization]] or [[Person]], sometimes not necessarily via a dedicated or long-lived [[Project]], resulting in one or more outputs, or [[fundedItem]]s. For financial sponsorship, indicate the [[funder]] of a [[MonetaryGrant]]. For non-financial support, indicate [[sponsor]] of [[Grant]]s of resources (e.g. office space). Grants support activities directed towards some agreed collective goals, often but not always organized as [[Project]]s. Long-lived projects are sometimes sponsored by a variety of grants over time, but it is also common for a project to be associated with a single grant. The amount of a [[Grant]] is represented using [[amount]] as a [[MonetaryAmount]]. GraphicNovel: URIRef # Book format: GraphicNovel. May represent a bound collection of ComicIssue instances. GroceryStore: URIRef # A grocery store. - GroupBoardingPolicy: URIRef # The airline boards by groups based on check-in time, priority, etc. + GroupBoardingPolicy: ( + URIRef # The airline boards by groups based on check-in time, priority, etc. + ) Guide: URIRef # [[Guide]] is a page or article that recommend specific products or services, or aspects of a thing for a user to consider. A [[Guide]] may represent a Buying Guide and detail aspects of products or services for a user to consider. A [[Guide]] may represent a Product Guide and recommend specific products or services. A [[Guide]] may represent a Ranked List and recommend specific products or services with ranking. Gynecologic: URIRef # A specific branch of medical science that pertains to the health care of women, particularly in the diagnosis and treatment of disorders affecting the female reproductive system. HVACBusiness: URIRef # A business that provide Heating, Ventilation and Air Conditioning services. @@ -490,17 +544,23 @@ class SDO(DefinedNamespace): HealthAspectEnumeration: URIRef # HealthAspectEnumeration enumerates several aspects of health content online, each of which might be described using [[hasHealthAspect]] and [[HealthTopicContent]]. HealthCare: URIRef # HealthCare: this is a benefit for health care. HealthClub: URIRef # A health club. - HealthInsurancePlan: URIRef # A US-style health insurance plan, including PPOs, EPOs, and HMOs. + HealthInsurancePlan: ( + URIRef # A US-style health insurance plan, including PPOs, EPOs, and HMOs. + ) HealthPlanCostSharingSpecification: URIRef # A description of costs to the patient under a given network or formulary. HealthPlanFormulary: URIRef # For a given health insurance plan, the specification for costs and coverage of prescription drugs. HealthPlanNetwork: URIRef # A US-style health insurance plan network. HealthTopicContent: URIRef # [[HealthTopicContent]] is [[WebContent]] that is about some aspect of a health topic, e.g. a condition, its symptoms or treatments. Such content may be comprised of several parts or sections and use different types of media. Multiple instances of [[WebContent]] (and hence [[HealthTopicContent]]) can be related using [[hasPart]] / [[isPartOf]] where there is some kind of content hierarchy, and their content described with [[about]] and [[mentions]] e.g. building upon the existing [[MedicalCondition]] vocabulary. - HearingImpairedSupported: URIRef # Uses devices to support users with hearing impairments. + HearingImpairedSupported: ( + URIRef # Uses devices to support users with hearing impairments. + ) Hematologic: URIRef # A specific branch of medical science that pertains to diagnosis and treatment of disorders of blood and blood producing organs. HighSchool: URIRef # A high school. HinduDiet: URIRef # A diet conforming to Hindu dietary practices, in particular, beef-free. HinduTemple: URIRef # A Hindu temple. - HobbyShop: URIRef # A store that sells materials useful or necessary for various hobbies. + HobbyShop: ( + URIRef # A store that sells materials useful or necessary for various hobbies. + ) HomeAndConstructionBusiness: URIRef # A construction business.\n\nA HomeAndConstructionBusiness is a [[LocalBusiness]] that provides services around homes and buildings.\n\nAs a [[LocalBusiness]] it can be described as a [[provider]] of one or more [[Service]]\(s). HomeGoodsStore: URIRef # A home goods store. Homeopathic: URIRef # A system of medicine based on the principle that a disease can be cured by a substance that produces similar symptoms in healthy people. @@ -527,23 +587,33 @@ class SDO(DefinedNamespace): ImageGallery: URIRef # Web page type: Image gallery page. ImageObject: URIRef # An image file. ImageObjectSnapshot: URIRef # A specific and exact (byte-for-byte) version of an [[ImageObject]]. Two byte-for-byte identical files, for the purposes of this type, considered identical. If they have different embedded metadata (e.g. XMP, EXIF) the files will differ. Different external facts about the files, e.g. creator or dateCreated that aren't represented in their actual content, do not affect this notion of identity. - ImagingTest: URIRef # Any medical imaging modality typically used for diagnostic purposes. + ImagingTest: ( + URIRef # Any medical imaging modality typically used for diagnostic purposes. + ) InForce: URIRef # Indicates that a legislation is in force. InStock: URIRef # Indicates that the item is in stock. - InStoreOnly: URIRef # Indicates that the item is available only at physical locations. + InStoreOnly: ( + URIRef # Indicates that the item is available only at physical locations. + ) IndividualProduct: URIRef # A single, identifiable product instance (e.g. a laptop with a particular serial number). Infectious: URIRef # Something in medical science that pertains to infectious diseases i.e caused by bacterial, viral, fungal or parasitic infections. InfectiousAgentClass: URIRef # Classes of agents or pathogens that transmit infectious diseases. Enumerated type. InfectiousDisease: URIRef # An infectious disease is a clinically evident human disease resulting from the presence of pathogenic microbial agents, like pathogenic viruses, pathogenic bacteria, fungi, protozoa, multicellular parasites, and prions. To be considered an infectious disease, such pathogens are known to be able to cause this disease. InformAction: URIRef # The act of notifying someone of information pertinent to them, with no expectation of a response. - IngredientsHealthAspect: URIRef # Content discussing ingredients-related aspects of a health topic. - InsertAction: URIRef # The act of adding at a specific location in an ordered collection. + IngredientsHealthAspect: ( + URIRef # Content discussing ingredients-related aspects of a health topic. + ) + InsertAction: ( + URIRef # The act of adding at a specific location in an ordered collection. + ) InstallAction: URIRef # The act of installing an application. Installment: URIRef # Represents the installment pricing component of the total price for an offered product. InsuranceAgency: URIRef # An Insurance agency. Intangible: URIRef # A utility class that serves as the umbrella for a number of 'intangible' things such as quantities, structured values, etc. Integer: URIRef # Data type: Integer. - InteractAction: URIRef # The act of interacting with another person or organization. + InteractAction: ( + URIRef # The act of interacting with another person or organization. + ) InteractionCounter: URIRef # A summary of how users have interacted with this CreativeWork. In most cases, authors will use a subtype to specify the specific type of interaction. InternationalTrial: URIRef # An international trial. InternetCafe: URIRef # An internet cafe. @@ -554,13 +624,19 @@ class SDO(DefinedNamespace): InvoicePrice: URIRef # Represents the invoice price of an offered product. ItemAvailability: URIRef # A list of possible product availability options. ItemList: URIRef # A list of items of any sort—for example, Top 10 Movies About Weathermen, or Top 100 Party Songs. Not to be confused with HTML lists, which are often used only for formatting. - ItemListOrderAscending: URIRef # An ItemList ordered with lower values listed first. - ItemListOrderDescending: URIRef # An ItemList ordered with higher values listed first. + ItemListOrderAscending: ( + URIRef # An ItemList ordered with lower values listed first. + ) + ItemListOrderDescending: ( + URIRef # An ItemList ordered with higher values listed first. + ) ItemListOrderType: URIRef # Enumerated for values for itemListOrder for indicating how an ordered ItemList is organized. ItemListUnordered: URIRef # An ItemList ordered with no explicit order. ItemPage: URIRef # A page devoted to a single item, such as a particular product or hotel. JewelryStore: URIRef # A jewelry store. - JobPosting: URIRef # A listing that describes a job opening in a certain organization. + JobPosting: ( + URIRef # A listing that describes a job opening in a certain organization. + ) JoinAction: URIRef # An agent joins an event/group with participants/friends at a location.\n\nRelated actions:\n\n* [[RegisterAction]]: Unlike RegisterAction, JoinAction refers to joining a group/team of people.\n* [[SubscribeAction]]: Unlike SubscribeAction, JoinAction does not imply that you'll be receiving updates.\n* [[FollowAction]]: Unlike FollowAction, JoinAction does not imply that you'll be polling for updates. Joint: URIRef # The anatomical location at which two or more bones make contact. KosherDiet: URIRef # A diet conforming to Jewish dietary practices. @@ -573,12 +649,18 @@ class SDO(DefinedNamespace): LearningResource: URIRef # The LearningResource type can be used to indicate [[CreativeWork]]s (whether physical or digital) that have a particular and explicit orientation towards learning, education, skill acquisition, and other educational purposes. [[LearningResource]] is expected to be used as an addition to a primary type such as [[Book]], [[VideoObject]], [[Product]] etc. [[EducationEvent]] serves a similar purpose for event-like things (e.g. a [[Trip]]). A [[LearningResource]] may be created as a result of an [[EducationEvent]], for example by recording one. LeaveAction: URIRef # An agent leaves an event / group with participants/friends at a location.\n\nRelated actions:\n\n* [[JoinAction]]: The antonym of LeaveAction.\n* [[UnRegisterAction]]: Unlike UnRegisterAction, LeaveAction implies leaving a group/team of people rather than a service. LeftHandDriving: URIRef # The steering position is on the left side of the vehicle (viewed from the main direction of driving). - LegalForceStatus: URIRef # A list of possible statuses for the legal force of a legislation. + LegalForceStatus: ( + URIRef # A list of possible statuses for the legal force of a legislation. + ) LegalService: URIRef # A LegalService is a business that provides legally-oriented services, advice and representation, e.g. law firms.\n\nAs a [[LocalBusiness]] it can be described as a [[provider]] of one or more [[Service]]\(s). - LegalValueLevel: URIRef # A list of possible levels for the legal validity of a legislation. + LegalValueLevel: ( + URIRef # A list of possible levels for the legal validity of a legislation. + ) Legislation: URIRef # A legal document such as an act, decree, bill, etc. (enforceable or not) or a component of a legal act (like an article). LegislationObject: URIRef # A specific object or file containing a Legislation. Note that the same Legislation can be published in multiple files. For example, a digitally signed PDF, a plain PDF and an HTML version. - LegislativeBuilding: URIRef # A legislative building—for example, the state capitol. + LegislativeBuilding: ( + URIRef # A legislative building—for example, the state capitol. + ) LeisureTimeActivity: URIRef # Any physical activity engaged in for recreational purposes. Examples may include ballroom dancing, roller skating, canoeing, fishing, etc. LendAction: URIRef # The act of providing an object under an agreement that it will be returned at a later date. Reciprocal of BorrowAction.\n\nRelated actions:\n\n* [[BorrowAction]]: Reciprocal of LendAction. Library: URIRef # A library. @@ -596,11 +678,15 @@ class SDO(DefinedNamespace): LiteraryEvent: URIRef # Event type: Literary event. LiveAlbum: URIRef # LiveAlbum. LiveBlogPosting: URIRef # A [[LiveBlogPosting]] is a [[BlogPosting]] intended to provide a rolling textual coverage of an ongoing event through continuous updates. - LivingWithHealthAspect: URIRef # Information about coping or life related to the topic. + LivingWithHealthAspect: ( + URIRef # Information about coping or life related to the topic. + ) LoanOrCredit: URIRef # A financial product for the loaning of an amount of money, or line of credit, under agreed terms and charges. LocalBusiness: URIRef # A particular physical business or branch of an organization. Examples of LocalBusiness include a restaurant, a particular branch of a restaurant chain, a branch of a bank, a medical practice, a club, a bowling alley, etc. LocationFeatureSpecification: URIRef # Specifies a location feature by providing a structured value representing a feature of an accommodation as a property-value pair of varying degrees of formality. - LockerDelivery: URIRef # A DeliveryMethod in which an item is made available via locker. + LockerDelivery: ( + URIRef # A DeliveryMethod in which an item is made available via locker. + ) Locksmith: URIRef # A locksmith. LodgingBusiness: URIRef # A lodging business, such as a motel, hotel, or inn. LodgingReservation: URIRef # A reservation for lodging at a hotel, motel, inn, etc.\n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. @@ -631,19 +717,25 @@ class SDO(DefinedNamespace): MediaReviewItem: URIRef # Represents an item or group of closely related items treated as a unit for the sake of evaluation in a [[MediaReview]]. Authorship etc. apply to the items rather than to the curation/grouping or reviewing party. MediaSubscription: URIRef # A subscription which allows a user to access media including audio, video, books, etc. MedicalAudience: URIRef # Target audiences for medical web pages. - MedicalAudienceType: URIRef # Target audiences types for medical web pages. Enumerated type. + MedicalAudienceType: ( + URIRef # Target audiences types for medical web pages. Enumerated type. + ) MedicalBusiness: URIRef # A particular physical or virtual business of an organization for medical purposes. Examples of MedicalBusiness include differents business run by health professionals. MedicalCause: URIRef # The causative agent(s) that are responsible for the pathophysiologic process that eventually results in a medical condition, symptom or sign. In this schema, unless otherwise specified this is meant to be the proximate cause of the medical condition, symptom or sign. The proximate cause is defined as the causative agent that most directly results in the medical condition, symptom or sign. For example, the HIV virus could be considered a cause of AIDS. Or in a diagnostic context, if a patient fell and sustained a hip fracture and two days later sustained a pulmonary embolism which eventuated in a cardiac arrest, the cause of the cardiac arrest (the proximate cause) would be the pulmonary embolism and not the fall. Medical causes can include cardiovascular, chemical, dermatologic, endocrine, environmental, gastroenterologic, genetic, hematologic, gynecologic, iatrogenic, infectious, musculoskeletal, neurologic, nutritional, obstetric, oncologic, otolaryngologic, pharmacologic, psychiatric, pulmonary, renal, rheumatologic, toxic, traumatic, or urologic causes; medical conditions can be causes as well. MedicalClinic: URIRef # A facility, often associated with a hospital or medical school, that is devoted to the specific diagnosis and/or healthcare. Previously limited to outpatients but with evolution it may be open to inpatients as well. MedicalCode: URIRef # A code for a medical entity. MedicalCondition: URIRef # Any condition of the human body that affects the normal functioning of a person, whether physically or mentally. Includes diseases, injuries, disabilities, disorders, syndromes, etc. - MedicalConditionStage: URIRef # A stage of a medical condition, such as 'Stage IIIa'. + MedicalConditionStage: ( + URIRef # A stage of a medical condition, such as 'Stage IIIa'. + ) MedicalContraindication: URIRef # A condition or factor that serves as a reason to withhold a certain medical therapy. Contraindications can be absolute (there are no reasonable circumstances for undertaking a course of action) or relative (the patient is at higher risk of complications, but that these risks may be outweighed by other considerations or mitigated by other measures). MedicalDevice: URIRef # Any object used in a medical capacity, such as to diagnose or treat a patient. MedicalDevicePurpose: URIRef # Categories of medical devices, organized by the purpose or intended use of the device. MedicalEntity: URIRef # The most generic type of entity related to health and the practice of medicine. MedicalEnumeration: URIRef # Enumerations related to health and the practice of medicine: A concept that is used to attribute a quality to another concept, as a qualifier, a collection of items or a listing of all of the elements of a set in medicine practice. - MedicalEvidenceLevel: URIRef # Level of evidence for a medical guideline. Enumerated type. + MedicalEvidenceLevel: ( + URIRef # Level of evidence for a medical guideline. Enumerated type. + ) MedicalGuideline: URIRef # Any recommendation made by a standard society (e.g. ACC/AHA) or consensus statement that denotes how to diagnose and treat a particular condition. Note: this type should be used to tag the actual guideline recommendation; if the guideline recommendation occurs in a larger scholarly article, use MedicalScholarlyArticle to tag the overall article, not this type. Note also: the organization making the recommendation should be captured in the recognizingAuthority base property of MedicalEntity. MedicalGuidelineContraindication: URIRef # A guideline contraindication that designates a process as harmful and where quality of the data supporting the contraindication is sound. MedicalGuidelineRecommendation: URIRef # A guideline recommendation that is regarded as efficacious and where quality of the data supporting the recommendation is sound. @@ -651,10 +743,14 @@ class SDO(DefinedNamespace): MedicalIndication: URIRef # A condition or factor that indicates use of a medical therapy, including signs, symptoms, risk factors, anatomical states, etc. MedicalIntangible: URIRef # A utility class that serves as the umbrella for a number of 'intangible' things in the medical space. MedicalObservationalStudy: URIRef # An observational study is a type of medical study that attempts to infer the possible effect of a treatment through observation of a cohort of subjects over a period of time. In an observational study, the assignment of subjects into treatment groups versus control groups is outside the control of the investigator. This is in contrast with controlled studies, such as the randomized controlled trials represented by MedicalTrial, where each subject is randomly assigned to a treatment group or a control group before the start of the treatment. - MedicalObservationalStudyDesign: URIRef # Design models for observational medical studies. Enumerated type. + MedicalObservationalStudyDesign: ( + URIRef # Design models for observational medical studies. Enumerated type. + ) MedicalOrganization: URIRef # A medical organization (physical or not), such as hospital, institution or clinic. MedicalProcedure: URIRef # A process of care used in either a diagnostic, therapeutic, preventive or palliative capacity that relies on invasive (surgical), non-invasive, or other techniques. - MedicalProcedureType: URIRef # An enumeration that describes different types of medical procedures. + MedicalProcedureType: ( + URIRef # An enumeration that describes different types of medical procedures. + ) MedicalResearcher: URIRef # Medical researchers. MedicalRiskCalculator: URIRef # A complex mathematical calculation requiring an online calculator, used to assess prognosis. Note: use the url property of Thing to record any URLs for online calculators. MedicalRiskEstimator: URIRef # Any rule set or interactive tool for estimating the risk of developing a complication or condition. @@ -667,7 +763,9 @@ class SDO(DefinedNamespace): MedicalStudy: URIRef # A medical study is an umbrella type covering all kinds of research studies relating to human medicine or health, including observational studies and interventional trials and registries, randomized, controlled or not. When the specific type of study is known, use one of the extensions of this type, such as MedicalTrial or MedicalObservationalStudy. Also, note that this type should be used to mark up data that describes the study itself; to tag an article that publishes the results of a study, use MedicalScholarlyArticle. Note: use the code property of MedicalEntity to store study IDs, e.g. clinicaltrials.gov ID. MedicalStudyStatus: URIRef # The status of a medical study. Enumerated type. MedicalSymptom: URIRef # Any complaint sensed and expressed by the patient (therefore defined as subjective) like stomachache, lower-back pain, or fatigue. - MedicalTest: URIRef # Any medical test, typically performed for diagnostic purposes. + MedicalTest: ( + URIRef # Any medical test, typically performed for diagnostic purposes. + ) MedicalTestPanel: URIRef # Any collection of tests commonly ordered together. MedicalTherapy: URIRef # Any medical intervention designed to prevent, treat, and cure human diseases and medical conditions, including both curative and palliative therapies. Medical therapies are typically processes of care relying upon pharmacotherapy, behavioral therapy, supportive therapy (with fluid or nutrition for example), or detoxification (e.g. hemodialysis) aimed at improving or preventing a health condition. MedicalTrial: URIRef # A medical trial is a type of medical study that uses scientific process used to compare the safety and efficacy of medical therapies or medical procedures. In general, medical trials are controlled and subjects are allocated at random to the different treatment and/or control groups. @@ -679,14 +777,28 @@ class SDO(DefinedNamespace): Menu: URIRef # A structured representation of food or drink items available from a FoodEstablishment. MenuItem: URIRef # A food or drink item listed in a menu or menu section. MenuSection: URIRef # A sub-grouping of food or drink items in a menu. E.g. courses (such as 'Dinner', 'Breakfast', etc.), specific type of dishes (such as 'Meat', 'Vegan', 'Drinks', etc.), or some other classification made by the menu provider. - MerchantReturnEnumeration: URIRef # Enumerates several kinds of product return policies. - MerchantReturnFiniteReturnWindow: URIRef # Specifies that there is a finite window for product returns. - MerchantReturnNotPermitted: URIRef # Specifies that product returns are not permitted. + MerchantReturnEnumeration: ( + URIRef # Enumerates several kinds of product return policies. + ) + MerchantReturnFiniteReturnWindow: ( + URIRef # Specifies that there is a finite window for product returns. + ) + MerchantReturnNotPermitted: ( + URIRef # Specifies that product returns are not permitted. + ) MerchantReturnPolicy: URIRef # A MerchantReturnPolicy provides information about product return policies associated with an [[Organization]], [[Product]], or [[Offer]]. - MerchantReturnPolicySeasonalOverride: URIRef # A seasonal override of a return policy, for example used for holidays. - MerchantReturnUnlimitedWindow: URIRef # Specifies that there is an unlimited window for product returns. - MerchantReturnUnspecified: URIRef # Specifies that a product return policy is not provided. - Message: URIRef # A single message from a sender to one or more organizations or people. + MerchantReturnPolicySeasonalOverride: ( + URIRef # A seasonal override of a return policy, for example used for holidays. + ) + MerchantReturnUnlimitedWindow: ( + URIRef # Specifies that there is an unlimited window for product returns. + ) + MerchantReturnUnspecified: ( + URIRef # Specifies that a product return policy is not provided. + ) + Message: ( + URIRef # A single message from a sender to one or more organizations or people. + ) MiddleSchool: URIRef # A middle school (typically for children aged around 11-14, although this varies somewhat). Midwifery: URIRef # A nurse-like health profession that deals with pregnancy, childbirth, and the postpartum period (including care of the newborn), besides sexual and reproductive health of women throughout their lives. MinimumAdvertisedPrice: URIRef # Represents the minimum advertised price ("MAP") (as dictated by the manufacturer) of an offered product. @@ -694,11 +806,15 @@ class SDO(DefinedNamespace): MixedEventAttendanceMode: URIRef # MixedEventAttendanceMode - an event that is conducted as a combination of both offline and online modes. MixtapeAlbum: URIRef # MixtapeAlbum. MobileApplication: URIRef # A software application designed specifically to work well on a mobile device such as a telephone. - MobilePhoneStore: URIRef # A store that sells mobile phones and related accessories. + MobilePhoneStore: ( + URIRef # A store that sells mobile phones and related accessories. + ) MolecularEntity: URIRef # Any constitutionally or isotopically distinct atom, molecule, ion, ion pair, radical, radical ion, complex, conformer etc., identifiable as a separately distinguishable entity. Monday: URIRef # The day of the week between Sunday and Tuesday. MonetaryAmount: URIRef # A monetary value or range. This type can be used to describe an amount of money such as $50 USD, or a range as in describing a bank account being suitable for a balance between £1,000 and £1,000,000 GBP, or the value of a salary, etc. It is recommended to use [[PriceSpecification]] Types to describe the price of an Offer, Invoice, etc. - MonetaryAmountDistribution: URIRef # A statistical distribution of monetary amounts. + MonetaryAmountDistribution: ( + URIRef # A statistical distribution of monetary amounts. + ) MonetaryGrant: URIRef # A monetary grant. MoneyTransfer: URIRef # The act of transferring money from one place to another place. This may occur electronically or physically. MortgageLoan: URIRef # A loan in which property or real estate is used as collateral. (A loan securitized against some real estate). @@ -725,7 +841,9 @@ class SDO(DefinedNamespace): Museum: URIRef # A museum. MusicAlbum: URIRef # A collection of music tracks. MusicAlbumProductionType: URIRef # Classification of the album by it's type of content: soundtrack, live album, studio album, etc. - MusicAlbumReleaseType: URIRef # The kind of release which this album is: single, EP or album. + MusicAlbumReleaseType: ( + URIRef # The kind of release which this album is: single, EP or album. + ) MusicComposition: URIRef # A musical composition. MusicEvent: URIRef # Event type: Music event. MusicGroup: URIRef # A musical group, such as a band, an orchestra, or a choir. Can also be a solo musician. @@ -745,16 +863,22 @@ class SDO(DefinedNamespace): Neurologic: URIRef # A specific branch of medical science that studies the nerves and nervous system and its respective disease states. NewCondition: URIRef # Indicates that the item is new. NewsArticle: URIRef # A NewsArticle is an article whose content reports news, or provides background context and supporting materials for understanding the news. A more detailed overview of [schema.org News markup](/docs/news.html) is also available. - NewsMediaOrganization: URIRef # A News/Media organization such as a newspaper or TV station. + NewsMediaOrganization: ( + URIRef # A News/Media organization such as a newspaper or TV station. + ) Newspaper: URIRef # A publication containing information about varied topics that are pertinent to general information, a geographic area, or a specific subject matter (i.e. business, culture, education). Often published daily. NightClub: URIRef # A nightclub or discotheque. - NoninvasiveProcedure: URIRef # A type of medical procedure that involves noninvasive techniques. + NoninvasiveProcedure: ( + URIRef # A type of medical procedure that involves noninvasive techniques. + ) Nonprofit501a: URIRef # Nonprofit501a: Non-profit type referring to Farmers’ Cooperative Associations. Nonprofit501c1: URIRef # Nonprofit501c1: Non-profit type referring to Corporations Organized Under Act of Congress, including Federal Credit Unions and National Farm Loan Associations. Nonprofit501c10: URIRef # Nonprofit501c10: Non-profit type referring to Domestic Fraternal Societies and Associations. Nonprofit501c11: URIRef # Nonprofit501c11: Non-profit type referring to Teachers' Retirement Fund Associations. Nonprofit501c12: URIRef # Nonprofit501c12: Non-profit type referring to Benevolent Life Insurance Associations, Mutual Ditch or Irrigation Companies, Mutual or Cooperative Telephone Companies. - Nonprofit501c13: URIRef # Nonprofit501c13: Non-profit type referring to Cemetery Companies. + Nonprofit501c13: ( + URIRef # Nonprofit501c13: Non-profit type referring to Cemetery Companies. + ) Nonprofit501c14: URIRef # Nonprofit501c14: Non-profit type referring to State-Chartered Credit Unions, Mutual Reserve Funds. Nonprofit501c15: URIRef # Nonprofit501c15: Non-profit type referring to Mutual Insurance Companies or Associations. Nonprofit501c16: URIRef # Nonprofit501c16: Non-profit type referring to Cooperative Organizations to Finance Crop Operations. @@ -765,7 +889,9 @@ class SDO(DefinedNamespace): Nonprofit501c20: URIRef # Nonprofit501c20: Non-profit type referring to Group Legal Services Plan Organizations. Nonprofit501c21: URIRef # Nonprofit501c21: Non-profit type referring to Black Lung Benefit Trusts. Nonprofit501c22: URIRef # Nonprofit501c22: Non-profit type referring to Withdrawal Liability Payment Funds. - Nonprofit501c23: URIRef # Nonprofit501c23: Non-profit type referring to Veterans Organizations. + Nonprofit501c23: ( + URIRef # Nonprofit501c23: Non-profit type referring to Veterans Organizations. + ) Nonprofit501c24: URIRef # Nonprofit501c24: Non-profit type referring to Section 4049 ERISA Trusts. Nonprofit501c25: URIRef # Nonprofit501c25: Non-profit type referring to Real Property Title-Holding Corporations or Trusts with Multiple Parents. Nonprofit501c26: URIRef # Nonprofit501c26: Non-profit type referring to State-Sponsored Organizations Providing Health Coverage for High-Risk Individuals. @@ -781,10 +907,16 @@ class SDO(DefinedNamespace): Nonprofit501d: URIRef # Nonprofit501d: Non-profit type referring to Religious and Apostolic Associations. Nonprofit501e: URIRef # Nonprofit501e: Non-profit type referring to Cooperative Hospital Service Organizations. Nonprofit501f: URIRef # Nonprofit501f: Non-profit type referring to Cooperative Service Organizations. - Nonprofit501k: URIRef # Nonprofit501k: Non-profit type referring to Child Care Organizations. - Nonprofit501n: URIRef # Nonprofit501n: Non-profit type referring to Charitable Risk Pools. + Nonprofit501k: ( + URIRef # Nonprofit501k: Non-profit type referring to Child Care Organizations. + ) + Nonprofit501n: ( + URIRef # Nonprofit501n: Non-profit type referring to Charitable Risk Pools. + ) Nonprofit501q: URIRef # Nonprofit501q: Non-profit type referring to Credit Counseling Organizations. - Nonprofit527: URIRef # Nonprofit527: Non-profit type referring to Political organizations. + Nonprofit527: ( + URIRef # Nonprofit527: Non-profit type referring to Political organizations. + ) NonprofitANBI: URIRef # NonprofitANBI: Non-profit type referring to a Public Benefit Organization (NL). NonprofitSBBI: URIRef # NonprofitSBBI: Non-profit type referring to a Social Interest Promoting Institution (NL). NonprofitType: URIRef # NonprofitType enumerates several kinds of official non-profit types of which a non-profit organization can be. @@ -830,19 +962,31 @@ class SDO(DefinedNamespace): Optician: URIRef # A store that sells reading glasses and similar devices for improving vision. Optometric: URIRef # The science or practice of testing visual acuity and prescribing corrective lenses. Order: URIRef # An order is a confirmation of a transaction (a receipt), which can contain multiple line items, each represented by an Offer that has been accepted by the customer. - OrderAction: URIRef # An agent orders an object/product/service to be delivered/sent. + OrderAction: ( + URIRef # An agent orders an object/product/service to be delivered/sent. + ) OrderCancelled: URIRef # OrderStatus representing cancellation of an order. OrderDelivered: URIRef # OrderStatus representing successful delivery of an order. OrderInTransit: URIRef # OrderStatus representing that an order is in transit. OrderItem: URIRef # An order item is a line of an order. It includes the quantity and shipping details of a bought offer. OrderPaymentDue: URIRef # OrderStatus representing that payment is due on an order. - OrderPickupAvailable: URIRef # OrderStatus representing availability of an order for pickup. - OrderProblem: URIRef # OrderStatus representing that there is a problem with the order. - OrderProcessing: URIRef # OrderStatus representing that an order is being processed. + OrderPickupAvailable: ( + URIRef # OrderStatus representing availability of an order for pickup. + ) + OrderProblem: ( + URIRef # OrderStatus representing that there is a problem with the order. + ) + OrderProcessing: ( + URIRef # OrderStatus representing that an order is being processed. + ) OrderReturned: URIRef # OrderStatus representing that an order has been returned. OrderStatus: URIRef # Enumerated status values for Order. - Organization: URIRef # An organization such as a school, NGO, corporation, club, etc. - OrganizationRole: URIRef # A subclass of Role used to describe roles within organizations. + Organization: ( + URIRef # An organization such as a school, NGO, corporation, club, etc. + ) + OrganizationRole: ( + URIRef # A subclass of Role used to describe roles within organizations. + ) OrganizeAction: URIRef # The act of manipulating/administering/supervising/controlling one or more objects. OriginalMediaContent: URIRef # Content coded 'as original media content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'original': No evidence the footage has been misleadingly altered or manipulated, though it may contain false or misleading claims. For an [[ImageObject]] to be 'original': No evidence the image has been misleadingly altered or manipulated, though it may still contain false or misleading claims. For an [[ImageObject]] with embedded text to be 'original': No evidence the image has been misleadingly altered or manipulated, though it may still contain false or misleading claims. For an [[AudioObject]] to be 'original': No evidence the audio has been misleadingly altered or manipulated, though it may contain false or misleading claims. OriginalShippingFees: URIRef # Specifies that the customer must pay the original shipping costs when returning a product. @@ -872,11 +1016,17 @@ class SDO(DefinedNamespace): PatientExperienceHealthAspect: URIRef # Content about the real life experience of patients or people that have lived a similar experience about the topic. May be forums, topics, Q-and-A and related material. PawnShop: URIRef # A shop that will buy, or lend money against the security of, personal possessions. PayAction: URIRef # An agent pays a price to a participant. - PaymentAutomaticallyApplied: URIRef # An automatic payment system is in place and will be used. + PaymentAutomaticallyApplied: ( + URIRef # An automatic payment system is in place and will be used. + ) PaymentCard: URIRef # A payment method using a credit, debit, store or other card to associate the payment with an account. - PaymentChargeSpecification: URIRef # The costs of settling the payment using a particular payment method. + PaymentChargeSpecification: ( + URIRef # The costs of settling the payment using a particular payment method. + ) PaymentComplete: URIRef # The payment has been received and processed. - PaymentDeclined: URIRef # The payee received the payment, but it was declined for some reason. + PaymentDeclined: ( + URIRef # The payee received the payment, but it was declined for some reason. + ) PaymentDue: URIRef # The payment is due, but still within an acceptable time to be received. PaymentMethod: URIRef # A payment method is a standardized procedure for transferring the monetary amount for a purchase. Payment methods are characterized by the legal and technical structures used, and by the organization or group carrying out the transaction.\n\nCommonly used values:\n\n* http://purl.org/goodrelations/v1#ByBankTransferInAdvance\n* http://purl.org/goodrelations/v1#ByInvoice\n* http://purl.org/goodrelations/v1#Cash\n* http://purl.org/goodrelations/v1#CheckInAdvance\n* http://purl.org/goodrelations/v1#COD\n* http://purl.org/goodrelations/v1#DirectDebit\n* http://purl.org/goodrelations/v1#GoogleCheckout\n* http://purl.org/goodrelations/v1#PayPal\n* http://purl.org/goodrelations/v1#PaySwarm PaymentPastDue: URIRef # The payment is due and considered late. @@ -888,7 +1038,9 @@ class SDO(DefinedNamespace): PerformAction: URIRef # The act of participating in performance arts. PerformanceRole: URIRef # A PerformanceRole is a Role that some entity places with regard to a theatrical performance, e.g. in a Movie, TVSeries etc. PerformingArtsTheater: URIRef # A theater or other performing art center. - PerformingGroup: URIRef # A performance group, such as a band, an orchestra, or a circus. + PerformingGroup: ( + URIRef # A performance group, such as a band, an orchestra, or a circus. + ) Periodical: URIRef # A publication in any medium issued in successive parts bearing numerical or chronological designations and intended, such as a magazine, scholarly journal, or newspaper to continue indefinitely.\n\nSee also [blog post](http://blog.schema.org/2014/09/schemaorg-support-for-bibliographic_2.html). Permit: URIRef # A permit issued by an organization, e.g. a parking pass. Person: URIRef # A person (alive, dead, undead, or fictional). @@ -896,10 +1048,14 @@ class SDO(DefinedNamespace): Pharmacy: URIRef # A pharmacy or drugstore. PharmacySpecialty: URIRef # The practice or art and science of preparing and dispensing drugs and medicines. Photograph: URIRef # A photograph. - PhotographAction: URIRef # The act of capturing still images of objects using a camera. + PhotographAction: ( + URIRef # The act of capturing still images of objects using a camera. + ) PhysicalActivity: URIRef # Any bodily activity that enhances or maintains physical fitness and overall health and wellness. Includes activity that is part of daily living and routine, structured exercise, and exercise prescribed as part of a medical treatment or recovery plan. PhysicalActivityCategory: URIRef # Categories of physical activity, organized by physiologic classification. - PhysicalExam: URIRef # A type of physical examination of a patient performed by a physician. + PhysicalExam: ( + URIRef # A type of physical examination of a patient performed by a physician. + ) PhysicalTherapy: URIRef # A process of progressive physical care and rehabilitation aimed at improving a health condition. Physician: URIRef # A doctor's office. Physiotherapy: URIRef # The practice of treatment of disease, injury, or deformity by physical methods such as massage, heat treatment, and exercise rather than by drugs or surgery.. @@ -926,13 +1082,21 @@ class SDO(DefinedNamespace): PreOrder: URIRef # Indicates that the item is available for pre-order. PreOrderAction: URIRef # An agent orders a (not yet released) object/product/service to be delivered/sent. PreSale: URIRef # Indicates that the item is available for ordering and delivery before general availability. - PregnancyHealthAspect: URIRef # Content discussing pregnancy-related aspects of a health topic. - PrependAction: URIRef # The act of inserting at the beginning if an ordered collection. + PregnancyHealthAspect: ( + URIRef # Content discussing pregnancy-related aspects of a health topic. + ) + PrependAction: ( + URIRef # The act of inserting at the beginning if an ordered collection. + ) Preschool: URIRef # A preschool. PrescriptionOnly: URIRef # Available by prescription only. - PresentationDigitalDocument: URIRef # A file containing slides or used for a presentation. + PresentationDigitalDocument: ( + URIRef # A file containing slides or used for a presentation. + ) PreventionHealthAspect: URIRef # Information about actions or measures that can be taken to avoid getting the topic or reaching a critical situation related to the topic. - PreventionIndication: URIRef # An indication for preventing an underlying condition, symptom, etc. + PreventionIndication: ( + URIRef # An indication for preventing an underlying condition, symptom, etc. + ) PriceComponentTypeEnumeration: URIRef # Enumerates different price components that together make up the total price for an offered product. PriceSpecification: URIRef # A structured value representing a price or price range. Typically, only the subclasses of this type are used for markup. It is recommended to use [[MonetaryAmount]] to describe independent amounts of money such as a salary, credit card limits, etc. PriceTypeEnumeration: URIRef # Enumerates different price types, for example list price, invoice price, and sale price. @@ -944,7 +1108,9 @@ class SDO(DefinedNamespace): ProductModel: URIRef # A datasheet or vendor specification of a product (in the sense of a prototypical description). ProfessionalService: URIRef # Original definition: "provider of professional services."\n\nThe general [[ProfessionalService]] type for local businesses was deprecated due to confusion with [[Service]]. For reference, the types that it included were: [[Dentist]], [[AccountingService]], [[Attorney]], [[Notary]], as well as types for several kinds of [[HomeAndConstructionBusiness]]: [[Electrician]], [[GeneralContractor]], [[HousePainter]], [[Locksmith]], [[Plumber]], [[RoofingContractor]]. [[LegalService]] was introduced as a more inclusive supertype of [[Attorney]]. ProfilePage: URIRef # Web page type: Profile page. - PrognosisHealthAspect: URIRef # Typical progression and happenings of life course of the topic. + PrognosisHealthAspect: ( + URIRef # Typical progression and happenings of life course of the topic. + ) ProgramMembership: URIRef # Used to describe membership in a loyalty programs (e.g. "StarAliance"), traveler clubs (e.g. "AAA"), purchase clubs ("Safeway Club"), etc. Project: URIRef # An enterprise (potentially individual but typically collaborative), planned to achieve a particular aim. Use properties from [[Organization]], [[subOrganization]]/[[parentOrganization]] to indicate project sub-structures. PronounceableText: URIRef # Data type: PronounceableText. @@ -978,12 +1144,16 @@ class SDO(DefinedNamespace): RadioChannel: URIRef # A unique instance of a radio BroadcastService on a CableOrSatelliteService lineup. RadioClip: URIRef # A short radio program or a segment/part of a radio program. RadioEpisode: URIRef # A radio episode which can be part of a series or season. - RadioSeason: URIRef # Season dedicated to radio broadcast and associated online delivery. + RadioSeason: ( + URIRef # Season dedicated to radio broadcast and associated online delivery. + ) RadioSeries: URIRef # CreativeWorkSeries dedicated to radio broadcast and associated online delivery. RadioStation: URIRef # A radio station. Radiography: URIRef # Radiography is an imaging technique that uses electromagnetic radiation other than visible light, especially X-rays, to view the internal structure of a non-uniformly composed and opaque object such as the human body. RandomizedTrial: URIRef # A randomized trial design. - Rating: URIRef # A rating is an evaluation on a numeric scale, such as 1 to 5 stars. + Rating: ( + URIRef # A rating is an evaluation on a numeric scale, such as 1 to 5 stars. + ) ReactAction: URIRef # The act of responding instinctively and emotionally to an object, expressing a sentiment. ReadAction: URIRef # The act of consuming written content. ReadPermission: URIRef # Permission to read or view the document. @@ -996,13 +1166,17 @@ class SDO(DefinedNamespace): RecommendedDoseSchedule: URIRef # A recommended dosing schedule for a drug or supplement as prescribed or recommended by an authority or by the drug/supplement's manufacturer. Capture the recommending authority in the recognizingAuthority property of MedicalEntity. Recruiting: URIRef # Recruiting participants. RecyclingCenter: URIRef # A recycling center. - RefundTypeEnumeration: URIRef # Enumerates several kinds of product return refund types. + RefundTypeEnumeration: ( + URIRef # Enumerates several kinds of product return refund types. + ) RefurbishedCondition: URIRef # Indicates that the item is refurbished. RegisterAction: URIRef # The act of registering to be a user of a service, product or web page.\n\nRelated actions:\n\n* [[JoinAction]]: Unlike JoinAction, RegisterAction implies you are registering to be a user of a service, *not* a group/team of people.\n* [FollowAction]]: Unlike FollowAction, RegisterAction doesn't imply that the agent is expecting to poll for updates from the object.\n* [[SubscribeAction]]: Unlike SubscribeAction, RegisterAction doesn't imply that the agent is expecting updates from the object. Registry: URIRef # A registry-based study design. ReimbursementCap: URIRef # The drug's cost represents the maximum reimbursement paid by an insurer for the drug. RejectAction: URIRef # The act of rejecting to/adopting an object.\n\nRelated actions:\n\n* [[AcceptAction]]: The antonym of RejectAction. - RelatedTopicsHealthAspect: URIRef # Other prominent or relevant topics tied to the main topic. + RelatedTopicsHealthAspect: ( + URIRef # Other prominent or relevant topics tied to the main topic. + ) RemixAlbum: URIRef # RemixAlbum. Renal: URIRef # A specific branch of medical science that pertains to the study of the kidneys and its respective disease states. RentAction: URIRef # The act of giving money in return for temporary use, but not ownership, of an object such as a vehicle or property. For example, an agent rents a property from a landlord in exchange for a periodic payment. @@ -1011,10 +1185,14 @@ class SDO(DefinedNamespace): RepaymentSpecification: URIRef # A structured value representing repayment. ReplaceAction: URIRef # The act of editing a recipient by replacing an old object with a new object. ReplyAction: URIRef # The act of responding to a question/message asked/sent by the object. Related to [[AskAction]]\n\nRelated actions:\n\n* [[AskAction]]: Appears generally as an origin of a ReplyAction. - Report: URIRef # A Report generated by governmental or non-governmental organization. + Report: ( + URIRef # A Report generated by governmental or non-governmental organization. + ) ReportageNewsArticle: URIRef # The [[ReportageNewsArticle]] type is a subtype of [[NewsArticle]] representing news articles which are the result of journalistic news reporting conventions. In practice many news publishers produce a wide variety of article types, many of which might be considered a [[NewsArticle]] but not a [[ReportageNewsArticle]]. For example, opinion pieces, reviews, analysis, sponsored or satirical articles, or articles that combine several of these elements. The [[ReportageNewsArticle]] type is based on a stricter ideal for "news" as a work of journalism, with articles based on factual information either observed or verified by the author, or reported and verified from knowledgeable sources. This often includes perspectives from multiple viewpoints on a particular issue (distinguishing news reports from public relations or propaganda). News reports in the [[ReportageNewsArticle]] sense de-emphasize the opinion of the author, with commentary and value judgements typically expressed elsewhere. A [[ReportageNewsArticle]] which goes deeper into analysis can also be marked with an additional type of [[AnalysisNewsArticle]]. ReportedDoseSchedule: URIRef # A patient-reported or observed dosing schedule for a drug or supplement. - ResearchOrganization: URIRef # A Research Organization (e.g. scientific institute, research company). + ResearchOrganization: ( + URIRef # A Research Organization (e.g. scientific institute, research company). + ) ResearchProject: URIRef # A Research project. Researcher: URIRef # Researchers. Reservation: URIRef # Describes a reservation for travel, dining or an event. Some reservations require tickets. \n\nNote: This type is for information about actual reservations, e.g. in confirmation emails or HTML pages with individual confirmations of reservations. For offers of tickets, restaurant reservations, flights, or rental cars, use [[Offer]]. @@ -1040,15 +1218,23 @@ class SDO(DefinedNamespace): ReturnAtKiosk: URIRef # Specifies that product returns must be made at a kiosk. ReturnByMail: URIRef # Specifies that product returns must to be done by mail. ReturnFeesCustomerResponsibility: URIRef # Specifies that product returns must be paid for, and are the responsibility of, the customer. - ReturnFeesEnumeration: URIRef # Enumerates several kinds of policies for product return fees. + ReturnFeesEnumeration: ( + URIRef # Enumerates several kinds of policies for product return fees. + ) ReturnInStore: URIRef # Specifies that product returns must be made in a store. ReturnLabelCustomerResponsibility: URIRef # Indicated that creating a return label is the responsibility of the customer. ReturnLabelDownloadAndPrint: URIRef # Indicated that a return label must be downloaded and printed by the customer. ReturnLabelInBox: URIRef # Specifies that a return label will be provided by the seller in the shipping box. - ReturnLabelSourceEnumeration: URIRef # Enumerates several types of return labels for product returns. - ReturnMethodEnumeration: URIRef # Enumerates several types of product return methods. + ReturnLabelSourceEnumeration: ( + URIRef # Enumerates several types of return labels for product returns. + ) + ReturnMethodEnumeration: ( + URIRef # Enumerates several types of product return methods. + ) ReturnShippingFees: URIRef # Specifies that the customer must pay the return shipping costs when returning a product - Review: URIRef # A review of an item - for example, of a restaurant, movie, or store. + Review: ( + URIRef # A review of an item - for example, of a restaurant, movie, or store. + ) ReviewAction: URIRef # The act of producing a balanced opinion about the object for an audience. An agent reviews an object with participants resulting in a review. ReviewNewsArticle: URIRef # A [[NewsArticle]] and [[CriticReview]] providing a professional critic's assessment of a service, product, performance, or artistic or literary work. Rheumatologic: URIRef # A specific branch of medical science that deals with the study and treatment of rheumatic, autoimmune or joint diseases. @@ -1064,7 +1250,9 @@ class SDO(DefinedNamespace): RsvpResponseType: URIRef # RsvpResponseType is an enumeration type whose instances represent responding to an RSVP request. RsvpResponseYes: URIRef # The invitee will attend. SRP: URIRef # Represents the suggested retail price ("SRP") of an offered product. - SafetyHealthAspect: URIRef # Content about the safety-related aspects of a health topic. + SafetyHealthAspect: ( + URIRef # Content about the safety-related aspects of a health topic. + ) SaleEvent: URIRef # Event type: Sales event. SalePrice: URIRef # Represents a sale price (usually active for a limited period) of an offered product. SatireOrParodyContent: URIRef # Content coded 'satire or parody content' in a [[MediaReview]], considered in the context of how it was published or shared. For a [[VideoObject]] to be 'satire or parody content': A video that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) For an [[ImageObject]] to be 'satire or parody content': An image that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) For an [[ImageObject]] with embedded text to be 'satire or parody content': An image that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) For an [[AudioObject]] to be 'satire or parody content': Audio that was created as political or humorous commentary and is presented in that context. (Reshares of satire/parody content that do not include relevant context are more likely to fall under the “missing context” rating.) @@ -1076,7 +1264,9 @@ class SDO(DefinedNamespace): School: URIRef # A school. SchoolDistrict: URIRef # A School District is an administrative area for the administration of schools. ScreeningEvent: URIRef # A screening of a movie or other video. - ScreeningHealthAspect: URIRef # Content about how to screen or further filter a topic. + ScreeningHealthAspect: ( + URIRef # Content about how to screen or further filter a topic. + ) Sculpture: URIRef # A piece of sculpture. SeaBodyOfWater: URIRef # A sea (for example, the Caspian sea). SearchAction: URIRef # The act of searching for an object.\n\nRelated actions:\n\n* [[FindAction]]: SearchAction generally leads to a FindAction, but not necessarily. @@ -1100,14 +1290,18 @@ class SDO(DefinedNamespace): ShoeStore: URIRef # A shoe store. ShoppingCenter: URIRef # A shopping center or mall. ShortStory: URIRef # Short story or tale. A brief work of literature, usually written in narrative prose. - SideEffectsHealthAspect: URIRef # Side effects that can be observed from the usage of the topic. + SideEffectsHealthAspect: ( + URIRef # Side effects that can be observed from the usage of the topic. + ) SingleBlindedTrial: URIRef # A trial design in which the researcher knows which treatment the patient was randomly assigned to but the patient does not. SingleCenterTrial: URIRef # A trial that takes place at a single center. SingleFamilyResidence: URIRef # Residence type: Single-family home. SinglePlayer: URIRef # Play mode: SinglePlayer. Which is played by a lone player. SingleRelease: URIRef # SingleRelease. SiteNavigationElement: URIRef # A navigation element of the page. - SizeGroupEnumeration: URIRef # Enumerates common size groups for various product categories. + SizeGroupEnumeration: ( + URIRef # Enumerates common size groups for various product categories. + ) SizeSpecification: URIRef # Size related properties of a product, typically a size code ([[name]]) and optionally a [[sizeSystem]], [[sizeGroup]], and product measurements ([[hasMeasurement]]). In addition, the intended audience can be defined through [[suggestedAge]], [[suggestedGender]], and suggested body measurements ([[suggestedMeasurement]]). SizeSystemEnumeration: URIRef # Enumerates common size systems for different categories of products, for example "EN-13402" or "UK" for wearables or "Imperial" for screws. SizeSystemImperial: URIRef # Imperial size system. @@ -1120,7 +1314,9 @@ class SDO(DefinedNamespace): SoftwareSourceCode: URIRef # Computer programming source code. Example: Full (compile ready) solutions, code snippet samples, scripts, templates. SoldOut: URIRef # Indicates that the item has sold out. SolveMathAction: URIRef # The action that takes in a math expression and directs users to a page potentially capable of solving/simplifying that expression. - SomeProducts: URIRef # A placeholder for multiple similar products of the same kind. + SomeProducts: ( + URIRef # A placeholder for multiple similar products of the same kind. + ) SoundtrackAlbum: URIRef # SoundtrackAlbum. SpeakableSpecification: URIRef # A SpeakableSpecification indicates (typically via [[xpath]] or [[cssSelector]]) sections of a document that are highlighted as particularly [[speakable]]. Instances of this type are expected to be used primarily as values of the [[speakable]] property. SpecialAnnouncement: URIRef # A SpecialAnnouncement combines a simple date-stamped textual information update with contextualized Web links and other structured data. It represents an information update made by a locally-oriented organization, for example schools, pharmacies, healthcare providers, community groups, police, local government. For work in progress guidelines on Coronavirus-related markup see [this doc](https://docs.google.com/document/d/14ikaGCKxo50rRM7nvKSlbUpjyIk2WMQd3IkB1lItlrM/edit#). The motivating scenario for SpecialAnnouncement is the [Coronavirus pandemic](https://en.wikipedia.org/wiki/2019%E2%80%9320_coronavirus_pandemic), and the initial vocabulary is oriented to this urgent situation. Schema.org expect to improve the markup iteratively as it is deployed and as feedback emerges from use. In addition to our usual [Github entry](https://github.com/schemaorg/schemaorg/issues/2490), feedback comments can also be provided in [this document](https://docs.google.com/document/d/1fpdFFxk8s87CWwACs53SGkYv3aafSxz_DTtOQxMrBJQ/edit#). While this schema is designed to communicate urgent crisis-related information, it is not the same as an emergency warning technology like [CAP](https://en.wikipedia.org/wiki/Common_Alerting_Protocol), although there may be overlaps. The intent is to cover the kinds of everyday practical information being posted to existing websites during an emergency situation. Several kinds of information can be provided: We encourage the provision of "name", "text", "datePosted", "expires" (if appropriate), "category" and "url" as a simple baseline. It is important to provide a value for "category" where possible, most ideally as a well known URL from Wikipedia or Wikidata. In the case of the 2019-2020 Coronavirus pandemic, this should be "https://en.wikipedia.org/w/index.php?title=2019-20\_coronavirus\_pandemic" or "https://www.wikidata.org/wiki/Q81068910". For many of the possible properties, values can either be simple links or an inline description, depending on whether a summary is available. For a link, provide just the URL of the appropriate page as the property's value. For an inline description, use a [[WebContent]] type, and provide the url as a property of that, alongside at least a simple "[[text]]" summary of the page. It is unlikely that a single SpecialAnnouncement will need all of the possible properties simultaneously. We expect that in many cases the page referenced might contain more specialized structured data, e.g. contact info, [[openingHours]], [[Event]], [[FAQPage]] etc. By linking to those pages from a [[SpecialAnnouncement]] you can help make it clearer that the events are related to the situation (e.g. Coronavirus) indicated by the [[category]] property of the [[SpecialAnnouncement]]. Many [[SpecialAnnouncement]]s will relate to particular regions and to identifiable local organizations. Use [[spatialCoverage]] for the region, and [[announcementLocation]] to indicate specific [[LocalBusiness]]es and [[CivicStructure]]s. If the announcement affects both a particular region and a specific location (for example, a library closure that serves an entire region), use both [[spatialCoverage]] and [[announcementLocation]]. The [[about]] property can be used to indicate entities that are the focus of the announcement. We now recommend using [[about]] only for representing non-location entities (e.g. a [[Course]] or a [[RadioStation]]). For places, use [[announcementLocation]] and [[spatialCoverage]]. Consumers of this markup should be aware that the initial design encouraged the use of /about for locations too. The basic content of [[SpecialAnnouncement]] is similar to that of an [RSS](https://en.wikipedia.org/wiki/RSS) or [Atom](https://en.wikipedia.org/wiki/Atom_(Web_standard)) feed. For publishers without such feeds, basic feed-like information can be shared by posting [[SpecialAnnouncement]] updates in a page, e.g. using JSON-LD. For sites with Atom/RSS functionality, you can point to a feed with the [[webFeed]] property. This can be a simple URL, or an inline [[DataFeed]] object, with [[encodingFormat]] providing media type information e.g. "application/rss+xml" or "application/atom+xml". @@ -1211,7 +1407,9 @@ class SDO(DefinedNamespace): TransitMap: URIRef # A transit map. TravelAction: URIRef # The act of traveling from an fromLocation to a destination by a specified mode of transport, optionally with participants. TravelAgency: URIRef # A travel agency. - TreatmentIndication: URIRef # An indication for treating an underlying condition, symptom, etc. + TreatmentIndication: ( + URIRef # An indication for treating an underlying condition, symptom, etc. + ) TreatmentsHealthAspect: URIRef # Treatments or related therapies for a Topic. Trip: URIRef # A trip or journey. An itinerary of visits to one or more places. TripleBlindedTrial: URIRef # A trial design in which neither the researcher, the person administering the therapy nor the patient knows the details of the treatment the patient was randomly assigned to. @@ -1225,13 +1423,19 @@ class SDO(DefinedNamespace): USNonprofitType: URIRef # USNonprofitType: Non-profit organization type originating from the United States. Ultrasound: URIRef # Ultrasound imaging. UnRegisterAction: URIRef # The act of un-registering from a service.\n\nRelated actions:\n\n* [[RegisterAction]]: antonym of UnRegisterAction.\n* [[LeaveAction]]: Unlike LeaveAction, UnRegisterAction implies that you are unregistering from a service you werer previously registered, rather than leaving a team/group of people. - UnemploymentSupport: URIRef # UnemploymentSupport: this is a benefit for unemployment support. + UnemploymentSupport: ( + URIRef # UnemploymentSupport: this is a benefit for unemployment support. + ) UnincorporatedAssociationCharity: URIRef # UnincorporatedAssociationCharity: Non-profit type referring to a charitable company that is not incorporated (UK). UnitPriceSpecification: URIRef # The price asked for a given offer by the respective organization or person. UnofficialLegalValue: URIRef # Indicates that a document has no particular or special standing (e.g. a republication of a law by a private publisher). - UpdateAction: URIRef # The act of managing by changing/editing the state of the object. + UpdateAction: ( + URIRef # The act of managing by changing/editing the state of the object. + ) Urologic: URIRef # A specific branch of medical science that is concerned with the diagnosis and treatment of diseases pertaining to the urinary tract and the urogenital system. - UsageOrScheduleHealthAspect: URIRef # Content about how, when, frequency and dosage of a topic. + UsageOrScheduleHealthAspect: ( + URIRef # Content about how, when, frequency and dosage of a topic. + ) UseAction: URIRef # The act of applying an object to its intended purpose. UsedCondition: URIRef # Indicates that the item is used. UserBlocks: URIRef # UserInteraction and its subtypes is an old way of talking about users interacting with pages. It is generally better to use [[Action]]-based vocabulary, alongside types such as [[Comment]]. @@ -1277,18 +1481,36 @@ class SDO(DefinedNamespace): WatchAction: URIRef # The act of consuming dynamic/moving visual content. Waterfall: URIRef # A waterfall, like Niagara. WearAction: URIRef # The act of dressing oneself in clothing. - WearableMeasurementBack: URIRef # Measurement of the back section, for example of a jacket - WearableMeasurementChestOrBust: URIRef # Measurement of the chest/bust section, for example of a suit - WearableMeasurementCollar: URIRef # Measurement of the collar, for example of a shirt + WearableMeasurementBack: ( + URIRef # Measurement of the back section, for example of a jacket + ) + WearableMeasurementChestOrBust: ( + URIRef # Measurement of the chest/bust section, for example of a suit + ) + WearableMeasurementCollar: ( + URIRef # Measurement of the collar, for example of a shirt + ) WearableMeasurementCup: URIRef # Measurement of the cup, for example of a bra - WearableMeasurementHeight: URIRef # Measurement of the height, for example the heel height of a shoe - WearableMeasurementHips: URIRef # Measurement of the hip section, for example of a skirt + WearableMeasurementHeight: ( + URIRef # Measurement of the height, for example the heel height of a shoe + ) + WearableMeasurementHips: ( + URIRef # Measurement of the hip section, for example of a skirt + ) WearableMeasurementInseam: URIRef # Measurement of the inseam, for example of pants WearableMeasurementLength: URIRef # Represents the length, for example of a dress - WearableMeasurementOutsideLeg: URIRef # Measurement of the outside leg, for example of pants - WearableMeasurementSleeve: URIRef # Measurement of the sleeve length, for example of a shirt - WearableMeasurementTypeEnumeration: URIRef # Enumerates common types of measurement for wearables products. - WearableMeasurementWaist: URIRef # Measurement of the waist section, for example of pants + WearableMeasurementOutsideLeg: ( + URIRef # Measurement of the outside leg, for example of pants + ) + WearableMeasurementSleeve: ( + URIRef # Measurement of the sleeve length, for example of a shirt + ) + WearableMeasurementTypeEnumeration: ( + URIRef # Enumerates common types of measurement for wearables products. + ) + WearableMeasurementWaist: ( + URIRef # Measurement of the waist section, for example of pants + ) WearableMeasurementWidth: URIRef # Measurement of the width, for example of shoes WearableSizeGroupBig: URIRef # Size group "Big" for wearables. WearableSizeGroupBoys: URIRef # Size group "Boys" for wearables. @@ -1301,7 +1523,9 @@ class SDO(DefinedNamespace): WearableSizeGroupJuniors: URIRef # Size group "Juniors" for wearables. WearableSizeGroupMaternity: URIRef # Size group "Maternity" for wearables. WearableSizeGroupMens: URIRef # Size group "Mens" for wearables. - WearableSizeGroupMisses: URIRef # Size group "Misses" (also known as "Missy") for wearables. + WearableSizeGroupMisses: ( + URIRef # Size group "Misses" (also known as "Missy") for wearables. + ) WearableSizeGroupPetite: URIRef # Size group "Petite" for wearables. WearableSizeGroupPlus: URIRef # Size group "Plus" for wearables. WearableSizeGroupRegular: URIRef # Size group "Regular" for wearables. @@ -1313,8 +1537,12 @@ class SDO(DefinedNamespace): WearableSizeSystemCN: URIRef # Chinese size system for wearables. WearableSizeSystemContinental: URIRef # Continental size system for wearables. WearableSizeSystemDE: URIRef # German size system for wearables. - WearableSizeSystemEN13402: URIRef # EN 13402 (joint European standard for size labelling of clothes). - WearableSizeSystemEnumeration: URIRef # Enumerates common size systems specific for wearable products + WearableSizeSystemEN13402: ( + URIRef # EN 13402 (joint European standard for size labelling of clothes). + ) + WearableSizeSystemEnumeration: ( + URIRef # Enumerates common size systems specific for wearable products + ) WearableSizeSystemEurope: URIRef # European size system for wearables. WearableSizeSystemFR: URIRef # French size system for wearables. WearableSizeSystemGS1: URIRef # GS1 (formerly NRF) size system for wearables. @@ -1331,7 +1559,9 @@ class SDO(DefinedNamespace): WebSite: URIRef # A WebSite is a set of related web pages and other items typically served from a single web domain and accessible via URLs. Wednesday: URIRef # The day of the week between Tuesday and Thursday. WesternConventional: URIRef # The conventional Western system of medicine, that aims to apply the best available evidence gained from the scientific method to clinical decision making. Also known as conventional or Western medicine. - Wholesale: URIRef # The drug's cost represents the wholesale acquisition cost of the drug. + Wholesale: ( + URIRef # The drug's cost represents the wholesale acquisition cost of the drug. + ) WholesaleStore: URIRef # A wholesale store. WinAction: URIRef # The act of achieving victory in a competitive activity. Winery: URIRef # A winery. @@ -1346,11 +1576,15 @@ class SDO(DefinedNamespace): Zoo: URIRef # A zoo. about: URIRef # The subject matter of the content. abridged: URIRef # Indicates whether the book is an abridged edition. - abstract: URIRef # An abstract is a short description that summarizes a [[CreativeWork]]. + abstract: ( + URIRef # An abstract is a short description that summarizes a [[CreativeWork]]. + ) accelerationTime: URIRef # The time needed to accelerate the vehicle from a given start velocity to a given target velocity.\n\nTypical unit code(s): SEC for seconds\n\n* Note: There are unfortunately no standard unit codes for seconds/0..100 km/h or seconds/0..60 mph. Simply use "SEC" for seconds and indicate the velocities in the [[name]] of the [[QuantitativeValue]], or use [[valueReference]] with a [[QuantitativeValue]] of 0..60 mph or 0..100 km/h to specify the reference speeds. acceptedAnswer: URIRef # The answer(s) that has been accepted as best, typically on a Question/Answer site. Sites vary in their selection mechanisms, e.g. drawing on community opinion and/or the view of the Question author. acceptedOffer: URIRef # The offer(s) -- e.g., product, quantity and price combinations -- included in the order. - acceptedPaymentMethod: URIRef # The payment method(s) accepted by seller for this offer. + acceptedPaymentMethod: ( + URIRef # The payment method(s) accepted by seller for this offer. + ) acceptsReservations: URIRef # Indicates whether a FoodEstablishment accepts reservations. Values can be Boolean, an URL at which reservations can be made or (for backwards compatibility) the strings ```Yes``` or ```No```. accessCode: URIRef # Password, PIN, or access code needed for delivery (e.g. from a locker). accessMode: URIRef # The human sensory perceptual system or cognitive faculty through which a person may process or perceive information. Expected values include: auditory, tactile, textual, visual, colorDependent, chartOnVisual, chemOnVisual, diagramOnVisual, mathOnVisual, musicOnVisual, textOnVisual. @@ -1365,13 +1599,19 @@ class SDO(DefinedNamespace): accountId: URIRef # The identifier for the account the payment will be applied to. accountMinimumInflow: URIRef # A minimum amount that has to be paid in every month. accountOverdraftLimit: URIRef # An overdraft is an extension of credit from a lending institution when an account reaches zero. An overdraft allows the individual to continue withdrawing money even if the account has no funds in it. Basically the bank allows people to borrow a set amount of money. - accountablePerson: URIRef # Specifies the Person that is legally accountable for the CreativeWork. + accountablePerson: ( + URIRef # Specifies the Person that is legally accountable for the CreativeWork. + ) acquireLicensePage: URIRef # Indicates a page documenting how licenses can be purchased or otherwise acquired, for the current item. - acquiredFrom: URIRef # The organization or person from which the product was acquired. + acquiredFrom: ( + URIRef # The organization or person from which the product was acquired. + ) acrissCode: URIRef # The ACRISS Car Classification Code is a code used by many car rental companies, for classifying vehicles. ACRISS stands for Association of Car Rental Industry Systems and Standards. actionAccessibilityRequirement: URIRef # A set of requirements that a must be fulfilled in order to perform an Action. If more than one value is specied, fulfilling one set of requirements will allow the Action to be performed. actionApplication: URIRef # An application that can complete the request. - actionOption: URIRef # A sub property of object. The options subject to this action. + actionOption: ( + URIRef # A sub property of object. The options subject to this action. + ) actionPlatform: URIRef # The high level platform(s) where the Action can be performed for the given URL. To specify a specific application or operating system instance, use actionApplication. actionStatus: URIRef # Indicates the current disposition of the Action. actionableFeedbackPolicy: URIRef # For a [[NewsMediaOrganization]] or other news-related [[Organization]], a statement about public engagement activities (for news media, the newsroom’s), including involving the public - digitally or otherwise -- in coverage decisions, reporting and activities after publication. @@ -1381,7 +1621,9 @@ class SDO(DefinedNamespace): actor: URIRef # An actor, e.g. in tv, radio, movie, video games etc., or in an event. Actors can be associated with individual items or with a series, episode, clip. actors: URIRef # An actor, e.g. in tv, radio, movie, video games etc. Actors can be associated with individual items or with a series, episode, clip. addOn: URIRef # An additional offer that can only be obtained in combination with the first base offer (e.g. supplements and extensions that are available for a surcharge). - additionalName: URIRef # An additional name for a Person, can be used for a middle name. + additionalName: ( + URIRef # An additional name for a Person, can be used for a middle name. + ) additionalNumberOfGuests: URIRef # If responding yes, the number of guests who will attend in addition to the invitee. additionalProperty: URIRef # A property-value pair representing an additional characteristics of the entitity, e.g. a product feature or another characteristic for which there is no matching property in schema.org.\n\nNote: Publishers should be aware that applications designed to use specific schema.org properties (e.g. https://schema.org/width, https://schema.org/color, https://schema.org/gtin13, ...) will typically expect such data to be provided using those properties, rather than using the generic property/value mechanism. additionalType: URIRef # An additional type for the item, typically used for adding more specific types from external vocabularies in microdata syntax. This is a relationship between something and a class that the thing is in. In RDFa syntax, it is better to use the native RDFa syntax - the 'typeof' attribute - for multiple types. Schema.org tools may have only weaker understanding of extra types, in particular those defined externally. @@ -1390,7 +1632,9 @@ class SDO(DefinedNamespace): addressCountry: URIRef # The country. For example, USA. You can also provide the two-letter [ISO 3166-1 alpha-2 country code](http://en.wikipedia.org/wiki/ISO_3166-1). addressLocality: URIRef # The locality in which the street address is, and which is in the region. For example, Mountain View. addressRegion: URIRef # The region in which the locality is, and which is in the country. For example, California or another appropriate first-level [Administrative division](https://en.wikipedia.org/wiki/List_of_administrative_divisions_by_country) - administrationRoute: URIRef # A route by which this drug may be administered, e.g. 'oral'. + administrationRoute: ( + URIRef # A route by which this drug may be administered, e.g. 'oral'. + ) advanceBookingRequirement: URIRef # The amount of time that is required between accepting the offer and the actual usage of the resource or service. adverseOutcome: URIRef # A possible complication and/or side effect of this therapy. If it is known that an adverse outcome is serious (resulting in death, disability, or permanent damage; requiring hospitalization; or is otherwise life-threatening or requires immediate medical attention), tag it as a seriouseAdverseOutcome instead. affectedBy: URIRef # Drugs that affect the test's results. @@ -1402,7 +1646,9 @@ class SDO(DefinedNamespace): album: URIRef # A music album. albumProductionType: URIRef # Classification of the album by it's type of content: soundtrack, live album, studio album, etc. albumRelease: URIRef # A release of this album. - albumReleaseType: URIRef # The kind of release which this album is: single, EP or album. + albumReleaseType: ( + URIRef # The kind of release which this album is: single, EP or album. + ) albums: URIRef # A collection of music albums. alcoholWarning: URIRef # Any precaution, guidance, contraindication, etc. related to consumption of alcohol while taking this drug. algorithm: URIRef # The algorithm or rules to follow to compute the score. @@ -1420,22 +1666,34 @@ class SDO(DefinedNamespace): answerCount: URIRef # The number of answers this question has received. answerExplanation: URIRef # A step-by-step or full explanation about Answer. Can outline how this Answer was achieved or contain more broad clarification or statement about it. antagonist: URIRef # The muscle whose action counteracts the specified muscle. - appearance: URIRef # Indicates an occurence of a [[Claim]] in some [[CreativeWork]]. + appearance: ( + URIRef # Indicates an occurence of a [[Claim]] in some [[CreativeWork]]. + ) applicableLocation: URIRef # The location in which the status applies. applicantLocationRequirements: URIRef # The location(s) applicants can apply from. This is usually used for telecommuting jobs where the applicant does not need to be in a physical office. Note: This should not be used for citizenship or work visa requirements. application: URIRef # An application that can complete the request. - applicationCategory: URIRef # Type of software application, e.g. 'Game, Multimedia'. - applicationContact: URIRef # Contact details for further information relevant to this job posting. + applicationCategory: ( + URIRef # Type of software application, e.g. 'Game, Multimedia'. + ) + applicationContact: ( + URIRef # Contact details for further information relevant to this job posting. + ) applicationDeadline: URIRef # The date at which the program stops collecting applications for the next enrollment cycle. applicationStartDate: URIRef # The date at which the program begins collecting applications for the next enrollment cycle. - applicationSubCategory: URIRef # Subcategory of the application, e.g. 'Arcade Game'. + applicationSubCategory: ( + URIRef # Subcategory of the application, e.g. 'Arcade Game'. + ) applicationSuite: URIRef # The name of the application suite to which the application belongs (e.g. Excel belongs to Office). appliesToDeliveryMethod: URIRef # The delivery method(s) to which the delivery charge or payment charge specification applies. appliesToPaymentMethod: URIRef # The payment method(s) to which the payment charge specification applies. archiveHeld: URIRef # Collection, [fonds](https://en.wikipedia.org/wiki/Fonds), or item held, kept or maintained by an [[ArchiveOrganization]]. archivedAt: URIRef # Indicates a page or other link involved in archival of a [[CreativeWork]]. In the case of [[MediaReview]], the items in a [[MediaReviewItem]] may often become inaccessible, but be archived by archival, journalistic, activist, or law enforcement organizations. In such cases, the referenced page may not directly publish the content. - area: URIRef # The area within which users can expect to reach the broadcast service. - areaServed: URIRef # The geographic area where a service or offered item is provided. + area: ( + URIRef # The area within which users can expect to reach the broadcast service. + ) + areaServed: ( + URIRef # The geographic area where a service or offered item is provided. + ) arrivalAirport: URIRef # The airport where the flight terminates. arrivalBoatTerminal: URIRef # The terminal or port from which the boat arrives. arrivalBusStop: URIRef # The stop or station from which the bus arrives. @@ -1454,7 +1712,9 @@ class SDO(DefinedNamespace): artworkSurface: URIRef # The supporting materials for the artwork, e.g. Canvas, Paper, Wood, Board, etc. aspect: URIRef # An aspect of medical practice that is considered on the page, such as 'diagnosis', 'treatment', 'causes', 'prognosis', 'etiology', 'epidemiology', etc. assembly: URIRef # Library file name e.g., mscorlib.dll, system.web.dll. - assemblyVersion: URIRef # Associated product/technology version. e.g., .NET Framework 4.5. + assemblyVersion: ( + URIRef # Associated product/technology version. e.g., .NET Framework 4.5. + ) assesses: URIRef # The item being described is intended to assess the competency or learning outcome defined by the referenced term. associatedAnatomy: URIRef # The anatomy of the underlying organ system or structures associated with this entity. associatedArticle: URIRef # A NewsArticle associated with the Media Object. @@ -1467,7 +1727,9 @@ class SDO(DefinedNamespace): athlete: URIRef # A person that acts as performing member of a sports team; a player as opposed to a coach. attendee: URIRef # A person or organization attending the event. attendees: URIRef # A person attending the event. - audience: URIRef # An intended audience, i.e. a group for whom something was created. + audience: ( + URIRef # An intended audience, i.e. a group for whom something was created. + ) audienceType: URIRef # The target group associated with a given audience (e.g. veterans, car owners, musicians, etc.). audio: URIRef # An embedded audio object. authenticator: URIRef # The Organization responsible for authenticating the user's subscription. For example, many media apps require a cable/satellite provider to authenticate your subscription before playing media. @@ -1478,21 +1740,29 @@ class SDO(DefinedNamespace): availableAtOrFrom: URIRef # The place(s) from which the offer can be obtained (e.g. store locations). availableChannel: URIRef # A means of accessing the service (e.g. a phone bank, a web site, a location, etc.). availableDeliveryMethod: URIRef # The delivery method(s) available for this offer. - availableFrom: URIRef # When the item is available for pickup from the store, locker, etc. + availableFrom: ( + URIRef # When the item is available for pickup from the store, locker, etc. + ) availableIn: URIRef # The location in which the strength is available. availableLanguage: URIRef # A language someone may use with or at the item, service or place. Please use one of the language codes from the [IETF BCP 47 standard](http://tools.ietf.org/html/bcp47). See also [[inLanguage]] availableOnDevice: URIRef # Device required to run the application. Used in cases where a specific make/model is required to run the application. availableService: URIRef # A medical service available from this provider. availableStrength: URIRef # An available dosage strength for the drug. availableTest: URIRef # A diagnostic test or procedure offered by this lab. - availableThrough: URIRef # After this date, the item will no longer be available for pickup. + availableThrough: ( + URIRef # After this date, the item will no longer be available for pickup. + ) award: URIRef # An award won by or for this item. awards: URIRef # Awards won by or for this item. awayTeam: URIRef # The away team in a sports event. backstory: URIRef # For an [[Article]], typically a [[NewsArticle]], the backstory property provides a textual summary giving a brief explanation of why and how an article was created. In a journalistic setting this could include information about reporting process, methods, interviews, data sources, etc. bankAccountType: URIRef # The type of a bank account. - baseSalary: URIRef # The base salary of the job or of an employee in an EmployeeRole. - bccRecipient: URIRef # A sub property of recipient. The recipient blind copied on a message. + baseSalary: ( + URIRef # The base salary of the job or of an employee in an EmployeeRole. + ) + bccRecipient: ( + URIRef # A sub property of recipient. The recipient blind copied on a message. + ) bed: URIRef # The type of bed or beds included in the accommodation. For the single case of just one bed of a certain type, you use bed directly with a text. If you want to indicate the quantity of a certain kind of bed, use an instance of BedDetails. For more detailed information, use the amenityFeature property. beforeMedia: URIRef # A media object representing the circumstances before performing this direction. beneficiaryBank: URIRef # A bank or bank’s branch, financial institution or international financial institution operating the beneficiary’s bank account or releasing funds for the beneficiary. @@ -1504,17 +1774,25 @@ class SDO(DefinedNamespace): billingIncrement: URIRef # This property specifies the minimal quantity and rounding increment that will be the basis for the billing. The unit of measurement is specified by the unitCode property. billingPeriod: URIRef # The time interval used to compute the invoice. billingStart: URIRef # Specifies after how much time this price (or price component) becomes valid and billing starts. Can be used, for example, to model a price increase after the first year of a subscription. The unit of measurement is specified by the unitCode property. - bioChemInteraction: URIRef # A BioChemEntity that is known to interact with this item. + bioChemInteraction: ( + URIRef # A BioChemEntity that is known to interact with this item. + ) bioChemSimilarity: URIRef # A similar BioChemEntity, e.g., obtained by fingerprint similarity algorithms. - biologicalRole: URIRef # A role played by the BioChemEntity within a biological context. + biologicalRole: ( + URIRef # A role played by the BioChemEntity within a biological context. + ) biomechnicalClass: URIRef # The biomechanical properties of the bone. birthDate: URIRef # Date of birth. birthPlace: URIRef # The place where the person was born. bitrate: URIRef # The bitrate of the media object. blogPost: URIRef # A posting that is part of this blog. blogPosts: URIRef # Indicates a post that is part of a [[Blog]]. Note that historically, what we term a "Blog" was once known as a "weblog", and that what we term a "BlogPosting" is now often colloquially referred to as a "blog". - bloodSupply: URIRef # The blood vessel that carries blood from the heart to the muscle. - boardingGroup: URIRef # The airline-specific indicator of boarding order / preference. + bloodSupply: ( + URIRef # The blood vessel that carries blood from the heart to the muscle. + ) + boardingGroup: ( + URIRef # The airline-specific indicator of boarding order / preference. + ) boardingPolicy: URIRef # The type of boarding policy used by the airline (e.g. zone-based or group-based). bodyLocation: URIRef # Location in the body of the anatomical structure. bodyType: URIRef # Indicates the design and body style of the vehicle (e.g. station wagon, hatchback, etc.). @@ -1530,12 +1808,16 @@ class SDO(DefinedNamespace): brand: URIRef # The brand(s) associated with a product or service, or the brand(s) maintained by an organization or business person. breadcrumb: URIRef # A set of links that can help a user understand and navigate a website hierarchy. breastfeedingWarning: URIRef # Any precaution, guidance, contraindication, etc. related to this drug's use by breastfeeding mothers. - broadcastAffiliateOf: URIRef # The media network(s) whose content is broadcast on this station. + broadcastAffiliateOf: ( + URIRef # The media network(s) whose content is broadcast on this station. + ) broadcastChannelId: URIRef # The unique address by which the BroadcastService can be identified in a provider lineup. In US, this is typically a number. broadcastDisplayName: URIRef # The name displayed in the channel guide. For many US affiliates, it is the network name. broadcastFrequency: URIRef # The frequency used for over-the-air broadcasts. Numeric values or simple ranges e.g. 87-99. In addition a shortcut idiom is supported for frequences of AM and FM radio channels, e.g. "87 FM". broadcastFrequencyValue: URIRef # The frequency in MHz for a particular broadcast. - broadcastOfEvent: URIRef # The event being broadcast such as a sporting event or awards ceremony. + broadcastOfEvent: ( + URIRef # The event being broadcast such as a sporting event or awards ceremony. + ) broadcastServiceTier: URIRef # The type of service required to have access to the channel (e.g. Standard or Premium). broadcastSignalModulation: URIRef # The modulation (e.g. FM, AM, etc) used by a particular broadcast service. broadcastSubChannel: URIRef # The subchannel used for the broadcast. @@ -1566,7 +1848,9 @@ class SDO(DefinedNamespace): catalogNumber: URIRef # The catalog number for the release. category: URIRef # A category for the item. Greater signs or slashes can be used to informally indicate a category hierarchy. causeOf: URIRef # The condition, complication, symptom, sign, etc. caused. - ccRecipient: URIRef # A sub property of recipient. The recipient copied on a message. + ccRecipient: ( + URIRef # A sub property of recipient. The recipient copied on a message. + ) character: URIRef # Fictional person connected with a creative work. characterAttribute: URIRef # A piece of data that represents a particular aspect of a fictional character (skill, power, character points, advantage, disadvantage). characterName: URIRef # The name of a character played in some acting or performing role, i.e. in a PerformanceRole. @@ -1574,7 +1858,9 @@ class SDO(DefinedNamespace): checkinTime: URIRef # The earliest someone may check into a lodging establishment. checkoutTime: URIRef # The latest someone may check out of a lodging establishment. chemicalComposition: URIRef # The chemical composition describes the identity and relative ratio of the chemical elements that make up the substance. - chemicalRole: URIRef # A role played by the BioChemEntity within a chemical context. + chemicalRole: ( + URIRef # A role played by the BioChemEntity within a chemical context. + ) childMaxAge: URIRef # Maximal age of the child. childMinAge: URIRef # Minimal age of the child. childTaxon: URIRef # Closest child taxa of the taxon in question. @@ -1583,7 +1869,9 @@ class SDO(DefinedNamespace): circle: URIRef # A circle is the circular region of a specified radius centered at a specified latitude and longitude. A circle is expressed as a pair followed by a radius in meters. citation: URIRef # A citation or reference to another creative work, such as another publication, web page, scholarly article, etc. claimInterpreter: URIRef # For a [[Claim]] interpreted from [[MediaObject]] content sed to indicate a claim contained, implied or refined from the content of a [[MediaObject]]. - claimReviewed: URIRef # A short summary of the specific claims reviewed in a ClaimReview. + claimReviewed: ( + URIRef # A short summary of the specific claims reviewed in a ClaimReview. + ) clincalPharmacology: URIRef # Description of the absorption and elimination of drugs, including their concentration (pharmacokinetics, pK) and biological effects (pharmacodynamics, pD). clinicalPharmacology: URIRef # Description of the absorption and elimination of drugs, including their concentration (pharmacokinetics, pK) and biological effects (pharmacodynamics, pD). clipNumber: URIRef # Position of the clip within an ordered group of clips. @@ -1609,8 +1897,12 @@ class SDO(DefinedNamespace): composer: URIRef # The person or organization who wrote a composition, or who is the composer of a work performed at some event. comprisedOf: URIRef # Specifying something physically contained by something else. Typically used here for the underlying anatomical structures, such as organs, that comprise the anatomical system. conditionsOfAccess: URIRef # Conditions that affect the availability of, or method(s) of access to, an item. Typically used for real world items such as an [[ArchiveComponent]] held by an [[ArchiveOrganization]]. This property is not suitable for use as a general Web access control mechanism. It is expressed only in natural language.\n\nFor example "Available by appointment from the Reading Room" or "Accessible only from logged-in accounts ". - confirmationNumber: URIRef # A number that confirms the given order or payment has been received. - connectedTo: URIRef # Other anatomical structures to which this structure is connected. + confirmationNumber: ( + URIRef # A number that confirms the given order or payment has been received. + ) + connectedTo: ( + URIRef # Other anatomical structures to which this structure is connected. + ) constrainingProperty: URIRef # Indicates a property used as a constraint to define a [[StatisticalPopulation]] with respect to the set of entities corresponding to an indicated type (via [[populationType]]). contactOption: URIRef # An option available on this contact point (e.g. a toll-free number or support for hearing-impaired callers). contactPoint: URIRef # A contact point for a person or organization. @@ -1622,7 +1914,9 @@ class SDO(DefinedNamespace): containsPlace: URIRef # The basic containment relation between a place and another that it contains. containsSeason: URIRef # A season that is part of the media series. contentLocation: URIRef # The location depicted or described in the content. For example, the location in a photograph or painting. - contentRating: URIRef # Official rating of a piece of content—for example,'MPAA PG-13'. + contentRating: ( + URIRef # Official rating of a piece of content—for example,'MPAA PG-13'. + ) contentReferenceTime: URIRef # The specific time described by a creative work, for works (e.g. articles, video objects etc.) that emphasise a particular moment within an Event. contentSize: URIRef # File size in (mega/kilo) bytes. contentType: URIRef # The supported content type(s) for an EntryPoint response. @@ -1631,7 +1925,9 @@ class SDO(DefinedNamespace): contributor: URIRef # A secondary contributor to the CreativeWork or Event. cookTime: URIRef # The time it takes to actually cook the dish, in [ISO 8601 duration format](http://en.wikipedia.org/wiki/ISO_8601). cookingMethod: URIRef # The method of cooking, such as Frying, Steaming, ... - copyrightHolder: URIRef # The party holding the legal copyright to the CreativeWork. + copyrightHolder: ( + URIRef # The party holding the legal copyright to the CreativeWork. + ) copyrightNotice: URIRef # Text of a notice appropriate for describing the copyright aspects of this Creative Work, ideally indicating the owner of the copyright for the Work. copyrightYear: URIRef # The year during which the claimed copyright for the CreativeWork was first asserted. correction: URIRef # Indicates a correction to a [[CreativeWork]], either via a [[CorrectionComment]], textually or in another document. @@ -1645,7 +1941,9 @@ class SDO(DefinedNamespace): countryOfAssembly: URIRef # The place where the product was assembled. countryOfLastProcessing: URIRef # The place where the item (typically [[Product]]) was last processed and tested before importation. countryOfOrigin: URIRef # The country of origin of something, including products as well as creative works such as movie and TV content. In the case of TV and movie, this would be the country of the principle offices of the production company or individual responsible for the movie. For other kinds of [[CreativeWork]] it is difficult to provide fully general guidance, and properties such as [[contentLocation]] and [[locationCreated]] may be more applicable. In the case of products, the country of origin of the product. The exact interpretation of this may vary by context and product type, and cannot be fully enumerated here. - course: URIRef # A sub property of location. The course where this action was taken. + course: ( + URIRef # A sub property of location. The course where this action was taken. + ) courseCode: URIRef # The identifier for the [[Course]] used by the course [[provider]] (e.g. CS101 or 6.001). courseMode: URIRef # The medium or means of delivery of the course instance or the mode of study, either as a text label (e.g. "online", "onsite" or "blended"; "synchronous" or "asynchronous"; "full-time" or "part-time") or as a URL reference to a term from a controlled vocabulary (e.g. https://ceds.ed.gov/element/001311#Asynchronous ). coursePrerequisites: URIRef # Requirements for taking the Course. May be completion of another [[Course]] or a textual description like "permission of instructor". Requirements may be a pre-requisite competency, referenced using [[AlignmentObject]]. @@ -1666,7 +1964,9 @@ class SDO(DefinedNamespace): customerRemorseReturnLabelSource: URIRef # The method (from an enumeration) by which the customer obtains a return shipping label for a product returned due to customer remorse. customerRemorseReturnShippingFeesAmount: URIRef # The amount of shipping costs if a product is returned due to customer remorse. Applicable when property [[customerRemorseReturnFees]] equals [[ReturnShippingFees]]. cutoffTime: URIRef # Order cutoff time allows merchants to describe the time after which they will no longer process orders received on that day. For orders processed after cutoff time, one day gets added to the delivery time estimate. This property is expected to be most typically used via the [[ShippingRateSettings]] publication pattern. The time is indicated using the ISO-8601 Time format, e.g. "23:30:00-05:00" would represent 6:30 pm Eastern Standard Time (EST) which is 5 hours behind Coordinated Universal Time (UTC). - cvdCollectionDate: URIRef # collectiondate - Date for which patient counts are reported. + cvdCollectionDate: ( + URIRef # collectiondate - Date for which patient counts are reported. + ) cvdFacilityCounty: URIRef # Name of the County of the NHSN facility that this data record applies to. Use [[cvdFacilityId]] to identify the facility. To provide other details, [[healthcareReportingData]] can be used on a [[Hospital]] entry. cvdFacilityId: URIRef # Identifier of the NHSN facility that this data record applies to. Use [[cvdFacilityCounty]] to indicate the county. To provide other details, [[healthcareReportingData]] can be used on a [[Hospital]] entry. cvdNumBeds: URIRef # numbeds - HOSPITAL INPATIENT BEDS: Inpatient beds, including all staffed, licensed, and overflow (surge) beds used for inpatients. @@ -1682,7 +1982,9 @@ class SDO(DefinedNamespace): cvdNumTotBeds: URIRef # numtotbeds - ALL HOSPITAL BEDS: Total number of all Inpatient and outpatient beds, including all staffed,ICU, licensed, and overflow (surge) beds used for inpatients or outpatients. cvdNumVent: URIRef # numvent - MECHANICAL VENTILATORS: Total number of ventilators available. cvdNumVentUse: URIRef # numventuse - MECHANICAL VENTILATORS IN USE: Total number of ventilators in use. - dataFeedElement: URIRef # An item within in a data feed. Data feeds may have many elements. + dataFeedElement: ( + URIRef # An item within in a data feed. Data feeds may have many elements. + ) dataset: URIRef # A dataset contained in this catalog. datasetTimeInterval: URIRef # The range of temporal applicability of a dataset, e.g. for a 2011 census dataset, the year 2011 (in ISO 8601 time interval format). dateCreated: URIRef # The date on which the CreativeWork was created or the item was added to a DataFeed. @@ -1692,7 +1994,9 @@ class SDO(DefinedNamespace): datePosted: URIRef # Publication date of an online listing. datePublished: URIRef # Date of first broadcast/publication. dateRead: URIRef # The date/time at which the message has been read by the recipient if a single recipient exists. - dateReceived: URIRef # The date/time the message was received if a single recipient exists. + dateReceived: ( + URIRef # The date/time the message was received if a single recipient exists. + ) dateSent: URIRef # The date/time at which the message was sent. dateVehicleFirstRegistered: URIRef # The date of the first registration of the vehicle with the respective public authorities. dateline: URIRef # A [dateline](https://en.wikipedia.org/wiki/Dateline) is a brief piece of text included in news articles that describes where and when the story was written or filed though the date is often omitted. Sometimes only a placename is provided. Structured representations of dateline-related information can also be expressed more explicitly using [[locationCreated]] (which represents where a work was created e.g. where a news report was written). For location depicted or described in the content, use [[contentLocation]]. Dateline summaries are oriented more towards human readers than towards automated processing, and can vary substantially. Some examples: "BEIRUT, Lebanon, June 2.", "Paris, France", "December 19, 2017 11:43AM Reporting from Washington", "Beijing/Moscow", "QUEZON CITY, Philippines". @@ -1731,7 +2035,9 @@ class SDO(DefinedNamespace): discountCode: URIRef # Code used to redeem a discount. discountCurrency: URIRef # The currency of the discount.\n\nUse standard formats: [ISO 4217 currency format](http://en.wikipedia.org/wiki/ISO_4217) e.g. "USD"; [Ticker symbol](https://en.wikipedia.org/wiki/List_of_cryptocurrencies) for cryptocurrencies e.g. "BTC"; well known names for [Local Exchange Tradings Systems](https://en.wikipedia.org/wiki/Local_exchange_trading_system) (LETS) and other currency types e.g. "Ithaca HOUR". discusses: URIRef # Specifies the CreativeWork associated with the UserComment. - discussionUrl: URIRef # A link to the page containing the comments of the CreativeWork. + discussionUrl: ( + URIRef # A link to the page containing the comments of the CreativeWork. + ) diseasePreventionInfo: URIRef # Information about disease prevention. diseaseSpreadStatistics: URIRef # Statistical information about the spread of a disease, either as [[WebContent]], or described directly as a [[Dataset]], or the specific [[Observation]]s in the dataset. When a [[WebContent]] URL is provided, the page indicated might also contain more such markup. dissolutionDate: URIRef # The date that this organization was dissolved. @@ -1740,7 +2046,9 @@ class SDO(DefinedNamespace): distribution: URIRef # A downloadable form of this dataset, at a specific location, in a specific format. diversityPolicy: URIRef # Statement on diversity policy by an [[Organization]] e.g. a [[NewsMediaOrganization]]. For a [[NewsMediaOrganization]], a statement describing the newsroom’s diversity policy on both staffing and sources, typically providing staffing data. diversityStaffingReport: URIRef # For an [[Organization]] (often but not necessarily a [[NewsMediaOrganization]]), a report on staffing diversity issues. In a news context this might be for example ASNE or RTDNA (US) reports, or self-reported. - documentation: URIRef # Further documentation describing the Web API in more detail. + documentation: ( + URIRef # Further documentation describing the Web API in more detail. + ) doesNotShip: URIRef # Indicates when shipping to a particular [[shippingDestination]] is not available. domainIncludes: URIRef # Relates a property to a class that is (one of) the type(s) the property is expected to be used on. domiciledMortgage: URIRef # Whether borrower is a resident of the jurisdiction where the property is located. @@ -1768,17 +2076,23 @@ class SDO(DefinedNamespace): editEIDR: URIRef # An [EIDR](https://eidr.org/) (Entertainment Identifier Registry) [[identifier]] representing a specific edit / edition for a work of film or television. For example, the motion picture known as "Ghostbusters" whose [[titleEIDR]] is "10.5240/7EC7-228A-510A-053E-CBB8-J", has several edits e.g. "10.5240/1F2A-E1C5-680A-14C6-E76B-I" and "10.5240/8A35-3BEE-6497-5D12-9E4F-3". Since schema.org types like [[Movie]] and [[TVEpisode]] can be used for both works and their multiple expressions, it is possible to use [[titleEIDR]] alone (for a general description), or alongside [[editEIDR]] for a more edit-specific description. editor: URIRef # Specifies the Person who edited the CreativeWork. eduQuestionType: URIRef # For questions that are part of learning resources (e.g. Quiz), eduQuestionType indicates the format of question being given. Example: "Multiple choice", "Open ended", "Flashcard". - educationRequirements: URIRef # Educational background needed for the position or Occupation. + educationRequirements: ( + URIRef # Educational background needed for the position or Occupation. + ) educationalAlignment: URIRef # An alignment to an established educational framework. This property should not be used where the nature of the alignment can be described using a simple property, for example to express that a resource [[teaches]] or [[assesses]] a competency. educationalCredentialAwarded: URIRef # A description of the qualification, award, certificate, diploma or other educational credential awarded as a consequence of successful completion of this course or program. - educationalFramework: URIRef # The framework to which the resource being described is aligned. + educationalFramework: ( + URIRef # The framework to which the resource being described is aligned. + ) educationalLevel: URIRef # The level in terms of progression through an educational or training context. Examples of educational levels include 'beginner', 'intermediate' or 'advanced', and formal sets of level indicators. educationalProgramMode: URIRef # Similar to courseMode, The medium or means of delivery of the program as a whole. The value may either be a text label (e.g. "online", "onsite" or "blended"; "synchronous" or "asynchronous"; "full-time" or "part-time") or a URL reference to a term from a controlled vocabulary (e.g. https://ceds.ed.gov/element/001311#Asynchronous ). educationalRole: URIRef # An educationalRole of an EducationalAudience. educationalUse: URIRef # The purpose of a work in the context of education; for example, 'assignment', 'group work'. elevation: URIRef # The elevation of a location ([WGS 84](https://en.wikipedia.org/wiki/World_Geodetic_System)). Values may be of the form 'NUMBER UNIT_OF_MEASUREMENT' (e.g., '1,000 m', '3,200 ft') while numbers alone should be assumed to be a value in meters. eligibilityToWorkRequirement: URIRef # The legal requirements such as citizenship, visa and other documentation required for an applicant to this job. - eligibleCustomerType: URIRef # The type(s) of customers for which the given offer is valid. + eligibleCustomerType: ( + URIRef # The type(s) of customers for which the given offer is valid. + ) eligibleDuration: URIRef # The duration for which the given offer is valid. eligibleQuantity: URIRef # The interval and unit of measurement of ordering quantities for which the offer or price specification is valid. This allows e.g. specifying that a certain freight charge is valid only for a certain quantity. eligibleRegion: URIRef # The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, the place, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is valid.\n\nSee also [[ineligibleRegion]]. @@ -1810,30 +2124,46 @@ class SDO(DefinedNamespace): engineType: URIRef # The type of engine or engines powering the vehicle. entertainmentBusiness: URIRef # A sub property of location. The entertainment business where the action occurred. epidemiology: URIRef # The characteristics of associated patients, such as age, gender, race etc. - episode: URIRef # An episode of a tv, radio or game media within a series or season. - episodeNumber: URIRef # Position of the episode within an ordered group of episodes. + episode: ( + URIRef # An episode of a tv, radio or game media within a series or season. + ) + episodeNumber: ( + URIRef # Position of the episode within an ordered group of episodes. + ) episodes: URIRef # An episode of a TV/radio series or season. equal: URIRef # This ordering relation for qualitative values indicates that the subject is equal to the object. error: URIRef # For failed actions, more information on the cause of the failure. estimatedCost: URIRef # The estimated cost of the supply or supplies consumed when performing instructions. estimatedFlightDuration: URIRef # The estimated time the flight will take. estimatedSalary: URIRef # An estimated salary for a job posting or occupation, based on a variety of variables including, but not limited to industry, job title, and location. Estimated salaries are often computed by outside organizations rather than the hiring organization, who may not have committed to the estimated value. - estimatesRiskOf: URIRef # The condition, complication, or symptom whose risk is being estimated. + estimatesRiskOf: ( + URIRef # The condition, complication, or symptom whose risk is being estimated. + ) ethicsPolicy: URIRef # Statement about ethics policy, e.g. of a [[NewsMediaOrganization]] regarding journalistic and publishing practices, or of a [[Restaurant]], a page describing food source policies. In the case of a [[NewsMediaOrganization]], an ethicsPolicy is typically a statement describing the personal, organizational, and corporate standards of behavior expected by the organization. event: URIRef # Upcoming or past event associated with this place, organization, or action. eventAttendanceMode: URIRef # The eventAttendanceMode of an event indicates whether it occurs online, offline, or a mix. eventSchedule: URIRef # Associates an [[Event]] with a [[Schedule]]. There are circumstances where it is preferable to share a schedule for a series of repeating events rather than data on the individual events themselves. For example, a website or application might prefer to publish a schedule for a weekly gym class rather than provide data on every event. A schedule could be processed by applications to add forthcoming events to a calendar. An [[Event]] that is associated with a [[Schedule]] using this property should not have [[startDate]] or [[endDate]] properties. These are instead defined within the associated [[Schedule]], this avoids any ambiguity for clients using the data. The property might have repeated values to specify different schedules, e.g. for different months or seasons. eventStatus: URIRef # An eventStatus of an event represents its status; particularly useful when an event is cancelled or rescheduled. - events: URIRef # Upcoming or past events associated with this place or organization. + events: ( + URIRef # Upcoming or past events associated with this place or organization. + ) evidenceLevel: URIRef # Strength of evidence of the data used to formulate the guideline (enumerated). evidenceOrigin: URIRef # Source of the data used to formulate the guidance, e.g. RCT, consensus opinion, etc. exampleOfWork: URIRef # A creative work that this work is an example/instance/realization/derivation of. exceptDate: URIRef # Defines a [[Date]] or [[DateTime]] during which a scheduled [[Event]] will not take place. The property allows exceptions to a [[Schedule]] to be specified. If an exception is specified as a [[DateTime]] then only the event that would have started at that specific date and time should be excluded from the schedule. If an exception is specified as a [[Date]] then any event that is scheduled for that 24 hour period should be excluded from the schedule. This allows a whole day to be excluded from the schedule without having to itemise every scheduled event. exchangeRateSpread: URIRef # The difference between the price at which a broker or other intermediary buys and sells foreign currency. - executableLibraryName: URIRef # Library file name e.g., mscorlib.dll, system.web.dll. - exerciseCourse: URIRef # A sub property of location. The course where this action was taken. - exercisePlan: URIRef # A sub property of instrument. The exercise plan used on this action. - exerciseRelatedDiet: URIRef # A sub property of instrument. The diet used in this action. + executableLibraryName: ( + URIRef # Library file name e.g., mscorlib.dll, system.web.dll. + ) + exerciseCourse: ( + URIRef # A sub property of location. The course where this action was taken. + ) + exercisePlan: ( + URIRef # A sub property of instrument. The exercise plan used on this action. + ) + exerciseRelatedDiet: ( + URIRef # A sub property of instrument. The diet used in this action. + ) exerciseType: URIRef # Type(s) of exercise or activity, such as strength training, flexibility training, aerobics, cardiac rehabilitation, etc. exifData: URIRef # exif data for this object. expectedArrivalFrom: URIRef # The earliest date the package may arrive. @@ -1861,9 +2191,13 @@ class SDO(DefinedNamespace): floorLevel: URIRef # The floor level for an [[Accommodation]] in a multi-storey building. Since counting systems [vary internationally](https://en.wikipedia.org/wiki/Storey#Consecutive_number_floor_designations), the local system should be used where possible. floorLimit: URIRef # A floor limit is the amount of money above which credit card transactions must be authorized. floorSize: URIRef # The size of the accommodation, e.g. in square meter or squarefoot. Typical unit code(s): MTK for square meter, FTK for square foot, or YDK for square yard - followee: URIRef # A sub property of object. The person or organization being followed. + followee: ( + URIRef # A sub property of object. The person or organization being followed. + ) follows: URIRef # The most generic uni-directional social relation. - followup: URIRef # Typical or recommended followup care after the procedure is performed. + followup: ( + URIRef # Typical or recommended followup care after the procedure is performed. + ) foodEstablishment: URIRef # A sub property of location. The specific food establishment where the action occurred. foodEvent: URIRef # A sub property of location. The specific food event where the action occurred. foodWarning: URIRef # Any precaution, guidance, contraindication, etc. related to consumption of specific foods while taking this drug. @@ -1940,7 +2274,9 @@ class SDO(DefinedNamespace): hasMenu: URIRef # Either the actual menu as a structured representation, as text, or a URL of the menu. hasMenuItem: URIRef # A food or drink item contained in a menu or menu section. hasMenuSection: URIRef # A subgrouping of the menu (by dishes, course, serving time period, etc.). - hasMerchantReturnPolicy: URIRef # Specifies a MerchantReturnPolicy that may be applicable. + hasMerchantReturnPolicy: ( + URIRef # Specifies a MerchantReturnPolicy that may be applicable. + ) hasMolecularFunction: URIRef # Molecular function performed by this BioChemEntity; please use PropertyValue if you want to include any evidence. hasOccupation: URIRef # The Person's occupation. For past professions, use Role for expressing dates. hasOfferCatalog: URIRef # Indicates an OfferCatalog listing for this Organization, Person, or Service. @@ -1956,12 +2292,16 @@ class SDO(DefinedNamespace): healthPlanCopayOption: URIRef # Whether the copay is before or after deductible, etc. TODO: Is this a closed set? healthPlanCostSharing: URIRef # Whether The costs to the patient for services under this network or formulary. healthPlanDrugOption: URIRef # TODO. - healthPlanDrugTier: URIRef # The tier(s) of drugs offered by this formulary or insurance plan. + healthPlanDrugTier: ( + URIRef # The tier(s) of drugs offered by this formulary or insurance plan. + ) healthPlanId: URIRef # The 14-character, HIOS-generated Plan ID number. (Plan IDs must be unique, even across different markets.) healthPlanMarketingUrl: URIRef # The URL that goes directly to the plan brochure for the specific standard plan or plan variation. healthPlanNetworkId: URIRef # Name or unique ID of network. (Networks are often reused across different insurance plans). healthPlanNetworkTier: URIRef # The tier(s) for this network. - healthPlanPharmacyCategory: URIRef # The category or type of pharmacy associated with this cost sharing. + healthPlanPharmacyCategory: ( + URIRef # The category or type of pharmacy associated with this cost sharing. + ) healthcareReportingData: URIRef # Indicates data describing a hospital, e.g. a CDC [[CDCPMDRecord]] or as some kind of [[Dataset]]. height: URIRef # The height of the item. highPrice: URIRef # The highest price of all offers available.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. @@ -1969,11 +2309,19 @@ class SDO(DefinedNamespace): holdingArchive: URIRef # [[ArchiveOrganization]] that holds, keeps or maintains the [[ArchiveComponent]]. homeLocation: URIRef # A contact location for a person's residence. homeTeam: URIRef # The home team in a sports event. - honorificPrefix: URIRef # An honorific prefix preceding a Person's name such as Dr/Mrs/Mr. - honorificSuffix: URIRef # An honorific suffix following a Person's name such as M.D. /PhD/MSCSW. - hospitalAffiliation: URIRef # A hospital with which the physician or office is affiliated. + honorificPrefix: ( + URIRef # An honorific prefix preceding a Person's name such as Dr/Mrs/Mr. + ) + honorificSuffix: ( + URIRef # An honorific suffix following a Person's name such as M.D. /PhD/MSCSW. + ) + hospitalAffiliation: ( + URIRef # A hospital with which the physician or office is affiliated. + ) hostingOrganization: URIRef # The organization (airline, travelers' club, etc.) the membership is made with. - hoursAvailable: URIRef # The hours during which this service or contact is available. + hoursAvailable: ( + URIRef # The hours during which this service or contact is available. + ) howPerformed: URIRef # How the procedure is performed. httpMethod: URIRef # An HTTP method that specifies the appropriate HTTP method for a request to an HTTP EntryPoint. Values are capitalized strings as used in HTTP. iataCode: URIRef # IATA identifier for an airline or airport. @@ -1994,9 +2342,15 @@ class SDO(DefinedNamespace): inPlaylist: URIRef # The playlist to which this recording belongs. inProductGroupWithID: URIRef # Indicates the [[productGroupID]] for a [[ProductGroup]] that this product [[isVariantOf]]. inStoreReturnsOffered: URIRef # Are in-store returns offered? (for more advanced return methods use the [[returnMethod]] property) - inSupportOf: URIRef # Qualification, candidature, degree, application that Thesis supports. - incentiveCompensation: URIRef # Description of bonus and commission compensation aspects of the job. - incentives: URIRef # Description of bonus and commission compensation aspects of the job. + inSupportOf: ( + URIRef # Qualification, candidature, degree, application that Thesis supports. + ) + incentiveCompensation: ( + URIRef # Description of bonus and commission compensation aspects of the job. + ) + incentives: ( + URIRef # Description of bonus and commission compensation aspects of the job. + ) includedComposition: URIRef # Smaller compositions included in this work (e.g. a movement in a symphony). includedDataCatalog: URIRef # A data catalog which contains this dataset (this property was previously 'catalog', preferred name is now 'includedInDataCatalog'). includedInDataCatalog: URIRef # A data catalog which contains this dataset. @@ -2006,12 +2360,18 @@ class SDO(DefinedNamespace): includesHealthPlanFormulary: URIRef # Formularies covered by this plan. includesHealthPlanNetwork: URIRef # Networks covered by this plan. includesObject: URIRef # This links to a node or nodes indicating the exact quantity of the products included in an [[Offer]] or [[ProductCollection]]. - increasesRiskOf: URIRef # The condition, complication, etc. influenced by this factor. + increasesRiskOf: ( + URIRef # The condition, complication, etc. influenced by this factor. + ) industry: URIRef # The industry associated with the job position. ineligibleRegion: URIRef # The ISO 3166-1 (ISO 3166-1 alpha-2) or ISO 3166-2 code, the place, or the GeoShape for the geo-political region(s) for which the offer or delivery charge specification is not valid, e.g. a region where the transaction is not allowed.\n\nSee also [[eligibleRegion]]. - infectiousAgent: URIRef # The actual infectious agent, such as a specific bacterium. + infectiousAgent: ( + URIRef # The actual infectious agent, such as a specific bacterium. + ) infectiousAgentClass: URIRef # The class of infectious agent (bacteria, prion, etc.) that causes the disease. - ingredients: URIRef # A single ingredient used in the recipe, e.g. sugar, flour or garlic. + ingredients: ( + URIRef # A single ingredient used in the recipe, e.g. sugar, flour or garlic. + ) inker: URIRef # The individual who traces over the pencil drawings in ink after pencils are complete. insertion: URIRef # The place of attachment of a muscle, or what the muscle moves. installUrl: URIRef # URL at which the app may be installed, if different from the URL of the item. @@ -2020,18 +2380,24 @@ class SDO(DefinedNamespace): intensity: URIRef # Quantitative measure gauging the degree of force involved in the exercise, for example, heartbeats per minute. May include the velocity of the movement. interactingDrug: URIRef # Another drug that is known to interact with this drug in a way that impacts the effect of this drug or causes a risk to the patient. Note: disease interactions are typically captured as contraindications. interactionCount: URIRef # This property is deprecated, alongside the UserInteraction types on which it depended. - interactionService: URIRef # The WebSite or SoftwareApplication where the interactions took place. + interactionService: ( + URIRef # The WebSite or SoftwareApplication where the interactions took place. + ) interactionStatistic: URIRef # The number of interactions for the CreativeWork using the WebSite or SoftwareApplication. The most specific child type of InteractionCounter should be used. interactionType: URIRef # The Action representing the type of interaction. For up votes, +1s, etc. use [[LikeAction]]. For down votes use [[DislikeAction]]. Otherwise, use the most specific Action. interactivityType: URIRef # The predominant mode of learning supported by the learning resource. Acceptable values are 'active', 'expositive', or 'mixed'. interestRate: URIRef # The interest rate, charged or paid, applicable to the financial product. Note: This is different from the calculated annualPercentageRate. interpretedAsClaim: URIRef # Used to indicate a specific claim contained, implied, translated or refined from the content of a [[MediaObject]] or other [[CreativeWork]]. The interpreting party can be indicated using [[claimInterpreter]]. - inventoryLevel: URIRef # The current approximate inventory level for the item or items. + inventoryLevel: ( + URIRef # The current approximate inventory level for the item or items. + ) inverseOf: URIRef # Relates a property to a property that is its inverse. Inverse properties relate the same pairs of items to each other, but in reversed direction. For example, the 'alumni' and 'alumniOf' properties are inverseOf each other. Some properties don't have explicit inverses; in these situations RDFa and JSON-LD syntax for reverse properties can be used. isAcceptingNewPatients: URIRef # Whether the provider is accepting new patients. isAccessibleForFree: URIRef # A flag to signal that the item, event, or place is accessible for free. isAccessoryOrSparePartFor: URIRef # A pointer to another product (or multiple products) for which this product is an accessory or spare part. - isAvailableGenerically: URIRef # True if the drug is available in a generic form (regardless of name). + isAvailableGenerically: ( + URIRef # True if the drug is available in a generic form (regardless of name). + ) isBasedOn: URIRef # A resource from which this work is derived or from which it is a modification or adaption. isBasedOnUrl: URIRef # A resource that was used in the creation of this resource. This term can be repeated for multiple sources. For example, http://example.com/great-multiplication-intro.html. isConsumableFor: URIRef # A pointer to another product (or multiple products) for which this product is a consumable. @@ -2043,9 +2409,13 @@ class SDO(DefinedNamespace): isLocatedInSubcellularLocation: URIRef # Subcellular location where this BioChemEntity is located; please use PropertyValue if you want to include any evidence. isPartOf: URIRef # Indicates an item or CreativeWork that this item, or CreativeWork (in some sense), is part of. isPartOfBioChemEntity: URIRef # Indicates a BioChemEntity that is (in some sense) a part of this BioChemEntity. - isPlanForApartment: URIRef # Indicates some accommodation that this floor plan describes. + isPlanForApartment: ( + URIRef # Indicates some accommodation that this floor plan describes. + ) isProprietary: URIRef # True if this item's name is a proprietary/brand name (vs. generic name). - isRelatedTo: URIRef # A pointer to another, somehow related product (or multiple products). + isRelatedTo: ( + URIRef # A pointer to another, somehow related product (or multiple products). + ) isResizable: URIRef # Whether the 3DModel allows resizing. For example, room layout applications often do not allow 3DModel elements to be resized to reflect reality. isSimilarTo: URIRef # A pointer to another, functionally similar product (or multiple products). isUnlabelledFallback: URIRef # This can be marked 'true' to indicate that some published [[DeliveryTimeSettings]] or [[ShippingRateSettings]] are intended to apply to all [[OfferShippingDetails]] published by the same merchant, when referenced by a [[shippingSettingsLink]] in those settings. It is not meaningful to use a 'true' value for this property alongside a transitTimeLabel (for [[DeliveryTimeSettings]]) or shippingLabel (for [[ShippingRateSettings]]), since this property is for use with unlabelled settings. @@ -2054,13 +2424,19 @@ class SDO(DefinedNamespace): isicV4: URIRef # The International Standard of Industrial Classification of All Economic Activities (ISIC), Revision 4 code for a particular organization, business person, or place. isrcCode: URIRef # The International Standard Recording Code for the recording. issn: URIRef # The International Standard Serial Number (ISSN) that identifies this serial publication. You can repeat this property to identify different formats of, or the linking ISSN (ISSN-L) for, this serial publication. - issueNumber: URIRef # Identifies the issue of publication; for example, "iii" or "2". + issueNumber: ( + URIRef # Identifies the issue of publication; for example, "iii" or "2". + ) issuedBy: URIRef # The organization issuing the ticket or permit. issuedThrough: URIRef # The service through with the permit was granted. - iswcCode: URIRef # The International Standard Musical Work Code for the composition. + iswcCode: ( + URIRef # The International Standard Musical Work Code for the composition. + ) item: URIRef # An entity represented by an entry in a list or data feed (e.g. an 'artist' in a list of 'artists')’. itemCondition: URIRef # A predefined value from OfferItemCondition specifying the condition of the product or service, or the products or services included in the offer. Also used for product return policies to specify the condition of products accepted for returns. - itemDefectReturnFees: URIRef # The type of return fees for returns of defect products. + itemDefectReturnFees: ( + URIRef # The type of return fees for returns of defect products. + ) itemDefectReturnLabelSource: URIRef # The method (from an enumeration) by which the customer obtains a return shipping label for a defect product. itemDefectReturnShippingFeesAmount: URIRef # Amount of shipping costs for defect product returns. Applicable when property [[itemDefectReturnFees]] equals [[ReturnShippingFees]]. itemListElement: URIRef # For itemListElement values, you can use simple strings (e.g. "Peter", "Paul", "Mary"), existing entities, or use ListItem.\n\nText values are best if the elements in the list are plain strings. Existing entities are best for a simple, unordered list of existing things in your data. ListItem is used with ordered lists when you want to provide additional context about the element in that list or when the same item might be in different places in different lists.\n\nNote: The order of elements in your mark-up is not sufficient for indicating the order or elements. Use ListItem with a 'position' property in such cases. @@ -2079,12 +2455,16 @@ class SDO(DefinedNamespace): jobTitle: URIRef # The job title of the person (for example, Financial Manager). jurisdiction: URIRef # Indicates a legal jurisdiction, e.g. of some legislation, or where some government service is based. keywords: URIRef # Keywords or tags used to describe this content. Multiple entries in a keywords list are typically delimited by commas. - knownVehicleDamages: URIRef # A textual description of known damages, both repaired and unrepaired. + knownVehicleDamages: ( + URIRef # A textual description of known damages, both repaired and unrepaired. + ) knows: URIRef # The most generic bi-directional social/work relation. knowsAbout: URIRef # Of a [[Person]], and less typically of an [[Organization]], to indicate a topic that is known about - suggesting possible expertise but not implying it. We do not distinguish skill levels here, or relate this to educational content, events, objectives or [[JobPosting]] descriptions. knowsLanguage: URIRef # Of a [[Person]], and less typically of an [[Organization]], to indicate a known language. We do not distinguish skill levels or reading/writing/speaking/signing here. Use language codes from the [IETF BCP 47 standard](http://tools.ietf.org/html/bcp47). labelDetails: URIRef # Link to the drug's label details. - landlord: URIRef # A sub property of participant. The owner of the real estate property. + landlord: ( + URIRef # A sub property of participant. The owner of the real estate property. + ) language: URIRef # A sub property of instrument. The language used on this action. lastReviewed: URIRef # Date on which the content on this web page was last reviewed for accuracy and/or completeness. latitude: URIRef # The latitude of a location. For example ```37.42242``` ([WGS 84](https://en.wikipedia.org/wiki/World_Geodetic_System)). @@ -2099,7 +2479,9 @@ class SDO(DefinedNamespace): legislationDate: URIRef # The date of adoption or signature of the legislation. This is the date at which the text is officially aknowledged to be a legislation, even though it might not even be published or in force. legislationDateVersion: URIRef # The point-in-time at which the provided description of the legislation is valid (e.g. : when looking at the law on the 2016-04-07 (= dateVersion), I get the consolidation of 2015-04-12 of the "National Insurance Contributions Act 2015") legislationIdentifier: URIRef # An identifier for the legislation. This can be either a string-based identifier, like the CELEX at EU level or the NOR in France, or a web-based, URL/URI identifier, like an ELI (European Legislation Identifier) or an URN-Lex. - legislationJurisdiction: URIRef # The jurisdiction from which the legislation originates. + legislationJurisdiction: ( + URIRef # The jurisdiction from which the legislation originates. + ) legislationLegalForce: URIRef # Whether the legislation is currently in force, not in force, or partially in force. legislationLegalValue: URIRef # The legal value of this legislation file. The same legislation can be written in multiple files with different legal values. Typically a digitally signed PDF have a "stronger" legal value than the HTML file of the same act. legislationPassedBy: URIRef # The person or organization that originally passed or made the law : typically parliament (for primary legislation) or government (for secondary legislation). This indicates the "legal author" of the law, as opposed to its physical author. @@ -2131,7 +2513,9 @@ class SDO(DefinedNamespace): lowPrice: URIRef # The lowest price of all offers available.\n\nUsage guidelines:\n\n* Use values from 0123456789 (Unicode 'DIGIT ZERO' (U+0030) to 'DIGIT NINE' (U+0039)) rather than superficially similiar Unicode symbols.\n* Use '.' (Unicode 'FULL STOP' (U+002E)) rather than ',' to indicate a decimal point. Avoid using these symbols as a readability separator. lyricist: URIRef # The person who wrote the words. lyrics: URIRef # The words in the song. - mainContentOfPage: URIRef # Indicates if this web page element is the main subject of the page. + mainContentOfPage: ( + URIRef # Indicates if this web page element is the main subject of the page. + ) mainEntity: URIRef # Indicates the primary entity described in some page or other CreativeWork. mainEntityOfPage: URIRef # Indicates a page (or other CreativeWork) for which this thing is the main entity being described. See [background notes](/docs/datamodel.html#mainEntityBackground) for details. maintainer: URIRef # A maintainer of a [[Dataset]], software package ([[SoftwareApplication]]), or other [[Project]]. A maintainer is a [[Person]] or [[Organization]] that manages contributions to, and/or publication of, some (typically complex) artifact. It is common for distributions of software and data to be based on "upstream" sources. When [[maintainer]] is applied to a specific version of something e.g. a particular version or packaging of a [[Dataset]], it is always possible that the upstream source has a different maintainer. The [[isBasedOn]] property can be used to indicate such relationships between datasets to make the different maintenance roles clear. Similarly in the case of software, a package may have dedicated maintainers working on integration into software distributions such as Ubuntu, as well as upstream maintainers of the underlying work. @@ -2147,8 +2531,12 @@ class SDO(DefinedNamespace): mathExpression: URIRef # A mathematical expression (e.g. 'x^2-3x=0') that may be solved for a specific variable, simplified, or transformed. This can take many formats, e.g. LaTeX, Ascii-Math, or math as you would write with a keyboard. maxPrice: URIRef # The highest price if the price is a range. maxValue: URIRef # The upper value of some characteristic or property. - maximumAttendeeCapacity: URIRef # The total number of individuals that may attend an event or venue. - maximumEnrollment: URIRef # The maximum number of students who may be enrolled in the program. + maximumAttendeeCapacity: ( + URIRef # The total number of individuals that may attend an event or venue. + ) + maximumEnrollment: ( + URIRef # The maximum number of students who may be enrolled in the program. + ) maximumIntake: URIRef # Recommended intake of this supplement for a given population as defined by a specific recommending authority. maximumPhysicalAttendeeCapacity: URIRef # The maximum physical attendee capacity of an [[Event]] whose [[eventAttendanceMode]] is [[OfflineEventAttendanceMode]] (or the offline aspects, in the case of a [[MixedEventAttendanceMode]]). maximumVirtualAttendeeCapacity: URIRef # The maximum physical attendee capacity of an [[Event]] whose [[eventAttendanceMode]] is [[OnlineEventAttendanceMode]] (or the online aspects, in the case of a [[MixedEventAttendanceMode]]). @@ -2163,7 +2551,9 @@ class SDO(DefinedNamespace): medicalAudience: URIRef # Medical audience for page. medicalSpecialty: URIRef # A medical specialty of the provider. medicineSystem: URIRef # The system of medicine that includes this MedicalEntity, for example 'evidence-based', 'homeopathic', 'chiropractic', etc. - meetsEmissionStandard: URIRef # Indicates that the vehicle meets the respective emission standard. + meetsEmissionStandard: ( + URIRef # Indicates that the vehicle meets the respective emission standard. + ) member: URIRef # A member of an Organization or a ProgramMembership. Organizations can be members of organizations; ProgramMembership is typically for individuals. memberOf: URIRef # An Organization (or ProgramMembership) to which this Person or Organization belongs. members: URIRef # A member of this organization. @@ -2175,7 +2565,9 @@ class SDO(DefinedNamespace): menuAddOn: URIRef # Additional menu item(s) such as a side dish of salad or side order of fries that can be added to this menu item. Additionally it can be a menu section containing allowed add-on menu items for this menu item. merchant: URIRef # 'merchant' is an out-dated term for 'seller'. merchantReturnDays: URIRef # Specifies either a fixed return date or the number of days (from the delivery date) that a product can be returned. Used when the [[returnPolicyCategory]] property is specified as [[MerchantReturnFiniteReturnWindow]]. - merchantReturnLink: URIRef # Specifies a Web page or service by URL, for product returns. + merchantReturnLink: ( + URIRef # Specifies a Web page or service by URL, for product returns. + ) messageAttachment: URIRef # A CreativeWork attached to the message. mileageFromOdometer: URIRef # The total distance travelled by the particular vehicle since its initial production, as read from its odometer.\n\nTypical unit code(s): KMT for kilometers, SMI for statute miles minPrice: URIRef # The lowest price if the price is a range. @@ -2195,7 +2587,9 @@ class SDO(DefinedNamespace): muscleAction: URIRef # The movement the muscle generates. musicArrangement: URIRef # An arrangement derived from the composition. musicBy: URIRef # The composer of the soundtrack. - musicCompositionForm: URIRef # The type of composition (e.g. overture, sonata, symphony, etc.). + musicCompositionForm: ( + URIRef # The type of composition (e.g. overture, sonata, symphony, etc.). + ) musicGroupMember: URIRef # A member of a music group—for example, John, Paul, George, or Ringo. musicReleaseFormat: URIRef # Format of this release (the type of recording media used, ie. compact disc, digital media, LP, etc.). musicalKey: URIRef # The key, mode, or scale this composition uses. @@ -2206,7 +2600,9 @@ class SDO(DefinedNamespace): naturalProgression: URIRef # The expected progression of the condition if it is not treated and allowed to progress naturally. negativeNotes: URIRef # Indicates, in the context of a [[Review]] (e.g. framed as 'pro' vs 'con' considerations), negative considerations - either as unstructured text, or a list. nerve: URIRef # The underlying innervation associated with the muscle. - nerveMotor: URIRef # The neurological pathway extension that involves muscle control. + nerveMotor: ( + URIRef # The neurological pathway extension that involves muscle control. + ) netWorth: URIRef # The total financial value of the person as calculated by subtracting assets from liabilities. newsUpdatesAndGuidelines: URIRef # Indicates a page with news updates and guidelines. This could often be (but is not required to be) the main page containing [[SpecialAnnouncement]] markup on a site. nextItem: URIRef # A link to the ListItem that follows the current one. @@ -2214,7 +2610,9 @@ class SDO(DefinedNamespace): nonEqual: URIRef # This ordering relation for qualitative values indicates that the subject is not equal to the object. nonProprietaryName: URIRef # The generic name of this drug or supplement. nonprofitStatus: URIRef # nonprofit Status indicates the legal status of a non-profit organization in its primary place of business. - normalRange: URIRef # Range of acceptable values for a typical patient, when applicable. + normalRange: ( + URIRef # Range of acceptable values for a typical patient, when applicable. + ) nsn: URIRef # Indicates the [NATO stock number](https://en.wikipedia.org/wiki/NATO_Stock_Number) (nsn) of a [[Product]]. numAdults: URIRef # The number of adults staying in the unit. numChildren: URIRef # The number of children staying in the unit. @@ -2229,7 +2627,9 @@ class SDO(DefinedNamespace): numberOfBeds: URIRef # The quantity of the given bed type available in the HotelRoom, Suite, House, or Apartment. numberOfCredits: URIRef # The number of credits or units awarded by a Course or required to complete an EducationalOccupationalProgram. numberOfDoors: URIRef # The number of doors.\n\nTypical unit code(s): C62 - numberOfEmployees: URIRef # The number of employees in an organization e.g. business. + numberOfEmployees: ( + URIRef # The number of employees in an organization e.g. business. + ) numberOfEpisodes: URIRef # The number of episodes in this season or series. numberOfForwardGears: URIRef # The total number of forward gears available for the transmission system of the vehicle.\n\nTypical unit code(s): C62 numberOfFullBathrooms: URIRef # Number of full bathrooms - The total number of full and ¾ bathrooms in an [[Accommodation]]. This corresponds to the [BathroomsFull field in RESO](https://ddwiki.reso.org/display/DDW17/BathroomsFull+Field). @@ -2257,11 +2657,15 @@ class SDO(DefinedNamespace): openingHours: URIRef # The general opening hours for a business. Opening hours can be specified as a weekly time range, starting with days, then times per day. Multiple days can be listed with commas ',' separating each day. Day or time ranges are specified using a hyphen '-'.\n\n* Days are specified using the following two-letter combinations: ```Mo```, ```Tu```, ```We```, ```Th```, ```Fr```, ```Sa```, ```Su```.\n* Times are specified using 24:00 format. For example, 3pm is specified as ```15:00```, 10am as ```10:00```. \n* Here is an example: <time itemprop="openingHours" datetime="Tu,Th 16:00-20:00">Tuesdays and Thursdays 4-8pm</time>.\n* If a business is open 7 days a week, then it can be specified as <time itemprop="openingHours" datetime="Mo-Su">Monday through Sunday, all day</time>. openingHoursSpecification: URIRef # The opening hours of a certain place. opens: URIRef # The opening hour of the place or service on the given day(s) of the week. - operatingSystem: URIRef # Operating systems supported (Windows 7, OSX 10.6, Android 1.6). + operatingSystem: ( + URIRef # Operating systems supported (Windows 7, OSX 10.6, Android 1.6). + ) opponent: URIRef # A sub property of participant. The opponent on this action. option: URIRef # A sub property of object. The options subject to this action. orderDate: URIRef # Date order was placed. - orderDelivery: URIRef # The delivery of the parcel related to this order or order item. + orderDelivery: ( + URIRef # The delivery of the parcel related to this order or order item. + ) orderItemNumber: URIRef # The identifier of the order item. orderItemStatus: URIRef # The current status of the order item. orderNumber: URIRef # The identifier of the transaction. @@ -2289,25 +2693,35 @@ class SDO(DefinedNamespace): parents: URIRef # A parents of the person. partOfEpisode: URIRef # The episode to which this clip belongs. partOfInvoice: URIRef # The order is being paid as part of the referenced Invoice. - partOfOrder: URIRef # The overall order the items in this delivery were included in. + partOfOrder: ( + URIRef # The overall order the items in this delivery were included in. + ) partOfSeason: URIRef # The season to which this episode belongs. partOfSeries: URIRef # The series to which this episode or season belongs. - partOfSystem: URIRef # The anatomical or organ system that this structure is part of. + partOfSystem: ( + URIRef # The anatomical or organ system that this structure is part of. + ) partOfTVSeries: URIRef # The TV series to which this episode or season belongs. partOfTrip: URIRef # Identifies that this [[Trip]] is a subTrip of another Trip. For example Day 1, Day 2, etc. of a multi-day trip. participant: URIRef # Other co-agents that participated in the action indirectly. e.g. John wrote a book with *Steve*. partySize: URIRef # Number of people the reservation should accommodate. passengerPriorityStatus: URIRef # The priority status assigned to a passenger for security or boarding (e.g. FastTrack or Priority). - passengerSequenceNumber: URIRef # The passenger's sequence number as assigned by the airline. + passengerSequenceNumber: ( + URIRef # The passenger's sequence number as assigned by the airline. + ) pathophysiology: URIRef # Changes in the normal mechanical, physical, and biochemical functions that are associated with this activity or condition. pattern: URIRef # A pattern that something has, for example 'polka dot', 'striped', 'Canadian flag'. Values are typically expressed as text, although links to controlled value schemes are also supported. payload: URIRef # The permitted weight of passengers and cargo, EXCLUDING the weight of the empty vehicle.\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n\n* Note 1: Many databases specify the permitted TOTAL weight instead, which is the sum of [[weight]] and [[payload]]\n* Note 2: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 3: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]].\n* Note 4: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. paymentAccepted: URIRef # Cash, Credit Card, Cryptocurrency, Local Exchange Tradings System, etc. paymentDue: URIRef # The date that payment is due. paymentDueDate: URIRef # The date that payment is due. - paymentMethod: URIRef # The name of the credit card or other method of payment for the order. + paymentMethod: ( + URIRef # The name of the credit card or other method of payment for the order. + ) paymentMethodId: URIRef # An identifier for the method of payment used (e.g. the last 4 digits of the credit card). - paymentStatus: URIRef # The status of payment; whether the invoice has been paid or not. + paymentStatus: ( + URIRef # The status of payment; whether the invoice has been paid or not. + ) paymentUrl: URIRef # The URL for sending a payment. penciler: URIRef # The individual who draws the primary narrative artwork. percentile10: URIRef # The 10th percentile value. @@ -2318,18 +2732,26 @@ class SDO(DefinedNamespace): performer: URIRef # A performer at the event—for example, a presenter, musician, musical group or actor. performerIn: URIRef # Event that this person is a performer or participant in. performers: URIRef # The main performer or performers of the event—for example, a presenter, musician, or actor. - permissionType: URIRef # The type of permission granted the person, organization, or audience. + permissionType: ( + URIRef # The type of permission granted the person, organization, or audience. + ) permissions: URIRef # Permission(s) required to run the app (for example, a mobile app may require full internet access or may run only on wifi). permitAudience: URIRef # The target audience for this permit. - permittedUsage: URIRef # Indications regarding the permitted usage of the accommodation. + permittedUsage: ( + URIRef # Indications regarding the permitted usage of the accommodation. + ) petsAllowed: URIRef # Indicates whether pets are allowed to enter the accommodation or lodging business. More detailed information can be put in a text value. phoneticText: URIRef # Representation of a text [[textValue]] using the specified [[speechToTextMarkup]]. For example the city name of Houston in IPA: /ˈhjuːstən/. photo: URIRef # A photograph of this place. photos: URIRef # Photographs of this place. physicalRequirement: URIRef # A description of the types of physical activity associated with the job. Defined terms such as those in O*net may be used, but note that there is no way to specify the level of ability as well as its nature when using a defined term. - physiologicalBenefits: URIRef # Specific physiologic benefits associated to the plan. + physiologicalBenefits: ( + URIRef # Specific physiologic benefits associated to the plan. + ) pickupLocation: URIRef # Where a taxi will pick up a passenger or a rental car can be picked up. - pickupTime: URIRef # When a taxi will pickup a passenger or a rental car can be picked up. + pickupTime: ( + URIRef # When a taxi will pickup a passenger or a rental car can be picked up. + ) playMode: URIRef # Indicates whether this game is multi-player, co-op or single-player. The game can be marked as multi-player, co-op and single-player at the same time. playerType: URIRef # Player type required—for example, Flash or Silverlight. playersOnline: URIRef # Number of players on the server. @@ -2338,7 +2760,9 @@ class SDO(DefinedNamespace): position: URIRef # The position of an item in a series or sequence of items. positiveNotes: URIRef # Indicates, in the context of a [[Review]] (e.g. framed as 'pro' vs 'con' considerations), positive considerations - either as unstructured text, or a list. possibleComplication: URIRef # A possible unexpected and unfavorable evolution of a medical condition. Complications may include worsening of the signs or symptoms of the disease, extension of the condition to other organ systems, etc. - possibleTreatment: URIRef # A possible treatment to address this condition, sign or symptom. + possibleTreatment: ( + URIRef # A possible treatment to address this condition, sign or symptom. + ) postOfficeBoxNumber: URIRef # The post office box number for PO box addresses. postOp: URIRef # A description of the postoperative procedures, care, and/or followups for this device. postalCode: URIRef # The postal code. For example, 94043. @@ -2369,13 +2793,19 @@ class SDO(DefinedNamespace): primaryImageOfPage: URIRef # Indicates the main image on the page. primaryPrevention: URIRef # A preventative therapy used to prevent an initial occurrence of the medical condition, such as vaccination. printColumn: URIRef # The number of the column in which the NewsArticle appears in the print edition. - printEdition: URIRef # The edition of the print product in which the NewsArticle appears. + printEdition: ( + URIRef # The edition of the print product in which the NewsArticle appears. + ) printPage: URIRef # If this NewsArticle appears in print, this field indicates the name of the page on which the article is found. Please note that this field is intended for the exact page name (e.g. A5, B18). printSection: URIRef # If this NewsArticle appears in print, this field indicates the print section in which the article appeared. procedure: URIRef # A description of the procedure involved in setting up, using, and/or installing the device. procedureType: URIRef # The type of procedure, for example Surgical, Noninvasive, or Percutaneous. - processingTime: URIRef # Estimated processing time for the service using this channel. - processorRequirements: URIRef # Processor architecture required to run the application (e.g. IA64). + processingTime: ( + URIRef # Estimated processing time for the service using this channel. + ) + processorRequirements: ( + URIRef # Processor architecture required to run the application (e.g. IA64). + ) producer: URIRef # The person or organization who produced the work (e.g. music album, movie, tv/radio series etc.). produces: URIRef # The tangible thing generated by the service, e.g. a passport, permit, etc. productGroupID: URIRef # Indicates a textual identifier for a ProductGroup. @@ -2406,9 +2836,15 @@ class SDO(DefinedNamespace): publisher: URIRef # The publisher of the creative work. publisherImprint: URIRef # The publishing division which published the comic. publishingPrinciples: URIRef # The publishingPrinciples property indicates (typically via [[URL]]) a document describing the editorial principles of an [[Organization]] (or individual e.g. a [[Person]] writing a blog) that relate to their activities as a publisher, e.g. ethics or diversity policies. When applied to a [[CreativeWork]] (e.g. [[NewsArticle]]) the principles are those of the party primarily responsible for the creation of the [[CreativeWork]]. While such policies are most typically expressed in natural language, sometimes related information (e.g. indicating a [[funder]]) can be expressed using schema.org terminology. - purchaseDate: URIRef # The date the item e.g. vehicle was purchased by the current owner. - qualifications: URIRef # Specific qualifications required for this role or Occupation. - quarantineGuidelines: URIRef # Guidelines about quarantine rules, e.g. in the context of a pandemic. + purchaseDate: ( + URIRef # The date the item e.g. vehicle was purchased by the current owner. + ) + qualifications: ( + URIRef # Specific qualifications required for this role or Occupation. + ) + quarantineGuidelines: ( + URIRef # Guidelines about quarantine rules, e.g. in the context of a pandemic. + ) query: URIRef # A sub property of instrument. The query used on this action. quest: URIRef # The task that a player-controlled character, or group of characters may complete in order to gain a reward. question: URIRef # A sub property of object. A question. @@ -2420,15 +2856,23 @@ class SDO(DefinedNamespace): readonlyValue: URIRef # Whether or not a property is mutable. Default is false. Specifying this for a property that also has a value makes it act similar to a "hidden" input in an HTML form. realEstateAgent: URIRef # A sub property of participant. The real estate agent involved in the action. recipe: URIRef # A sub property of instrument. The recipe/instructions used to perform the action. - recipeCategory: URIRef # The category of the recipe—for example, appetizer, entree, etc. - recipeCuisine: URIRef # The cuisine of the recipe (for example, French or Ethiopian). - recipeIngredient: URIRef # A single ingredient used in the recipe, e.g. sugar, flour or garlic. + recipeCategory: ( + URIRef # The category of the recipe—for example, appetizer, entree, etc. + ) + recipeCuisine: ( + URIRef # The cuisine of the recipe (for example, French or Ethiopian). + ) + recipeIngredient: ( + URIRef # A single ingredient used in the recipe, e.g. sugar, flour or garlic. + ) recipeInstructions: URIRef # A step in making the recipe, in the form of a single item (document, video, etc.) or an ordered list with HowToStep and/or HowToSection items. recipeYield: URIRef # The quantity produced by the recipe (for example, number of people served, number of servings, etc). recipient: URIRef # A sub property of participant. The participant who is at the receiving end of the action. recognizedBy: URIRef # An organization that acknowledges the validity, value or utility of a credential. Note: recognition may include a process of quality assurance or accreditation. recognizingAuthority: URIRef # If applicable, the organization that officially recognizes this entity as part of its endorsed system of medicine. - recommendationStrength: URIRef # Strength of the guideline's recommendation (e.g. 'class I'). + recommendationStrength: ( + URIRef # Strength of the guideline's recommendation (e.g. 'class I'). + ) recommendedIntake: URIRef # Recommended intake of this supplement for a given population as defined by a specific recommending authority. recordLabel: URIRef # The label that issued the release. recordedAs: URIRef # An audio recording of the work. @@ -2453,15 +2897,23 @@ class SDO(DefinedNamespace): releaseOf: URIRef # The album this is a release of. releasedEvent: URIRef # The place and time the release was issued, expressed as a PublicationEvent. relevantOccupation: URIRef # The Occupation for the JobPosting. - relevantSpecialty: URIRef # If applicable, a medical specialty in which this entity is relevant. - remainingAttendeeCapacity: URIRef # The number of attendee places for an event that remain unallocated. + relevantSpecialty: ( + URIRef # If applicable, a medical specialty in which this entity is relevant. + ) + remainingAttendeeCapacity: ( + URIRef # The number of attendee places for an event that remain unallocated. + ) renegotiableLoan: URIRef # Whether the terms for payment of interest can be renegotiated during the life of the loan. - repeatCount: URIRef # Defines the number of times a recurring [[Event]] will take place + repeatCount: ( + URIRef # Defines the number of times a recurring [[Event]] will take place + ) repeatFrequency: URIRef # Defines the frequency at which [[Event]]s will occur according to a schedule [[Schedule]]. The intervals between events should be defined as a [[Duration]] of time. repetitions: URIRef # Number of times one should repeat the activity. replacee: URIRef # A sub property of object. The object that is being replaced. replacer: URIRef # A sub property of object. The object that replaces. - replyToUrl: URIRef # The URL at which a reply may be posted to the specified UserComment. + replyToUrl: ( + URIRef # The URL at which a reply may be posted to the specified UserComment. + ) reportNumber: URIRef # The number or other unique designator assigned to a Report by the publishing organization. representativeOfPage: URIRef # Indicates whether this image is representative of the content of the page. requiredCollateral: URIRef # Assets required to secure loan or credit repayments. It may take form of third party pledge, goods, financial instruments (cash, securities, etc.) @@ -2471,20 +2923,30 @@ class SDO(DefinedNamespace): requiredQuantity: URIRef # The required quantity of the item(s). requirements: URIRef # Component dependency requirements for application. This includes runtime environments and shared libraries that are not included in the application distribution package, but required to run the application (Examples: DirectX, Java or .NET runtime). requiresSubscription: URIRef # Indicates if use of the media require a subscription (either paid or free). Allowed values are ```true``` or ```false``` (note that an earlier version had 'yes', 'no'). - reservationFor: URIRef # The thing -- flight, event, restaurant,etc. being reserved. + reservationFor: ( + URIRef # The thing -- flight, event, restaurant,etc. being reserved. + ) reservationId: URIRef # A unique identifier for the reservation. reservationStatus: URIRef # The current status of the reservation. reservedTicket: URIRef # A ticket associated with the reservation. - responsibilities: URIRef # Responsibilities associated with this role or Occupation. + responsibilities: ( + URIRef # Responsibilities associated with this role or Occupation. + ) restPeriods: URIRef # How often one should break from the activity. restockingFee: URIRef # Use [[MonetaryAmount]] to specify a fixed restocking fee for product returns, or use [[Number]] to specify a percentage of the product price paid by the customer. result: URIRef # The result produced in the action. e.g. John wrote *a book*. resultComment: URIRef # A sub property of result. The Comment created or sent as a result of this action. resultReview: URIRef # A sub property of result. The review that resulted in the performing of the action. - returnFees: URIRef # The type of return fees for purchased products (for any return reason) + returnFees: ( + URIRef # The type of return fees for purchased products (for any return reason) + ) returnLabelSource: URIRef # The method (from an enumeration) by which the customer obtains a return shipping label for a product returned for any reason. - returnMethod: URIRef # The type of return method offered, specified from an enumeration. - returnPolicyCategory: URIRef # Specifies an applicable return policy (from an enumeration). + returnMethod: ( + URIRef # The type of return method offered, specified from an enumeration. + ) + returnPolicyCategory: ( + URIRef # Specifies an applicable return policy (from an enumeration). + ) returnPolicyCountry: URIRef # The country where the product has to be sent to for returns, for example "Ireland" using the [[name]] property of [[Country]]. You can also provide the two-letter [ISO 3166-1 alpha-2 country code](http://en.wikipedia.org/wiki/ISO_3166-1). Note that this can be different from the country where the product was originally shipped from or sent too. returnPolicySeasonalOverride: URIRef # Seasonal override of a return policy. returnShippingFeesAmount: URIRef # Amount of shipping costs for product returns (for any reason). Applicable when property [[returnFees]] equals [[ReturnShippingFees]]. @@ -2529,8 +2991,12 @@ class SDO(DefinedNamespace): seatingCapacity: URIRef # The number of persons that can be seated (e.g. in a vehicle), both in terms of the physical space available, and in terms of limitations set by law.\n\nTypical unit code(s): C62 for persons seatingType: URIRef # The type/class of the seat. secondaryPrevention: URIRef # A preventative therapy used to prevent reoccurrence of the medical condition after an initial episode of the condition. - securityClearanceRequirement: URIRef # A description of any security clearance requirements of the job. - securityScreening: URIRef # The type of security screening the passenger is subject to. + securityClearanceRequirement: ( + URIRef # A description of any security clearance requirements of the job. + ) + securityScreening: ( + URIRef # The type of security screening the passenger is subject to. + ) seeks: URIRef # A pointer to products or services sought by the organization or person (demand). seller: URIRef # An entity which offers (sells / leases / lends / loans) the services / goods. A seller may also be a provider. sender: URIRef # A sub property of participant. The participant who is at the sending end of the action. @@ -2578,7 +3044,9 @@ class SDO(DefinedNamespace): softwareHelp: URIRef # Software application help. softwareRequirements: URIRef # Component dependency requirements for application. This includes runtime environments and shared libraries that are not included in the application distribution package, but required to run the application (Examples: DirectX, Java or .NET runtime). softwareVersion: URIRef # Version of the software instance. - sourceOrganization: URIRef # The Organization on whose behalf the creator was working. + sourceOrganization: ( + URIRef # The Organization on whose behalf the creator was working. + ) sourcedFrom: URIRef # The neurological pathway that originates the neurons. spatial: URIRef # The "spatial" property can be used in cases when more specific properties (e.g. [[locationCreated]], [[spatialCoverage]], [[contentLocation]]) are not known to be appropriate. spatialCoverage: URIRef # The spatialCoverage of a CreativeWork indicates the place(s) which are the focus of the content. It is a subproperty of contentLocation intended primarily for more technical and detailed materials. For example with a Dataset, it indicates areas that the dataset describes: a dataset of New York weather would have spatialCoverage which was the place: the state of New York. @@ -2610,7 +3078,9 @@ class SDO(DefinedNamespace): streetAddress: URIRef # The street address. For example, 1600 Amphitheatre Pkwy. strengthUnit: URIRef # The units of an active ingredient's strength, e.g. mg. strengthValue: URIRef # The value of an active ingredient's strength, e.g. 325. - structuralClass: URIRef # The name given to how bone physically connects to each other. + structuralClass: ( + URIRef # The name given to how bone physically connects to each other. + ) study: URIRef # A medical study or trial related to this entity. studyDesign: URIRef # Specifics about the observational study design (enumerated). studyLocation: URIRef # The location in which the study is taking/took place. @@ -2620,7 +3090,9 @@ class SDO(DefinedNamespace): subOrganization: URIRef # A relationship between two organizations where the first includes the second, e.g., as a subsidiary. See also: the more specific 'department' property. subReservation: URIRef # The individual reservations included in the package. Typically a repeated property. subStageSuffix: URIRef # The substage, e.g. 'a' for Stage IIIa. - subStructure: URIRef # Component (sub-)structure(s) that comprise this anatomical structure. + subStructure: ( + URIRef # Component (sub-)structure(s) that comprise this anatomical structure. + ) subTest: URIRef # A component test of the panel. subTrip: URIRef # Identifies a [[Trip]] that is a subTrip of this Trip. For example Day 1, Day 2, etc. of a multi-day trip. subjectOf: URIRef # A CreativeWork or Event about this Thing. @@ -2630,9 +3102,13 @@ class SDO(DefinedNamespace): suggestedAge: URIRef # The age or age range for the intended audience or person, for example 3-12 months for infants, 1-5 years for toddlers. suggestedAnswer: URIRef # An answer (possibly one of several, possibly incorrect) to a Question, e.g. on a Question/Answer site. suggestedGender: URIRef # The suggested gender of the intended person or audience, for example "male", "female", or "unisex". - suggestedMaxAge: URIRef # Maximum recommended age in years for the audience or user. + suggestedMaxAge: ( + URIRef # Maximum recommended age in years for the audience or user. + ) suggestedMeasurement: URIRef # A suggested range of body measurements for the intended audience or person, for example inseam between 32 and 34 inches or height between 170 and 190 cm. Typically found on a size chart for wearable products. - suggestedMinAge: URIRef # Minimum recommended age in years for the audience or user. + suggestedMinAge: ( + URIRef # Minimum recommended age in years for the audience or user. + ) suitableForDiet: URIRef # Indicates a dietary restriction or guideline for which this recipe or menu item is suitable, e.g. diabetic, halal etc. superEvent: URIRef # An event that this event is a part of. For example, a collection of individual music performances might each have a music festival as their superEvent. supersededBy: URIRef # Relates a term (i.e. a property, class or enumeration) to one that supersedes it. @@ -2641,10 +3117,16 @@ class SDO(DefinedNamespace): supportingData: URIRef # Supporting data for a SoftwareApplication. surface: URIRef # A material used as a surface in some artwork, e.g. Canvas, Paper, Wood, Board, etc. target: URIRef # Indicates a target EntryPoint for an Action. - targetCollection: URIRef # A sub property of object. The collection target of the action. - targetDescription: URIRef # The description of a node in an established educational framework. + targetCollection: ( + URIRef # A sub property of object. The collection target of the action. + ) + targetDescription: ( + URIRef # The description of a node in an established educational framework. + ) targetName: URIRef # The name of a node in an established educational framework. - targetPlatform: URIRef # Type of app development: phone, Metro style, desktop, XBox, etc. + targetPlatform: ( + URIRef # Type of app development: phone, Metro style, desktop, XBox, etc. + ) targetPopulation: URIRef # Characteristics of the population for which this is intended, or which typically uses it, e.g. 'adults'. targetProduct: URIRef # Target Operating System / Product to which the code applies. If applies to several versions, just the product name can be used. targetUrl: URIRef # The URL of a node in an established educational framework. @@ -2667,7 +3149,9 @@ class SDO(DefinedNamespace): ticketNumber: URIRef # The unique identifier for the ticket. ticketToken: URIRef # Reference to an asset (e.g., Barcode, QR code image or PDF) usable for entrance. ticketedSeat: URIRef # The seat associated with the ticket. - timeOfDay: URIRef # The time of day the program normally runs. For example, "evenings". + timeOfDay: ( + URIRef # The time of day the program normally runs. For example, "evenings". + ) timeRequired: URIRef # Approximate or typical time it takes to work with or through this learning resource for the typical intended target audience, e.g. 'PT30M', 'PT1H25M'. timeToComplete: URIRef # The expected length of time to complete the program if attending full-time. tissueSample: URIRef # The type of tissue sample required for the test. @@ -2702,7 +3186,9 @@ class SDO(DefinedNamespace): translationOfWork: URIRef # The work that this work has been translated from. e.g. 物种起源 is a translationOf “On the Origin of Species” translator: URIRef # Organization or person who adapts a creative work to different languages, regional differences and technical requirements of a target market, or that translates during some event. transmissionMethod: URIRef # How the disease spreads, either as a route or vector, for example 'direct contact', 'Aedes aegypti', etc. - travelBans: URIRef # Information about travel bans, e.g. in the context of a pandemic. + travelBans: ( + URIRef # Information about travel bans, e.g. in the context of a pandemic. + ) trialDesign: URIRef # Specifics about the trial design (enumerated). tributary: URIRef # The anatomical or organ system that the vein flows into; a larger structure that the vein connects to. typeOfBed: URIRef # The type of bed to which the BedDetail refers, i.e. the type of bed available in the quantity indicated by quantity. @@ -2745,7 +3231,9 @@ class SDO(DefinedNamespace): vehicleConfiguration: URIRef # A short text indicating the configuration of the vehicle, e.g. '5dr hatchback ST 2.5 MT 225 hp' or 'limited edition'. vehicleEngine: URIRef # Information about the engine or engines of the vehicle. vehicleIdentificationNumber: URIRef # The Vehicle Identification Number (VIN) is a unique serial number used by the automotive industry to identify individual motor vehicles. - vehicleInteriorColor: URIRef # The color or color combination of the interior of the vehicle. + vehicleInteriorColor: ( + URIRef # The color or color combination of the interior of the vehicle. + ) vehicleInteriorType: URIRef # The type or material of the interior of the vehicle (e.g. synthetic fabric, leather, wood, etc.). While most interior types are characterized by the material used, an interior type can also be based on vehicle usage or target audience. vehicleModelDate: URIRef # The release date of a vehicle model (often used to differentiate versions of the same make and model). vehicleSeatingCapacity: URIRef # The number of passengers that can be seated in the vehicle, both in terms of the physical space available, and in terms of limitations set by law.\n\nTypical unit code(s): C62 for persons. @@ -2763,7 +3251,9 @@ class SDO(DefinedNamespace): warranty: URIRef # The warranty promise(s) included in the offer. warrantyPromise: URIRef # The warranty promise(s) included in the offer. warrantyScope: URIRef # The scope of the warranty promise. - webCheckinTime: URIRef # The time when a passenger can check into the flight online. + webCheckinTime: ( + URIRef # The time when a passenger can check into the flight online. + ) webFeed: URIRef # The URL for a feed, e.g. associated with a podcast series, blog, or series of date-stamped updates. This is usually RSS or Atom. weight: URIRef # The weight of the product or person. weightTotal: URIRef # The permitted total weight of the loaded vehicle, including passengers and cargo and the weight of the empty vehicle.\n\nTypical unit code(s): KGM for kilogram, LBR for pound\n\n* Note 1: You can indicate additional information in the [[name]] of the [[QuantitativeValue]] node.\n* Note 2: You may also link to a [[QualitativeValue]] node that provides additional information using [[valueReference]].\n* Note 3: Note that you can use [[minValue]] and [[maxValue]] to indicate ranges. diff --git a/rdflib/namespace/_SH.py b/rdflib/namespace/_SH.py index 4df0fc55a..5bcbbbbf1 100644 --- a/rdflib/namespace/_SH.py +++ b/rdflib/namespace/_SH.py @@ -35,8 +35,12 @@ class SH(DefinedNamespace): entailment: URIRef # An entailment regime that indicates what kind of inferencing is required by a shapes graph. equals: URIRef # Specifies a property that must have the same values as the value nodes. expression: URIRef # The node expression that must return true for the value nodes. - filterShape: URIRef # The shape that all input nodes of the expression need to conform to. - flags: URIRef # An optional flag to be used with regular expression pattern matching. + filterShape: ( + URIRef # The shape that all input nodes of the expression need to conform to. + ) + flags: ( + URIRef # An optional flag to be used with regular expression pattern matching. + ) focusNode: URIRef # The focus node that was validated when the result was produced. group: URIRef # Can be used to link to a property group to indicate that a property shape belongs to a group of related property shapes. hasValue: URIRef # Specifies a value that must be among the value nodes. @@ -48,15 +52,21 @@ class SH(DefinedNamespace): jsLibrary: URIRef # Declares which JavaScript libraries are needed to execute this. jsLibraryURL: URIRef # Declares the URLs of a JavaScript library. This should be the absolute URL of a JavaScript file. Implementations may redirect those to local files. labelTemplate: URIRef # Outlines how human-readable labels of instances of the associated Parameterizable shall be produced. The values can contain {?paramName} as placeholders for the actual values of the given parameter. - languageIn: URIRef # Specifies a list of language tags that all value nodes must have. + languageIn: ( + URIRef # Specifies a list of language tags that all value nodes must have. + ) lessThan: URIRef # Specifies a property that must have smaller values than the value nodes. lessThanOrEquals: URIRef # Specifies a property that must have smaller or equal values than the value nodes. - maxCount: URIRef # Specifies the maximum number of values in the set of value nodes. + maxCount: ( + URIRef # Specifies the maximum number of values in the set of value nodes. + ) maxExclusive: URIRef # Specifies the maximum exclusive value of each value node. maxInclusive: URIRef # Specifies the maximum inclusive value of each value node. maxLength: URIRef # Specifies the maximum string length of each value node. message: URIRef # A human-readable message (possibly with placeholders for variables) explaining the cause of the result. - minCount: URIRef # Specifies the minimum number of values in the set of value nodes. + minCount: ( + URIRef # Specifies the minimum number of values in the set of value nodes. + ) minExclusive: URIRef # Specifies the minimum exclusive value of each value node. minInclusive: URIRef # Specifies the minimum inclusive value of each value node. minLength: URIRef # Specifies the minimum string length of each value node. @@ -66,7 +76,9 @@ class SH(DefinedNamespace): nodeKind: URIRef # Specifies the node kind (e.g. IRI or literal) each value node. nodeValidator: URIRef # The validator(s) used to evaluate a constraint in the context of a node shape. nodes: URIRef # The node expression producing the input nodes of a filter shape expression. - object: URIRef # An expression producing the nodes that shall be inferred as objects. + object: ( + URIRef # An expression producing the nodes that shall be inferred as objects. + ) oneOrMorePath: URIRef # The (single) value of this property represents a path that is matched one or more times. optional: URIRef # Indicates whether a parameter is optional. order: URIRef # Specifies the relative order of this compared to its siblings. For example use 0 for the first, 1 for the second. @@ -78,23 +90,37 @@ class SH(DefinedNamespace): prefixes: URIRef # The prefixes that shall be applied before parsing the associated SPARQL query. property: URIRef # Links a shape to its property shapes. propertyValidator: URIRef # The validator(s) used to evaluate a constraint in the context of a property shape. - qualifiedMaxCount: URIRef # The maximum number of value nodes that can conform to the shape. - qualifiedMinCount: URIRef # The minimum number of value nodes that must conform to the shape. - qualifiedValueShape: URIRef # The shape that a specified number of values must conform to. + qualifiedMaxCount: ( + URIRef # The maximum number of value nodes that can conform to the shape. + ) + qualifiedMinCount: ( + URIRef # The minimum number of value nodes that must conform to the shape. + ) + qualifiedValueShape: ( + URIRef # The shape that a specified number of values must conform to. + ) qualifiedValueShapesDisjoint: URIRef # Can be used to mark the qualified value shape to be disjoint with its sibling shapes. result: URIRef # The validation results contained in a validation report. resultAnnotation: URIRef # Links a SPARQL validator with zero or more sh:ResultAnnotation instances, defining how to derive additional result properties based on the variables of the SELECT query. resultMessage: URIRef # Human-readable messages explaining the cause of the result. resultPath: URIRef # The path of a validation result, based on the path of the validated property shape. resultSeverity: URIRef # The severity of the result, e.g. warning. - returnType: URIRef # The expected type of values returned by the associated function. + returnType: ( + URIRef # The expected type of values returned by the associated function. + ) rule: URIRef # The rules linked to a shape. select: URIRef # The SPARQL SELECT query to execute. severity: URIRef # Defines the severity that validation results produced by a shape must have. Defaults to sh:Violation. - shapesGraph: URIRef # Shapes graphs that should be used when validating this data graph. + shapesGraph: ( + URIRef # Shapes graphs that should be used when validating this data graph. + ) shapesGraphWellFormed: URIRef # If true then the validation engine was certain that the shapes graph has passed all SHACL syntax requirements during the validation process. - sourceConstraint: URIRef # The constraint that was validated when the result was produced. - sourceConstraintComponent: URIRef # The constraint component that is the source of the result. + sourceConstraint: ( + URIRef # The constraint that was validated when the result was produced. + ) + sourceConstraintComponent: ( + URIRef # The constraint component that is the source of the result. + ) sourceShape: URIRef # The shape that is was validated when the result was produced. sparql: URIRef # Links a shape with SPARQL constraints. subject: URIRef # An expression producing the resources that shall be inferred as subjects. @@ -134,18 +160,28 @@ class SH(DefinedNamespace): PropertyShape: URIRef # A property shape is a shape that specifies constraints on the values of a focus node for a given property or path. ResultAnnotation: URIRef # A class of result annotations, which define the rules to derive the values of a given annotation property as extra values for a validation result. Rule: URIRef # The class of SHACL rules. Never instantiated directly. - SPARQLAskExecutable: URIRef # The class of SPARQL executables that are based on an ASK query. + SPARQLAskExecutable: ( + URIRef # The class of SPARQL executables that are based on an ASK query. + ) SPARQLAskValidator: URIRef # The class of validators based on SPARQL ASK queries. The queries are evaluated for each value node and are supposed to return true if the given node conforms. SPARQLConstraint: URIRef # The class of constraints based on SPARQL SELECT queries. - SPARQLConstructExecutable: URIRef # The class of SPARQL executables that are based on a CONSTRUCT query. + SPARQLConstructExecutable: ( + URIRef # The class of SPARQL executables that are based on a CONSTRUCT query. + ) SPARQLExecutable: URIRef # The class of resources that encapsulate a SPARQL query. - SPARQLFunction: URIRef # A function backed by a SPARQL query - either ASK or SELECT. + SPARQLFunction: ( + URIRef # A function backed by a SPARQL query - either ASK or SELECT. + ) SPARQLRule: URIRef # The class of SHACL rules based on SPARQL CONSTRUCT queries. - SPARQLSelectExecutable: URIRef # The class of SPARQL executables based on a SELECT query. + SPARQLSelectExecutable: ( + URIRef # The class of SPARQL executables based on a SELECT query. + ) SPARQLSelectValidator: URIRef # The class of validators based on SPARQL SELECT queries. The queries are evaluated for each focus node and are supposed to produce bindings for all focus nodes that do not conform. SPARQLTarget: URIRef # The class of targets that are based on SPARQL queries. SPARQLTargetType: URIRef # The (meta) class for parameterizable targets that are based on SPARQL queries. - SPARQLUpdateExecutable: URIRef # The class of SPARQL executables based on a SPARQL UPDATE. + SPARQLUpdateExecutable: ( + URIRef # The class of SPARQL executables based on a SPARQL UPDATE. + ) Severity: URIRef # The class of validation result severity levels, including violation and warning levels. Shape: URIRef # A shape is a collection of constraints that may be targeted for certain nodes. Target: URIRef # The base class of targets such as those based on SPARQL queries. diff --git a/rdflib/namespace/_SKOS.py b/rdflib/namespace/_SKOS.py index 120019e10..e87df6f09 100644 --- a/rdflib/namespace/_SKOS.py +++ b/rdflib/namespace/_SKOS.py @@ -21,11 +21,15 @@ class SKOS(DefinedNamespace): altLabel: URIRef # An alternative lexical label for a resource. broadMatch: URIRef # skos:broadMatch is used to state a hierarchical mapping link between two conceptual resources in different concept schemes. broader: URIRef # Relates a concept to a concept that is more general in meaning. - broaderTransitive: URIRef # skos:broaderTransitive is a transitive superproperty of skos:broader. + broaderTransitive: ( + URIRef # skos:broaderTransitive is a transitive superproperty of skos:broader. + ) changeNote: URIRef # A note about a modification to a concept. closeMatch: URIRef # skos:closeMatch is used to link two concepts that are sufficiently similar that they can be used interchangeably in some information retrieval applications. In order to avoid the possibility of "compound errors" when combining mappings across more than two concept schemes, skos:closeMatch is not declared to be a transitive property. definition: URIRef # A statement or formal explanation of the meaning of a concept. - editorialNote: URIRef # A note for an editor, translator or maintainer of the vocabulary. + editorialNote: ( + URIRef # A note for an editor, translator or maintainer of the vocabulary. + ) exactMatch: URIRef # skos:exactMatch is used to link two concepts, indicating a high degree of confidence that the concepts can be used interchangeably across a wide range of information retrieval applications. skos:exactMatch is a transitive property, and is a sub-property of skos:closeMatch. example: URIRef # An example of the use of a concept. hasTopConcept: URIRef # Relates, by convention, a concept scheme to a concept which is topmost in the broader/narrower concept hierarchies for that scheme, providing an entry point to these hierarchies. @@ -34,16 +38,22 @@ class SKOS(DefinedNamespace): inScheme: URIRef # Relates a resource (for example a concept) to a concept scheme in which it is included. mappingRelation: URIRef # Relates two concepts coming, by convention, from different schemes, and that have comparable meanings member: URIRef # Relates a collection to one of its members. - memberList: URIRef # Relates an ordered collection to the RDF list containing its members. + memberList: ( + URIRef # Relates an ordered collection to the RDF list containing its members. + ) narrowMatch: URIRef # skos:narrowMatch is used to state a hierarchical mapping link between two conceptual resources in different concept schemes. narrower: URIRef # Relates a concept to a concept that is more specific in meaning. narrowerTransitive: URIRef # skos:narrowerTransitive is a transitive superproperty of skos:narrower. notation: URIRef # A notation, also known as classification code, is a string of characters such as "T58.5" or "303.4833" used to uniquely identify a concept within the scope of a given concept scheme. note: URIRef # A general note, for any purpose. - prefLabel: URIRef # The preferred lexical label for a resource, in a given language. + prefLabel: ( + URIRef # The preferred lexical label for a resource, in a given language. + ) related: URIRef # Relates a concept to a concept with which there is an associative semantic relationship. relatedMatch: URIRef # skos:relatedMatch is used to state an associative mapping link between two conceptual resources in different concept schemes. - scopeNote: URIRef # A note that helps to clarify the meaning and/or the use of a concept. + scopeNote: ( + URIRef # A note that helps to clarify the meaning and/or the use of a concept. + ) semanticRelation: URIRef # Links a concept to a concept related by meaning. topConceptOf: URIRef # Relates a concept to the concept scheme that it is a top level concept of. diff --git a/rdflib/namespace/_SOSA.py b/rdflib/namespace/_SOSA.py index c2702c526..9f281d56d 100644 --- a/rdflib/namespace/_SOSA.py +++ b/rdflib/namespace/_SOSA.py @@ -21,6 +21,7 @@ class SOSA(DefinedNamespace): FeatureOfInterest: URIRef # The thing whose property is being estimated or calculated in the course of an Observation to arrive at a Result or whose property is being manipulated by an Actuator, or which is being sampled or transformed in an act of Sampling. ObservableProperty: URIRef # An observable quality (property, characteristic) of a FeatureOfInterest. Observation: URIRef # Act of carrying out an (Observation) Procedure to estimate or calculate a value of a property of a FeatureOfInterest. Links to a Sensor to describe what made the Observation and how; links to an ObservableProperty to describe what the result is an estimate of, and to a FeatureOfInterest to detail what that property was associated with. + ObservationCollection: URIRef # Collection of one or more observations, whose members share a common value for one or more property Platform: URIRef # A Platform is an entity that hosts other entities, particularly Sensors, Actuators, Samplers, and other Platforms. Procedure: URIRef # A workflow, protocol, plan, algorithm, or computational method specifying how to make an Observation, create a Sample, or make a change to the state of the world (via an Actuator). A Procedure is re-usable, and might be involved in many Observations, Samplings, or Actuations. It explains the steps to be carried out to arrive at reproducible results. Result: URIRef # The Result of an Observation, Actuation, or act of Sampling. To store an observation's simple result value one can use the hasSimpleResult property. @@ -30,14 +31,20 @@ class SOSA(DefinedNamespace): Sensor: URIRef # Device, agent (including humans), or software (simulation) involved in, or implementing, a Procedure. Sensors respond to a stimulus, e.g., a change in the environment, or input data composed from the results of prior Observations, and generate a Result. Sensors can be hosted by Platforms. # http://www.w3.org/2002/07/owl#DatatypeProperty - hasSimpleResult: URIRef # The simple value of an Observation or Actuation or act of Sampling. + hasSimpleResult: ( + URIRef # The simple value of an Observation or Actuation or act of Sampling. + ) resultTime: URIRef # The result time is the instant of time when the Observation, Actuation or Sampling activity was completed. # http://www.w3.org/2002/07/owl#ObjectProperty actsOnProperty: URIRef # Relation between an Actuation and the property of a FeatureOfInterest it is acting upon. hasFeatureOfInterest: URIRef # A relation between an Observation and the entity whose quality was observed, or between an Actuation and the entity whose property was modified, or between an act of Sampling and the entity that was sampled. + hasMember: URIRef # Link to a member of a collection of observations that share the same value for one or more of the characteristic properties + hasOriginalSample: URIRef # link to the original sample that is related to the context sample through a chain of isSampleOf relations hasResult: URIRef # Relation linking an Observation or Actuation or act of Sampling and a Result or Sample. hasSample: URIRef # Relation between a FeatureOfInterest and the Sample used to represent it. + hasSampledFeature: URIRef # link to the ultimate feature of interest of the context sample - i.e. the end of a chain of isSampleOf relations + hasUltimateFeatureOfInterest: URIRef # link to the ultimate feature of interest of an observation or act of sampling. This is useful when the proximate feature of interest is a sample of the ultimate feature of interest, directly or trasntitively. hosts: URIRef # Relation between a Platform and a Sensor, Actuator, Sampler, or Platform, hosted or mounted on it. isActedOnBy: URIRef # Relation between an ActuatableProperty of a FeatureOfInterest and an Actuation changing its state. isFeatureOfInterestOf: URIRef # A relation between a FeatureOfInterest and an Observation about it, an Actuation acting on it, or an act of Sampling that sampled it. @@ -49,7 +56,9 @@ class SOSA(DefinedNamespace): madeByActuator: URIRef # Relation linking an Actuation to the Actuator that made that Actuation. madeBySampler: URIRef # Relation linking an act of Sampling to the Sampler (sampling device or entity) that made it. madeBySensor: URIRef # Relation between an Observation and the Sensor which made the Observation. - madeObservation: URIRef # Relation between a Sensor and an Observation made by the Sensor. + madeObservation: ( + URIRef # Relation between a Sensor and an Observation made by the Sensor. + ) madeSampling: URIRef # Relation between a Sampler (sampling device or entity) and the Sampling act it performed. observedProperty: URIRef # Relation linking an Observation to the property that was observed. The ObservableProperty should be a property of the FeatureOfInterest (linked by hasFeatureOfInterest) of this Observation. observes: URIRef # Relation between a Sensor and an ObservableProperty that it is capable of sensing. diff --git a/rdflib/namespace/_SSN.py b/rdflib/namespace/_SSN.py index df5f3a916..5f1851179 100644 --- a/rdflib/namespace/_SSN.py +++ b/rdflib/namespace/_SSN.py @@ -23,7 +23,9 @@ class SSN(DefinedNamespace): System: URIRef # System is a unit of abstraction for pieces of infrastructure that implement Procedures. A System may have components, its subsystems, which are other systems. # http://www.w3.org/2002/07/owl#FunctionalProperty - wasOriginatedBy: URIRef # Relation between an Observation and the Stimulus that originated it. + wasOriginatedBy: ( + URIRef # Relation between an Observation and the Stimulus that originated it. + ) # http://www.w3.org/2002/07/owl#ObjectProperty deployedOnPlatform: URIRef # Relation between a Deployment and the Platform on which the Systems are deployed. diff --git a/rdflib/namespace/_TIME.py b/rdflib/namespace/_TIME.py index aa7bbecdf..0370180ec 100644 --- a/rdflib/namespace/_TIME.py +++ b/rdflib/namespace/_TIME.py @@ -44,7 +44,9 @@ class TIME(DefinedNamespace): hour: URIRef # Hour position in a calendar-clock system. hours: URIRef # length of, or element of the length of, a temporal extent expressed in hours inXSDDate: URIRef # Position of an instant, expressed using xsd:date - inXSDDateTimeStamp: URIRef # Position of an instant, expressed using xsd:dateTimeStamp + inXSDDateTimeStamp: ( + URIRef # Position of an instant, expressed using xsd:dateTimeStamp + ) inXSDgYear: URIRef # Position of an instant, expressed using xsd:gYear inXSDgYearMonth: URIRef # Position of an instant, expressed using xsd:gYearMonth minute: URIRef # Minute position in a calendar-clock system. @@ -74,7 +76,9 @@ class TIME(DefinedNamespace): # http://www.w3.org/2002/07/owl#ObjectProperty after: URIRef # Gives directionality to time. If a temporal entity T1 is after another temporal entity T2, then the beginning of T1 is after the end of T2. - dayOfWeek: URIRef # The day of week, whose value is a member of the class time:DayOfWeek + dayOfWeek: ( + URIRef # The day of week, whose value is a member of the class time:DayOfWeek + ) hasBeginning: URIRef # Beginning of a temporal entity. hasDateTimeDescription: URIRef # Value of DateTimeInterval expressed as a structured value. The beginning and end of the interval coincide with the limits of the shortest element in the description. hasDuration: URIRef # Duration of a temporal entity, event or activity, or thing, expressed as a scaled value @@ -82,7 +86,9 @@ class TIME(DefinedNamespace): hasEnd: URIRef # End of a temporal entity. hasTemporalDuration: URIRef # Duration of a temporal entity. hasTime: URIRef # Supports the association of a temporal entity (instant or interval) to any thing - inDateTime: URIRef # Position of an instant, expressed using a structured description + inDateTime: ( + URIRef # Position of an instant, expressed using a structured description + ) inTemporalPosition: URIRef # Position of a time instant inTimePosition: URIRef # Position of a time instant expressed as a TimePosition inside: URIRef # An instant that falls inside the interval. It is not intended to include beginnings and ends of intervals. diff --git a/rdflib/namespace/_VOID.py b/rdflib/namespace/_VOID.py index db81e72e6..5905e789d 100644 --- a/rdflib/namespace/_VOID.py +++ b/rdflib/namespace/_VOID.py @@ -26,7 +26,9 @@ class VOID(DefinedNamespace): distinctObjects: URIRef # The total number of distinct objects in a void:Dataset. In other words, the number of distinct resources that occur in the object position of triples in the dataset. Literals are included in this count. distinctSubjects: URIRef # The total number of distinct subjects in a void:Dataset. In other words, the number of distinct resources that occur in the subject position of triples in the dataset. documents: URIRef # The total number of documents, for datasets that are published as a set of individual documents, such as RDF/XML documents or RDFa-annotated web pages. Non-RDF documents, such as web pages in HTML or images, are usually not included in this count. This property is intended for datasets where the total number of triples or entities is hard to determine. void:triples or void:entities should be preferred where practical. - entities: URIRef # The total number of entities that are described in a void:Dataset. + entities: ( + URIRef # The total number of entities that are described in a void:Dataset. + ) exampleResource: URIRef # example resource of dataset feature: URIRef # feature inDataset: URIRef # Points to the void:Dataset that a document is a part of. @@ -42,8 +44,12 @@ class VOID(DefinedNamespace): subset: URIRef # has subset target: URIRef # One of the two datasets linked by the Linkset. triples: URIRef # The total number of triples contained in a void:Dataset. - uriLookupEndpoint: URIRef # Defines a simple URI look-up protocol for accessing a dataset. - uriRegexPattern: URIRef # Defines a regular expression pattern matching URIs in the dataset. + uriLookupEndpoint: ( + URIRef # Defines a simple URI look-up protocol for accessing a dataset. + ) + uriRegexPattern: ( + URIRef # Defines a regular expression pattern matching URIs in the dataset. + ) uriSpace: URIRef # A URI that is a common string prefix of all the entity URIs in a void:Dataset. vocabulary: URIRef # A vocabulary that is used in the dataset. diff --git a/rdflib/namespace/_WGS.py b/rdflib/namespace/_WGS.py index b73222cf3..1416154cd 100644 --- a/rdflib/namespace/_WGS.py +++ b/rdflib/namespace/_WGS.py @@ -7,7 +7,7 @@ class WGS(DefinedNamespace): Basic Geo (WGS84 lat/long) Vocabulary The HTML Specification for the vocabulary can be found - `here `. + here . """ _NS = Namespace("https://www.w3.org/2003/01/geo/wgs84_pos#") diff --git a/rdflib/namespace/_XSD.py b/rdflib/namespace/_XSD.py index af9cad75d..f12524ff2 100644 --- a/rdflib/namespace/_XSD.py +++ b/rdflib/namespace/_XSD.py @@ -47,9 +47,15 @@ class XSD(DefinedNamespace): language: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#language long: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#long negativeInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#negativeInteger - nonNegativeInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#nonNegativeInteger - nonPositiveInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#nonPositiveInteger - normalizedString: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#normalizedString + nonNegativeInteger: ( + URIRef # see: http://www.w3.org/TR/xmlschema11-2/#nonNegativeInteger + ) + nonPositiveInteger: ( + URIRef # see: http://www.w3.org/TR/xmlschema11-2/#nonPositiveInteger + ) + normalizedString: ( + URIRef # see: http://www.w3.org/TR/xmlschema11-2/#normalizedString + ) positiveInteger: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#positiveInteger short: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#short string: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#string @@ -59,7 +65,9 @@ class XSD(DefinedNamespace): unsignedInt: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedInt unsignedLong: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedLong unsignedShort: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#unsignedShort - yearMonthDuration: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#yearMonthDuration + yearMonthDuration: ( + URIRef # see: http://www.w3.org/TR/xmlschema11-2/#yearMonthDuration + ) # fundamental facets - https://www.w3.org/TR/xmlschema11-2/#rf-fund-facets ordered: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-ordered @@ -81,7 +89,9 @@ class XSD(DefinedNamespace): totalDigits: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-totalDigits fractionDigits: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-fractionDigits Assertions: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#rf-assertions - explicitTimezone: URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-explicitTimezone + explicitTimezone: ( + URIRef # see: http://www.w3.org/TR/xmlschema11-2/#rf-explicitTimezone + ) # The Seven-property Model - https://www.w3.org/TR/xmlschema11-2/#theSevenPropertyModel year: URIRef # see: https://www.w3.org/TR/xmlschema11-2/#vp-dt-http://www.w3.org/TR/xmlschema11-2/#rf-whiteSpace diff --git a/rdflib/namespace/__init__.py b/rdflib/namespace/__init__.py index 3e591fcf7..4077b0be3 100644 --- a/rdflib/namespace/__init__.py +++ b/rdflib/namespace/__init__.py @@ -1,18 +1,4 @@ -import logging -import warnings -from functools import lru_cache -from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple, Union -from unicodedata import category -from urllib.parse import urldefrag, urljoin - -from rdflib.term import URIRef, Variable, _is_valid_uri - -if TYPE_CHECKING: - from rdflib.graph import Graph - from rdflib.store import Store - -__doc__ = """ +""" =================== Namespace Utilities =================== @@ -84,6 +70,23 @@ rdflib.term.URIRef('http://www.w3.org/2000/01/rdf-schema#seeAlso') """ +from __future__ import annotations + +import logging +import warnings +from functools import lru_cache +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Set, Tuple, Union +from unicodedata import category +from urllib.parse import urldefrag, urljoin + +from rdflib.term import URIRef, Variable, _is_valid_uri + +if TYPE_CHECKING: + from rdflib.graph import Graph + from rdflib.store import Store + + __all__ = [ "is_ncname", "split_uri", @@ -141,7 +144,7 @@ class Namespace(str): False """ - def __new__(cls, value: Union[str, bytes]) -> "Namespace": + def __new__(cls, value: Union[str, bytes]) -> Namespace: try: rt = str.__new__(cls, value) except UnicodeDecodeError: @@ -199,7 +202,7 @@ class URIPattern(str): """ - def __new__(cls, value: Union[str, bytes]) -> "URIPattern": + def __new__(cls, value: Union[str, bytes]) -> URIPattern: try: rt = str.__new__(cls, value) except UnicodeDecodeError: @@ -230,6 +233,13 @@ def __repr__(self) -> str: "_underscore_num", } +# Some libraries probe classes for certain attributes or items. +# This is a list of those attributes and items that should be ignored. +_IGNORED_ATTR_LOOKUP: Set[str] = { + "_pytestfixturefunction", # pytest tries to look this up on Defined namespaces + "_partialmethod", # sphinx tries to look this up during autodoc generation +} + class DefinedNamespaceMeta(type): """Utility metaclass for generating URIRefs with a common prefix.""" @@ -243,10 +253,13 @@ class DefinedNamespaceMeta(type): @lru_cache(maxsize=None) def __getitem__(cls, name: str, default=None) -> URIRef: name = str(name) + if name in _DFNS_RESERVED_ATTRS: raise AttributeError( f"DefinedNamespace like object has no attribute {name!r}" ) + elif name in _IGNORED_ATTR_LOOKUP: + raise KeyError() if str(name).startswith("__"): # NOTE on type ignore: This seems to be a real bug, super() does not # implement this method, it will fail if it is ever reached. @@ -262,6 +275,8 @@ def __getitem__(cls, name: str, default=None) -> URIRef: return cls._NS[name] def __getattr__(cls, name: str): + if name in _IGNORED_ATTR_LOOKUP: + raise AttributeError() return cls.__getitem__(name) def __repr__(cls) -> str: @@ -297,7 +312,7 @@ def __dir__(cls) -> Iterable[str]: values = {cls[str(x)] for x in attrs} return values - def as_jsonld_context(self, pfx: str) -> dict: + def as_jsonld_context(self, pfx: str) -> dict: # noqa: N804 """Returns this DefinedNamespace as a a JSON-LD 'context' object""" terms = {pfx: str(self._NS)} for key, term in self.__annotations__.items(): @@ -426,9 +441,7 @@ class NamespaceManager: >>> """ - def __init__( - self, graph: "Graph", bind_namespaces: "_NamespaceSetString" = "rdflib" - ): + def __init__(self, graph: Graph, bind_namespaces: _NamespaceSetString = "rdflib"): self.graph = graph self.__cache: Dict[str, Tuple[str, URIRef, str]] = {} self.__cache_strict: Dict[str, Tuple[str, URIRef, str]] = {} @@ -480,7 +493,7 @@ def reset(self) -> None: insert_trie(self.__trie, str(n)) @property - def store(self) -> "Store": + def store(self) -> Store: return self.graph.store def qname(self, uri: str) -> str: @@ -526,7 +539,7 @@ def qname_strict(self, uri: str) -> str: else: return ":".join((prefix, name)) - def normalizeUri(self, rdfTerm: str) -> str: + def normalizeUri(self, rdfTerm: str) -> str: # noqa: N802, N803 """ Takes an RDF Term and 'normalizes' it into a QName (using the registered prefix) or (unlike compute_qname) the Notation 3 @@ -548,7 +561,7 @@ def normalizeUri(self, rdfTerm: str) -> str: elif prefix is None: return "<%s>" % rdfTerm else: - qNameParts = self.compute_qname(rdfTerm) + qNameParts = self.compute_qname(rdfTerm) # noqa: N806 return ":".join([qNameParts[0], qNameParts[-1]]) def compute_qname(self, uri: str, generate: bool = True) -> Tuple[str, URIRef, str]: @@ -669,7 +682,7 @@ def expand_curie(self, curie: str) -> URIRef: Raises exception if a namespace is not bound to the prefix. """ - if not type(curie) is str: + if not type(curie) is str: # noqa: E714 raise TypeError(f"Argument must be a string, not {type(curie).__name__}.") parts = curie.split(":", 1) if len(parts) != 2: @@ -836,7 +849,7 @@ def is_ncname(name: str) -> int: if first == "_" or category(first) in NAME_START_CATEGORIES: for i in range(1, len(name)): c = name[i] - if not category(c) in NAME_CATEGORIES: + if not category(c) in NAME_CATEGORIES: # noqa: E713 if c in ALLOWED_NAME_CHARS: continue return 0 @@ -857,7 +870,7 @@ def split_uri( length = len(uri) for i in range(0, length): c = uri[-i - 1] - if not category(c) in NAME_CATEGORIES: + if not category(c) in NAME_CATEGORIES: # noqa: E713 if c in ALLOWED_NAME_CHARS: continue for j in range(-1 - i, length): diff --git a/rdflib/parser.py b/rdflib/parser.py index a35c1d825..1c652ca21 100644 --- a/rdflib/parser.py +++ b/rdflib/parser.py @@ -9,6 +9,7 @@ want to do so through the Graph class parse method. """ + from __future__ import annotations import codecs @@ -26,6 +27,7 @@ TextIO, Tuple, Union, + cast, ) from urllib.parse import urljoin from urllib.request import Request, url2pathname @@ -39,10 +41,14 @@ if TYPE_CHECKING: from email.message import Message + from io import BufferedReader from urllib.response import addinfourl + from typing_extensions import Buffer + from rdflib.graph import Graph + __all__ = [ "Parser", "InputSource", @@ -59,39 +65,357 @@ class Parser: def __init__(self): pass - def parse(self, source: "InputSource", sink: "Graph") -> None: + def parse(self, source: InputSource, sink: Graph) -> None: pass class BytesIOWrapper(BufferedIOBase): - __slots__ = ("wrapped", "encoded", "encoding") + __slots__ = ( + "wrapped", + "enc_str", + "text_str", + "encoding", + "encoder", + "has_read1", + "has_seek", + "_name", + "_fileno", + "_isatty", + "_leftover", + "_bytes_per_char", + "_text_bytes_offset", + ) - def __init__(self, wrapped: str, encoding="utf-8"): + def __init__(self, wrapped: Union[str, StringIO, TextIOBase], encoding="utf-8"): super(BytesIOWrapper, self).__init__() self.wrapped = wrapped self.encoding = encoding - self.encoded = None + self.encoder = codecs.getencoder(self.encoding) + self.enc_str: Optional[Union[BytesIO, BufferedIOBase]] = None + self.text_str: Optional[Union[StringIO, TextIOBase]] = None + self.has_read1: Optional[bool] = None + self.has_seek: Optional[bool] = None + self._name: Optional[str] = None + self._fileno: Optional[Union[int, BaseException]] = None + self._isatty: Optional[Union[bool, BaseException]] = None + self._leftover: bytes = b"" + self._text_bytes_offset: int = 0 + norm_encoding = encoding.lower().replace("_", "-") + if norm_encoding in ("utf-8", "utf8", "u8", "cp65001"): + # utf-8 has a variable number of bytes per character, 1-4 + self._bytes_per_char: int = 1 # assume average of 1 byte per character + elif norm_encoding in ( + "latin1", + "latin-1", + "iso-8859-1", + "iso8859-1", + "ascii", + "us-ascii", + ): + # these are all 1-byte-per-character encodings + self._bytes_per_char = 1 + elif norm_encoding.startswith("utf-16") or norm_encoding.startswith("utf16"): + # utf-16 has a variable number of bytes per character, 2-3 + self._bytes_per_char = 2 # assume average of 2 bytes per character + elif norm_encoding.startswith("utf-32") or norm_encoding.startswith("utf32"): + # utf-32 is fixed length with 4 bytes per character + self._bytes_per_char = 4 + else: + # not sure, just assume it is 2 bytes per character + self._bytes_per_char = 2 + + def _init(self): + name: Optional[str] = None + if isinstance(self.wrapped, str): + b, blen = self.encoder(self.wrapped) + self.enc_str = BytesIO(b) + name = "string" + elif isinstance(self.wrapped, TextIOWrapper): + inner = self.wrapped.buffer + # type error: TextIOWrapper.buffer cannot be a BytesIOWrapper + if isinstance(inner, BytesIOWrapper): # type: ignore[unreachable] + raise Exception( + "BytesIOWrapper cannot be wrapped in TextIOWrapper, " + "then wrapped in another BytesIOWrapper" + ) + else: + self.enc_str = cast(BufferedIOBase, inner) + elif isinstance(self.wrapped, (TextIOBase, StringIO)): + self.text_str = self.wrapped + use_stream: Union[BytesIO, StringIO, BufferedIOBase, TextIOBase] + if self.enc_str is not None: + use_stream = self.enc_str + elif self.text_str is not None: + use_stream = self.text_str + else: + raise Exception("No stream to read from") + if name is None: + try: + name = use_stream.name # type: ignore[union-attr] + except AttributeError: + name = "stream" + self.has_read1 = hasattr(use_stream, "read1") + try: + self.has_seek = use_stream.seekable() + except AttributeError: + self.has_seek = hasattr(use_stream, "seek") + + self._name = name + + def _check_fileno(self): + use_stream: Union[BytesIO, StringIO, BufferedIOBase, TextIOBase] + if self.enc_str is None and self.text_str is None: + self._init() + if self.enc_str is not None: + use_stream = self.enc_str + elif self.text_str is not None: + use_stream = self.text_str + try: + self._fileno = use_stream.fileno() + except OSError as e: + self._fileno = e + except AttributeError: + self._fileno = -1 + + def _check_isatty(self): + use_stream: Union[BytesIO, StringIO, BufferedIOBase, TextIOBase] + if self.enc_str is None and self.text_str is None: + self._init() + if self.enc_str is not None: + use_stream = self.enc_str + elif self.text_str is not None: + use_stream = self.text_str + try: + self._isatty = use_stream.isatty() + except OSError as e: + self._isatty = e + except AttributeError: + self._isatty = False + + @property + def name(self) -> Any: + if self._name is None: + self._init() + return self._name + + @property + def closed(self) -> bool: + if self.enc_str is None and self.text_str is None: + return False + closed: Optional[bool] = None + if self.enc_str is not None: + try: + closed = self.enc_str.closed + except AttributeError: + closed = None + elif self.text_str is not None: + try: + closed = self.text_str.closed + except AttributeError: + closed = None + return False if closed is None else closed - def read(self, *args, **kwargs): - if self.encoded is None: - b, blen = codecs.getencoder(self.encoding)(self.wrapped) - self.encoded = BytesIO(b) - return self.encoded.read(*args, **kwargs) + def readable(self) -> bool: + return True - def read1(self, *args, **kwargs): - if self.encoded is None: - b = codecs.getencoder(self.encoding)(self.wrapped) - self.encoded = BytesIO(b) - return self.encoded.read1(*args, **kwargs) + def writable(self) -> bool: + return False - def readinto(self, *args, **kwargs): - raise NotImplementedError() + def truncate(self, size: Optional[int] = None) -> int: + raise NotImplementedError("Cannot truncate on BytesIOWrapper") - def readinto1(self, *args, **kwargs): - raise NotImplementedError() + def isatty(self) -> bool: + if self._isatty is None: + self._check_isatty() + if isinstance(self._isatty, BaseException): + raise self._isatty + else: + return bool(self._isatty) + + def fileno(self) -> int: + if self._fileno is None: + self._check_fileno() + if isinstance(self._fileno, BaseException): + raise self._fileno + else: + return -1 if self._fileno is None else self._fileno - def write(self, *args, **kwargs): - raise NotImplementedError() + def close(self): + if self.enc_str is None and self.text_str is None: + return + if self.enc_str is not None: + try: + self.enc_str.close() + except AttributeError: + pass + elif self.text_str is not None: + try: + self.text_str.close() + except AttributeError: + pass + + def flush(self): + return # Does nothing on read-only streams + + def _read_bytes_from_text_stream(self, size: Optional[int] = -1, /) -> bytes: + if TYPE_CHECKING: + assert self.text_str is not None + if size is None or size < 0: + try: + ret_str: str = self.text_str.read() + except EOFError: + ret_str = "" + ret_encoded, enc_len = self.encoder(ret_str) + if self._leftover: + ret_bytes = self._leftover + ret_encoded + self._leftover = b"" + else: + ret_bytes = ret_encoded + elif size == len(self._leftover): + ret_bytes = self._leftover + self._leftover = b"" + elif size < len(self._leftover): + ret_bytes = self._leftover[:size] + self._leftover = self._leftover[size:] + else: + d, m = divmod(size, self._bytes_per_char) + get_per_loop = int(d) + (1 if m > 0 else 0) + got_bytes: bytes = self._leftover + while len(got_bytes) < size: + try: + got_str: str = self.text_str.read(get_per_loop) + except EOFError: + got_str = "" + if len(got_str) < 1: + break + ret_encoded, enc_len = self.encoder(got_str) + got_bytes += ret_encoded + if len(got_bytes) == size: + self._leftover = b"" + ret_bytes = got_bytes + else: + ret_bytes = got_bytes[:size] + self._leftover = got_bytes[size:] + del got_bytes + self._text_bytes_offset += len(ret_bytes) + return ret_bytes + + def read(self, size: Optional[int] = -1, /) -> bytes: + """ + Read at most size bytes, returned as a bytes object. + + If the size argument is negative or omitted read until EOF is reached. + Return an empty bytes object if already at EOF. + """ + if size is not None and size == 0: + return b"" + if self.enc_str is None and self.text_str is None: + self._init() + if self.enc_str is not None: + ret_bytes = self.enc_str.read(size) + else: + ret_bytes = self._read_bytes_from_text_stream(size) + return ret_bytes + + def read1(self, size: Optional[int] = -1, /) -> bytes: + """ + Read at most size bytes, with at most one call to the underlying raw stream’s + read() or readinto() method. Returned as a bytes object. + + If the size argument is negative or omitted, read until EOF is reached. + Return an empty bytes object at EOF. + """ + if (self.enc_str is None and self.text_str is None) or self.has_read1 is None: + self._init() + if not self.has_read1: + raise NotImplementedError() + if self.enc_str is not None: + if size is None or size < 0: + return self.enc_str.read1() + return self.enc_str.read1(size) + raise NotImplementedError("read1() not supported for TextIO in BytesIOWrapper") + + def readinto(self, b: Buffer, /) -> int: + """ + Read len(b) bytes into buffer b. + + Returns number of bytes read (0 for EOF), or error if the object + is set not to block and has no data to read. + """ + if TYPE_CHECKING: + assert isinstance(b, (memoryview, bytearray)) + if len(b) == 0: + return 0 + if self.enc_str is None and self.text_str is None: + self._init() + if self.enc_str is not None: + return self.enc_str.readinto(b) + else: + size = len(b) + read_data: bytes = self._read_bytes_from_text_stream(size) + read_len = len(read_data) + if read_len == 0: + return 0 + b[:read_len] = read_data + return read_len + + def readinto1(self, b: Buffer, /) -> int: + """ + Read len(b) bytes into buffer b, with at most one call to the underlying raw + stream's read() or readinto() method. + + Returns number of bytes read (0 for EOF), or error if the object + is set not to block and has no data to read. + """ + if TYPE_CHECKING: + assert isinstance(b, (memoryview, bytearray)) + if (self.enc_str is None and self.text_str is None) or self.has_read1 is None: + self._init() + if not self.has_read1: + raise NotImplementedError() + if self.enc_str is not None: + return self.enc_str.readinto1(b) + raise NotImplementedError( + "readinto1() not supported for TextIO in BytesIOWrapper" + ) + + def seek(self, offset: int, whence: int = 0, /) -> int: + if self.has_seek is not None and not self.has_seek: + raise NotImplementedError() + if (self.enc_str is None and self.text_str is None) or self.has_seek is None: + self._init() + + if not whence == 0: + raise NotImplementedError("Only SEEK_SET is supported on BytesIOWrapper") + if offset != 0: + raise NotImplementedError( + "Only seeking to zero is supported on BytesIOWrapper" + ) + if self.enc_str is not None: + self.enc_str.seek(offset, whence) + elif self.text_str is not None: + self.text_str.seek(offset, whence) + self._text_bytes_offset = 0 + self._leftover = b"" + return 0 + + def seekable(self): + if (self.enc_str is None and self.text_str is None) or self.has_seek is None: + self._init() + return self.has_seek + + def tell(self) -> int: + if self.has_seek is not None and not self.has_seek: + raise NotImplementedError("Cannot tell() pos because file is not seekable.") + if self.enc_str is not None: + try: + self._text_bytes_offset = self.enc_str.tell() + except AttributeError: + pass + return self._text_bytes_offset + + def write(self, b, /): + raise NotImplementedError("Cannot write to a BytesIOWrapper") class InputSource(xmlreader.InputSource): @@ -199,7 +523,7 @@ class URLInputSource(InputSource): links: List[str] @classmethod - def getallmatchingheaders(cls, message: "Message", name) -> List[str]: + def getallmatchingheaders(cls, message: Message, name) -> List[str]: # This is reimplemented here, because the method # getallmatchingheaders from HTTPMessage is broken since Python 3.0 name = name.lower() @@ -249,9 +573,9 @@ def __init__(self, system_id: Optional[str] = None, format: Optional[str] = None elif format == "trix": myheaders["Accept"] = "application/trix, */*;q=0.1" elif format == "json-ld": - myheaders[ - "Accept" - ] = "application/ld+json, application/json;q=0.9, */*;q=0.1" + myheaders["Accept"] = ( + "application/ld+json, application/json;q=0.9, */*;q=0.1" + ) else: # if format not given, create an Accept header from all registered # parser Media Types @@ -295,7 +619,10 @@ def __repr__(self) -> str: class FileInputSource(InputSource): def __init__( - self, file: Union[BinaryIO, TextIO, TextIOBase, RawIOBase, BufferedIOBase] + self, + file: Union[BinaryIO, TextIO, TextIOBase, RawIOBase, BufferedIOBase], + /, + encoding: Optional[str] = None, ): base = pathlib.Path.cwd().as_uri() system_id = URIRef(pathlib.Path(file.name).absolute().as_uri(), base=base) # type: ignore[union-attr] @@ -308,11 +635,18 @@ def __init__( b = file.buffer # type: ignore[attr-defined] self.setByteStream(b) except (AttributeError, LookupError): - self.setByteStream(file) + self.setByteStream(BytesIOWrapper(file, encoding=file.encoding)) else: + if TYPE_CHECKING: + assert isinstance(file, BufferedReader) self.setByteStream(file) - # We cannot set characterStream here because - # we do not know the Raw Bytes File encoding. + if encoding is not None: + self.setEncoding(encoding) + self.setCharacterStream(TextIOWrapper(file, encoding=encoding)) + else: + # We cannot set characterStream here because + # we do not know the Raw Bytes File encoding. + pass def __repr__(self) -> str: return repr(self.file) diff --git a/rdflib/paths.py b/rdflib/paths.py index df7136178..3692bad45 100644 --- a/rdflib/paths.py +++ b/rdflib/paths.py @@ -1,6 +1,4 @@ -from __future__ import annotations - -__doc__ = r""" +r""" This module implements the SPARQL 1.1 Property path operators, as defined in: @@ -182,8 +180,10 @@ """ +from __future__ import annotations import warnings +from abc import ABC, abstractmethod from functools import total_ordering from typing import ( TYPE_CHECKING, @@ -213,21 +213,32 @@ ZeroOrOne = "?" -@total_ordering -class Path: - __or__: Callable[["Path", Union["URIRef", "Path"]], "AlternativePath"] - __invert__: Callable[["Path"], "InvPath"] - __neg__: Callable[["Path"], "NegatedPath"] - __truediv__: Callable[["Path", Union["URIRef", "Path"]], "SequencePath"] - __mul__: Callable[["Path", str], "MulPath"] +def _n3( + arg: Union[URIRef, Path], namespace_manager: Optional[NamespaceManager] = None +) -> str: + if isinstance(arg, (SequencePath, AlternativePath)) and len(arg.args) > 1: + return "(%s)" % arg.n3(namespace_manager) + return arg.n3(namespace_manager) + +@total_ordering +class Path(ABC): + __or__: Callable[[Path, Union[URIRef, Path]], AlternativePath] + __invert__: Callable[[Path], InvPath] + __neg__: Callable[[Path], NegatedPath] + __truediv__: Callable[[Path, Union[URIRef, Path]], SequencePath] + __mul__: Callable[[Path, str], MulPath] + + @abstractmethod def eval( self, - graph: "Graph", - subj: Optional["_SubjectType"] = None, - obj: Optional["_ObjectType"] = None, - ) -> Iterator[Tuple["_SubjectType", "_ObjectType"]]: - raise NotImplementedError() + graph: Graph, + subj: Optional[_SubjectType] = None, + obj: Optional[_ObjectType] = None, + ) -> Iterator[Tuple[_SubjectType, _ObjectType]]: ... + + @abstractmethod + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: ... def __hash__(self): return hash(repr(self)) @@ -249,9 +260,9 @@ def __init__(self, arg: Union[Path, URIRef]): def eval( self, - graph: "Graph", - subj: Optional["_SubjectType"] = None, - obj: Optional["_ObjectType"] = None, + graph: Graph, + subj: Optional[_SubjectType] = None, + obj: Optional[_ObjectType] = None, ) -> Generator[Tuple[_ObjectType, _SubjectType], None, None]: for s, o in eval_path(graph, (obj, self.arg, subj)): yield o, s @@ -259,9 +270,8 @@ def eval( def __repr__(self) -> str: return "Path(~%s)" % (self.arg,) - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: - # type error: Item "Path" of "Union[Path, URIRef]" has no attribute "n3" [union-attr] - return "^%s" % self.arg.n3(namespace_manager) # type: ignore[union-attr] + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + return "^%s" % _n3(self.arg, namespace_manager) class SequencePath(Path): @@ -275,9 +285,9 @@ def __init__(self, *args: Union[Path, URIRef]): def eval( self, - graph: "Graph", - subj: Optional["_SubjectType"] = None, - obj: Optional["_ObjectType"] = None, + graph: Graph, + subj: Optional[_SubjectType] = None, + obj: Optional[_ObjectType] = None, ) -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: def _eval_seq( paths: List[Union[Path, URIRef]], @@ -317,9 +327,8 @@ def _eval_seq_bw( def __repr__(self) -> str: return "Path(%s)" % " / ".join(str(x) for x in self.args) - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: - # type error: Item "Path" of "Union[Path, URIRef]" has no attribute "n3" [union-attr] - return "/".join(a.n3(namespace_manager) for a in self.args) # type: ignore[union-attr] + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + return "/".join(_n3(a, namespace_manager) for a in self.args) class AlternativePath(Path): @@ -333,9 +342,9 @@ def __init__(self, *args: Union[Path, URIRef]): def eval( self, - graph: "Graph", - subj: Optional["_SubjectType"] = None, - obj: Optional["_ObjectType"] = None, + graph: Graph, + subj: Optional[_SubjectType] = None, + obj: Optional[_ObjectType] = None, ) -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: for x in self.args: for y in eval_path(graph, (subj, x, obj)): @@ -344,9 +353,8 @@ def eval( def __repr__(self) -> str: return "Path(%s)" % " | ".join(str(x) for x in self.args) - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: - # type error: Item "Path" of "Union[Path, URIRef]" has no attribute "n3" [union-attr] - return "|".join(a.n3(namespace_manager) for a in self.args) # type: ignore[union-attr] + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + return "|".join(_n3(a, namespace_manager) for a in self.args) class MulPath(Path): @@ -368,9 +376,9 @@ def __init__(self, path: Union[Path, URIRef], mod: _MulPathMod): def eval( self, - graph: "Graph", - subj: Optional["_SubjectType"] = None, - obj: Optional["_ObjectType"] = None, + graph: Graph, + subj: Optional[_SubjectType] = None, + obj: Optional[_ObjectType] = None, first: bool = True, ) -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: if self.zero and first: @@ -469,9 +477,8 @@ def _all_fwd_paths() -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: def __repr__(self) -> str: return "Path(%s%s)" % (self.path, self.mod) - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: - # type error: Item "Path" of "Union[Path, URIRef]" has no attribute "n3" [union-attr] - return "%s%s" % (self.path.n3(namespace_manager), self.mod) # type: ignore[union-attr] + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + return "%s%s" % (_n3(self.path, namespace_manager), self.mod) class NegatedPath(Path): @@ -504,9 +511,8 @@ def eval(self, graph, subj=None, obj=None): def __repr__(self) -> str: return "Path(! %s)" % ",".join(str(x) for x in self.args) - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: - # type error: Item "Path" of "Union[Path, URIRef]" has no attribute "n3" [union-attr] - return "!(%s)" % ("|".join(arg.n3(namespace_manager) for arg in self.args)) # type: ignore[union-attr] + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: + return "!(%s)" % ("|".join(_n3(arg, namespace_manager) for arg in self.args)) class PathList(list): @@ -534,9 +540,9 @@ def path_sequence(self: Union[URIRef, Path], other: Union[URIRef, Path]): def evalPath( # noqa: N802 graph: Graph, t: Tuple[ - Optional["_SubjectType"], + Optional[_SubjectType], Union[None, Path, _PredicateType], - Optional["_ObjectType"], + Optional[_ObjectType], ], ) -> Iterator[Tuple[_SubjectType, _ObjectType]]: warnings.warn( @@ -552,9 +558,9 @@ def evalPath( # noqa: N802 def eval_path( graph: Graph, t: Tuple[ - Optional["_SubjectType"], + Optional[_SubjectType], Union[None, Path, _PredicateType], - Optional["_ObjectType"], + Optional[_ObjectType], ], ) -> Iterator[Tuple[_SubjectType, _ObjectType]]: return ((s, o) for s, p, o in graph.triples(t)) diff --git a/rdflib/plugin.py b/rdflib/plugin.py index 676ffbaa8..921f218a7 100644 --- a/rdflib/plugin.py +++ b/rdflib/plugin.py @@ -25,6 +25,8 @@ """ +from __future__ import annotations + from importlib.metadata import EntryPoint, entry_points from typing import ( TYPE_CHECKING, @@ -73,10 +75,10 @@ "rdf.plugins.updateprocessor": UpdateProcessor, } -_plugins: Dict[Tuple[str, Type[Any]], "Plugin"] = {} +_plugins: Dict[Tuple[str, Type[Any]], Plugin] = {} -class PluginException(Error): +class PluginException(Error): # noqa: N818 pass @@ -102,7 +104,7 @@ def getClass(self) -> Type[PluginT]: # noqa: N802 class PKGPlugin(Plugin[PluginT]): - def __init__(self, name: str, kind: Type[PluginT], ep: "EntryPoint"): + def __init__(self, name: str, kind: Type[PluginT], ep: EntryPoint): self.name = name self.kind = kind self.ep = ep @@ -152,13 +154,11 @@ def get(name: str, kind: Type[PluginT]) -> Type[PluginT]: @overload def plugins( name: Optional[str] = ..., kind: Type[PluginT] = ... -) -> Iterator[Plugin[PluginT]]: - ... +) -> Iterator[Plugin[PluginT]]: ... @overload -def plugins(name: Optional[str] = ..., kind: None = ...) -> Iterator[Plugin]: - ... +def plugins(name: Optional[str] = ..., kind: None = ...) -> Iterator[Plugin]: ... def plugins( diff --git a/rdflib/plugins/parsers/RDFVOC.py b/rdflib/plugins/parsers/RDFVOC.py index 40f508c4e..33f9c87d5 100644 --- a/rdflib/plugins/parsers/RDFVOC.py +++ b/rdflib/plugins/parsers/RDFVOC.py @@ -1,4 +1,4 @@ -from rdflib.namespace import RDF +from rdflib.namespace import RDF # noqa: N999 from rdflib.term import URIRef @@ -12,8 +12,8 @@ class RDFVOC(RDF): Description: URIRef ID: URIRef about: URIRef - parseType: URIRef + parseType: URIRef # noqa: N815 resource: URIRef li: URIRef - nodeID: URIRef + nodeID: URIRef # noqa: N815 datatype: URIRef diff --git a/rdflib/plugins/parsers/hext.py b/rdflib/plugins/parsers/hext.py index 47d436f29..b8fd28ae2 100644 --- a/rdflib/plugins/parsers/hext.py +++ b/rdflib/plugins/parsers/hext.py @@ -3,6 +3,7 @@ (ndjson) files, into Conjunctive. The store that backs the graph *must* be able to handle contexts, i.e. multiple graphs. """ + from __future__ import annotations import json @@ -24,7 +25,7 @@ class HextuplesParser(Parser): """ def __init__(self): - pass + self.skolemize = False def _load_json_line(self, line: str) -> List[Optional[Any]]: # this complex handing is because the 'value' component is @@ -51,6 +52,8 @@ def _parse_hextuple( s: Union[URIRef, BNode] if tup[0].startswith("_"): s = BNode(value=tup[0].replace("_:", "")) + if self.skolemize: + s = s.skolemize() else: s = URIRef(tup[0]) @@ -63,6 +66,8 @@ def _parse_hextuple( o = URIRef(tup[2]) elif tup[3] == "localId": o = BNode(value=tup[2].replace("_:", "")) + if self.skolemize: + o = o.skolemize() else: # literal if tup[4] is None: o = Literal(tup[2], datatype=URIRef(tup[3])) @@ -71,14 +76,21 @@ def _parse_hextuple( # 6 - context if tup[5] is not None: - c = URIRef(tup[5]) + c = ( + BNode(tup[5].replace("_:", "")) + if tup[5].startswith("_:") + else URIRef(tup[5]) + ) + if isinstance(c, BNode) and self.skolemize: + c = c.skolemize() + # type error: Argument 1 to "add" of "ConjunctiveGraph" has incompatible type "Tuple[Union[URIRef, BNode], URIRef, Union[URIRef, BNode, Literal], URIRef]"; expected "Union[Tuple[Node, Node, Node], Tuple[Node, Node, Node, Optional[Graph]]]" cg.add((s, p, o, c)) # type: ignore[arg-type] else: cg.add((s, p, o)) # type error: Signature of "parse" incompatible with supertype "Parser" - def parse(self, source: InputSource, graph: Graph, **kwargs: Any) -> None: # type: ignore[override] + def parse(self, source: InputSource, graph: Graph, skolemize: bool = False, **kwargs: Any) -> None: # type: ignore[override] if kwargs.get("encoding") not in [None, "utf-8"]: warnings.warn( f"Hextuples files are always utf-8 encoded, " @@ -90,6 +102,7 @@ def parse(self, source: InputSource, graph: Graph, **kwargs: Any) -> None: # ty graph.store.context_aware ), "Hextuples Parser needs a context-aware store!" + self.skolemize = skolemize cg = ConjunctiveGraph(store=graph.store, identifier=graph.identifier) cg.default_context = graph diff --git a/rdflib/plugins/parsers/jsonld.py b/rdflib/plugins/parsers/jsonld.py index 4eb05fcee..6c9dc9d2b 100644 --- a/rdflib/plugins/parsers/jsonld.py +++ b/rdflib/plugins/parsers/jsonld.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ This parser will interpret a JSON-LD document as an RDF Graph. See: @@ -28,6 +27,7 @@ True """ + # From: https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/parser.py # NOTE: This code reads the entire JSON object into memory before parsing, but @@ -35,7 +35,7 @@ from __future__ import annotations import warnings -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union +from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Union import rdflib.parser from rdflib.graph import ConjunctiveGraph, Graph @@ -79,33 +79,83 @@ class JsonLDParser(rdflib.parser.Parser): def __init__(self): super(JsonLDParser, self).__init__() - def parse(self, source: InputSource, sink: Graph, **kwargs: Any) -> None: - # TODO: docstring w. args and return value - encoding = kwargs.get("encoding") or "utf-8" + def parse( + self, + source: InputSource, + sink: Graph, + version: float = 1.1, + skolemize: bool = False, + encoding: Optional[str] = "utf-8", + base: Optional[str] = None, + context: Optional[ + Union[ + List[Union[Dict[str, Any], str, None]], + Dict[str, Any], + str, + ] + ] = None, + generalized_rdf: Optional[bool] = False, + extract_all_scripts: Optional[bool] = False, + **kwargs: Any, + ) -> None: + """Parse JSON-LD from a source document. + + The source document can be JSON or HTML with embedded JSON script + elements (type attribute = "application/ld+json"). To process as HTML + ``source.content_type`` must be set to "text/html" or + "application/xhtml+xml". + + :param source: InputSource with JSON-formatted data (JSON or HTML) + + :param sink: Graph to receive the parsed triples + + :param version: parse as JSON-LD version, defaults to 1.1 + + :param encoding: character encoding of the JSON (should be "utf-8" + or "utf-16"), defaults to "utf-8" + + :param base: JSON-LD `Base IRI `_, defaults to None + + :param context: JSON-LD `Context `_, defaults to None + + :param generalized_rdf: parse as `Generalized RDF `_, defaults to False + + :param extract_all_scripts: if source is an HTML document then extract + all script elements, defaults to False (extract only the first + script element). This is ignored if ``source.system_id`` contains + a fragment identifier, in which case only the script element with + matching id attribute is extracted. + + """ if encoding not in ("utf-8", "utf-16"): warnings.warn( "JSON should be encoded as unicode. " "Given encoding was: %s" % encoding ) - base = kwargs.get("base") or sink.absolutize( - source.getPublicId() or source.getSystemId() or "" - ) + if not base: + base = sink.absolutize(source.getPublicId() or source.getSystemId() or "") - context_data = kwargs.get("context") + context_data = context if not context_data and hasattr(source, "url") and hasattr(source, "links"): if TYPE_CHECKING: assert isinstance(source, URLInputSource) context_data = context_from_urlinputsource(source) try: - version = float(kwargs.get("version", "1.0")) + version = float(version) except ValueError: - version = None + version = 1.1 - generalized_rdf = kwargs.get("generalized_rdf", False) + # Get the optional fragment identifier + try: + fragment_id = URIRef(source.getSystemId()).fragment + except Exception: + fragment_id = None - data = source_to_json(source) + data, html_base = source_to_json(source, fragment_id, extract_all_scripts) + if html_base is not None: + base = URIRef(html_base, base=base) # NOTE: A ConjunctiveGraph parses into a Graph sink, so no sink will be # context_aware. Keeping this check in case RDFLib is changed, or @@ -116,32 +166,53 @@ def parse(self, source: InputSource, sink: Graph, **kwargs: Any) -> None: else: conj_sink = sink - to_rdf(data, conj_sink, base, context_data, version, generalized_rdf) + to_rdf( + data, + conj_sink, + base, + context_data, + version, + bool(generalized_rdf), + skolemize=skolemize, + ) def to_rdf( data: Any, dataset: Graph, base: Optional[str] = None, - context_data: Optional[bool] = None, + context_data: Optional[ + Union[ + List[Union[Dict[str, Any], str, None]], + Dict[str, Any], + str, + ] + ] = None, version: Optional[float] = None, generalized_rdf: bool = False, allow_lists_of_lists: Optional[bool] = None, + skolemize: bool = False, ): # TODO: docstring w. args and return value context = Context(base=base, version=version) if context_data: context.load(context_data) parser = Parser( - generalized_rdf=generalized_rdf, allow_lists_of_lists=allow_lists_of_lists + generalized_rdf=generalized_rdf, + allow_lists_of_lists=allow_lists_of_lists, + skolemize=skolemize, ) return parser.parse(data, context, dataset) class Parser: def __init__( - self, generalized_rdf: bool = False, allow_lists_of_lists: Optional[bool] = None + self, + generalized_rdf: bool = False, + allow_lists_of_lists: Optional[bool] = None, + skolemize: bool = False, ): + self.skolemize = skolemize self.generalized_rdf = generalized_rdf self.allow_lists_of_lists = ( allow_lists_of_lists @@ -211,6 +282,8 @@ def _add_to_graph( subj = self._to_rdf_id(context, id_val) else: subj = BNode() + if self.skolemize: + subj = subj.skolemize() if subj is None: return None @@ -281,7 +354,7 @@ def _key_to_graph( if term.type == JSON: obj_nodes = [self._to_typed_json_value(obj)] elif LIST in term.container: - obj_nodes = [{LIST: obj_nodes}] + obj_nodes = [self._expand_nested_list(obj_nodes)] elif isinstance(obj, dict): obj_nodes = self._parse_container(context, term, obj) else: @@ -333,17 +406,21 @@ def _key_to_graph( context = context.get_context_for_term(term) - flattened = [] - for obj in obj_nodes: - if isinstance(obj, dict): - objs = context.get_set(obj) - if objs is not None: - obj = objs - if isinstance(obj, list): - flattened += obj - continue - flattened.append(obj) - obj_nodes = flattened + # Flatten deep nested lists + def flatten(n: Iterable[Any]) -> List[Any]: + flattened = [] + for obj in n: + if isinstance(obj, dict): + objs = context.get_set(obj) + if objs is not None: + obj = objs + if isinstance(obj, list): + flattened += flatten(obj) + continue + flattened.append(obj) + return flattened + + obj_nodes = flatten(obj_nodes) if not pred_uri: return @@ -357,6 +434,8 @@ def _key_to_graph( if not self.generalized_rdf: return pred = BNode(bid) + if self.skolemize: + pred = pred.skolemize() else: pred = URIRef(pred_uri) @@ -388,11 +467,11 @@ def _parse_container( if v11 and GRAPH in term.container and ID in term.container: return [ - dict({GRAPH: o}) - if k in context.get_keys(NONE) - else dict({ID: k, GRAPH: o}) - if isinstance(o, dict) - else o + ( + dict({GRAPH: o}) + if k in context.get_keys(NONE) + else dict({ID: k, GRAPH: o}) if isinstance(o, dict) else o + ) for k, o in obj.items() ] @@ -404,23 +483,29 @@ def _parse_container( elif v11 and ID in term.container: return [ - dict({ID: k}, **o) - if isinstance(o, dict) and k not in context.get_keys(NONE) - else o + ( + dict({ID: k}, **o) + if isinstance(o, dict) and k not in context.get_keys(NONE) + else o + ) for k, o in obj.items() ] elif v11 and TYPE in term.container: return [ - self._add_type( - context, - {ID: context.expand(o) if term.type == VOCAB else o} - if isinstance(o, str) - else o, - k, + ( + self._add_type( + context, + ( + {ID: context.expand(o) if term.type == VOCAB else o} + if isinstance(o, str) + else o + ), + k, + ) + if isinstance(o, (dict, str)) and k not in context.get_keys(NONE) + else o ) - if isinstance(o, (dict, str)) and k not in context.get_keys(NONE) - else o for k, o in obj.items() ] @@ -534,7 +619,10 @@ def _to_object( def _to_rdf_id(self, context: Context, id_val: str) -> Optional[IdentifiedNode]: bid = self._get_bnodeid(id_val) if bid: - return BNode(bid) + b = BNode(bid) + if self.skolemize: + return b.skolemize() + return b else: uri = context.resolve(id_val) if not self.generalized_rdf and ":" not in uri: @@ -559,7 +647,11 @@ def _add_list( if not isinstance(node_list, list): node_list = [node_list] - first_subj = BNode() + first_subj: Union[URIRef, BNode] = BNode() + if self.skolemize and isinstance(first_subj, BNode): + first_subj = first_subj.skolemize() + + rest: Union[URIRef, BNode, None] subj, rest = first_subj, None for node in node_list: @@ -578,6 +670,8 @@ def _add_list( graph.add((subj, RDF.first, obj)) rest = BNode() + if self.skolemize and isinstance(rest, BNode): + rest = rest.skolemize() if rest: graph.add((subj, RDF.rest, RDF.nil)) @@ -593,3 +687,10 @@ def _to_typed_json_value(value: Any) -> Dict[str, str]: value, separators=(",", ":"), sort_keys=True, ensure_ascii=False ), } + + @classmethod + def _expand_nested_list(cls, obj_nodes: List[Any]) -> Dict[str, List[Any]]: + result = [ + cls._expand_nested_list(o) if isinstance(o, list) else o for o in obj_nodes + ] + return {LIST: result} diff --git a/rdflib/plugins/parsers/notation3.py b/rdflib/plugins/parsers/notation3.py index 2a64be24f..da71405e0 100755 --- a/rdflib/plugins/parsers/notation3.py +++ b/rdflib/plugins/parsers/notation3.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ notation3.py - Standalone Notation3 Parser Derived from CWM, the Closed World Machine @@ -27,6 +26,7 @@ Copyright 2010, Gunnar A. Grimnes """ + from __future__ import annotations import codecs @@ -146,11 +146,11 @@ def join(here: str, there: str) -> str: We grok IRIs - >>> len(u'Andr\\xe9') + >>> len('Andr\\xe9') 5 - >>> join('http://example.org/', u'#Andr\\xe9') - u'http://example.org/#Andr\\xe9' + >>> join('http://example.org/', '#Andr\\xe9') + 'http://example.org/#Andr\\xe9' """ # assert(here.find("#") < 0), \ @@ -276,7 +276,7 @@ def _fixslash(s: str) -> str: N3_Empty = (SYMBOL, List_NS + "Empty") -runNamespaceValue = None +runNamespaceValue: Optional[str] = None def runNamespace() -> str: @@ -383,8 +383,8 @@ def unicodeExpand(m: Match) -> str: class SinkParser: def __init__( self, - store: "RDFSink", - openFormula: Optional["Formula"] = None, + store: RDFSink, + openFormula: Optional[Formula] = None, thisDoc: str = "", baseURI: Optional[str] = None, genPrefix: str = "", @@ -475,7 +475,7 @@ def here(self, i: int) -> str: def formula(self) -> Optional[Formula]: return self._formula - def loadStream(self, stream: Union[IO[str], IO[bytes]]) -> Optional["Formula"]: + def loadStream(self, stream: Union[IO[str], IO[bytes]]) -> Optional[Formula]: return self.loadBuf(stream.read()) # Not ideal def loadBuf(self, buf: Union[str, bytes]) -> Optional[Formula]: @@ -759,7 +759,7 @@ def startDoc(self) -> None: # was: self._store.startDoc() self._store.startDoc(self._formula) - def endDoc(self) -> Optional["Formula"]: + def endDoc(self) -> Optional[Formula]: """Signal end of document and stop parsing. returns formula""" self._store.endDoc(self._formula) # don't canonicalize yet return self._formula @@ -1734,7 +1734,7 @@ def BadSyntax(self, argstr: str, i: int, msg: str) -> NoReturn: # [ is operator:plus of ( \1 \2 ) ] -class BadSyntax(SyntaxError): +class BadSyntax(SyntaxError): # noqa: N818 def __init__(self, uri: str, lines: int, argstr: str, i: int, why: str): self._str = argstr.encode("utf-8") # Better go back to strings for errors self._i = i @@ -1988,7 +1988,6 @@ def hexify(ustr: str) -> bytes: class TurtleParser(Parser): - """ An RDFLib parser for Turtle @@ -2000,7 +1999,7 @@ def __init__(self): def parse( self, - source: "InputSource", + source: InputSource, graph: Graph, encoding: Optional[str] = "utf-8", turtle: bool = True, @@ -2025,7 +2024,6 @@ def parse( class N3Parser(TurtleParser): - """ An RDFLib parser for Notation3 diff --git a/rdflib/plugins/parsers/nquads.py b/rdflib/plugins/parsers/nquads.py index eb24aa64b..071ef0592 100644 --- a/rdflib/plugins/parsers/nquads.py +++ b/rdflib/plugins/parsers/nquads.py @@ -22,6 +22,7 @@ >>> FOAF = Namespace("http://xmlns.com/foaf/0.1/") >>> assert(g.value(s, FOAF.name).eq("Arco Publications")) """ + from __future__ import annotations from codecs import getreader @@ -47,6 +48,7 @@ def parse( # type: ignore[override] inputsource: InputSource, sink: ConjunctiveGraph, bnode_context: Optional[_BNodeContextType] = None, + skolemize: bool = False, **kwargs: Any, ) -> ConjunctiveGraph: """ @@ -67,6 +69,7 @@ def parse( # type: ignore[override] self.sink: ConjunctiveGraph = ConjunctiveGraph( # type: ignore[assignment] store=sink.store, identifier=sink.identifier ) + self.skolemize = skolemize source = inputsource.getCharacterStream() if not source: @@ -91,7 +94,7 @@ def parse( # type: ignore[override] def parseline(self, bnode_context: Optional[_BNodeContextType] = None) -> None: self.eat(r_wspace) - if (not self.line) or self.line.startswith(("#")): + if (not self.line) or self.line.startswith("#"): return # The line is empty or a comment subject = self.subject(bnode_context) diff --git a/rdflib/plugins/parsers/ntriples.py b/rdflib/plugins/parsers/ntriples.py index 09656faff..933e99f3f 100644 --- a/rdflib/plugins/parsers/ntriples.py +++ b/rdflib/plugins/parsers/ntriples.py @@ -1,12 +1,11 @@ -#!/usr/bin/env python3 -from __future__ import annotations - -__doc__ = """\ +"""\ N-Triples Parser License: GPL 2, W3C, BSD, or MIT Author: Sean B. Palmer, inamidst.com """ +from __future__ import annotations + import codecs import re from io import BytesIO, StringIO, TextIOBase @@ -26,9 +25,8 @@ from rdflib.exceptions import ParserError as ParseError from rdflib.parser import InputSource, Parser from rdflib.term import BNode as bNode -from rdflib.term import Literal -from rdflib.term import URIRef -from rdflib.term import URIRef as URI +from rdflib.term import Literal, URIRef +from rdflib.term import URIRef as URI # noqa: N814 if TYPE_CHECKING: import typing_extensions as te @@ -101,7 +99,7 @@ def unquote(s: str) -> str: m = r_uniquot.match(s) if m: s = s[m.end() :] - u, U = m.groups() + u, U = m.groups() # noqa: N806 codepoint = int(u or U, 16) if codepoint > 0x10FFFF: raise ParseError("Disallowed codepoint: %08X" % codepoint) @@ -140,19 +138,21 @@ class W3CNTriplesParser: `W3CNTriplesParser`. """ - __slots__ = ("_bnode_ids", "sink", "buffer", "file", "line") + __slots__ = ("_bnode_ids", "sink", "buffer", "file", "line", "skolemize") def __init__( self, - sink: Optional[Union[DummySink, "NTGraphSink"]] = None, + sink: Optional[Union[DummySink, NTGraphSink]] = None, bnode_context: Optional[_BNodeContextType] = None, ): + self.skolemize = False + if bnode_context is not None: self._bnode_ids = bnode_context else: self._bnode_ids = {} - self.sink: Union[DummySink, "NTGraphSink"] + self.sink: Union[DummySink, NTGraphSink] if sink is not None: self.sink = sink else: @@ -166,7 +166,8 @@ def parse( self, f: Union[TextIO, IO[bytes], codecs.StreamReader], bnode_context: Optional[_BNodeContextType] = None, - ) -> Union[DummySink, "NTGraphSink"]: + skolemize: bool = False, + ) -> Union[DummySink, NTGraphSink]: """ Parse f as an N-Triples file. @@ -186,6 +187,7 @@ def parse( # someone still using a bytestream here? f = codecs.getreader("utf-8")(f) + self.skolemize = skolemize self.file = f # type: ignore[assignment] self.buffer = "" while True: @@ -272,7 +274,7 @@ def subject(self, bnode_context=None) -> Union[bNode, URIRef]: raise ParseError("Subject must be uriref or nodeID") return subj - def predicate(self) -> URIRef: + def predicate(self) -> Union[bNode, URIRef]: pred = self.uriref() if not pred: raise ParseError("Predicate must be uriref") @@ -286,7 +288,7 @@ def object( raise ParseError("Unrecognised object type") return objt - def uriref(self) -> Union["te.Literal[False]", URI]: + def uriref(self) -> Union[te.Literal[False], URI]: if self.peek("<"): uri = self.eat(r_uriref).group(1) uri = unquote(uri) @@ -296,25 +298,30 @@ def uriref(self) -> Union["te.Literal[False]", URI]: def nodeid( self, bnode_context: Optional[_BNodeContextType] = None - ) -> Union["te.Literal[False]", bNode]: + ) -> Union[te.Literal[False], bNode, URI]: if self.peek("_"): - # Fix for https://github.com/RDFLib/rdflib/issues/204 - if bnode_context is None: - bnode_context = self._bnode_ids - bnode_id = self.eat(r_nodeid).group(1) - new_id = bnode_context.get(bnode_id, None) - if new_id is not None: - # Re-map to id specific to this doc - return bNode(new_id) + if self.skolemize: + bnode_id = self.eat(r_nodeid).group(1) + return bNode(bnode_id).skolemize() + else: - # Replace with freshly-generated document-specific BNode id - bnode = bNode() - # Store the mapping - bnode_context[bnode_id] = bnode - return bnode + # Fix for https://github.com/RDFLib/rdflib/issues/204 + if bnode_context is None: + bnode_context = self._bnode_ids + bnode_id = self.eat(r_nodeid).group(1) + new_id = bnode_context.get(bnode_id, None) + if new_id is not None: + # Re-map to id specific to this doc + return bNode(new_id) + else: + # Replace with freshly-generated document-specific BNode id + bnode = bNode() + # Store the mapping + bnode_context[bnode_id] = bnode + return bnode return False - def literal(self) -> Union["te.Literal[False]", Literal]: + def literal(self) -> Union[te.Literal[False], Literal]: if self.peek('"'): lit, lang, dtype = self.eat(r_literal).groups() if lang: @@ -337,10 +344,10 @@ def literal(self) -> Union["te.Literal[False]", Literal]: class NTGraphSink: __slots__ = ("g",) - def __init__(self, graph: "Graph"): + def __init__(self, graph: Graph): self.g = graph - def triple(self, s: "_SubjectType", p: "_PredicateType", o: "_ObjectType") -> None: + def triple(self, s: _SubjectType, p: _PredicateType, o: _ObjectType) -> None: self.g.add((s, p, o)) @@ -352,7 +359,7 @@ class NTParser(Parser): __slots__ = () @classmethod - def parse(cls, source: InputSource, sink: "Graph", **kwargs: Any) -> None: + def parse(cls, source: InputSource, sink: Graph, **kwargs: Any) -> None: """ Parse the NT format diff --git a/rdflib/plugins/parsers/rdfxml.py b/rdflib/plugins/parsers/rdfxml.py index 03650fc98..54fc69567 100644 --- a/rdflib/plugins/parsers/rdfxml.py +++ b/rdflib/plugins/parsers/rdfxml.py @@ -1,6 +1,7 @@ """ An RDF/XML parser for RDFLib """ + from __future__ import annotations from typing import TYPE_CHECKING, Any, Dict, List, NoReturn, Optional, Tuple @@ -87,12 +88,14 @@ class BagID(URIRef): __slots__ = ["li"] def __init__(self, val): - super(URIRef, self).__init__(val) + # type error: Too many arguments for "__init__" of "object" + super(URIRef, self).__init__(val) # type: ignore[call-arg] self.li = 0 def next_li(self): self.li += 1 - return RDFNS["_%s" % self.li] + # type error: Type expected within [...] + return RDFNS["_%s" % self.li] # type: ignore[misc] class ElementHandler: @@ -178,7 +181,8 @@ def startElementNS( stack.append(ElementHandler()) current = self.current parent = self.parent - base = attrs.get(BASE, None) + # type error: No overlaod for "get" of "AttributesImpl" mactches tuple (str, str) + base = attrs.get(BASE, None) # type: ignore[call-overload, unused-ignore] if base is not None: base, frag = urldefrag(base) if parent and parent.base: @@ -195,7 +199,8 @@ def startElementNS( if systemId: base, frag = urldefrag(systemId) current.base = base - language = attrs.get(LANG, None) + # type error: No overlaod for "get" of "AttributesImpl" mactches tuple (str, str) + language = attrs.get(LANG, None) # type: ignore[call-overload, unused-ignore] if language is None: if parent: language = parent.language @@ -273,8 +278,9 @@ def convert( name = URIRef("".join(name)) # type: ignore[assignment, arg-type] atts = {} for n, v in attrs.items(): + # mypy error: mypy thinks n[0]==None is unreachable if n[0] is None: - att = n[1] + att = n[1] # type: ignore[unreachable, unused-ignore] else: att = "".join(n) if att.startswith(XMLNS) or att[0:3].lower() == "xml": @@ -578,7 +584,7 @@ def literal_element_start( current.object = "<%s:%s" % (prefix, name[1]) else: current.object = "<%s" % name[1] - if not name[0] in current.declared: + if not name[0] in current.declared: # noqa: E713 current.declared[name[0]] = prefix if prefix: current.object += ' xmlns:%s="%s"' % (prefix, name[0]) @@ -586,10 +592,10 @@ def literal_element_start( current.object += ' xmlns="%s"' % name[0] else: current.object = "<%s" % name[1] - - for name, value in attrs.items(): + # type error: Incompatible types in assignment (expression has type "str", variable has type "Tuple[str, str]") + for name, value in attrs.items(): # type: ignore[assignment, unused-ignore] if name[0]: - if not name[0] in current.declared: + if not name[0] in current.declared: # noqa: E713 current.declared[name[0]] = self._current_context[name[0]] name = current.declared[name[0]] + ":" + name[1] else: @@ -640,7 +646,8 @@ def parse(self, source: InputSource, sink: Graph, **args: Any) -> None: content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get("preserve_bnode_ids", None) if preserve_bnode_ids is not None: - content_handler.preserve_bnode_ids = preserve_bnode_ids + # type error: ContentHandler has no attribute "preserve_bnode_ids" + content_handler.preserve_bnode_ids = preserve_bnode_ids # type: ignore[attr-defined, unused-ignore] # # We're only using it once now # content_handler.reset() # self._parser.reset() diff --git a/rdflib/plugins/parsers/trig.py b/rdflib/plugins/parsers/trig.py index d28198bce..9ed6e8bbc 100644 --- a/rdflib/plugins/parsers/trig.py +++ b/rdflib/plugins/parsers/trig.py @@ -8,7 +8,7 @@ from .notation3 import RDFSink, SinkParser -def becauseSubGraph(*args, **kwargs): +def becauseSubGraph(*args, **kwargs): # noqa: N802 pass @@ -69,16 +69,20 @@ def graph(self, argstr: str, i: int) -> int: raise Exception if it looks like a graph, but isn't. """ + need_graphid = False # import pdb; pdb.set_trace() j = self.sparqlTok("GRAPH", argstr, i) # optional GRAPH keyword if j >= 0: i = j + need_graphid = True r: MutableSequence[Any] = [] j = self.labelOrSubject(argstr, i, r) if j >= 0: graph = r[0] i = j + elif need_graphid: + self.BadSyntax(argstr, i, "GRAPH keyword must be followed by graph name") else: graph = self._store.graph.identifier # hack @@ -98,7 +102,10 @@ def graph(self, argstr: str, i: int) -> int: j = i + 1 - oldParentContext = self._parentContext + if self._context is not None: + self.BadSyntax(argstr, i, "Nested graphs are not allowed") + + oldParentContext = self._parentContext # noqa: N806 self._parentContext = self._context reason2 = self._reason2 self._reason2 = becauseSubGraph @@ -153,7 +160,7 @@ def parse(self, source: InputSource, graph: Graph, encoding: str = "utf-8") -> N sink = RDFSink(conj_graph) - baseURI = conj_graph.absolutize( + baseURI = conj_graph.absolutize( # noqa: N806 source.getPublicId() or source.getSystemId() or "" ) p = TrigSinkParser(sink, baseURI=baseURI, turtle=True) diff --git a/rdflib/plugins/parsers/trix.py b/rdflib/plugins/parsers/trix.py index 8baaf5ca4..833e18568 100644 --- a/rdflib/plugins/parsers/trix.py +++ b/rdflib/plugins/parsers/trix.py @@ -1,6 +1,7 @@ """ A TriX parser for RDFLib """ + from __future__ import annotations from typing import TYPE_CHECKING, Any, Dict, List, NoReturn, Optional, Tuple @@ -28,6 +29,9 @@ class TriXHandler(handler.ContentHandler): """An Sax Handler for TriX. See http://sw.nokia.com/trix/""" + lang: Optional[str] + datatype: Optional[str] + def __init__(self, store: Store): self.store = store self.preserve_bnode_ids = False @@ -104,12 +108,12 @@ def startElementNS( self.datatype = None try: - self.lang = attrs.getValue((str(XMLNS), "lang")) + self.lang = attrs.getValue((str(XMLNS), "lang")) # type: ignore[arg-type, unused-ignore] except Exception: # language not required - ignore pass try: - self.datatype = attrs.getValueByQName("datatype") + self.datatype = attrs.getValueByQName("datatype") # type: ignore[arg-type, unused-ignore] except KeyError: self.error("No required attribute 'datatype'") else: @@ -121,7 +125,8 @@ def startElementNS( self.lang = None self.datatype = None try: - self.lang = attrs.getValue((str(XMLNS), "lang")) + # type error: Argument 1 to "getValue" of "AttributesImpl" has incompatible type "Tuple[str, str]"; expected "str" + self.lang = attrs.getValue((str(XMLNS), "lang")) # type: ignore[arg-type, unused-ignore] except Exception: # language not required - ignore pass @@ -283,7 +288,8 @@ def parse(self, source: InputSource, sink: Graph, **args: Any) -> None: content_handler = self._parser.getContentHandler() preserve_bnode_ids = args.get("preserve_bnode_ids", None) if preserve_bnode_ids is not None: - content_handler.preserve_bnode_ids = preserve_bnode_ids + # type error: ContentHandler has no attribute "preserve_bnode_ids" + content_handler.preserve_bnode_ids = preserve_bnode_ids # type: ignore[attr-defined, unused-ignore] # We're only using it once now # content_handler.reset() # self._parser.reset() diff --git a/rdflib/plugins/serializers/hext.py b/rdflib/plugins/serializers/hext.py index 9452fd0e0..00a02c5ce 100644 --- a/rdflib/plugins/serializers/hext.py +++ b/rdflib/plugins/serializers/hext.py @@ -2,11 +2,14 @@ HextuplesSerializer RDF graph serializer for RDFLib. See for details about the format. """ + +from __future__ import annotations + import json import warnings from typing import IO, Optional, Type, Union -from rdflib.graph import ConjunctiveGraph, Graph +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, ConjunctiveGraph, Graph from rdflib.namespace import RDF, XSD from rdflib.serializer import Serializer from rdflib.term import BNode, Literal, Node, URIRef @@ -133,9 +136,14 @@ def _iri_or_bn(self, i_): def _context(self, context): if self.graph_type == Graph: return "" - if context.identifier == "urn:x-rdflib:default": + if context.identifier == DATASET_DEFAULT_GRAPH_ID: return "" elif context is not None and self.default_context is not None: - if context.identifier == self.default_context.identifier: + # type error: "Node" has no attribute "identifier" + if context.identifier == self.default_context.identifier: # type: ignore[attr-defined] return "" - return context.identifier + return ( + context.identifier + if isinstance(context.identifier, URIRef) + else context.identifier.n3() + ) diff --git a/rdflib/plugins/serializers/jsonld.py b/rdflib/plugins/serializers/jsonld.py index e5d9b0384..ee3fe17bd 100644 --- a/rdflib/plugins/serializers/jsonld.py +++ b/rdflib/plugins/serializers/jsonld.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- """ This serialiser will output an RDF Graph as a JSON-LD formatted document. See: @@ -29,19 +28,22 @@ ] """ + # From: https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/serializer.py # NOTE: This code writes the entire JSON object into memory before serialising, # but we should consider streaming the output to deal with arbitrarily large # graphs. +from __future__ import annotations + import warnings -from typing import IO, Optional +from typing import IO, Any, Dict, List, Optional -from rdflib.graph import Graph +from rdflib.graph import Graph, _ObjectType from rdflib.namespace import RDF, XSD from rdflib.serializer import Serializer -from rdflib.term import BNode, Literal, URIRef +from rdflib.term import BNode, IdentifiedNode, Identifier, Literal, URIRef from ..shared.jsonld.context import UNDEF, Context from ..shared.jsonld.keys import CONTEXT, GRAPH, ID, LANG, LIST, SET, VOCAB @@ -139,18 +141,19 @@ def from_rdf( class Converter: - def __init__(self, context, use_native_types, use_rdf_type): + def __init__(self, context: Context, use_native_types: bool, use_rdf_type: bool): self.context = context self.use_native_types = context.active or use_native_types self.use_rdf_type = use_rdf_type - def convert(self, graph): + def convert(self, graph: Graph): # TODO: bug in rdflib dataset parsing (nquads et al): # plain triples end up in separate unnamed graphs (rdflib issue #436) if graph.context_aware: default_graph = Graph() graphs = [default_graph] - for g in graph.contexts(): + # type error: "Graph" has no attribute "contexts" + for g in graph.contexts(): # type: ignore[attr-defined] if isinstance(g.identifier, URIRef): graphs.append(g) else: @@ -160,7 +163,7 @@ def convert(self, graph): context = self.context - objs = [] + objs: List[Any] = [] for g in graphs: obj = {} graphname = None @@ -193,8 +196,8 @@ def convert(self, graph): return objs - def from_graph(self, graph): - nodemap = {} + def from_graph(self, graph: Graph): + nodemap: Dict[Any, Any] = {} for s in set(graph.subjects()): ## only iri:s and unreferenced (rest will be promoted to top if needed) @@ -205,12 +208,13 @@ def from_graph(self, graph): return list(nodemap.values()) - def process_subject(self, graph, s, nodemap): + def process_subject(self, graph: Graph, s: IdentifiedNode, nodemap): if isinstance(s, URIRef): node_id = self.context.shrink_iri(s) elif isinstance(s, BNode): node_id = s.n3() else: + # This does not seem right, this probably should be an error. node_id = None # used_as_object = any(graph.subjects(None, s)) @@ -222,11 +226,21 @@ def process_subject(self, graph, s, nodemap): nodemap[node_id] = node for p, o in graph.predicate_objects(s): - self.add_to_node(graph, s, p, o, node, nodemap) + # type error: Argument 3 to "add_to_node" of "Converter" has incompatible type "Node"; expected "IdentifiedNode" + # type error: Argument 4 to "add_to_node" of "Converter" has incompatible type "Node"; expected "Identifier" + self.add_to_node(graph, s, p, o, node, nodemap) # type: ignore[arg-type] return node - def add_to_node(self, graph, s, p, o, s_node, nodemap): + def add_to_node( + self, + graph: Graph, + s: IdentifiedNode, + p: IdentifiedNode, + o: Identifier, + s_node: Dict[str, Any], + nodemap, + ): context = self.context if isinstance(o, Literal): @@ -237,7 +251,9 @@ def add_to_node(self, graph, s, p, o, s_node, nodemap): containers = [LIST, None] if graph.value(o, RDF.first) else [None] for container in containers: for coercion in (ID, VOCAB, UNDEF): - term = context.find_term(str(p), coercion, container) + # type error: Argument 2 to "find_term" of "Context" has incompatible type "object"; expected "Union[str, Defined, None]" + # type error: Argument 3 to "find_term" of "Context" has incompatible type "Optional[str]"; expected "Union[Defined, str]" + term = context.find_term(str(p), coercion, container) # type: ignore[arg-type] if term: break if term: @@ -251,10 +267,12 @@ def add_to_node(self, graph, s, p, o, s_node, nodemap): if term.type: node = self.type_coerce(o, term.type) - elif term.language and o.language == term.language: - node = str(o) - elif context.language and (term.language is None and o.language is None): + # type error: "Identifier" has no attribute "language" + elif term.language and o.language == term.language: # type: ignore[attr-defined] node = str(o) + # type error: Right operand of "and" is never evaluated + elif context.language and (term.language is None and o.language is None): # type: ignore[unreachable] + node = str(o) # type: ignore[unreachable] if LIST in term.container: node = [ @@ -301,7 +319,7 @@ def add_to_node(self, graph, s, p, o, s_node, nodemap): value = node s_node[p_key] = value - def type_coerce(self, o, coerce_type): + def type_coerce(self, o: Identifier, coerce_type: str): if coerce_type == ID: if isinstance(o, URIRef): return self.context.shrink_iri(o) @@ -316,7 +334,9 @@ def type_coerce(self, o, coerce_type): else: return None - def to_raw_value(self, graph, s, o, nodemap): + def to_raw_value( + self, graph: Graph, s: IdentifiedNode, o: Identifier, nodemap: Dict[str, Any] + ): context = self.context coll = self.to_collection(graph, o) if coll is not None: @@ -358,15 +378,16 @@ def to_raw_value(self, graph, s, o, nodemap): } elif o.language and o.language != context.language: return {context.lang_key: o.language, context.value_key: v} - elif not context.active or context.language and not o.language: + # type error: Right operand of "and" is never evaluated + elif not context.active or context.language and not o.language: # type: ignore[unreachable] return {context.value_key: v} else: return v - def to_collection(self, graph, l_): + def to_collection(self, graph: Graph, l_: Identifier): if l_ != RDF.nil and not graph.value(l_, RDF.first): return None - list_nodes = [] + list_nodes: List[Optional[_ObjectType]] = [] chain = set([l_]) while l_: if l_ == RDF.nil: @@ -382,7 +403,8 @@ def to_collection(self, graph, l_): elif p != RDF.type or o != RDF.List: return None list_nodes.append(first) - l_ = rest + # type error: Incompatible types in assignment (expression has type "Optional[Node]", variable has type "Identifier") + l_ = rest # type: ignore[assignment] if l_ in chain: return None chain.add(l_) diff --git a/rdflib/plugins/serializers/longturtle.py b/rdflib/plugins/serializers/longturtle.py index ac2febdcf..e886574f3 100644 --- a/rdflib/plugins/serializers/longturtle.py +++ b/rdflib/plugins/serializers/longturtle.py @@ -13,7 +13,7 @@ on the start of the next line * uses default encoding (encode()) is used instead of "latin-1" -- Nicholas Car, 2021 +- Nicholas Car, 2023 """ from rdflib.exceptions import Error @@ -101,7 +101,6 @@ def serialize(self, stream, base=None, encoding=None, spacious=None, **args): self.write("\n") self.endDocument() - self.write("\n") self.base = None @@ -168,21 +167,20 @@ def s_default(self, subject): self.path(subject, SUBJECT) self.write("\n" + self.indent()) self.predicateList(subject) - self.write(" ;\n.") + self.write("\n.") return True def s_squared(self, subject): if (self._references[subject] > 0) or not isinstance(subject, BNode): return False self.write("\n" + self.indent() + "[]") - self.predicateList(subject) + self.predicateList(subject, newline=False) self.write(" ;\n.") return True def path(self, node, position, newline=False): if not ( - self.p_squared(node, position, newline) - or self.p_default(node, position, newline) + self.p_squared(node, position) or self.p_default(node, position, newline) ): raise Error("Cannot serialize node '%s'" % (node,)) @@ -207,7 +205,11 @@ def label(self, node, position): return self.getQName(node, position == VERB) or node.n3() - def p_squared(self, node, position, newline=False): + def p_squared( + self, + node, + position, + ): if ( not isinstance(node, BNode) or node in self._serialized @@ -216,23 +218,19 @@ def p_squared(self, node, position, newline=False): ): return False - if not newline: - self.write(" ") - if self.isValidList(node): # this is a list self.depth += 2 - self.write("(\n") - self.depth -= 1 + self.write(" (\n") + self.depth -= 2 self.doList(node) - self.depth -= 1 - self.write("\n" + self.indent(1) + ")") + self.write("\n" + self.indent() + ")") else: + # this is a Blank Node self.subjectDone(node) - self.depth += 2 - self.write("[\n") - self.depth -= 1 - self.predicateList(node, newline=False) + self.write("\n" + self.indent(1) + "[\n") + self.depth += 1 + self.predicateList(node) self.depth -= 1 self.write("\n" + self.indent(1) + "]") @@ -279,6 +277,7 @@ def predicateList(self, subject, newline=False): self.write(" ;\n" + self.indent(1)) self.verb(predicate, newline=True) self.objectList(properties[predicate]) + self.write(" ;") def verb(self, node, newline=False): self.path(node, VERB, newline) @@ -291,11 +290,13 @@ def objectList(self, objects): self.depth += depthmod first_nl = False if count > 1: - self.write("\n" + self.indent(1)) + if not isinstance(objects[0], BNode): + self.write("\n" + self.indent(1)) first_nl = True self.path(objects[0], OBJECT, newline=first_nl) for obj in objects[1:]: - self.write(" ,\n") - self.write(self.indent(1)) + self.write(" ,") + if not isinstance(obj, BNode): + self.write("\n" + self.indent(1)) self.path(obj, OBJECT, newline=True) self.depth -= depthmod diff --git a/rdflib/plugins/serializers/n3.py b/rdflib/plugins/serializers/n3.py index 6f074f68f..d8036bba0 100644 --- a/rdflib/plugins/serializers/n3.py +++ b/rdflib/plugins/serializers/n3.py @@ -1,6 +1,7 @@ """ Notation 3 (N3) RDF graph serializer for RDFLib. """ + from rdflib.graph import Graph from rdflib.namespace import OWL, Namespace from rdflib.plugins.serializers.turtle import OBJECT, SUBJECT, TurtleSerializer @@ -22,7 +23,7 @@ def reset(self): super(N3Serializer, self).reset() self._stores = {} - def endDocument(self): + def endDocument(self): # noqa: N802 if not self.parent: super(N3Serializer, self).endDocument() @@ -32,7 +33,7 @@ def indent(self, modifier=0): indent += self.parent.indent() # modifier) return indent - def preprocessTriple(self, triple): + def preprocessTriple(self, triple): # noqa: N802 super(N3Serializer, self).preprocessTriple(triple) if isinstance(triple[0], Graph): for t in triple[0]: @@ -44,7 +45,7 @@ def preprocessTriple(self, triple): for t in triple[2]: self.preprocessTriple(t) - def getQName(self, uri, gen_prefix=True): + def getQName(self, uri, gen_prefix=True): # noqa: N802 qname = None if self.parent is not None: qname = self.parent.getQName(uri, gen_prefix) @@ -81,7 +82,8 @@ def p_clause(self, node, position): self.write("{") self.depth += 1 serializer = N3Serializer(node, parent=self) - serializer.serialize(self.stream) + # type error: Argument 1 to "serialize" of "TurtleSerializer" has incompatible type "Optional[IO[bytes]]"; expected "IO[bytes]" + serializer.serialize(self.stream) # type: ignore[arg-type] self.depth -= 1 self.write(self.indent() + "}") return True diff --git a/rdflib/plugins/serializers/nquads.py b/rdflib/plugins/serializers/nquads.py index 8d7c80781..3c8d02ccc 100644 --- a/rdflib/plugins/serializers/nquads.py +++ b/rdflib/plugins/serializers/nquads.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import warnings from typing import IO, Optional diff --git a/rdflib/plugins/serializers/nt.py b/rdflib/plugins/serializers/nt.py index 5faf9efb2..e87f949e3 100644 --- a/rdflib/plugins/serializers/nt.py +++ b/rdflib/plugins/serializers/nt.py @@ -1,10 +1,5 @@ from __future__ import annotations -""" -N-Triples RDF graph serializer for RDFLib. -See for details about the -format. -""" import codecs import warnings from typing import IO, TYPE_CHECKING, Optional, Tuple, Union @@ -16,6 +11,12 @@ if TYPE_CHECKING: from rdflib.graph import _TripleType +""" +N-Triples RDF graph serializer for RDFLib. +See for details about the +format. +""" + __all__ = ["NTSerializer"] @@ -60,14 +61,12 @@ def __init__(self, store: Graph): def _nt_row(triple: _TripleType) -> str: if isinstance(triple[2], Literal): return "%s %s %s .\n" % ( - # type error: "Node" has no attribute "n3" - triple[0].n3(), # type: ignore[attr-defined] - triple[1].n3(), # type: ignore[attr-defined] + triple[0].n3(), + triple[1].n3(), _quoteLiteral(triple[2]), ) else: - # type error: "Node" has no attribute "n3" - return "%s %s %s .\n" % (triple[0].n3(), triple[1].n3(), triple[2].n3()) # type: ignore[attr-defined] + return "%s %s %s .\n" % (triple[0].n3(), triple[1].n3(), triple[2].n3()) def _quoteLiteral(l_: Literal) -> str: # noqa: N802 diff --git a/rdflib/plugins/serializers/rdfxml.py b/rdflib/plugins/serializers/rdfxml.py index c5acc74ad..d6a2f6abb 100644 --- a/rdflib/plugins/serializers/rdfxml.py +++ b/rdflib/plugins/serializers/rdfxml.py @@ -1,5 +1,7 @@ +from __future__ import annotations + import xml.dom.minidom -from typing import IO, Dict, Optional, Set +from typing import IO, Dict, Generator, Optional, Set, Tuple from xml.sax.saxutils import escape, quoteattr from rdflib.collection import Collection @@ -20,16 +22,17 @@ class XMLSerializer(Serializer): def __init__(self, store: Graph): super(XMLSerializer, self).__init__(store) - def __bindings(self): + def __bindings(self) -> Generator[Tuple[str, URIRef], None, None]: store = self.store nm = store.namespace_manager - bindings = {} + bindings: Dict[str, URIRef] = {} for predicate in set(store.predicates()): - prefix, namespace, name = nm.compute_qname_strict(predicate) + # type error: Argument 1 to "compute_qname_strict" of "NamespaceManager" has incompatible type "Node"; expected "str" + prefix, namespace, name = nm.compute_qname_strict(predicate) # type: ignore[arg-type] bindings[prefix] = URIRef(namespace) - RDFNS = URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#") + RDFNS = URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#") # noqa: N806 if "rdf" in bindings: assert bindings["rdf"] == RDFNS @@ -45,7 +48,7 @@ def serialize( base: Optional[str] = None, encoding: Optional[str] = None, **args, - ): + ) -> None: # if base is given here, use that, if not and a base is set for the graph use that if base is not None: self.base = base @@ -82,7 +85,8 @@ def serialize( # write out triples by subject for subject in self.store.subjects(): - self.subject(subject, 1) + # type error: Argument 1 to "subject" of "XMLSerializer" has incompatible type "Node"; expected "Identifier" + self.subject(subject, 1) # type: ignore[arg-type] # endRDF write("\n") @@ -91,7 +95,7 @@ def serialize( # self.__serialized = None del self.__serialized - def subject(self, subject, depth=1): + def subject(self, subject: Identifier, depth: int = 1) -> None: if subject not in self.__serialized: self.__serialized[subject] = 1 @@ -110,13 +114,17 @@ def subject(self, subject, depth=1): write(">\n") for predicate, object in self.store.predicate_objects(subject): - self.predicate(predicate, object, depth + 1) + # type error: Argument 1 to "predicate" of "XMLSerializer" has incompatible type "Node"; expected "Identifier" + # type error: Argument 2 to "predicate" of "XMLSerializer" has incompatible type "Node"; expected "Identifier" + self.predicate(predicate, object, depth + 1) # type: ignore[arg-type] write("%s\n" % (indent, element_name)) else: write("/>\n") - def predicate(self, predicate, object, depth=1): + def predicate( + self, predicate: Identifier, object: Identifier, depth: int = 1 + ) -> None: write = self.write indent = " " * depth qname = self.store.namespace_manager.qname_strict(predicate) @@ -150,7 +158,7 @@ def predicate(self, predicate, object, depth=1): # TODO: -def fix(val): +def fix(val: str) -> str: "strip off _: from nodeIDs... as they are not valid NCNames" if val.startswith("_:"): return val[2:] @@ -169,7 +177,7 @@ def serialize( base: Optional[str] = None, encoding: Optional[str] = None, **args, - ): + ) -> None: self.__serialized: Dict[Identifier, int] = {} store = self.store # if base is given here, use that, if not and a base is set for the graph use that @@ -236,7 +244,7 @@ def serialize( # Set to None so that the memory can get garbage collected. self.__serialized = None # type: ignore[assignment] - def subject(self, subject: IdentifiedNode, depth: int = 1): + def subject(self, subject: Identifier, depth: int = 1): store = self.store writer = self.writer @@ -257,7 +265,8 @@ def subject(self, subject: IdentifiedNode, depth: int = 1): type = None element = type or RDFVOC.Description - writer.push(element) + # type error: Argument 1 to "push" of "XMLWriter" has incompatible type "Node"; expected "str" + writer.push(element) # type: ignore[arg-type] if isinstance(subject, BNode): @@ -277,9 +286,12 @@ def subj_as_obj_more_than(ceil): if (subject, None, None) in store: for predicate, object in store.predicate_objects(subject): if not (predicate == RDF.type and object == type): - self.predicate(predicate, object, depth + 1) + # type error: Argument 1 to "predicate" of "PrettyXMLSerializer" has incompatible type "Node"; expected "Identifier" + # type error: Argument 2 to "predicate" of "PrettyXMLSerializer" has incompatible type "Node"; expected "Identifier" + self.predicate(predicate, object, depth + 1) # type: ignore[arg-type] - writer.pop(element) + # type error: Argument 1 to "pop" of "XMLWriter" has incompatible type "Node"; expected "Optional[str]" + writer.pop(element) # type: ignore[arg-type] elif subject in self.forceRDFAbout: # TODO FIXME?: this looks like a duplicate of first condition @@ -288,7 +300,9 @@ def subj_as_obj_more_than(ceil): writer.pop(RDFVOC.Description) self.forceRDFAbout.remove(subject) # type: ignore[arg-type] - def predicate(self, predicate, object, depth=1): + def predicate( + self, predicate: Identifier, object: Identifier, depth: int = 1 + ) -> None: writer = self.writer store = self.store writer.push(predicate) @@ -308,7 +322,10 @@ def predicate(self, predicate, object, depth=1): writer.attribute(RDFVOC.datatype, object.datatype) writer.text(object) - elif object in self.__serialized or not (object, None, None) in store: + elif ( + object in self.__serialized + or not (object, None, None) in store # noqa: E713 + ): if isinstance(object, BNode): if more_than(store.triples((None, None, object)), 0): writer.attribute(RDFVOC.nodeID, fix(object)) @@ -338,14 +355,17 @@ def predicate(self, predicate, object, depth=1): for item in col: if isinstance(item, URIRef): self.forceRDFAbout.add(item) - self.subject(item) + # type error: Argument 1 to "subject" of "PrettyXMLSerializer" has incompatible type "Node"; expected "Identifier" + self.subject(item) # type: ignore[arg-type] if not isinstance(item, URIRef): - self.__serialized[item] = 1 + # type error: Invalid index type "Node" for "Dict[Identifier, int]"; expected type "Identifier" + self.__serialized[item] = 1 # type: ignore[index] else: if first( store.triples_choices( - (object, RDF.type, [OWL_NS.Class, RDFS.Class]) + # type error: Argument 1 to "triples_choices" of "Graph" has incompatible type "Tuple[Identifier, URIRef, List[URIRef]]"; expected "Union[Tuple[List[Node], Node, Node], Tuple[Node, List[Node], Node], Tuple[Node, Node, List[Node]]]" + (object, RDF.type, [OWL_NS.Class, RDFS.Class]) # type: ignore[arg-type] ) ) and isinstance(object, URIRef): writer.attribute(RDFVOC.resource, self.relativize(object)) diff --git a/rdflib/plugins/serializers/trig.py b/rdflib/plugins/serializers/trig.py index 18bee3f21..984f80c5a 100644 --- a/rdflib/plugins/serializers/trig.py +++ b/rdflib/plugins/serializers/trig.py @@ -3,6 +3,8 @@ See for syntax specification. """ +from __future__ import annotations + from typing import IO, TYPE_CHECKING, Dict, List, Optional, Tuple, Union from rdflib.graph import ConjunctiveGraph, Graph @@ -40,7 +42,8 @@ def preprocess(self) -> None: if len(context) == 0: continue self.store = context - self.getQName(context.identifier) + # Don't generate a new prefix for a graph URI if one already exists + self.getQName(context.identifier, False) self._subjects = {} for triple in context: @@ -97,10 +100,10 @@ def serialize( if isinstance(store.identifier, BNode): iri = store.identifier.n3() else: - iri = self.getQName(store.identifier) + # Show the full graph URI if a prefix for it doesn't already exist + iri = self.getQName(store.identifier, False) if iri is None: - # type error: "IdentifiedNode" has no attribute "n3" - iri = store.identifier.n3() # type: ignore[attr-defined] + iri = store.identifier.n3() self.write(self.indent() + "\n%s {" % iri) self.depth += 1 diff --git a/rdflib/plugins/serializers/trix.py b/rdflib/plugins/serializers/trix.py index c29a8559d..008360e6b 100644 --- a/rdflib/plugins/serializers/trix.py +++ b/rdflib/plugins/serializers/trix.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import IO, Optional from rdflib.graph import ConjunctiveGraph, Graph @@ -51,7 +53,7 @@ def serialize( self.writer.pop() stream.write("\n".encode("latin-1")) - def _writeGraph(self, graph): + def _writeGraph(self, graph): # noqa: N802 self.writer.push(TRIXNS["graph"]) if graph.base: self.writer.attribute( @@ -64,7 +66,7 @@ def _writeGraph(self, graph): self._writeTriple(triple) self.writer.pop() - def _writeTriple(self, triple): + def _writeTriple(self, triple): # noqa: N802 self.writer.push(TRIXNS["triple"]) for component in triple: if isinstance(component, URIRef): diff --git a/rdflib/plugins/serializers/turtle.py b/rdflib/plugins/serializers/turtle.py index ad1182474..a26df04a6 100644 --- a/rdflib/plugins/serializers/turtle.py +++ b/rdflib/plugins/serializers/turtle.py @@ -3,37 +3,32 @@ See for syntax specification. """ +from __future__ import annotations + from collections import defaultdict -from functools import cmp_to_key +from typing import ( + IO, + TYPE_CHECKING, + Any, + DefaultDict, + Dict, + List, + Mapping, + Optional, + Sequence, + Tuple, +) from rdflib.exceptions import Error +from rdflib.graph import Graph from rdflib.namespace import RDF, RDFS from rdflib.serializer import Serializer -from rdflib.term import BNode, Literal, URIRef - -__all__ = ["RecursiveSerializer", "TurtleSerializer"] - +from rdflib.term import BNode, Literal, Node, URIRef -def _object_comparator(a, b): - """ - for nice clean output we sort the objects of triples, - some of them are literals, - these are sorted according to the sort order of the underlying python objects - in py3 not all things are comparable. - This falls back on comparing string representations when not. - """ +if TYPE_CHECKING: + from rdflib.graph import _PredicateType, _SubjectType, _TripleType - try: - if a > b: - return 1 - if a < b: - return -1 - return 0 - - except TypeError: - a = str(a) - b = str(b) - return (a > b) - (a < b) +__all__ = ["RecursiveSerializer", "TurtleSerializer"] class RecursiveSerializer(Serializer): @@ -41,14 +36,14 @@ class RecursiveSerializer(Serializer): predicateOrder = [RDF.type, RDFS.label] maxDepth = 10 indentString = " " - roundtrip_prefixes = () + roundtrip_prefixes: Tuple[Any, ...] = () - def __init__(self, store): + def __init__(self, store: Graph): super(RecursiveSerializer, self).__init__(store) - self.stream = None + self.stream: Optional[IO[bytes]] = None self.reset() - def addNamespace(self, prefix, uri): + def addNamespace(self, prefix: str, uri: URIRef) -> None: if prefix in self.namespaces and self.namespaces[prefix] != uri: raise Exception( "Trying to override namespace prefix %s => %s, but it's already bound to %s" @@ -56,7 +51,7 @@ def addNamespace(self, prefix, uri): ) self.namespaces[prefix] = uri - def checkSubject(self, subject): + def checkSubject(self, subject: _SubjectType) -> bool: """Check to see if the subject should be serialized yet""" if ( (self.isDone(subject)) @@ -67,13 +62,13 @@ def checkSubject(self, subject): return False return True - def isDone(self, subject): + def isDone(self, subject: _SubjectType) -> bool: """Return true if subject is serialized""" return subject in self._serialized - def orderSubjects(self): - seen = {} - subjects = [] + def orderSubjects(self) -> List[_SubjectType]: + seen: Dict[_SubjectType, bool] = {} + subjects: List[_SubjectType] = [] for classURI in self.topClasses: members = list(self.store.subjects(RDF.type, classURI)) @@ -95,23 +90,24 @@ def orderSubjects(self): return subjects - def preprocess(self): + def preprocess(self) -> None: for triple in self.store.triples((None, None, None)): self.preprocessTriple(triple) - def preprocessTriple(self, spo): + def preprocessTriple(self, spo: _TripleType) -> None: s, p, o = spo self._references[o] += 1 self._subjects[s] = True - def reset(self): + def reset(self) -> None: self.depth = 0 - self.lists = {} - self.namespaces = {} - self._references = defaultdict(int) - self._serialized = {} - self._subjects = {} - self._topLevels = {} + # Typed none because nothing is using it ... + self.lists: Dict[None, None] = {} + self.namespaces: Dict[str, URIRef] = {} + self._references: DefaultDict[Node, int] = defaultdict(int) + self._serialized: Dict[_SubjectType, bool] = {} + self._subjects: Dict[_SubjectType, bool] = {} + self._topLevels: Dict[_SubjectType, bool] = {} if self.roundtrip_prefixes: if hasattr(self.roundtrip_prefixes, "__iter__"): @@ -122,28 +118,32 @@ def reset(self): for prefix, ns in self.store.namespaces(): self.addNamespace(prefix, ns) - def buildPredicateHash(self, subject): + def buildPredicateHash( + self, subject: _SubjectType + ) -> Mapping[_PredicateType, List[Node]]: """ Build a hash key by predicate to a list of objects for the given subject """ - properties = {} + properties: Dict[_PredicateType, List[Node]] = {} for s, p, o in self.store.triples((subject, None, None)): oList = properties.get(p, []) oList.append(o) properties[p] = oList return properties - def sortProperties(self, properties): + def sortProperties( + self, properties: Mapping[_PredicateType, List[Node]] + ) -> List[_PredicateType]: """Take a hash from predicate uris to lists of values. Sort the lists of values. Return a sorted list of properties.""" # Sort object lists for prop, objects in properties.items(): - objects.sort(key=cmp_to_key(_object_comparator)) + objects.sort() # Make sorted list of properties - propList = [] - seen = {} + propList: List[_PredicateType] = [] + seen: Dict[_PredicateType, bool] = {} for prop in self.predicateOrder: if (prop in properties) and (prop not in seen): propList.append(prop) @@ -156,17 +156,18 @@ def sortProperties(self, properties): seen[prop] = True return propList - def subjectDone(self, subject): + def subjectDone(self, subject: _SubjectType) -> None: """Mark a subject as done.""" self._serialized[subject] = True - def indent(self, modifier=0): + def indent(self, modifier: int = 0) -> str: """Returns indent string multiplied by the depth""" return (self.depth + modifier) * self.indentString - def write(self, text): + def write(self, text: str) -> None: """Write text in given encoding.""" - self.stream.write(text.encode(self.encoding, "replace")) + # type error: Item "None" of "Optional[IO[bytes]]" has no attribute "write" + self.stream.write(text.encode(self.encoding, "replace")) # type: ignore[union-attr] SUBJECT = 0 @@ -181,15 +182,16 @@ class TurtleSerializer(RecursiveSerializer): short_name = "turtle" indentString = " " - def __init__(self, store): - self._ns_rewrite = {} + def __init__(self, store: Graph): + self._ns_rewrite: Dict[str, str] = {} super(TurtleSerializer, self).__init__(store) - self.keywords = {RDF.type: "a"} + self.keywords: Dict[Node, str] = {RDF.type: "a"} self.reset() self.stream = None self._spacious = _SPACIOUS_OUTPUT - def addNamespace(self, prefix, namespace): + # type error: Return type "str" of "addNamespace" incompatible with return type "None" in supertype "RecursiveSerializer" + def addNamespace(self, prefix: str, namespace: URIRef) -> str: # type: ignore[override] # Turtle does not support prefix that start with _ # if they occur in the graph, rewrite to p_blah # this is more complicated since we need to make sure p_blah @@ -213,13 +215,21 @@ def addNamespace(self, prefix, namespace): super(TurtleSerializer, self).addNamespace(prefix, namespace) return prefix - def reset(self): + def reset(self) -> None: super(TurtleSerializer, self).reset() - self._shortNames = {} + # typing as Dict[None, None] because nothing seems to be using it + self._shortNames: Dict[None, None] = {} self._started = False self._ns_rewrite = {} - def serialize(self, stream, base=None, encoding=None, spacious=None, **args): + def serialize( + self, + stream: IO[bytes], + base: Optional[str] = None, + encoding: Optional[str] = None, + spacious: Optional[bool] = None, + **args: Any, + ) -> None: self.reset() self.stream = stream # if base is given here, use that, if not and a base is set for the graph use that @@ -250,7 +260,7 @@ def serialize(self, stream, base=None, encoding=None, spacious=None, **args): self.base = None - def preprocessTriple(self, triple): + def preprocessTriple(self, triple: _TripleType) -> None: super(TurtleSerializer, self).preprocessTriple(triple) for i, node in enumerate(triple): if i == VERB and node in self.keywords: @@ -265,7 +275,7 @@ def preprocessTriple(self, triple): self._references[p] += 1 # TODO: Rename to get_pname - def getQName(self, uri, gen_prefix=True): + def getQName(self, uri: Node, gen_prefix: bool = True) -> Optional[str]: if not isinstance(uri, URIRef): return None @@ -295,7 +305,7 @@ def getQName(self, uri, gen_prefix=True): return "%s:%s" % (prefix, local) - def startDocument(self): + def startDocument(self) -> None: self._started = True ns_list = sorted(self.namespaces.items()) @@ -306,22 +316,22 @@ def startDocument(self): if ns_list and self._spacious: self.write("\n") - def endDocument(self): + def endDocument(self) -> None: if self._spacious: self.write("\n") - def statement(self, subject): + def statement(self, subject: _SubjectType) -> bool: self.subjectDone(subject) return self.s_squared(subject) or self.s_default(subject) - def s_default(self, subject): + def s_default(self, subject: _SubjectType) -> bool: self.write("\n" + self.indent()) self.path(subject, SUBJECT) self.predicateList(subject) self.write(" .") return True - def s_squared(self, subject): + def s_squared(self, subject: _SubjectType) -> bool: if (self._references[subject] > 0) or not isinstance(subject, BNode): return False self.write("\n" + self.indent() + "[]") @@ -329,20 +339,20 @@ def s_squared(self, subject): self.write(" .") return True - def path(self, node, position, newline=False): + def path(self, node: Node, position: int, newline: bool = False) -> None: if not ( self.p_squared(node, position, newline) or self.p_default(node, position, newline) ): raise Error("Cannot serialize node '%s'" % (node,)) - def p_default(self, node, position, newline=False): + def p_default(self, node: Node, position: int, newline: bool = False) -> bool: if position != SUBJECT and not newline: self.write(" ") self.write(self.label(node, position)) return True - def label(self, node, position): + def label(self, node: Node, position: int) -> str: if node == RDF.nil: return "()" if position is VERB and node in self.keywords: @@ -353,11 +363,11 @@ def label(self, node, position): qname_callback=lambda dt: self.getQName(dt, _GEN_QNAME_FOR_DT), ) else: - node = self.relativize(node) + node = self.relativize(node) # type: ignore[type-var] return self.getQName(node, position == VERB) or node.n3() - def p_squared(self, node, position, newline=False): + def p_squared(self, node: Node, position: int, newline: bool = False) -> bool: if ( not isinstance(node, BNode) or node in self._serialized @@ -390,7 +400,7 @@ def p_squared(self, node, position, newline=False): return True - def isValidList(self, l_): + def isValidList(self, l_: Node) -> bool: """ Checks if l is a valid RDF list, i.e. no nodes have other properties. """ @@ -402,18 +412,20 @@ def isValidList(self, l_): while l_: if l_ != RDF.nil and len(list(self.store.predicate_objects(l_))) != 2: return False - l_ = self.store.value(l_, RDF.rest) + # type error: Incompatible types in assignment (expression has type "Optional[Node]", variable has type "Node") + l_ = self.store.value(l_, RDF.rest) # type: ignore[assignment] return True - def doList(self, l_): + def doList(self, l_: Node) -> None: while l_: item = self.store.value(l_, RDF.first) if item is not None: self.path(item, OBJECT) self.subjectDone(l_) - l_ = self.store.value(l_, RDF.rest) + # type error: Incompatible types in assignment (expression has type "Optional[Node]", variable has type "Node") + l_ = self.store.value(l_, RDF.rest) # type: ignore[assignment] - def predicateList(self, subject, newline=False): + def predicateList(self, subject: Node, newline: bool = False) -> None: properties = self.buildPredicateHash(subject) propList = self.sortProperties(properties) if len(propList) == 0: @@ -425,10 +437,10 @@ def predicateList(self, subject, newline=False): self.verb(predicate, newline=True) self.objectList(properties[predicate]) - def verb(self, node, newline=False): + def verb(self, node: Node, newline: bool = False) -> None: self.path(node, VERB, newline) - def objectList(self, objects): + def objectList(self, objects: Sequence[Node]) -> None: count = len(objects) if count == 0: return diff --git a/rdflib/plugins/serializers/xmlwriter.py b/rdflib/plugins/serializers/xmlwriter.py index 88cebdeda..8c00521ad 100644 --- a/rdflib/plugins/serializers/xmlwriter.py +++ b/rdflib/plugins/serializers/xmlwriter.py @@ -1,34 +1,53 @@ +from __future__ import annotations + import codecs +from typing import IO, TYPE_CHECKING, Dict, Iterable, List, Optional, Tuple from xml.sax.saxutils import escape, quoteattr +from rdflib.term import URIRef + +if TYPE_CHECKING: + from rdflib.namespace import Namespace, NamespaceManager + + __all__ = ["XMLWriter"] ESCAPE_ENTITIES = {"\r": " "} class XMLWriter: - def __init__(self, stream, namespace_manager, encoding=None, decl=1, extra_ns=None): + def __init__( + self, + stream: IO[bytes], + namespace_manager: NamespaceManager, + encoding: Optional[str] = None, + decl: int = 1, + extra_ns: Optional[Dict[str, Namespace]] = None, + ): encoding = encoding or "utf-8" encoder, decoder, stream_reader, stream_writer = codecs.lookup(encoding) - self.stream = stream = stream_writer(stream) + # NOTE on type ignores: this is mainly because the variable is being re-used. + # type error: Incompatible types in assignment (expression has type "StreamWriter", variable has type "IO[bytes]") + self.stream = stream = stream_writer(stream) # type: ignore[assignment] if decl: - stream.write('' % encoding) - self.element_stack = [] + # type error: No overload variant of "write" of "IO" matches argument type "str" + stream.write('' % encoding) # type: ignore[call-overload] + self.element_stack: List[str] = [] self.nm = namespace_manager self.extra_ns = extra_ns or {} self.closed = True - def __get_indent(self): + def __get_indent(self) -> str: return " " * len(self.element_stack) indent = property(__get_indent) - def __close_start_tag(self): + def __close_start_tag(self) -> None: if not self.closed: # TODO: self.closed = True self.stream.write(">") - def push(self, uri): + def push(self, uri: str) -> None: self.__close_start_tag() write = self.stream.write write("\n") @@ -38,7 +57,7 @@ def push(self, uri): self.closed = False self.parent = False - def pop(self, uri=None): + def pop(self, uri: Optional[str] = None) -> None: top = self.element_stack.pop() if uri: assert uri == top @@ -53,7 +72,9 @@ def pop(self, uri=None): write("" % self.qname(top)) self.parent = True - def element(self, uri, content, attributes={}): + def element( + self, uri: str, content: str, attributes: Dict[URIRef, str] = {} + ) -> None: """Utility method for adding a complete simple element""" self.push(uri) for k, v in attributes.items(): @@ -61,7 +82,7 @@ def element(self, uri, content, attributes={}): self.text(content) self.pop() - def namespaces(self, namespaces=None): + def namespaces(self, namespaces: Iterable[Tuple[str, str]] = None) -> None: if not namespaces: namespaces = self.nm.namespaces() @@ -80,11 +101,11 @@ def namespaces(self, namespaces=None): else: write(' xmlns="%s"\n' % namespace) - def attribute(self, uri, value): + def attribute(self, uri: str, value: str) -> None: write = self.stream.write write(" %s=%s" % (self.qname(uri), quoteattr(value))) - def text(self, text): + def text(self, text: str) -> None: self.__close_start_tag() if "<" in text and ">" in text and "]]>" not in text: self.stream.write(" str: """Compute qname for a uri using our extra namespaces, or the given namespace manager""" diff --git a/rdflib/plugins/shared/jsonld/context.py b/rdflib/plugins/shared/jsonld/context.py index 2f6cedbdd..f80cdf376 100644 --- a/rdflib/plugins/shared/jsonld/context.py +++ b/rdflib/plugins/shared/jsonld/context.py @@ -1,10 +1,10 @@ -# -*- coding: utf-8 -*- """ Implementation of the JSON-LD Context structure. See: http://json-ld.org/ """ + # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/context.py from __future__ import annotations @@ -68,15 +68,19 @@ class Defined(int): # From URI_GEN_DELIMS = (":", "/", "?", "#", "[", "]", "@") +_ContextSourceType = Union[ + List[Union[Dict[str, Any], str, None]], Dict[str, Any], str, None +] + class Context: def __init__( self, - source: Optional[Any] = None, + source: _ContextSourceType = None, base: Optional[str] = None, - version: Optional[float] = None, + version: Optional[float] = 1.1, ): - self.version: float = version or 1.0 + self.version: float = version or 1.1 self.language = None self.vocab: Optional[str] = None self._base: Optional[str] @@ -111,13 +115,13 @@ def base(self, base: Optional[str]): ) self._basedomain = "%s://%s" % urlsplit(base)[0:2] if base else None - def subcontext(self, source: Any, propagate: bool = True) -> "Context": + def subcontext(self, source: Any, propagate: bool = True) -> Context: # IMPROVE: to optimize, implement SubContext with parent fallback support parent = self.parent if self.propagate is False else self # type error: Item "None" of "Optional[Context]" has no attribute "_subcontext" return parent._subcontext(source, propagate) # type: ignore[union-attr] - def _subcontext(self, source: Any, propagate: bool) -> "Context": + def _subcontext(self, source: Any, propagate: bool) -> Context: ctx = Context(version=self.version) ctx.propagate = propagate ctx.parent = self @@ -125,7 +129,7 @@ def _subcontext(self, source: Any, propagate: bool) -> "Context": ctx.vocab = self.vocab ctx.base = self.base ctx.doc_base = self.doc_base - ctx._alias = {k: l[:] for k, l in self._alias.items()} + ctx._alias = {k: l[:] for k, l in self._alias.items()} # noqa: E741 ctx.terms = self.terms.copy() ctx._lookup = self._lookup.copy() ctx._prefixes = self._prefixes.copy() @@ -143,23 +147,26 @@ def _clear(self) -> None: self.active = False self.propagate = True - def get_context_for_term(self, term: Optional["Term"]) -> "Context": + def get_context_for_term(self, term: Optional[Term]) -> Context: if term and term.context is not UNDEF: return self._subcontext(term.context, propagate=True) return self - def get_context_for_type(self, node: Any) -> Optional["Context"]: + def get_context_for_type(self, node: Any) -> Optional[Context]: if self.version >= 1.1: rtype = self.get_type(node) if isinstance(node, dict) else None if not isinstance(rtype, list): rtype = [rtype] if rtype else [] + typeterm = None for rt in rtype: - typeterm = self.terms.get(rt) - if typeterm: + try: + typeterm = self.terms.get(rt) + except TypeError: + # extra lenience, triggers if type is set to a literal + pass + if typeterm is not None: break - else: - typeterm = None if typeterm and typeterm.context: subcontext = self.subcontext(typeterm.context, propagate=False) @@ -390,12 +397,14 @@ def to_symbol(self, iri: str) -> Optional[str]: def load( self, - source: Optional[Union[List[Any], Any]], + source: _ContextSourceType, base: Optional[str] = None, referenced_contexts: Set[Any] = None, ): self.active = True - sources: List[Any] = [] + sources: List[Tuple[Optional[str], Union[Dict[str, Any], str, None]]] = [] + # "Union[List[Union[Dict[str, Any], str]], List[Dict[str, Any]], List[str]]" : expression + # "Union[List[Dict[str, Any]], Dict[str, Any], List[str], str]" : variable source = source if isinstance(source, list) else [source] referenced_contexts = referenced_contexts or set() self._prep_sources(base, source, sources, referenced_contexts) @@ -403,7 +412,8 @@ def load( if source is None: self._clear() else: - self._read_source(source, source_url, referenced_contexts) + # type error: Argument 1 to "_read_source" of "Context" has incompatible type "Union[Dict[str, Any], str]"; expected "Dict[str, Any]" + self._read_source(source, source_url, referenced_contexts) # type: ignore[arg-type] def _accept_term(self, key: str) -> bool: if self.version < 1.1: @@ -416,8 +426,8 @@ def _accept_term(self, key: str) -> bool: def _prep_sources( self, base: Optional[str], - inputs: List[Any], - sources: List[Any], + inputs: Union[List[Union[Dict[str, Any], str, None]], List[str]], + sources: List[Tuple[Optional[str], Union[Dict[str, Any], str, None]]], referenced_contexts: Set[str], in_source_url: Optional[str] = None, ): @@ -444,10 +454,12 @@ def _prep_sources( if isinstance(source, dict): if CONTEXT in source: source = source[CONTEXT] - source = source if isinstance(source, list) else [source] + # type ignore: Incompatible types in assignment (expression has type "List[Union[Dict[str, Any], str, None]]", variable has type "Union[Dict[str, Any], str, None]") + source = source if isinstance(source, list) else [source] # type: ignore[assignment] if isinstance(source, list): - self._prep_sources( + # type error: Statement is unreachable + self._prep_sources( # type: ignore[unreachable] new_base, source, sources, referenced_contexts, source_url ) else: @@ -469,7 +481,7 @@ def _fetch_context( return self._context_cache[source_url] # type error: Incompatible types in assignment (expression has type "Optional[Any]", variable has type "str") - source = source_to_json(source_url) # type: ignore[assignment] + source, _ = source_to_json(source_url) if source and CONTEXT not in source: raise INVALID_REMOTE_CONTEXT diff --git a/rdflib/plugins/shared/jsonld/errors.py b/rdflib/plugins/shared/jsonld/errors.py index f1973f14a..6ba6e4b9a 100644 --- a/rdflib/plugins/shared/jsonld/errors.py +++ b/rdflib/plugins/shared/jsonld/errors.py @@ -1,6 +1,5 @@ -# -*- coding: utf-8 -*- # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/errors.py -class JSONLDException(ValueError): +class JSONLDException(ValueError): # noqa: N818 pass diff --git a/rdflib/plugins/shared/jsonld/keys.py b/rdflib/plugins/shared/jsonld/keys.py index 8e2ad4082..6b998ad41 100644 --- a/rdflib/plugins/shared/jsonld/keys.py +++ b/rdflib/plugins/shared/jsonld/keys.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/keys.py BASE = "@base" CONTAINER = "@container" diff --git a/rdflib/plugins/shared/jsonld/util.py b/rdflib/plugins/shared/jsonld/util.py index 486f8b077..524d5ece8 100644 --- a/rdflib/plugins/shared/jsonld/util.py +++ b/rdflib/plugins/shared/jsonld/util.py @@ -1,9 +1,8 @@ -# -*- coding: utf-8 -*- # https://github.com/RDFLib/rdflib-jsonld/blob/feature/json-ld-1.1/rdflib_jsonld/util.py from __future__ import annotations import pathlib -from typing import IO, TYPE_CHECKING, Any, Optional, TextIO, Tuple, Union +from typing import IO, TYPE_CHECKING, Any, List, Optional, TextIO, Tuple, Union if TYPE_CHECKING: import json @@ -15,6 +14,7 @@ except ImportError: import simplejson as json +from html.parser import HTMLParser from io import TextIOBase, TextIOWrapper from posixpath import normpath, sep from urllib.parse import urljoin, urlsplit, urlunsplit @@ -32,13 +32,29 @@ def source_to_json( source: Optional[ Union[IO[bytes], TextIO, InputSource, str, bytes, pathlib.PurePath] - ] -) -> Optional[Any]: + ], + fragment_id: Optional[str] = None, + extract_all_scripts: Optional[bool] = False, +) -> Tuple[Any, Any]: + """Extract JSON from a source document. + + The source document can be JSON or HTML with embedded JSON script elements (type attribute = "application/ld+json"). + To process as HTML ``source.content_type`` must be set to "text/html" or "application/xhtml+xml". + + :param source: the input source document (JSON or HTML) + + :param fragment_id: if source is an HTML document then extract only the script element with matching id attribute, defaults to None + + :param extract_all_scripts: if source is an HTML document then extract all script elements (unless fragment_id is provided), defaults to False (extract only the first script element) + + :return: Tuple with the extracted JSON document and value of the HTML base element + """ + if isinstance(source, PythonInputSource): - return source.data + return (source.data, None) if isinstance(source, StringInputSource): - return json.load(source.getCharacterStream()) + return (json.load(source.getCharacterStream()), None) # TODO: conneg for JSON (fix support in rdflib's URLInputSource!) source = create_input_source(source, format="json-ld") @@ -51,7 +67,15 @@ def source_to_json( use_stream = stream else: use_stream = TextIOWrapper(stream, encoding="utf-8") - return json.load(use_stream) + + if source.content_type in ("text/html", "application/xhtml+xml"): + parser = HTMLJSONParser( + fragment_id=fragment_id, extract_all_scripts=extract_all_scripts + ) + parser.feed(use_stream.read()) + return (parser.get_json(), parser.get_base()) + else: + return (json.load(use_stream), None) finally: stream.close() @@ -127,3 +151,67 @@ def context_from_urlinputsource(source: URLInputSource) -> Optional[str]: # typ "norm_url", "context_from_urlinputsource", ] + + +class HTMLJSONParser(HTMLParser): + def __init__( + self, + fragment_id: Optional[str] = None, + extract_all_scripts: Optional[bool] = False, + ): + super().__init__() + self.fragment_id = fragment_id + self.json: List[Any] = [] + self.contains_json = False + self.fragment_id_does_not_match = False + self.base = None + self.extract_all_scripts = extract_all_scripts + self.script_count = 0 + + def handle_starttag(self, tag, attrs): + self.contains_json = False + self.fragment_id_does_not_match = False + + # Only set self. contains_json to True if the + # type is 'application/ld+json' + if tag == "script": + for attr, value in attrs: + if attr == "type" and value == "application/ld+json": + self.contains_json = True + elif attr == "id" and self.fragment_id and value != self.fragment_id: + self.fragment_id_does_not_match = True + + elif tag == "base": + for attr, value in attrs: + if attr == "href": + self.base = value + + def handle_data(self, data): + # Only do something when we know the context is a + # script element containing application/ld+json + + if self.contains_json is True and self.fragment_id_does_not_match is False: + + if not self.extract_all_scripts and self.script_count > 0: + return + + if data.strip() == "": + # skip empty data elements + return + + # Try to parse the json + parsed = json.loads(data) + + # Add to the result document + if isinstance(parsed, list): + self.json.extend(parsed) + else: + self.json.append(parsed) + + self.script_count += 1 + + def get_json(self): + return self.json + + def get_base(self): + return self.base diff --git a/rdflib/plugins/sparql/__init__.py b/rdflib/plugins/sparql/__init__.py index a11a6e004..0ab7f80bf 100644 --- a/rdflib/plugins/sparql/__init__.py +++ b/rdflib/plugins/sparql/__init__.py @@ -33,8 +33,8 @@ PLUGIN_ENTRY_POINT = "rdf.plugins.sparqleval" -from . import operators, parser, parserutils # noqa: E402 -from .processor import prepareQuery, prepareUpdate, processUpdate # noqa: F401, E402 +from . import operators, parser, parserutils +from .processor import prepareQuery, prepareUpdate, processUpdate assert parser assert operators diff --git a/rdflib/plugins/sparql/aggregates.py b/rdflib/plugins/sparql/aggregates.py index d4a7d6592..12972e795 100644 --- a/rdflib/plugins/sparql/aggregates.py +++ b/rdflib/plugins/sparql/aggregates.py @@ -1,3 +1,7 @@ +""" +Aggregation functions +""" + from __future__ import annotations from decimal import Decimal @@ -25,17 +29,13 @@ from rdflib.plugins.sparql.sparql import FrozenBindings, NotBoundError, SPARQLTypeError from rdflib.term import BNode, Identifier, Literal, URIRef, Variable -""" -Aggregation functions -""" - class Accumulator: """abstract base class for different aggregation functions""" def __init__(self, aggregation: CompValue): self.get_value: Callable[[], Optional[Literal]] - self.update: Callable[[FrozenBindings, "Aggregator"], None] + self.update: Callable[[FrozenBindings, Aggregator], None] self.var = aggregation.res self.expr = aggregation.vars if not aggregation.distinct: @@ -69,7 +69,7 @@ def __init__(self, aggregation: CompValue): # type error: Cannot assign to a method self.eval_row = self.eval_full_row # type: ignore[assignment] - def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: + def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: val = self.eval_row(row) except NotBoundError: @@ -97,13 +97,13 @@ def use_row(self, row: FrozenBindings) -> bool: @overload -def type_safe_numbers(*args: int) -> Tuple[int]: - ... +def type_safe_numbers(*args: int) -> Tuple[int]: ... @overload -def type_safe_numbers(*args: Union[Decimal, float, int]) -> Tuple[Union[float, int]]: - ... +def type_safe_numbers( + *args: Union[Decimal, float, int] +) -> Tuple[Union[float, int]]: ... def type_safe_numbers(*args: Union[Decimal, float, int]) -> Iterable[Union[float, int]]: @@ -122,7 +122,7 @@ def __init__(self, aggregation: CompValue): self.value = 0 self.datatype: Optional[str] = None - def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: + def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: value = _eval(self.expr, row) dt = self.datatype @@ -150,7 +150,7 @@ def __init__(self, aggregation: CompValue): self.sum = 0 self.datatype: Optional[str] = None - def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: + def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: value = _eval(self.expr, row) dt = self.datatype @@ -195,7 +195,7 @@ def set_value(self, bindings: MutableMapping[Variable, Identifier]) -> None: # simply do not set if self.value is still None bindings[self.var] = Literal(self.value) - def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: + def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: if self.value is None: self.value = _eval(self.expr, row) @@ -228,9 +228,10 @@ class Sample(Accumulator): def __init__(self, aggregation): super(Sample, self).__init__(aggregation) # DISTINCT would not change the value - self.use_row = self.dont_care + # type error: Cannot assign to a method + self.use_row = self.dont_care # type: ignore[method-assign] - def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: + def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: # set the value now aggregator.bindings[self.var] = _eval(self.expr, row) @@ -245,13 +246,18 @@ def get_value(self) -> None: class GroupConcat(Accumulator): - def __init__(self, aggregation): + value: List[Literal] + + def __init__(self, aggregation: CompValue): super(GroupConcat, self).__init__(aggregation) # only GROUPCONCAT needs to have a list as accumulator self.value = [] - self.separator = aggregation.separator or " " + if aggregation.separator is None: + self.separator = " " + else: + self.separator = aggregation.separator - def update(self, row: FrozenBindings, aggregator: "Aggregator") -> None: + def update(self, row: FrozenBindings, aggregator: Aggregator) -> None: try: value = _eval(self.expr, row) # skip UNDEF diff --git a/rdflib/plugins/sparql/algebra.py b/rdflib/plugins/sparql/algebra.py index 52aa92a7f..5cb22d265 100644 --- a/rdflib/plugins/sparql/algebra.py +++ b/rdflib/plugins/sparql/algebra.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ Converting the 'parse-tree' output of pyparsing to a SPARQL Algebra expression @@ -7,6 +5,8 @@ """ +from __future__ import annotations + import collections import functools import operator @@ -174,14 +174,10 @@ def triples( List[List[Identifier]], List[Tuple[Identifier, Identifier, Identifier]] ] ) -> List[Tuple[Identifier, Identifier, Identifier]]: - # NOTE on type errors: errors are a result of the variable being reused for - # a different type. - # type error: Incompatible types in assignment (expression has type "Sequence[Identifier]", variable has type "Union[List[List[Identifier]], List[Tuple[Identifier, Identifier, Identifier]]]") - l = reduce(lambda x, y: x + y, l) # type: ignore[assignment] # noqa: E741 - if (len(l) % 3) != 0: + _l = reduce(lambda x, y: x + y, l) + if (len(_l) % 3) != 0: raise Exception("these aint triples") - # type error: Generator has incompatible item type "Tuple[Union[List[Identifier], Tuple[Identifier, Identifier, Identifier]], Union[List[Identifier], Tuple[Identifier, Identifier, Identifier]], Union[List[Identifier], Tuple[Identifier, Identifier, Identifier]]]"; expected "Tuple[Identifier, Identifier, Identifier]" - return reorderTriples((l[x], l[x + 1], l[x + 2]) for x in range(0, len(l), 3)) # type: ignore[misc] + return reorderTriples((_l[x], _l[x + 1], _l[x + 2]) for x in range(0, len(_l), 3)) # type error: Missing return statement @@ -206,17 +202,15 @@ def translatePName( # type: ignore[return] @overload -def translatePath(p: URIRef) -> None: - ... +def translatePath(p: URIRef) -> None: ... @overload -def translatePath(p: CompValue) -> "Path": - ... +def translatePath(p: CompValue) -> Path: ... # type error: Missing return statement -def translatePath(p: typing.Union[CompValue, URIRef]) -> Optional["Path"]: # type: ignore[return] +def translatePath(p: typing.Union[CompValue, URIRef]) -> Optional[Path]: # type: ignore[return] """ Translate PropertyPath expressions """ @@ -334,6 +328,11 @@ def translateGroupGraphPattern(graphPattern: CompValue) -> CompValue: http://www.w3.org/TR/sparql11-query/#convertGraphPattern """ + if graphPattern.translated: + # This occurs if it is attempted to translate a group graph pattern twice, + # which occurs with nested (NOT) EXISTS filters. Simply return the already + # translated pattern instead. + return graphPattern if graphPattern.name == "SubSelect": # The first output from translate cannot be None for a subselect query # as it can only be None for certain DESCRIBE queries. @@ -390,6 +389,9 @@ def translateGroupGraphPattern(graphPattern: CompValue) -> CompValue: if filters: G = Filter(expr=filters, p=G) + # Mark this graph pattern as translated + G.translated = True + return G @@ -417,16 +419,14 @@ def _traverse( if isinstance(e, (list, ParseResults)): return [_traverse(x, visitPre, visitPost) for x in e] - # type error: Statement is unreachable - elif isinstance(e, tuple): # type: ignore[unreachable] + elif isinstance(e, tuple): return tuple([_traverse(x, visitPre, visitPost) for x in e]) elif isinstance(e, CompValue): for k, val in e.items(): e[k] = _traverse(val, visitPre, visitPost) - # type error: Statement is unreachable - _e = visitPost(e) # type: ignore[unreachable] + _e = visitPost(e) if _e is not None: return _e @@ -444,8 +444,7 @@ def _traverseAgg(e, visitor: Callable[[Any, Any], Any] = lambda n, v: None): if isinstance(e, (list, ParseResults, tuple)): res = [_traverseAgg(x, visitor) for x in e] - # type error: Statement is unreachable - elif isinstance(e, CompValue): # type: ignore[unreachable] + elif isinstance(e, CompValue): for k, val in e.items(): if val is not None: res.append(_traverseAgg(val, visitor)) @@ -850,9 +849,9 @@ def translateQuads( else: alltriples = [] - allquads: DefaultDict[ - str, List[Tuple[Identifier, Identifier, Identifier]] - ] = collections.defaultdict(list) + allquads: DefaultDict[str, List[Tuple[Identifier, Identifier, Identifier]]] = ( + collections.defaultdict(list) + ) if quads.quadsNotTriples: for q in quads.quadsNotTriples: @@ -975,9 +974,9 @@ class _AlgebraTranslator: def __init__(self, query_algebra: Query): self.query_algebra = query_algebra - self.aggr_vars: DefaultDict[ - Identifier, List[Identifier] - ] = collections.defaultdict(list) + self.aggr_vars: DefaultDict[Identifier, List[Identifier]] = ( + collections.defaultdict(list) + ) self._alg_translation: str = "" def _replace( @@ -1010,16 +1009,12 @@ def convert_node_arg( ) -> str: if isinstance(node_arg, Identifier): if node_arg in self.aggr_vars.keys(): - # type error: "Identifier" has no attribute "n3" - grp_var = self.aggr_vars[node_arg].pop(0).n3() # type: ignore[attr-defined] + grp_var = self.aggr_vars[node_arg].pop(0).n3() return grp_var else: - # type error: "Identifier" has no attribute "n3" - return node_arg.n3() # type: ignore[attr-defined] + return node_arg.n3() elif isinstance(node_arg, CompValue): return "{" + node_arg.name + "}" - elif isinstance(node_arg, Expr): - return "{" + node_arg.name + "}" elif isinstance(node_arg, str): return node_arg else: @@ -1175,7 +1170,6 @@ def sparql_query_text(self, node): " ".join([self.convert_node_arg(pattern) for pattern in node.part]), ) elif node.name == "TriplesBlock": - print("triplesblock") self._replace( "{TriplesBlock}", "".join( @@ -1285,7 +1279,7 @@ def sparql_query_text(self, node): elif node.name == "MultiplicativeExpression": left_side = self.convert_node_arg(node.expr) multiplication = left_side - for i, operator in enumerate(node.op): # noqa: F402 + for i, operator in enumerate(node.op): multiplication += ( operator + " " + self.convert_node_arg(node.other[i]) + " " ) @@ -1336,7 +1330,6 @@ def sparql_query_text(self, node): # According to https://www.w3.org/TR/2013/REC-sparql11-query-20130321/#rNotExistsFunc # NotExistsFunc can only have a GroupGraphPattern as parameter. However, when we print the query algebra # we get a GroupGraphPatternSub - print(node.graph.name) self._replace( "{Builtin_NOTEXISTS}", "NOT EXISTS " + "{{" + node.graph.name + "}}" ) diff --git a/rdflib/plugins/sparql/datatypes.py b/rdflib/plugins/sparql/datatypes.py index 2f60fe428..bc06525a0 100644 --- a/rdflib/plugins/sparql/datatypes.py +++ b/rdflib/plugins/sparql/datatypes.py @@ -1,16 +1,17 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Dict, List, Optional, Set - """ Utility functions for supporting the XML Schema Datatypes hierarchy """ +from __future__ import annotations + +from typing import TYPE_CHECKING, Dict, List, Optional, Set + from rdflib.namespace import XSD if TYPE_CHECKING: from rdflib.term import URIRef + XSD_DTs: Set[URIRef] = set( ( XSD.integer, diff --git a/rdflib/plugins/sparql/evaluate.py b/rdflib/plugins/sparql/evaluate.py index 08dd02d57..123749026 100644 --- a/rdflib/plugins/sparql/evaluate.py +++ b/rdflib/plugins/sparql/evaluate.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ These method recursively evaluate the SPARQL Algebra @@ -16,6 +14,8 @@ """ +from __future__ import annotations + import collections import itertools import json as j @@ -63,6 +63,7 @@ if TYPE_CHECKING: from rdflib.paths import Path + _Triple = Tuple[Identifier, Identifier, Identifier] @@ -157,7 +158,7 @@ def evalJoin(ctx: QueryContext, join: CompValue) -> Generator[FrozenDict, None, return _join(a, b) -def evalUnion(ctx: QueryContext, union: CompValue) -> Iterable[FrozenBindings]: +def evalUnion(ctx: QueryContext, union: CompValue) -> List[Any]: branch1_branch2 = [] for x in evalPart(ctx, union.p1): branch1_branch2.append(x) diff --git a/rdflib/plugins/sparql/evalutils.py b/rdflib/plugins/sparql/evalutils.py index 84c868c94..1f737e469 100644 --- a/rdflib/plugins/sparql/evalutils.py +++ b/rdflib/plugins/sparql/evalutils.py @@ -52,15 +52,13 @@ def _minus( @overload def _join( a: Iterable[FrozenBindings], b: Iterable[Mapping[Identifier, Identifier]] -) -> Generator[FrozenBindings, None, None]: - ... +) -> Generator[FrozenBindings, None, None]: ... @overload def _join( a: Iterable[FrozenDict], b: Iterable[Mapping[Identifier, Identifier]] -) -> Generator[FrozenDict, None, None]: - ... +) -> Generator[FrozenDict, None, None]: ... def _join( @@ -106,8 +104,7 @@ def _eval( expr: Union[Literal, URIRef], ctx: FrozenBindings, raise_not_bound_error: bool = ..., -) -> Union[Literal, URIRef]: - ... +) -> Union[Literal, URIRef]: ... @overload @@ -115,8 +112,7 @@ def _eval( expr: Union[Variable, Expr], ctx: FrozenBindings, raise_not_bound_error: bool = ..., -) -> Union[Any, SPARQLError]: - ... +) -> Union[Any, SPARQLError]: ... def _eval( diff --git a/rdflib/plugins/sparql/operators.py b/rdflib/plugins/sparql/operators.py index 908b1d5c5..93f748220 100644 --- a/rdflib/plugins/sparql/operators.py +++ b/rdflib/plugins/sparql/operators.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ This contains evaluation functions for expressions @@ -8,6 +6,8 @@ """ +from __future__ import annotations + import datetime as py_datetime # naming conflict with function within this module import hashlib import math @@ -754,7 +754,7 @@ def MultiplicativeExpression( for op, f in zip(e.op, other): f = numeric(f) - if type(f) == float: + if type(f) == float: # noqa: E721 res = float(res) if op == "*": @@ -988,8 +988,7 @@ def simplify(expr: Any) -> Any: if isinstance(expr, (list, ParseResults)): return list(map(simplify, expr)) - # type error: Statement is unreachable - if not isinstance(expr, CompValue): # type: ignore[unreachable] + if not isinstance(expr, CompValue): return expr if expr.name.endswith("Expression"): if expr.other is None: @@ -1164,18 +1163,15 @@ def calculateFinalDateTime( @overload -def EBV(rt: Literal) -> bool: - ... +def EBV(rt: Literal) -> bool: ... @overload -def EBV(rt: Union[Variable, IdentifiedNode, SPARQLError, Expr]) -> NoReturn: - ... +def EBV(rt: Union[Variable, IdentifiedNode, SPARQLError, Expr]) -> NoReturn: ... @overload -def EBV(rt: Union[Identifier, SPARQLError, Expr]) -> Union[bool, NoReturn]: - ... +def EBV(rt: Union[Identifier, SPARQLError, Expr]) -> Union[bool, NoReturn]: ... def EBV(rt: Union[Identifier, SPARQLError, Expr]) -> bool: diff --git a/rdflib/plugins/sparql/parser.py b/rdflib/plugins/sparql/parser.py index 455377ed1..d2e9f2dee 100644 --- a/rdflib/plugins/sparql/parser.py +++ b/rdflib/plugins/sparql/parser.py @@ -3,7 +3,8 @@ based on pyparsing """ -from __future__ import annotations + +from __future__ import annotations # noqa: I001 import re import sys @@ -98,7 +99,7 @@ def expandTriples(terms: ParseResults) -> List[Any]: # "Length of triple-list is not divisible by 3: %d!"%len(res) # return [tuple(res[i:i+3]) for i in range(len(res)/3)] - except: # noqa: E722 + except: if DEBUG: import traceback diff --git a/rdflib/plugins/sparql/parserutils.py b/rdflib/plugins/sparql/parserutils.py index b625f3646..7b85eb659 100644 --- a/rdflib/plugins/sparql/parserutils.py +++ b/rdflib/plugins/sparql/parserutils.py @@ -1,26 +1,3 @@ -from __future__ import annotations - -from collections import OrderedDict -from types import MethodType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - List, - Mapping, - Optional, - Tuple, - TypeVar, - Union, -) - -from pyparsing import ParseResults, TokenConverter, originalTextFor - -from rdflib.term import BNode, Identifier, Variable - -if TYPE_CHECKING: - from rdflib.plugins.sparql.sparql import FrozenBindings - """ NOTE: PyParsing setResultName/__call__ provides a very similar solution to this @@ -48,6 +25,29 @@ """ +from __future__ import annotations + +from collections import OrderedDict +from types import MethodType +from typing import ( + TYPE_CHECKING, + Any, + Callable, + List, + Mapping, + Optional, + Tuple, + TypeVar, + Union, +) + +from pyparsing import ParserElement, ParseResults, TokenConverter, originalTextFor + +from rdflib.term import BNode, Identifier, Variable + +if TYPE_CHECKING: + from rdflib.plugins.sparql.sparql import FrozenBindings + # This is an alternative @@ -55,7 +55,7 @@ def value( - ctx: "FrozenBindings", + ctx: FrozenBindings, val: Any, variables: bool = False, errors: bool = False, @@ -150,7 +150,6 @@ def __init__(self, name: str, expr): class CompValue(OrderedDict): - """ The result of parsing a Comp Any included Params are available as Dict keys @@ -200,8 +199,7 @@ def __getattr__(self, a: str) -> Any: if TYPE_CHECKING: # this is here because properties are dynamically set on CompValue - def __setattr__(self, __name: str, __value: Any) -> None: - ... + def __setattr__(self, __name: str, __value: Any) -> None: ... class Expr(CompValue): @@ -233,7 +231,6 @@ def eval(self, ctx: Any = {}) -> Union[SPARQLError, Any]: class Comp(TokenConverter): - """ A pyparsing token for grouping together things with a label Any sub-tokens that are not Params will be ignored. @@ -241,7 +238,7 @@ class Comp(TokenConverter): Returns CompValue / Expr objects - depending on whether evalFn is set. """ - def __init__(self, name: str, expr): + def __init__(self, name: str, expr: ParserElement): self.expr = expr TokenConverter.__init__(self, expr) self.setName(name) diff --git a/rdflib/plugins/sparql/processor.py b/rdflib/plugins/sparql/processor.py index f10f372bc..de97d80bd 100644 --- a/rdflib/plugins/sparql/processor.py +++ b/rdflib/plugins/sparql/processor.py @@ -4,6 +4,7 @@ These should be automatically registered with RDFLib """ + from __future__ import annotations from typing import Any, Mapping, Optional, Union diff --git a/rdflib/plugins/sparql/results/csvresults.py b/rdflib/plugins/sparql/results/csvresults.py index cc99ddf94..ef557c014 100644 --- a/rdflib/plugins/sparql/results/csvresults.py +++ b/rdflib/plugins/sparql/results/csvresults.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ This module implements a parser and serializer for the CSV SPARQL result @@ -9,6 +7,8 @@ """ +from __future__ import annotations + import codecs import csv from typing import IO, Dict, List, Optional, Union diff --git a/rdflib/plugins/sparql/results/jsonresults.py b/rdflib/plugins/sparql/results/jsonresults.py index ecdb01247..405a3860b 100644 --- a/rdflib/plugins/sparql/results/jsonresults.py +++ b/rdflib/plugins/sparql/results/jsonresults.py @@ -1,11 +1,3 @@ -from __future__ import annotations - -import json -from typing import IO, Any, Dict, Mapping, MutableSequence, Optional - -from rdflib.query import Result, ResultException, ResultParser, ResultSerializer -from rdflib.term import BNode, Identifier, Literal, URIRef, Variable - """A Serializer for SPARQL results in JSON: http://www.w3.org/TR/rdf-sparql-json-res/ @@ -17,6 +9,14 @@ """ +from __future__ import annotations + +import json +from typing import IO, Any, Dict, Mapping, MutableSequence, Optional + +from rdflib.query import Result, ResultException, ResultParser, ResultSerializer +from rdflib.term import BNode, Identifier, Literal, URIRef, Variable + class JSONResultParser(ResultParser): # type error: Signature of "parse" incompatible with supertype "ResultParser" diff --git a/rdflib/plugins/sparql/results/rdfresults.py b/rdflib/plugins/sparql/results/rdfresults.py index 903734f57..c59a40c14 100644 --- a/rdflib/plugins/sparql/results/rdfresults.py +++ b/rdflib/plugins/sparql/results/rdfresults.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import IO, Any, MutableMapping, Optional, Union from rdflib.graph import Graph diff --git a/rdflib/plugins/sparql/results/tsvresults.py b/rdflib/plugins/sparql/results/tsvresults.py index 02274f266..54b516d0d 100644 --- a/rdflib/plugins/sparql/results/tsvresults.py +++ b/rdflib/plugins/sparql/results/tsvresults.py @@ -4,6 +4,8 @@ It is implemented with pyparsing, reusing the elements from the SPARQL Parser """ +from __future__ import annotations + import codecs import typing from typing import IO, Union @@ -30,9 +32,8 @@ ) from rdflib.plugins.sparql.parserutils import Comp, CompValue, Param from rdflib.query import Result, ResultParser -from rdflib.term import BNode +from rdflib.term import BNode, URIRef from rdflib.term import Literal as RDFLiteral -from rdflib.term import URIRef ParserElement.setDefaultWhitespaceChars(" \n") diff --git a/rdflib/plugins/sparql/results/txtresults.py b/rdflib/plugins/sparql/results/txtresults.py index 3f2f1f511..999daa60c 100644 --- a/rdflib/plugins/sparql/results/txtresults.py +++ b/rdflib/plugins/sparql/results/txtresults.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import IO, List, Optional, Union from rdflib.namespace import NamespaceManager diff --git a/rdflib/plugins/sparql/results/xmlresults.py b/rdflib/plugins/sparql/results/xmlresults.py index 21ee3449d..3cc6b2c38 100644 --- a/rdflib/plugins/sparql/results/xmlresults.py +++ b/rdflib/plugins/sparql/results/xmlresults.py @@ -1,3 +1,15 @@ +"""A Parser for SPARQL results in XML: + +http://www.w3.org/TR/rdf-sparql-XMLres/ + +Bits and pieces borrowed from: +http://projects.bigasterisk.com/sparqlhttp/ + +Authors: Drew Perttula, Gunnar Aastrand Grimnes +""" + +from __future__ import annotations + import logging import xml.etree.ElementTree as xml_etree # noqa: N813 from io import BytesIO @@ -35,17 +47,6 @@ log = logging.getLogger(__name__) -"""A Parser for SPARQL results in XML: - -http://www.w3.org/TR/rdf-sparql-XMLres/ - -Bits and pieces borrowed from: -http://projects.bigasterisk.com/sparqlhttp/ - -Authors: Drew Perttula, Gunnar Aastrand Grimnes -""" - - class XMLResultParser(ResultParser): # TODO FIXME: content_type should be a keyword only arg. def parse(self, source: IO, content_type: Optional[str] = None) -> Result: # type: ignore[override] @@ -253,7 +254,7 @@ def write_binding(self, name: Variable, val: Identifier) -> None: "binding", # type error: Argument 1 to "AttributesNSImpl" has incompatible type "Dict[Tuple[None, str], str]"; expected "Mapping[Tuple[str, str], str]" # type error: Argument 2 to "AttributesNSImpl" has incompatible type "Dict[Tuple[None, str], str]"; expected "Mapping[Tuple[str, str], str]" - AttributesNSImpl(attr_vals, attr_qnames), # type: ignore[arg-type] + AttributesNSImpl(attr_vals, attr_qnames), # type: ignore[arg-type, unused-ignore] ) if isinstance(val, URIRef): @@ -283,7 +284,7 @@ def write_binding(self, name: Variable, val: Identifier) -> None: "literal", # type error: Argument 1 to "AttributesNSImpl" has incompatible type "Dict[Tuple[Optional[str], str], str]"; expected "Mapping[Tuple[str, str], str]" # type error: Argument 2 to "AttributesNSImpl" has incompatible type "Dict[Tuple[Optional[str], str], str]"; expected "Mapping[Tuple[str, str], str]" - AttributesNSImpl(attr_vals, attr_qnames), # type: ignore[arg-type] + AttributesNSImpl(attr_vals, attr_qnames), # type: ignore[arg-type, unused-ignore] ) self.writer.characters(val) self.writer.endElementNS((SPARQL_XML_NAMESPACE, "literal"), "literal") diff --git a/rdflib/plugins/sparql/sparql.py b/rdflib/plugins/sparql/sparql.py index 7bfe28284..2fdc6d479 100644 --- a/rdflib/plugins/sparql/sparql.py +++ b/rdflib/plugins/sparql/sparql.py @@ -57,7 +57,6 @@ def __init__(self, msg: Optional[str]): class Bindings(MutableMapping): - """ A single level of a stack of variable-value bindings. @@ -67,7 +66,7 @@ class Bindings(MutableMapping): In python 3.3 this could be a collections.ChainMap """ - def __init__(self, outer: Optional["Bindings"] = None, d=[]): + def __init__(self, outer: Optional[Bindings] = None, d=[]): self._d: Dict[str, str] = dict(d) self.outer = outer @@ -148,8 +147,8 @@ def __hash__(self) -> int: self._hash ^= hash(value) return self._hash - def project(self, vars: Container[Variable]) -> "FrozenDict": - return FrozenDict((x for x in self.items() if x[0] in vars)) + def project(self, vars: Container[Variable]) -> FrozenDict: + return FrozenDict(x for x in self.items() if x[0] in vars) def disjointDomain(self, other: t.Mapping[Identifier, Identifier]) -> bool: return not bool(set(self).intersection(other)) @@ -164,7 +163,7 @@ def compatible(self, other: t.Mapping[Identifier, Identifier]) -> bool: return True - def merge(self, other: t.Mapping[Identifier, Identifier]) -> "FrozenDict": + def merge(self, other: t.Mapping[Identifier, Identifier]) -> FrozenDict: res = FrozenDict(itertools.chain(self.items(), other.items())) return res @@ -177,7 +176,7 @@ def __repr__(self) -> str: class FrozenBindings(FrozenDict): - def __init__(self, ctx: "QueryContext", *args, **kwargs): + def __init__(self, ctx: QueryContext, *args, **kwargs): FrozenDict.__init__(self, *args, **kwargs) self.ctx = ctx @@ -195,10 +194,10 @@ def __getitem__(self, key: Union[Identifier, str]) -> Identifier: else: return self._d[key] - def project(self, vars: Container[Variable]) -> "FrozenBindings": + def project(self, vars: Container[Variable]) -> FrozenBindings: return FrozenBindings(self.ctx, (x for x in self.items() if x[0] in vars)) - def merge(self, other: t.Mapping[Identifier, Identifier]) -> "FrozenBindings": + def merge(self, other: t.Mapping[Identifier, Identifier]) -> FrozenBindings: res = FrozenBindings(self.ctx, itertools.chain(self.items(), other.items())) return res @@ -211,11 +210,11 @@ def bnodes(self) -> t.Mapping[Identifier, BNode]: return self.ctx.bnodes @property - def prologue(self) -> Optional["Prologue"]: + def prologue(self) -> Optional[Prologue]: return self.ctx.prologue def forget( - self, before: "QueryContext", _except: Optional[Container[Variable]] = None + self, before: QueryContext, _except: Optional[Container[Variable]] = None ) -> FrozenBindings: """ return a frozen dict only of bindings made in self @@ -289,7 +288,7 @@ def now(self) -> datetime.datetime: def clone( self, bindings: Optional[Union[FrozenBindings, Bindings, List[Any]]] = None - ) -> "QueryContext": + ) -> QueryContext: r = QueryContext( self._dataset if self._dataset is not None else self.graph, bindings or self.bindings, @@ -311,14 +310,23 @@ def dataset(self) -> ConjunctiveGraph: ) return self._dataset - def load(self, source: URIRef, default: bool = False, **kwargs: Any) -> None: + def load( + self, + source: URIRef, + default: bool = False, + into: Optional[Identifier] = None, + **kwargs: Any, + ) -> None: """ Load data from the source into the query context's. :param source: The source to load from. - :param default: If `True`, triples from the source will be added to the - default graph, otherwise it will be loaded into a graph with - ``source`` URI as its name. + :param default: If `True`, triples from the source will be added + to the default graph, otherwise it will be loaded into a + graph with ``source`` URI as its name. + :param into: The name of the graph to load the data into. If + `None`, the source URI will be used as as the name of the + graph. :param kwargs: Keyword arguments to pass to :meth:`rdflib.graph.Graph.parse`. """ @@ -353,7 +361,9 @@ def _load(graph, source): if default: _load(self.graph, source) else: - _load(self.dataset.get_context(source), source) + if into is None: + into = source + _load(self.dataset.get_context(into), source) def __getitem__(self, key: Union[str, Path]) -> Optional[Union[str, Path]]: # in SPARQL BNodes are just labels @@ -387,19 +397,19 @@ def __setitem__(self, key: str, value: str) -> None: self.bindings[key] = value - def pushGraph(self, graph: Optional[Graph]) -> "QueryContext": + def pushGraph(self, graph: Optional[Graph]) -> QueryContext: r = self.clone() r.graph = graph return r - def push(self) -> "QueryContext": + def push(self) -> QueryContext: r = self.clone(Bindings(self.bindings)) return r - def clean(self) -> "QueryContext": + def clean(self) -> QueryContext: return self.clone([]) - def thaw(self, frozenbindings: FrozenBindings) -> "QueryContext": + def thaw(self, frozenbindings: FrozenBindings) -> QueryContext: """ Create a new read/write query context from the given solution """ diff --git a/rdflib/plugins/sparql/update.py b/rdflib/plugins/sparql/update.py index 5ce86f393..cd22a7520 100644 --- a/rdflib/plugins/sparql/update.py +++ b/rdflib/plugins/sparql/update.py @@ -3,6 +3,7 @@ Code for carrying out Update Operations """ + from __future__ import annotations from typing import TYPE_CHECKING, Iterator, Mapping, Optional, Sequence @@ -51,7 +52,7 @@ def evalLoad(ctx: QueryContext, u: CompValue) -> None: assert isinstance(u.iri, URIRef) if u.graphiri: - ctx.load(u.iri, default=False, publicID=u.graphiri) + ctx.load(u.iri, default=False, into=u.graphiri) else: ctx.load(u.iri, default=True) diff --git a/rdflib/plugins/stores/auditable.py b/rdflib/plugins/stores/auditable.py index 17fa0e548..7a9748c69 100644 --- a/rdflib/plugins/stores/auditable.py +++ b/rdflib/plugins/stores/auditable.py @@ -15,6 +15,8 @@ """ +from __future__ import annotations + import threading from typing import TYPE_CHECKING, Any, Generator, Iterator, List, Optional, Tuple @@ -42,7 +44,7 @@ class AuditableStore(Store): - def __init__(self, store: "Store"): + def __init__(self, store: Store): self.store = store self.context_aware = store.context_aware # NOTE: this store can't be formula_aware as it doesn't have enough @@ -51,10 +53,10 @@ def __init__(self, store: "Store"): self.transaction_aware = True # This is only half true self.reverseOps: List[ Tuple[ - Optional["_SubjectType"], - Optional["_PredicateType"], - Optional["_ObjectType"], - Optional["_ContextIdentifierType"], + Optional[_SubjectType], + Optional[_PredicateType], + Optional[_ObjectType], + Optional[_ContextIdentifierType], str, ] ] = [] @@ -69,11 +71,11 @@ def close(self, commit_pending_transaction: bool = False) -> None: def destroy(self, configuration: str) -> None: self.store.destroy(configuration) - def query(self, *args: Any, **kw: Any) -> "Result": + def query(self, *args: Any, **kw: Any) -> Result: return self.store.query(*args, **kw) def add( - self, triple: "_TripleType", context: "_ContextType", quoted: bool = False + self, triple: _TripleType, context: _ContextType, quoted: bool = False ) -> None: (s, p, o) = triple lock = destructiveOpLocks["add"] @@ -95,7 +97,7 @@ def add( self.store.add((s, p, o), context, quoted) def remove( - self, spo: "_TriplePatternType", context: Optional["_ContextType"] = None + self, spo: _TriplePatternType, context: Optional[_ContextType] = None ) -> None: subject, predicate, object_ = spo lock = destructiveOpLocks["remove"] @@ -139,8 +141,8 @@ def remove( self.store.remove((subject, predicate, object_), context) def triples( - self, triple: "_TriplePatternType", context: Optional["_ContextType"] = None - ) -> Iterator[Tuple["_TripleType", Iterator[Optional["_ContextType"]]]]: + self, triple: _TriplePatternType, context: Optional[_ContextType] = None + ) -> Iterator[Tuple[_TripleType, Iterator[Optional[_ContextType]]]]: (su, pr, ob) = triple context = ( context.__class__(self.store, context.identifier) @@ -150,7 +152,7 @@ def triples( for (s, p, o), cg in self.store.triples((su, pr, ob), context): yield (s, p, o), cg - def __len__(self, context: Optional["_ContextType"] = None): + def __len__(self, context: Optional[_ContextType] = None): context = ( context.__class__(self.store, context.identifier) if context is not None @@ -159,21 +161,21 @@ def __len__(self, context: Optional["_ContextType"] = None): return self.store.__len__(context) def contexts( - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: for ctx in self.store.contexts(triple): yield ctx - def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: self.store.bind(prefix, namespace, override=override) - def prefix(self, namespace: "URIRef") -> Optional[str]: + def prefix(self, namespace: URIRef) -> Optional[str]: return self.store.prefix(namespace) - def namespace(self, prefix: str) -> Optional["URIRef"]: + def namespace(self, prefix: str) -> Optional[URIRef]: return self.store.namespace(prefix) - def namespaces(self) -> Iterator[Tuple[str, "URIRef"]]: + def namespaces(self) -> Iterator[Tuple[str, URIRef]]: return self.store.namespaces() def commit(self) -> None: diff --git a/rdflib/plugins/stores/berkeleydb.py b/rdflib/plugins/stores/berkeleydb.py index 23968f77a..12009787c 100644 --- a/rdflib/plugins/stores/berkeleydb.py +++ b/rdflib/plugins/stores/berkeleydb.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging from os import mkdir from os.path import abspath, exists @@ -84,12 +86,12 @@ class BerkeleyDB(Store): formula_aware = True transaction_aware = False graph_aware = True - db_env: "db.DBEnv" = None + db_env: db.DBEnv = None def __init__( self, configuration: Optional[str] = None, - identifier: Optional["Identifier"] = None, + identifier: Optional[Identifier] = None, ): if not has_bsddb: raise ImportError("Unable to import berkeleydb, store is unusable.") @@ -100,14 +102,14 @@ def __init__( self._dumps = self.node_pickler.dumps self.__indicies_info: List[Tuple[Any, _ToKeyFunc, _FromKeyFunc]] - def __get_identifier(self) -> Optional["Identifier"]: + def __get_identifier(self) -> Optional[Identifier]: return self.__identifier identifier = property(__get_identifier) def _init_db_environment( self, homeDir: str, create: bool = True # noqa: N803 - ) -> "db.DBEnv": # noqa: N803 + ) -> db.DBEnv: if not exists(homeDir): if create is True: mkdir(homeDir) @@ -155,7 +157,7 @@ def open(self, path: str, create: bool = True) -> Optional[int]: dbsetflags = 0 # create and open the DBs - self.__indicies: List["db.DB"] = [ + self.__indicies: List[db.DB] = [ None, ] * 3 # NOTE on type ingore: this is because type checker does not like this @@ -176,7 +178,7 @@ def open(self, path: str, create: bool = True) -> Optional[int]: self.__indicies_info[i] = (index, to_key_func(i), from_key_func(i)) lookup: Dict[ - int, Tuple["db.DB", _GetPrefixFunc, _FromKeyFunc, _ResultsFromKeyFunc] + int, Tuple[db.DB, _GetPrefixFunc, _FromKeyFunc, _ResultsFromKeyFunc] ] = {} for i in range(0, 8): results: List[Tuple[Tuple[int, int], int, int]] = [] @@ -298,8 +300,8 @@ def close(self, commit_pending_transaction: bool = False) -> None: def add( self, - triple: "_TripleType", - context: "_ContextType", + triple: _TripleType, + context: _ContextType, quoted: bool = False, txn: Optional[Any] = None, ) -> None: @@ -380,8 +382,8 @@ def __remove( # type error: Signature of "remove" incompatible with supertype "Store" def remove( # type: ignore[override] self, - spo: "_TriplePatternType", - context: Optional["_ContextType"], + spo: _TriplePatternType, + context: Optional[_ContextType], txn: Optional[Any] = None, ) -> None: subject, predicate, object = spo @@ -471,11 +473,11 @@ def remove( # type: ignore[override] def triples( self, - spo: "_TriplePatternType", - context: Optional["_ContextType"] = None, + spo: _TriplePatternType, + context: Optional[_ContextType] = None, txn: Optional[Any] = None, ) -> Generator[ - Tuple["_TripleType", Generator[Optional["_ContextType"], None, None]], + Tuple[_TripleType, Generator[Optional[_ContextType], None, None]], None, None, ]: @@ -518,7 +520,7 @@ def triples( else: break - def __len__(self, context: Optional["_ContextType"] = None) -> int: + def __len__(self, context: Optional[_ContextType] = None) -> int: assert self.__open, "The Store must be open." if context is not None: if context == self: @@ -544,7 +546,7 @@ def __len__(self, context: Optional["_ContextType"] = None) -> int: cursor.close() return count - def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: # NOTE on type error: this is because the variables are reused with # another type. # type error: Incompatible types in assignment (expression has type "bytes", variable has type "str") @@ -564,7 +566,7 @@ def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: self.__prefix[bound_namespace or namespace] = bound_prefix or prefix self.__namespace[bound_prefix or prefix] = bound_namespace or namespace - def namespace(self, prefix: str) -> Optional["URIRef"]: + def namespace(self, prefix: str) -> Optional[URIRef]: # NOTE on type error: this is because the variable is reused with # another type. # type error: Incompatible types in assignment (expression has type "bytes", variable has type "str") @@ -574,7 +576,7 @@ def namespace(self, prefix: str) -> Optional["URIRef"]: return URIRef(ns.decode("utf-8")) return None - def prefix(self, namespace: "URIRef") -> Optional[str]: + def prefix(self, namespace: URIRef) -> Optional[str]: # NOTE on type error: this is because the variable is reused with # another type. # type error: Incompatible types in assignment (expression has type "bytes", variable has type "URIRef") @@ -584,7 +586,7 @@ def prefix(self, namespace: "URIRef") -> Optional[str]: return prefix.decode("utf-8") return None - def namespaces(self) -> Generator[Tuple[str, "URIRef"], None, None]: + def namespaces(self) -> Generator[Tuple[str, URIRef], None, None]: cursor = self.__namespace.cursor() results = [] current = cursor.first() @@ -598,8 +600,8 @@ def namespaces(self) -> Generator[Tuple[str, "URIRef"], None, None]: yield prefix, URIRef(namespace) def contexts( - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: _from_string = self._from_string _to_string = self._to_string # NOTE on type errors: context is lost because of how data is loaded @@ -641,10 +643,10 @@ def contexts( current = None cursor.close() - def add_graph(self, graph: "Graph") -> None: + def add_graph(self, graph: Graph) -> None: self.__contexts.put(bb(self._to_string(graph)), b"") - def remove_graph(self, graph: "Graph"): + def remove_graph(self, graph: Graph): self.remove((None, None, None), graph) def _from_string(self, i: bytes) -> Node: @@ -669,10 +671,10 @@ def _to_string(self, term: Node, txn: Optional[Any] = None) -> str: def __lookup( self, - spo: "_TriplePatternType", - context: Optional["_ContextType"], + spo: _TriplePatternType, + context: Optional[_ContextType], txn: Optional[Any] = None, - ) -> Tuple["db.DB", bytes, _FromKeyFunc, _ResultsFromKeyFunc]: + ) -> Tuple[db.DB, bytes, _FromKeyFunc, _ResultsFromKeyFunc]: subject, predicate, object = spo _to_string = self._to_string # NOTE on type errors: this is because the same variable is used with different types. diff --git a/rdflib/plugins/stores/concurrent.py b/rdflib/plugins/stores/concurrent.py index c07867958..2d050954b 100644 --- a/rdflib/plugins/stores/concurrent.py +++ b/rdflib/plugins/stores/concurrent.py @@ -55,7 +55,7 @@ def triples(self, triple): pending_removes = self.__pending_removes self.__begin_read() for s, p, o in ResponsibleGenerator(g, self.__end_read): - if not (s, p, o) in pending_removes: + if not (s, p, o) in pending_removes: # noqa: E713 yield s, p, o for s, p, o in self.__pending_adds: diff --git a/rdflib/plugins/stores/memory.py b/rdflib/plugins/stores/memory.py index 13c15218a..7dc7c25ac 100644 --- a/rdflib/plugins/stores/memory.py +++ b/rdflib/plugins/stores/memory.py @@ -1,5 +1,7 @@ # # +from __future__ import annotations + from typing import ( TYPE_CHECKING, Any, @@ -34,7 +36,7 @@ __all__ = ["SimpleMemory", "Memory"] -ANY = None +ANY: None = None class SimpleMemory(Store): @@ -51,33 +53,33 @@ class SimpleMemory(Store): def __init__( self, configuration: Optional[str] = None, - identifier: Optional["Identifier"] = None, + identifier: Optional[Identifier] = None, ): super(SimpleMemory, self).__init__(configuration) self.identifier = identifier # indexed by [subject][predicate][object] - self.__spo: Dict[ - "_SubjectType", Dict["_PredicateType", Dict["_ObjectType", int]] - ] = {} + self.__spo: Dict[_SubjectType, Dict[_PredicateType, Dict[_ObjectType, int]]] = ( + {} + ) # indexed by [predicate][object][subject] - self.__pos: Dict[ - "_PredicateType", Dict["_ObjectType", Dict["_SubjectType", int]] - ] = {} + self.__pos: Dict[_PredicateType, Dict[_ObjectType, Dict[_SubjectType, int]]] = ( + {} + ) # indexed by [predicate][object][subject] - self.__osp: Dict[ - "_ObjectType", Dict["_SubjectType", Dict["_PredicateType", int]] - ] = {} + self.__osp: Dict[_ObjectType, Dict[_SubjectType, Dict[_PredicateType, int]]] = ( + {} + ) - self.__namespace: Dict[str, "URIRef"] = {} - self.__prefix: Dict["URIRef", str] = {} + self.__namespace: Dict[str, URIRef] = {} + self.__prefix: Dict[URIRef, str] = {} def add( self, - triple: "_TripleType", - context: "_ContextType", + triple: _TripleType, + context: _ContextType, quoted: bool = False, ) -> None: """\ @@ -122,8 +124,8 @@ def add( def remove( self, - triple_pattern: "_TriplePatternType", - context: Optional["_ContextType"] = None, + triple_pattern: _TriplePatternType, + context: Optional[_ContextType] = None, ) -> None: for (subject, predicate, object), c in list(self.triples(triple_pattern)): del self.__spo[subject][predicate][object] @@ -132,9 +134,9 @@ def remove( def triples( self, - triple_pattern: "_TriplePatternType", - context: Optional["_ContextType"] = None, - ) -> Iterator[Tuple["_TripleType", Iterator[Optional["_ContextType"]]]]: + triple_pattern: _TriplePatternType, + context: Optional[_ContextType] = None, + ) -> Iterator[Tuple[_TripleType, Iterator[Optional[_ContextType]]]]: """A generator over all the triples matching""" subject, predicate, object = triple_pattern if subject != ANY: # subject is given @@ -194,14 +196,14 @@ def triples( for o in subjectDictionary[p].keys(): yield (s, p, o), self.__contexts() - def __len__(self, context: Optional["_ContextType"] = None) -> int: + def __len__(self, context: Optional[_ContextType] = None) -> int: # @@ optimize i = 0 for triple in self.triples((None, None, None)): i += 1 return i - def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: # should be identical to `Memory.bind` bound_namespace = self.__namespace.get(prefix) bound_prefix = _coalesce( @@ -226,17 +228,17 @@ def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: bound_namespace, default=namespace ) - def namespace(self, prefix: str) -> Optional["URIRef"]: + def namespace(self, prefix: str) -> Optional[URIRef]: return self.__namespace.get(prefix, None) - def prefix(self, namespace: "URIRef") -> Optional[str]: + def prefix(self, namespace: URIRef) -> Optional[str]: return self.__prefix.get(namespace, None) - def namespaces(self) -> Iterator[Tuple[str, "URIRef"]]: + def namespaces(self) -> Iterator[Tuple[str, URIRef]]: for prefix, namespace in self.__namespace.items(): yield prefix, namespace - def __contexts(self) -> Generator["_ContextType", None, None]: + def __contexts(self) -> Generator[_ContextType, None, None]: # TODO: best way to return empty generator # type error: Need type annotation for "c" return (c for c in []) # type: ignore[var-annotated] @@ -244,22 +246,22 @@ def __contexts(self) -> Generator["_ContextType", None, None]: # type error: Missing return statement def query( # type: ignore[return] self, - query: Union["Query", str], + query: Union[Query, str], initNs: Mapping[str, Any], # noqa: N803 - initBindings: Mapping["str", "Identifier"], # noqa: N803 - queryGraph: "str", # noqa: N803 + initBindings: Mapping[str, Identifier], # noqa: N803 + queryGraph: str, # noqa: N803 **kwargs: Any, - ) -> "Result": + ) -> Result: super(SimpleMemory, self).query( query, initNs, initBindings, queryGraph, **kwargs ) def update( self, - update: Union["Update", str], + update: Union[Update, str], initNs: Mapping[str, Any], # noqa: N803 - initBindings: Mapping["str", "Identifier"], # noqa: N803 - queryGraph: "str", # noqa: N803 + initBindings: Mapping[str, Identifier], # noqa: N803 + queryGraph: str, # noqa: N803 **kwargs: Any, ) -> None: super(SimpleMemory, self).update( @@ -282,40 +284,40 @@ class Memory(Store): def __init__( self, configuration: Optional[str] = None, - identifier: Optional["Identifier"] = None, + identifier: Optional[Identifier] = None, ): super(Memory, self).__init__(configuration) self.identifier = identifier # indexed by [subject][predicate][object] - self.__spo: Dict[ - "_SubjectType", Dict["_PredicateType", Dict["_ObjectType", int]] - ] = {} + self.__spo: Dict[_SubjectType, Dict[_PredicateType, Dict[_ObjectType, int]]] = ( + {} + ) # indexed by [predicate][object][subject] - self.__pos: Dict[ - "_PredicateType", Dict["_ObjectType", Dict["_SubjectType", int]] - ] = {} + self.__pos: Dict[_PredicateType, Dict[_ObjectType, Dict[_SubjectType, int]]] = ( + {} + ) # indexed by [predicate][object][subject] - self.__osp: Dict[ - "_ObjectType", Dict["_SubjectType", Dict["_PredicateType", int]] - ] = {} - - self.__namespace: Dict[str, "URIRef"] = {} - self.__prefix: Dict["URIRef", str] = {} - self.__context_obj_map: Dict[str, "Graph"] = {} - self.__tripleContexts: Dict["_TripleType", Dict[Optional[str], bool]] = {} - self.__contextTriples: Dict[Optional[str], Set["_TripleType"]] = {None: set()} + self.__osp: Dict[_ObjectType, Dict[_SubjectType, Dict[_PredicateType, int]]] = ( + {} + ) + + self.__namespace: Dict[str, URIRef] = {} + self.__prefix: Dict[URIRef, str] = {} + self.__context_obj_map: Dict[str, Graph] = {} + self.__tripleContexts: Dict[_TripleType, Dict[Optional[str], bool]] = {} + self.__contextTriples: Dict[Optional[str], Set[_TripleType]] = {None: set()} # all contexts used in store (unencoded) - self.__all_contexts: Set["Graph"] = set() + self.__all_contexts: Set[Graph] = set() # default context information for triples self.__defaultContexts: Optional[Dict[Optional[str], bool]] = None def add( self, - triple: "_TripleType", - context: "_ContextType", + triple: _TripleType, + context: _ContextType, quoted: bool = False, ) -> None: """\ @@ -376,8 +378,8 @@ def add( def remove( self, - triple_pattern: "_TriplePatternType", - context: Optional["_ContextType"] = None, + triple_pattern: _TriplePatternType, + context: Optional[_ContextType] = None, ) -> None: req_ctx = self.__ctx_to_str(context) for triple, c in self.triples(triple_pattern, context=context): @@ -414,10 +416,10 @@ def remove( def triples( self, - triple_pattern: "_TriplePatternType", - context: Optional["_ContextType"] = None, + triple_pattern: _TriplePatternType, + context: Optional[_ContextType] = None, ) -> Generator[ - Tuple["_TripleType", Generator[Optional["_ContextType"], None, None]], + Tuple[_TripleType, Generator[Optional[_ContextType], None, None]], None, None, ]: @@ -520,7 +522,7 @@ def triples( if self.__triple_has_context(triple, req_ctx): yield triple, self.__contexts(triple) - def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: # should be identical to `SimpleMemory.bind` bound_namespace = self.__namespace.get(prefix) bound_prefix = _coalesce( @@ -546,19 +548,19 @@ def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: bound_namespace, default=namespace ) - def namespace(self, prefix: str) -> Optional["URIRef"]: + def namespace(self, prefix: str) -> Optional[URIRef]: return self.__namespace.get(prefix, None) - def prefix(self, namespace: "URIRef") -> Optional[str]: + def prefix(self, namespace: URIRef) -> Optional[str]: return self.__prefix.get(namespace, None) - def namespaces(self) -> Iterator[Tuple[str, "URIRef"]]: + def namespaces(self) -> Iterator[Tuple[str, URIRef]]: for prefix, namespace in self.__namespace.items(): yield prefix, namespace def contexts( - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: if triple is None or triple == (None, None, None): return (context for context in self.__all_contexts) @@ -569,19 +571,19 @@ def contexts( except KeyError: return (_ for _ in []) - def __len__(self, context: Optional["_ContextType"] = None) -> int: + def __len__(self, context: Optional[_ContextType] = None) -> int: ctx = self.__ctx_to_str(context) if ctx not in self.__contextTriples: return 0 return len(self.__contextTriples[ctx]) - def add_graph(self, graph: "Graph") -> None: + def add_graph(self, graph: Graph) -> None: if not self.graph_aware: Store.add_graph(self, graph) else: self.__all_contexts.add(graph) - def remove_graph(self, graph: "Graph") -> None: + def remove_graph(self, graph: Graph) -> None: if not self.graph_aware: Store.remove_graph(self, graph) else: @@ -594,9 +596,9 @@ def remove_graph(self, graph: "Graph") -> None: # internal utility methods below def __add_triple_context( self, - triple: "_TripleType", + triple: _TripleType, triple_exists: bool, - context: Optional["_ContextType"], + context: Optional[_ContextType], quoted: bool, ) -> None: """add the given context to the set of contexts for the triple""" @@ -610,9 +612,9 @@ def __add_triple_context( # triple exists with default ctx info # start with a copy of the default ctx info # type error: Item "None" of "Optional[Dict[Optional[str], bool]]" has no attribute "copy" - triple_context = self.__tripleContexts[ - triple - ] = self.__defaultContexts.copy() # type: ignore[union-attr] + triple_context = self.__tripleContexts[triple] = ( + self.__defaultContexts.copy() # type: ignore[union-attr] + ) triple_context[ctx] = quoted @@ -646,7 +648,7 @@ def __add_triple_context( del self.__tripleContexts[triple] def __get_context_for_triple( - self, triple: "_TripleType", skipQuoted: bool = False # noqa: N803 + self, triple: _TripleType, skipQuoted: bool = False # noqa: N803 ) -> Collection[Optional[str]]: """return a list of contexts (str) for the triple, skipping quoted contexts if skipQuoted==True""" @@ -660,12 +662,12 @@ def __get_context_for_triple( # type error: Item "None" of "Optional[Dict[Optional[str], bool]]" has no attribute "items" return [ctx for ctx, quoted in ctxs.items() if not quoted] # type: ignore[union-attr] - def __triple_has_context(self, triple: "_TripleType", ctx: Optional[str]) -> bool: + def __triple_has_context(self, triple: _TripleType, ctx: Optional[str]) -> bool: """return True if the triple exists in the given context""" # type error: Unsupported right operand type for in ("Optional[Dict[Optional[str], bool]]") return ctx in self.__tripleContexts.get(triple, self.__defaultContexts) # type: ignore[operator] - def __remove_triple_context(self, triple: "_TripleType", ctx): + def __remove_triple_context(self, triple: _TripleType, ctx): """remove the context from the triple""" # type error: Item "None" of "Optional[Dict[Optional[str], bool]]" has no attribute "copy" ctxs = self.__tripleContexts.get(triple, self.__defaultContexts).copy() # type: ignore[union-attr] @@ -677,14 +679,12 @@ def __remove_triple_context(self, triple: "_TripleType", ctx): self.__contextTriples[ctx].remove(triple) @overload - def __ctx_to_str(self, ctx: "_ContextType") -> str: - ... + def __ctx_to_str(self, ctx: _ContextType) -> str: ... @overload - def __ctx_to_str(self, ctx: None) -> None: - ... + def __ctx_to_str(self, ctx: None) -> None: ... - def __ctx_to_str(self, ctx: Optional["_ContextType"]) -> Optional[str]: + def __ctx_to_str(self, ctx: Optional[_ContextType]) -> Optional[str]: if ctx is None: return None try: @@ -705,9 +705,7 @@ def __ctx_to_str(self, ctx: Optional["_ContextType"]) -> Optional[str]: return ctx_str raise RuntimeError("Cannot use that type of object as a Graph context") - def __contexts( - self, triple: "_TripleType" - ) -> Generator["_ContextType", None, None]: + def __contexts(self, triple: _TripleType) -> Generator[_ContextType, None, None]: """return a generator for all the non-quoted contexts (dereferenced) the encoded triple appears in""" # type error: Argument 2 to "get" of "Mapping" has incompatible type "str"; expected "Optional[Graph]" @@ -720,20 +718,20 @@ def __contexts( # type error: Missing return statement def query( # type: ignore[return] self, - query: Union["Query", str], + query: Union[Query, str], initNs: Mapping[str, Any], # noqa: N803 - initBindings: Mapping["str", "Identifier"], # noqa: N803 - queryGraph: "str", + initBindings: Mapping[str, Identifier], # noqa: N803 + queryGraph: str, # noqa: N803 **kwargs, - ) -> "Result": + ) -> Result: super(Memory, self).query(query, initNs, initBindings, queryGraph, **kwargs) def update( self, - update: Union["Update", Any], + update: Union[Update, Any], initNs: Mapping[str, Any], # noqa: N803 - initBindings: Mapping["str", "Identifier"], # noqa: N803 - queryGraph: "str", + initBindings: Mapping[str, Identifier], # noqa: N803 + queryGraph: str, # noqa: N803 **kwargs, ) -> None: super(Memory, self).update(update, initNs, initBindings, queryGraph, **kwargs) diff --git a/rdflib/plugins/stores/regexmatching.py b/rdflib/plugins/stores/regexmatching.py index 0f8da8fff..9dc3da2fa 100644 --- a/rdflib/plugins/stores/regexmatching.py +++ b/rdflib/plugins/stores/regexmatching.py @@ -30,7 +30,7 @@ def __init__(self, expr): self.compiledExpr = re.compile(expr) def __reduce__(self): - return (REGEXTerm, (str(""),)) + return (REGEXTerm, ("",)) def regexCompareQuad(quad, regexQuad): # noqa: N802, N803 diff --git a/rdflib/plugins/stores/sparqlconnector.py b/rdflib/plugins/stores/sparqlconnector.py index cbf7bd92a..e2bb83909 100644 --- a/rdflib/plugins/stores/sparqlconnector.py +++ b/rdflib/plugins/stores/sparqlconnector.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import base64 import copy import logging @@ -40,14 +42,14 @@ def __init__( query_endpoint: Optional[str] = None, update_endpoint: Optional[str] = None, returnFormat: str = "xml", # noqa: N803 - method: "te.Literal['GET', 'POST', 'POST_FORM']" = "GET", + method: te.Literal["GET", "POST", "POST_FORM"] = "GET", auth: Optional[Tuple[str, str]] = None, **kwargs, ): """ auth, if present, must be a tuple of (username, password) used for Basic Authentication - Any additional keyword arguments will be passed to to the request, and can be used to setup timesouts etc. + Any additional keyword arguments will be passed to to the request, and can be used to setup timeouts etc. """ self._method: str self.returnFormat = returnFormat @@ -56,7 +58,7 @@ def __init__( self.kwargs = kwargs self.method = method if auth is not None: - if type(auth) != tuple: + if type(auth) is not tuple: raise SPARQLConnectorException("auth must be a tuple") if len(auth) != 2: raise SPARQLConnectorException("auth must be a tuple (user, password)") @@ -84,13 +86,13 @@ def query( query: str, default_graph: Optional[str] = None, named_graph: Optional[str] = None, - ) -> "Result": + ) -> Result: if not self.query_endpoint: raise SPARQLConnectorException("Query endpoint not set!") params = {} # this test ensures we don't have a useless (BNode) default graph URI, which calls to Graph().query() will add - if default_graph is not None and type(default_graph) != BNode: + if default_graph is not None and type(default_graph) is not BNode: params["default-graph-uri"] = default_graph headers = {"Accept": _response_mime_types[self.returnFormat]} diff --git a/rdflib/plugins/stores/sparqlstore.py b/rdflib/plugins/stores/sparqlstore.py index cfffbd768..f9827cf94 100644 --- a/rdflib/plugins/stores/sparqlstore.py +++ b/rdflib/plugins/stores/sparqlstore.py @@ -1,10 +1,11 @@ -# -*- coding: utf-8 -*- -# """ This is an RDFLib store around Ivan Herman et al.'s SPARQL service wrapper. This was first done in layer-cake, and then ported to RDFLib """ + +from __future__ import annotations + import collections import re from typing import ( @@ -29,7 +30,7 @@ from rdflib.term import BNode, Identifier, Node, URIRef, Variable if TYPE_CHECKING: - import typing_extensions as te + import typing_extensions as te # noqa: I001 from rdflib.graph import ( _TripleType, _ContextType, @@ -55,14 +56,13 @@ _NodeToSparql = Callable[["Node"], str] -def _node_to_sparql(node: "Node") -> str: +def _node_to_sparql(node: Node) -> str: if isinstance(node, BNode): raise Exception( "SPARQLStore does not support BNodes! " "See http://www.w3.org/TR/sparql11-query/#BGPsparqlBNodes" ) - # type error: "Node" has no attribute "n3" - return node.n3() # type: ignore[attr-defined] + return node.n3() class SPARQLStore(SPARQLConnector, Store): @@ -151,7 +151,7 @@ def open(self, configuration: str, create: bool = False) -> Optional[int]: # ty """This method is included so that calls to this Store via Graph, e.g. Graph("SPARQLStore"), can set the required parameters """ - if type(configuration) == str: + if type(configuration) == str: # noqa: E721 self.query_endpoint = configuration else: raise Exception( @@ -175,31 +175,31 @@ def rollback(self) -> None: raise TypeError("The SPARQL store is read only") def add( - self, _: "_TripleType", context: "_ContextType" = None, quoted: bool = False + self, _: _TripleType, context: _ContextType = None, quoted: bool = False ) -> None: raise TypeError("The SPARQL store is read only") - def addN(self, quads: Iterable["_QuadType"]) -> None: # noqa: N802 + def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 raise TypeError("The SPARQL store is read only") # type error: Signature of "remove" incompatible with supertype "Store" def remove( # type: ignore[override] - self, _: "_TriplePatternType", context: Optional["_ContextType"] + self, _: _TriplePatternType, context: Optional[_ContextType] ) -> None: raise TypeError("The SPARQL store is read only") # type error: Signature of "update" incompatible with supertype "SPARQLConnector" def update( # type: ignore[override] self, - query: Union["Update", str], + query: Union[Update, str], initNs: Dict[str, Any] = {}, # noqa: N803 - initBindings: Dict["str", "Identifier"] = {}, - queryGraph: "Identifier" = None, - DEBUG: bool = False, + initBindings: Dict[str, Identifier] = {}, # noqa: N803 + queryGraph: Identifier = None, # noqa: N803 + DEBUG: bool = False, # noqa: N803 ) -> None: raise TypeError("The SPARQL store is read only") - def _query(self, *args: Any, **kwargs: Any) -> "Result": + def _query(self, *args: Any, **kwargs: Any) -> Result: self._queries += 1 return super(SPARQLStore, self).query(*args, **kwargs) @@ -220,12 +220,12 @@ def _inject_prefixes(self, query: str, extra_bindings: Mapping[str, Any]) -> str # type error: Signature of "query" incompatible with supertype "Store" def query( # type: ignore[override] self, - query: Union["Query", str], + query: Union[Query, str], initNs: Optional[Mapping[str, Any]] = None, # noqa: N803 - initBindings: Optional[Mapping["str", "Identifier"]] = None, - queryGraph: Optional["str"] = None, - DEBUG: bool = False, - ) -> "Result": + initBindings: Optional[Mapping[str, Identifier]] = None, # noqa: N803 + queryGraph: Optional[str] = None, # noqa: N803 + DEBUG: bool = False, # noqa: N803 + ) -> Result: self.debug = DEBUG assert isinstance(query, str) @@ -249,8 +249,8 @@ def query( # type: ignore[override] # type error: Return type "Iterator[Tuple[Tuple[Node, Node, Node], None]]" of "triples" incompatible with return type "Iterator[Tuple[Tuple[Node, Node, Node], Iterator[Optional[Graph]]]]" def triples( # type: ignore[override] - self, spo: "_TriplePatternType", context: Optional["_ContextType"] = None - ) -> Iterator[Tuple["_TripleType", None]]: + self, spo: _TriplePatternType, context: Optional[_ContextType] = None + ) -> Iterator[Tuple[_TripleType, None]]: """ - tuple **(s, o, p)** the triple used as filter for the SPARQL select. @@ -345,7 +345,7 @@ def triples( # type: ignore[override] ) if vars: - if type(result) == tuple: + if type(result) is tuple: if result[0] == 401: raise ValueError( "It looks like you need to authenticate with this SPARQL Store. HTTP unauthorized" @@ -368,15 +368,15 @@ def triples( # type: ignore[override] def triples_choices( self, _: Tuple[ - Union["_SubjectType", List["_SubjectType"]], - Union["_PredicateType", List["_PredicateType"]], - Union["_ObjectType", List["_ObjectType"]], + Union[_SubjectType, List[_SubjectType]], + Union[_PredicateType, List[_PredicateType]], + Union[_ObjectType, List[_ObjectType]], ], - context: Optional["_ContextType"] = None, + context: Optional[_ContextType] = None, ) -> Generator[ Tuple[ - Tuple["_SubjectType", "_PredicateType", "_ObjectType"], - Iterator[Optional["_ContextType"]], + Tuple[_SubjectType, _PredicateType, _ObjectType], + Iterator[Optional[_ContextType]], ], None, None, @@ -390,7 +390,7 @@ def triples_choices( """ raise NotImplementedError("Triples choices currently not supported") - def __len__(self, context: Optional["_ContextType"] = None) -> int: + def __len__(self, context: Optional[_ContextType] = None) -> int: if not self.sparql11: raise NotImplementedError( "For performance reasons, this is not" @@ -402,17 +402,19 @@ def __len__(self, context: Optional["_ContextType"] = None) -> int: result = self._query( q, # type error: Item "None" of "Optional[Graph]" has no attribute "identifier" - default_graph=context.identifier # type: ignore[union-attr] - if self._is_contextual(context) - else None, + default_graph=( + context.identifier # type: ignore[union-attr] + if self._is_contextual(context) + else None + ), ) # type error: Item "Tuple[Node, ...]" of "Union[Tuple[Node, Node, Node], bool, ResultRow]" has no attribute "c" return int(next(iter(result)).c) # type: ignore[union-attr] # type error: Return type "Generator[Identifier, None, None]" of "contexts" incompatible with return type "Generator[Graph, None, None]" in supertype "Store" def contexts( # type: ignore[override] - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextIdentifierType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextIdentifierType, None, None]: """ Iterates over results to "SELECT ?NAME { GRAPH ?NAME { ?s ?p ?o } }" or "SELECT ?NAME { GRAPH ?NAME {} }" if triple is `None`. @@ -445,38 +447,36 @@ def contexts( # type: ignore[override] return (row.name for row in result) # type: ignore[union-attr,misc] # Namespace persistence interface implementation - def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: bound_prefix = self.prefix(namespace) if override and bound_prefix: del self.nsBindings[bound_prefix] self.nsBindings[prefix] = namespace - def prefix(self, namespace: "URIRef") -> Optional["str"]: + def prefix(self, namespace: URIRef) -> Optional[str]: """ """ return dict([(v, k) for k, v in self.nsBindings.items()]).get(namespace) - def namespace(self, prefix: str) -> Optional["URIRef"]: + def namespace(self, prefix: str) -> Optional[URIRef]: return self.nsBindings.get(prefix) - def namespaces(self) -> Iterator[Tuple[str, "URIRef"]]: + def namespaces(self) -> Iterator[Tuple[str, URIRef]]: for prefix, ns in self.nsBindings.items(): yield prefix, ns - def add_graph(self, graph: "Graph") -> None: + def add_graph(self, graph: Graph) -> None: raise TypeError("The SPARQL store is read only") - def remove_graph(self, graph: "Graph") -> None: + def remove_graph(self, graph: Graph) -> None: raise TypeError("The SPARQL store is read only") @overload - def _is_contextual(self, graph: None) -> "te.Literal[False]": - ... + def _is_contextual(self, graph: None) -> te.Literal[False]: ... @overload - def _is_contextual(self, graph: Optional[Union["Graph", "str"]]) -> bool: - ... + def _is_contextual(self, graph: Optional[Union[Graph, str]]) -> bool: ... - def _is_contextual(self, graph: Optional[Union["Graph", "str"]]) -> bool: + def _is_contextual(self, graph: Optional[Union[Graph, str]]) -> bool: """Returns `True` if the "GRAPH" keyword must appear in the final SPARQL query sent to the endpoint. """ @@ -489,48 +489,48 @@ def _is_contextual(self, graph: Optional[Union["Graph", "str"]]) -> bool: def subjects( self, - predicate: Optional["_PredicateType"] = None, - object: Optional["_ObjectType"] = None, - ) -> Generator["_SubjectType", None, None]: + predicate: Optional[_PredicateType] = None, + object: Optional[_ObjectType] = None, + ) -> Generator[_SubjectType, None, None]: """A generator of subjects with the given predicate and object""" for t, c in self.triples((None, predicate, object)): yield t[0] def predicates( self, - subject: Optional["_SubjectType"] = None, - object: Optional["_ObjectType"] = None, - ) -> Generator["_PredicateType", None, None]: + subject: Optional[_SubjectType] = None, + object: Optional[_ObjectType] = None, + ) -> Generator[_PredicateType, None, None]: """A generator of predicates with the given subject and object""" for t, c in self.triples((subject, None, object)): yield t[1] def objects( self, - subject: Optional["_SubjectType"] = None, - predicate: Optional["_PredicateType"] = None, - ) -> Generator["_ObjectType", None, None]: + subject: Optional[_SubjectType] = None, + predicate: Optional[_PredicateType] = None, + ) -> Generator[_ObjectType, None, None]: """A generator of objects with the given subject and predicate""" for t, c in self.triples((subject, predicate, None)): yield t[2] def subject_predicates( - self, object: Optional["_ObjectType"] = None - ) -> Generator[Tuple["_SubjectType", "_PredicateType"], None, None]: + self, object: Optional[_ObjectType] = None + ) -> Generator[Tuple[_SubjectType, _PredicateType], None, None]: """A generator of (subject, predicate) tuples for the given object""" for t, c in self.triples((None, None, object)): yield t[0], t[1] def subject_objects( - self, predicate: Optional["_PredicateType"] = None - ) -> Generator[Tuple["_SubjectType", "_ObjectType"], None, None]: + self, predicate: Optional[_PredicateType] = None + ) -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: """A generator of (subject, object) tuples for the given predicate""" for t, c in self.triples((None, predicate, None)): yield t[0], t[2] def predicate_objects( - self, subject: Optional["_SubjectType"] = None - ) -> Generator[Tuple["_PredicateType", "_ObjectType"], None, None]: + self, subject: Optional[_SubjectType] = None + ) -> Generator[Tuple[_PredicateType, _ObjectType], None, None]: """A generator of (predicate, object) tuples for the given subject""" for t, c in self.triples((subject, None, None)): yield t[1], t[2] @@ -645,25 +645,7 @@ def __init__( self._edits: Optional[List[str]] = None self._updates = 0 - def open( - self, configuration: Union[str, Tuple[str, str]], create: bool = False - ) -> None: - """ - This method is included so that calls to this Store via Graph, e.g. - Graph("SPARQLStore"), can set the required parameters - """ - if type(configuration) == str: - self.query_endpoint = configuration - elif type(configuration) == tuple: - self.query_endpoint = configuration[0] - self.update_endpoint = configuration[1] - else: - raise Exception( - "configuration must be either a string (a single query endpoint URI) " - "or a tuple (a query/update endpoint URI pair)" - ) - - def query(self, *args: Any, **kwargs: Any) -> "Result": + def query(self, *args: Any, **kwargs: Any) -> Result: if not self.autocommit and not self.dirty_reads: self.commit() return SPARQLStore.query(self, *args, **kwargs) @@ -671,7 +653,7 @@ def query(self, *args: Any, **kwargs: Any) -> "Result": # type error: Signature of "triples" incompatible with supertype "Store" def triples( # type: ignore[override] self, *args: Any, **kwargs: Any - ) -> Iterator[Tuple["_TripleType", None]]: + ) -> Iterator[Tuple[_TripleType, None]]: if not self.autocommit and not self.dirty_reads: self.commit() return SPARQLStore.triples(self, *args, **kwargs) @@ -679,7 +661,7 @@ def triples( # type: ignore[override] # type error: Signature of "contexts" incompatible with supertype "Store" def contexts( # type: ignore[override] self, *args: Any, **kwargs: Any - ) -> Generator["_ContextIdentifierType", None, None]: + ) -> Generator[_ContextIdentifierType, None, None]: if not self.autocommit and not self.dirty_reads: self.commit() return SPARQLStore.contexts(self, *args, **kwargs) @@ -689,8 +671,9 @@ def __len__(self, *args: Any, **kwargs: Any) -> int: self.commit() return SPARQLStore.__len__(self, *args, **kwargs) - # TODO: FIXME: open is defined twice - def open(self, configuration: Union[str, Tuple[str, str]], create: bool = False) -> None: # type: ignore[no-redef] # noqa: F811 + def open( + self, configuration: Union[str, Tuple[str, str]], create: bool = False + ) -> None: """ sets the endpoint URLs for this SPARQLStore @@ -731,8 +714,8 @@ def rollback(self) -> None: def add( self, - spo: "_TripleType", - context: Optional["_ContextType"] = None, + spo: _TripleType, + context: Optional[_ContextType] = None, quoted: bool = False, ) -> None: """Add a triple to the store of triples.""" @@ -756,7 +739,7 @@ def add( if self.autocommit: self.commit() - def addN(self, quads: Iterable["_QuadType"]) -> None: # noqa: N802 + def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 """Add a list of quads to the store.""" if not self.update_endpoint: raise Exception("UpdateEndpoint is not set - call 'open'") @@ -781,7 +764,7 @@ def addN(self, quads: Iterable["_QuadType"]) -> None: # noqa: N802 # type error: Signature of "remove" incompatible with supertype "Store" def remove( # type: ignore[override] - self, spo: "_TriplePatternType", context: Optional["_ContextType"] + self, spo: _TriplePatternType, context: Optional[_ContextType] ) -> None: """Remove a triple from the store""" if not self.update_endpoint: @@ -824,11 +807,11 @@ def _update(self, update): # type error: Signature of "update" incompatible with supertype "Store" def update( # type: ignore[override] self, - query: Union["Update", str], + query: Union[Update, str], initNs: Dict[str, Any] = {}, # noqa: N803 - initBindings: Dict["str", "Identifier"] = {}, - queryGraph: Optional[str] = None, - DEBUG: bool = False, + initBindings: Dict[str, Identifier] = {}, # noqa: N803 + queryGraph: Optional[str] = None, # noqa: N803 + DEBUG: bool = False, # noqa: N803 ): """ Perform a SPARQL Update Query against the endpoint, @@ -950,13 +933,13 @@ def _insert_named_graph(self, query: str, query_graph: str) -> str: return "".join(modified_query) - def add_graph(self, graph: "Graph") -> None: + def add_graph(self, graph: Graph) -> None: if not self.graph_aware: Store.add_graph(self, graph) elif graph.identifier != DATASET_DEFAULT_GRAPH_ID: self.update("CREATE GRAPH %s" % self.node_to_sparql(graph.identifier)) - def remove_graph(self, graph: "Graph") -> None: + def remove_graph(self, graph: Graph) -> None: if not self.graph_aware: Store.remove_graph(self, graph) elif graph.identifier == DATASET_DEFAULT_GRAPH_ID: @@ -966,48 +949,48 @@ def remove_graph(self, graph: "Graph") -> None: def subjects( self, - predicate: Optional["_PredicateType"] = None, - object: Optional["_ObjectType"] = None, - ) -> Generator["_SubjectType", None, None]: + predicate: Optional[_PredicateType] = None, + object: Optional[_ObjectType] = None, + ) -> Generator[_SubjectType, None, None]: """A generator of subjects with the given predicate and object""" for t, c in self.triples((None, predicate, object)): yield t[0] def predicates( self, - subject: Optional["_SubjectType"] = None, - object: Optional["_ObjectType"] = None, - ) -> Generator["_PredicateType", None, None]: + subject: Optional[_SubjectType] = None, + object: Optional[_ObjectType] = None, + ) -> Generator[_PredicateType, None, None]: """A generator of predicates with the given subject and object""" for t, c in self.triples((subject, None, object)): yield t[1] def objects( self, - subject: Optional["_SubjectType"] = None, - predicate: Optional["_PredicateType"] = None, - ) -> Generator["_ObjectType", None, None]: + subject: Optional[_SubjectType] = None, + predicate: Optional[_PredicateType] = None, + ) -> Generator[_ObjectType, None, None]: """A generator of objects with the given subject and predicate""" for t, c in self.triples((subject, predicate, None)): yield t[2] def subject_predicates( - self, object: Optional["_ObjectType"] = None - ) -> Generator[Tuple["_SubjectType", "_PredicateType"], None, None]: + self, object: Optional[_ObjectType] = None + ) -> Generator[Tuple[_SubjectType, _PredicateType], None, None]: """A generator of (subject, predicate) tuples for the given object""" for t, c in self.triples((None, None, object)): yield t[0], t[1] def subject_objects( - self, predicate: Optional["_PredicateType"] = None - ) -> Generator[Tuple["_SubjectType", "_ObjectType"], None, None]: + self, predicate: Optional[_PredicateType] = None + ) -> Generator[Tuple[_SubjectType, _ObjectType], None, None]: """A generator of (subject, object) tuples for the given predicate""" for t, c in self.triples((None, predicate, None)): yield t[0], t[2] def predicate_objects( - self, subject: Optional["_SubjectType"] = None - ) -> Generator[Tuple["_PredicateType", "_ObjectType"], None, None]: + self, subject: Optional[_SubjectType] = None + ) -> Generator[Tuple[_PredicateType, _ObjectType], None, None]: """A generator of (predicate, object) tuples for the given subject""" for t, c in self.triples((subject, None, None)): yield t[1], t[2] diff --git a/rdflib/query.py b/rdflib/query.py index 261ffde9a..f42b37bb2 100644 --- a/rdflib/query.py +++ b/rdflib/query.py @@ -34,6 +34,8 @@ "EncodeOnlyUnicode", ] +import rdflib.term + if TYPE_CHECKING: from rdflib.graph import Graph, _TripleType from rdflib.plugins.sparql.sparql import Query, Update @@ -50,16 +52,16 @@ class Processor: """ - def __init__(self, graph: "Graph"): + def __init__(self, graph: Graph): pass # type error: Missing return statement def query( # type: ignore[empty-body] self, - strOrQuery: Union[str, "Query"], # noqa: N803 - initBindings: Mapping["str", "Identifier"] = {}, # noqa: N803 + strOrQuery: Union[str, Query], # noqa: N803 + initBindings: Mapping[str, Identifier] = {}, # noqa: N803 initNs: Mapping[str, Any] = {}, # noqa: N803 - DEBUG: bool = False, + DEBUG: bool = False, # noqa: N803 ) -> Mapping[str, Any]: pass @@ -77,19 +79,19 @@ class update method. """ - def __init__(self, graph: "Graph"): + def __init__(self, graph: Graph): pass def update( self, - strOrQuery: Union[str, "Update"], # noqa: N803 - initBindings: Mapping["str", "Identifier"] = {}, # noqa: N803 - initNs: Mapping[str, Any] = {}, + strOrQuery: Union[str, Update], # noqa: N803 + initBindings: Mapping[str, Identifier] = {}, # noqa: N803 + initNs: Mapping[str, Any] = {}, # noqa: N803 ) -> None: pass -class ResultException(Exception): +class ResultException(Exception): # noqa: N818 pass @@ -114,7 +116,7 @@ def __getattr__(self, name: str) -> Any: return getattr(self.__stream, name) -class ResultRow(Tuple["Identifier", ...]): +class ResultRow(Tuple[rdflib.term.Identifier, ...]): """ a single result row allows accessing bindings as attributes or with [] @@ -123,28 +125,28 @@ class ResultRow(Tuple["Identifier", ...]): >>> rr=ResultRow({ Variable('a'): URIRef('urn:cake') }, [Variable('a')]) >>> rr[0] - rdflib.term.URIRef(u'urn:cake') + rdflib.term.URIRef('urn:cake') >>> rr[1] Traceback (most recent call last): ... IndexError: tuple index out of range >>> rr.a - rdflib.term.URIRef(u'urn:cake') + rdflib.term.URIRef('urn:cake') >>> rr.b Traceback (most recent call last): ... AttributeError: b >>> rr['a'] - rdflib.term.URIRef(u'urn:cake') + rdflib.term.URIRef('urn:cake') >>> rr['b'] Traceback (most recent call last): ... KeyError: 'b' >>> rr[Variable('a')] - rdflib.term.URIRef(u'urn:cake') + rdflib.term.URIRef('urn:cake') .. versionadded:: 4.0 @@ -152,23 +154,21 @@ class ResultRow(Tuple["Identifier", ...]): labels: Mapping[str, int] - def __new__( - cls, values: Mapping["Variable", "Identifier"], labels: List["Variable"] - ): + def __new__(cls, values: Mapping[Variable, Identifier], labels: List[Variable]): # type error: Value of type variable "Self" of "__new__" of "tuple" cannot be "ResultRow" [type-var] # type error: Generator has incompatible item type "Optional[Identifier]"; expected "_T_co" [misc] - instance = super(ResultRow, cls).__new__(cls, (values.get(v) for v in labels)) # type: ignore[type-var, misc] + instance = super(ResultRow, cls).__new__(cls, (values.get(v) for v in labels)) # type: ignore[type-var, misc, unused-ignore] instance.labels = dict((str(x[1]), x[0]) for x in enumerate(labels)) return instance - def __getattr__(self, name: str) -> "Identifier": + def __getattr__(self, name: str) -> Identifier: if name not in self.labels: raise AttributeError(name) return tuple.__getitem__(self, self.labels[name]) # type error: Signature of "__getitem__" incompatible with supertype "tuple" # type error: Signature of "__getitem__" incompatible with supertype "Sequence" - def __getitem__(self, name: Union[str, int, Any]) -> "Identifier": # type: ignore[override] + def __getitem__(self, name: Union[str, int, Any]) -> Identifier: # type: ignore[override] try: # type error: Invalid index type "Union[str, int, Any]" for "tuple"; expected type "int" return tuple.__getitem__(self, name) # type: ignore[index] @@ -181,24 +181,22 @@ def __getitem__(self, name: Union[str, int, Any]) -> "Identifier": # type: igno raise KeyError(name) @overload - def get(self, name: str, default: "Identifier") -> "Identifier": - ... + def get(self, name: str, default: Identifier) -> Identifier: ... @overload def get( - self, name: str, default: Optional["Identifier"] = ... - ) -> Optional["Identifier"]: - ... + self, name: str, default: Optional[Identifier] = ... + ) -> Optional[Identifier]: ... def get( - self, name: str, default: Optional["Identifier"] = None - ) -> Optional["Identifier"]: + self, name: str, default: Optional[Identifier] = None + ) -> Optional[Identifier]: try: return self[name] except KeyError: return default - def asdict(self) -> Dict[str, "Identifier"]: + def asdict(self) -> Dict[str, Identifier]: return dict((v, self[v]) for v in self.labels if self[v] is not None) @@ -227,11 +225,11 @@ def __init__(self, type_: str): self.type = type_ #: variables contained in the result. - self.vars: Optional[List["Variable"]] = None - self._bindings: MutableSequence[Mapping["Variable", "Identifier"]] = None # type: ignore[assignment] - self._genbindings: Optional[Iterator[Mapping["Variable", "Identifier"]]] = None + self.vars: Optional[List[Variable]] = None + self._bindings: MutableSequence[Mapping[Variable, Identifier]] = None # type: ignore[assignment] + self._genbindings: Optional[Iterator[Mapping[Variable, Identifier]]] = None self.askAnswer: Optional[bool] = None - self.graph: Optional["Graph"] = None + self.graph: Optional[Graph] = None @property def bindings(self) -> MutableSequence[Mapping[Variable, Identifier]]: @@ -248,7 +246,7 @@ def bindings(self) -> MutableSequence[Mapping[Variable, Identifier]]: def bindings( self, b: Union[ - MutableSequence[Mapping["Variable", "Identifier"]], + MutableSequence[Mapping[Variable, Identifier]], Iterator[Mapping[Variable, Identifier]], ], ) -> None: @@ -265,7 +263,7 @@ def parse( format: Optional[str] = None, content_type: Optional[str] = None, **kwargs: Any, - ) -> "Result": + ) -> Result: from rdflib import plugin if format: @@ -358,7 +356,7 @@ def __bool__(self) -> bool: def __iter__( self, - ) -> Iterator[Union["_TripleType", bool, ResultRow]]: + ) -> Iterator[Union[_TripleType, bool, ResultRow]]: if self.type in ("CONSTRUCT", "DESCRIBE"): # type error: Item "None" of "Optional[Graph]" has no attribute "__iter__" (not iterable) for t in self.graph: # type: ignore[union-attr] diff --git a/rdflib/resource.py b/rdflib/resource.py index 0620c13d9..48c4710f6 100644 --- a/rdflib/resource.py +++ b/rdflib/resource.py @@ -63,13 +63,13 @@ Retrieve some basic facts:: >>> person.identifier - rdflib.term.URIRef(u'http://example.org/person/some1#self') + rdflib.term.URIRef('http://example.org/person/some1#self') >>> person.value(FOAF.name) - rdflib.term.Literal(u'Some Body') + rdflib.term.Literal('Some Body') >>> person.value(RDFS.comment) - rdflib.term.Literal(u'Just a Python & RDF hacker.', lang=u'en') + rdflib.term.Literal('Just a Python & RDF hacker.', lang='en') Resources can be sliced (like graphs, but the subject is fixed):: @@ -82,21 +82,21 @@ Resources as unicode are represented by their identifiers as unicode:: >>> %(unicode)s(person) #doctest: +SKIP - u'Resource(http://example.org/person/some1#self' + 'Resource(http://example.org/person/some1#self' Resource references are also Resources, so you can easily get e.g. a qname for the type of a resource, like:: >>> person.value(RDF.type).qname() - u'foaf:Person' + 'foaf:Person' Or for the predicates of a resource:: >>> sorted( ... p.qname() for p in person.predicates() ... ) #doctest: +NORMALIZE_WHITESPACE +SKIP - [u'foaf:depiction', u'foaf:homepage', - u'foaf:name', u'rdf:type', u'rdfs:comment'] + ['foaf:depiction', 'foaf:homepage', + 'foaf:name', 'rdf:type', 'rdfs:comment'] Follow relations and get more data from their Resources as well:: @@ -172,18 +172,18 @@ >>> subclasses = list(artifact.transitive_subjects(RDFS.subClassOf)) >>> [c.qname() for c in subclasses] - [u'v:Artifact', u'v:Document', u'v:Paper'] + ['v:Artifact', 'v:Document', 'v:Paper'] and superclasses from the last subclass:: >>> [c.qname() for c in subclasses[-1].transitive_objects(RDFS.subClassOf)] - [u'v:Paper', u'v:Document', u'v:Artifact'] + ['v:Paper', 'v:Document', 'v:Artifact'] Get items from the Choice:: >>> choice = Resource(graph, URIRef("http://example.org/def/v#Choice")) >>> [it.qname() for it in choice.value(OWL.oneOf).items()] - [u'v:One', u'v:Other'] + ['v:One', 'v:Other'] On add, other resources are auto-unboxed: >>> paper = Resource(graph, URIRef("http://example.org/def/v#Paper")) diff --git a/rdflib/serializer.py b/rdflib/serializer.py index 95f910a2f..6f1230d59 100644 --- a/rdflib/serializer.py +++ b/rdflib/serializer.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ Serializer plugin interface. @@ -12,6 +10,8 @@ """ +from __future__ import annotations + from typing import IO, TYPE_CHECKING, Any, Optional, TypeVar, Union from rdflib.term import URIRef @@ -19,14 +19,15 @@ if TYPE_CHECKING: from rdflib.graph import Graph + __all__ = ["Serializer"] _StrT = TypeVar("_StrT", bound=str) class Serializer: - def __init__(self, store: "Graph"): - self.store: "Graph" = store + def __init__(self, store: Graph): + self.store: Graph = store self.encoding: str = "utf-8" self.base: Optional[str] = None diff --git a/rdflib/store.py b/rdflib/store.py index e3c9f7ab2..2ca03529a 100644 --- a/rdflib/store.py +++ b/rdflib/store.py @@ -1,3 +1,30 @@ +""" +============ +rdflib.store +============ + +Types of store +-------------- + +``Context-aware``: An RDF store capable of storing statements within contexts +is considered context-aware. Essentially, such a store is able to partition +the RDF model it represents into individual, named, and addressable +sub-graphs. + +Relevant Notation3 reference regarding formulae, quoted statements, and such: +http://www.w3.org/DesignIssues/Notation3.html + +``Formula-aware``: An RDF store capable of distinguishing between statements +that are asserted and statements that are quoted is considered formula-aware. + +``Transaction-capable``: capable of providing transactional integrity to the +RDF operations performed on it. + +``Graph-aware``: capable of keeping track of empty graphs. + +------ +""" + from __future__ import annotations import pickle @@ -33,39 +60,12 @@ from rdflib.query import Result from rdflib.term import Identifier, Node, URIRef -""" -============ -rdflib.store -============ - -Types of store --------------- - -``Context-aware``: An RDF store capable of storing statements within contexts -is considered context-aware. Essentially, such a store is able to partition -the RDF model it represents into individual, named, and addressable -sub-graphs. - -Relevant Notation3 reference regarding formulae, quoted statements, and such: -http://www.w3.org/DesignIssues/Notation3.html - -``Formula-aware``: An RDF store capable of distinguishing between statements -that are asserted and statements that are quoted is considered formula-aware. - -``Transaction-capable``: capable of providing transactional integrity to the -RDF operations performed on it. - -``Graph-aware``: capable of keeping track of empty graphs. - ------- -""" - # Constants representing the state of a Store (returned by the open method) VALID_STORE = 1 CORRUPTED_STORE = 0 NO_STORE = -1 -UNKNOWN = None +UNKNOWN: None = None Pickler = pickle.Pickler @@ -129,7 +129,7 @@ def register(self, object: Any, id: str) -> None: self._objects[id] = object self._ids[object] = id - def loads(self, s: bytes) -> "Node": + def loads(self, s: bytes) -> Node: up = Unpickler(BytesIO(s)) # NOTE on type error: https://github.com/python/mypy/issues/2427 # type error: Cannot assign to a method @@ -140,7 +140,7 @@ def loads(self, s: bytes) -> "Node": raise UnpicklingError("Could not find Node class for %s" % e) def dumps( - self, obj: "Node", protocol: Optional[Any] = None, bin: Optional[Any] = None + self, obj: Node, protocol: Optional[Any] = None, bin: Optional[Any] = None ): src = BytesIO() p = Pickler(src) @@ -175,7 +175,7 @@ class Store: def __init__( self, configuration: Optional[str] = None, - identifier: Optional["Identifier"] = None, + identifier: Optional[Identifier] = None, ): """ identifier: URIRef of the Store. Defaults to CWD @@ -242,8 +242,8 @@ def gc(self) -> None: # RDF APIs def add( self, - triple: "_TripleType", - context: "_ContextType", + triple: _TripleType, + context: _ContextType, quoted: bool = False, ) -> None: """ @@ -256,7 +256,7 @@ def add( """ self.dispatcher.dispatch(TripleAddedEvent(triple=triple, context=context)) - def addN(self, quads: Iterable["_QuadType"]) -> None: # noqa: N802 + def addN(self, quads: Iterable[_QuadType]) -> None: # noqa: N802 """ Adds each item in the list of statements to a specific context. The quoted argument is interpreted by formula-aware stores to indicate this @@ -273,8 +273,8 @@ def addN(self, quads: Iterable["_QuadType"]) -> None: # noqa: N802 def remove( self, - triple: "_TriplePatternType", - context: Optional["_ContextType"] = None, + triple: _TriplePatternType, + context: Optional[_ContextType] = None, ) -> None: """Remove the set of triples matching the pattern from the store""" self.dispatcher.dispatch(TripleRemovedEvent(triple=triple, context=context)) @@ -282,15 +282,15 @@ def remove( def triples_choices( self, triple: Union[ - Tuple[List["_SubjectType"], "_PredicateType", "_ObjectType"], - Tuple["_SubjectType", List["_PredicateType"], "_ObjectType"], - Tuple["_SubjectType", "_PredicateType", List["_ObjectType"]], + Tuple[List[_SubjectType], _PredicateType, _ObjectType], + Tuple[_SubjectType, List[_PredicateType], _ObjectType], + Tuple[_SubjectType, _PredicateType, List[_ObjectType]], ], - context: Optional["_ContextType"] = None, + context: Optional[_ContextType] = None, ) -> Generator[ Tuple[ _TripleType, - Iterator[Optional["_ContextType"]], + Iterator[Optional[_ContextType]], ], None, None, @@ -346,9 +346,9 @@ def triples_choices( # type error: Missing return statement def triples( # type: ignore[return] self, - triple_pattern: "_TriplePatternType", - context: Optional["_ContextType"] = None, - ) -> Iterator[Tuple["_TripleType", Iterator[Optional["_ContextType"]]]]: + triple_pattern: _TriplePatternType, + context: Optional[_ContextType] = None, + ) -> Iterator[Tuple[_TripleType, Iterator[Optional[_ContextType]]]]: """ A generator over all the triples matching the pattern. Pattern can include any objects for used for comparing against nodes in the store, @@ -364,7 +364,7 @@ def triples( # type: ignore[return] # variants of triples will be done if / when optimization is needed # type error: Missing return statement - def __len__(self, context: Optional["_ContextType"] = None) -> int: # type: ignore[empty-body] + def __len__(self, context: Optional[_ContextType] = None) -> int: # type: ignore[empty-body] """ Number of statements in the store. This should only account for non- quoted (asserted) statements if the context is not specified, @@ -376,8 +376,8 @@ def __len__(self, context: Optional["_ContextType"] = None) -> int: # type: ign # type error: Missing return statement def contexts( # type: ignore[empty-body] - self, triple: Optional["_TripleType"] = None - ) -> Generator["_ContextType", None, None]: + self, triple: Optional[_TripleType] = None + ) -> Generator[_ContextType, None, None]: """ Generator over all contexts in the graph. If triple is specified, a generator over all contexts the triple is in. @@ -390,12 +390,12 @@ def contexts( # type: ignore[empty-body] # TODO FIXME: the result of query is inconsistent. def query( self, - query: Union["Query", str], + query: Union[Query, str], initNs: Mapping[str, Any], # noqa: N803 - initBindings: Mapping["str", "Identifier"], # noqa: N803 + initBindings: Mapping[str, Identifier], # noqa: N803 queryGraph: str, # noqa: N803 **kwargs: Any, - ) -> "Result": + ) -> Result: """ If stores provide their own SPARQL implementation, override this. @@ -413,9 +413,9 @@ def query( def update( self, - update: Union["Update", str], + update: Union[Update, str], initNs: Mapping[str, Any], # noqa: N803 - initBindings: Mapping["str", "Identifier"], # noqa: N803 + initBindings: Mapping[str, Identifier], # noqa: N803 queryGraph: str, # noqa: N803 **kwargs: Any, ) -> None: @@ -437,18 +437,18 @@ def update( # Optional Namespace methods - def bind(self, prefix: str, namespace: "URIRef", override: bool = True) -> None: + def bind(self, prefix: str, namespace: URIRef, override: bool = True) -> None: """ :param override: rebind, even if the given namespace is already bound to another prefix. """ - def prefix(self, namespace: "URIRef") -> Optional["str"]: + def prefix(self, namespace: URIRef) -> Optional[str]: """""" - def namespace(self, prefix: str) -> Optional["URIRef"]: + def namespace(self, prefix: str) -> Optional[URIRef]: """ """ - def namespaces(self) -> Iterator[Tuple[str, "URIRef"]]: + def namespaces(self) -> Iterator[Tuple[str, URIRef]]: """ """ # This is here so that the function becomes an empty generator. # See https://stackoverflow.com/q/13243766 and @@ -466,7 +466,7 @@ def rollback(self) -> None: # Optional graph methods - def add_graph(self, graph: "Graph") -> None: + def add_graph(self, graph: Graph) -> None: """ Add a graph to the store, no effect if the graph already exists. @@ -474,7 +474,7 @@ def add_graph(self, graph: "Graph") -> None: """ raise Exception("Graph method called on non-graph_aware store") - def remove_graph(self, graph: "Graph") -> None: + def remove_graph(self, graph: Graph) -> None: """ Remove a graph from the store, this should also remove all triples in the graph diff --git a/rdflib/term.py b/rdflib/term.py index 89f67017a..3c88c31f0 100644 --- a/rdflib/term.py +++ b/rdflib/term.py @@ -20,8 +20,10 @@ * Numerical Ranges """ + from __future__ import annotations +import abc import re from fractions import Fraction @@ -78,6 +80,15 @@ from .namespace import NamespaceManager from .paths import AlternativePath, InvPath, NegatedPath, Path, SequencePath +_HAS_HTML5LIB = False + +try: + import html5lib + + _HAS_HTML5LIB = True +except ImportError: + html5lib = None + _SKOLEM_DEFAULT_AUTHORITY = "https://rdflib.github.io" logger = logging.getLogger(__name__) @@ -121,13 +132,16 @@ def _is_valid_unicode(value: Union[str, bytes]) -> bool: return True -class Node: +class Node(abc.ABC): """ A Node in the Graph. """ __slots__ = () + @abc.abstractmethod + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: ... + class Identifier(Node, str): # allow Identifiers to be Nodes in the Graph """ @@ -137,7 +151,7 @@ class Identifier(Node, str): # allow Identifiers to be Nodes in the Graph __slots__ = () - def __new__(cls, value: str) -> "Identifier": + def __new__(cls, value: str) -> Identifier: return str.__new__(cls, value) def eq(self, other: Any) -> bool: @@ -173,7 +187,7 @@ def __eq__(self, other: Any) -> bool: False """ - if type(self) == type(other): + if type(self) is type(other): return str(self) == str(other) else: return False @@ -191,7 +205,7 @@ def __gt__(self, other: Any) -> bool: """ if other is None: return True # everything bigger than None - elif type(self) == type(other): + elif type(self) is type(other): return str(self) > str(other) elif isinstance(other, Node): return _ORDERING[type(self)] > _ORDERING[type(other)] @@ -201,7 +215,7 @@ def __gt__(self, other: Any) -> bool: def __lt__(self, other: Any) -> bool: if other is None: return False # Nothing is less than None - elif type(self) == type(other): + elif type(self) is type(other): return str(self) < str(other) elif isinstance(other, Node): return _ORDERING[type(self)] < _ORDERING[type(other)] @@ -262,12 +276,12 @@ class URIRef(IdentifiedNode): __slots__ = () - __or__: Callable[["URIRef", Union["URIRef", "Path"]], "AlternativePath"] - __invert__: Callable[["URIRef"], "InvPath"] - __neg__: Callable[["URIRef"], "NegatedPath"] - __truediv__: Callable[["URIRef", Union["URIRef", "Path"]], "SequencePath"] + __or__: Callable[[URIRef, Union[URIRef, Path]], AlternativePath] + __invert__: Callable[[URIRef], InvPath] + __neg__: Callable[[URIRef], NegatedPath] + __truediv__: Callable[[URIRef, Union[URIRef, Path]], SequencePath] - def __new__(cls, value: str, base: Optional[str] = None) -> "URIRef": + def __new__(cls, value: str, base: Optional[str] = None) -> URIRef: if base is not None: ends_in_hash = value.endswith("#") # type error: Argument "allow_fragments" to "urljoin" has incompatible type "int"; expected "bool" @@ -289,7 +303,7 @@ def __new__(cls, value: str, base: Optional[str] = None) -> "URIRef": rt = str.__new__(cls, value, "utf-8") # type: ignore[call-overload] return rt - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: """ This will do a limited check for valid URIs, essentially just making sure that the string includes no illegal @@ -310,7 +324,7 @@ def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: else: return "<%s>" % self - def defrag(self) -> "URIRef": + def defrag(self) -> URIRef: if "#" in self: url, frag = urldefrag(self) return URIRef(url) @@ -329,7 +343,7 @@ def fragment(self) -> str: """ return urlparse(self).fragment - def __reduce__(self) -> Tuple[Type["URIRef"], Tuple[str]]: + def __reduce__(self) -> Tuple[Type[URIRef], Tuple[str]]: return (URIRef, (str(self),)) def __repr__(self) -> str: @@ -340,13 +354,13 @@ def __repr__(self) -> str: return """%s(%s)""" % (clsName, super(URIRef, self).__repr__()) - def __add__(self, other) -> "URIRef": + def __add__(self, other) -> URIRef: return self.__class__(str(self) + other) - def __radd__(self, other) -> "URIRef": + def __radd__(self, other) -> URIRef: return self.__class__(other + str(self)) - def __mod__(self, other) -> "URIRef": + def __mod__(self, other) -> URIRef: return self.__class__(str(self) % other) @@ -426,7 +440,7 @@ def __new__( value: Optional[str] = None, _sn_gen: Callable[[], str] = _serial_number_generator(), _prefix: str = _unique_id(), - ) -> "BNode": + ) -> BNode: """ # only store implementations should pass in a value """ @@ -446,10 +460,10 @@ def __new__( # type error: Incompatible return value type (got "Identifier", expected "BNode") return Identifier.__new__(cls, value) # type: ignore[return-value] - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: return "_:%s" % self - def __reduce__(self) -> Tuple[Type["BNode"], Tuple[str]]: + def __reduce__(self) -> Tuple[Type[BNode], Tuple[str]]: return (BNode, (str(self),)) def __repr__(self) -> str: @@ -476,7 +490,7 @@ def skolemize( class Literal(Identifier): - __doc__ = """ + """ RDF 1.1's Literals Section: http://www.w3.org/TR/rdf-concepts/#section-Graph-Literal @@ -580,7 +594,7 @@ def __new__( lang: Optional[str] = None, datatype: Optional[str] = None, normalize: Optional[bool] = None, - ) -> "Literal": + ) -> Literal: if lang == "": lang = None # no empty lang-tags in RDF @@ -661,7 +675,7 @@ def __new__( return inst - def normalize(self) -> "Literal": + def normalize(self) -> Literal: """ Returns a new literal with a normalised lexical representation of this literal @@ -707,7 +721,7 @@ def datatype(self) -> Optional[URIRef]: def __reduce__( self, - ) -> Tuple[Type["Literal"], Tuple[str, Union[str, None], Union[str, None]]]: + ) -> Tuple[Type[Literal], Tuple[str, Union[str, None], Union[str, None]]]: return ( Literal, (str(self), self.language, self.datatype), @@ -721,7 +735,7 @@ def __setstate__(self, arg: Tuple[Any, Dict[str, Any]]) -> None: self._language = d["language"] self._datatype = d["datatype"] - def __add__(self, val: Any) -> "Literal": + def __add__(self, val: Any) -> Literal: """ >>> from rdflib.namespace import XSD >>> Literal(1) + 1 @@ -826,7 +840,7 @@ def __add__(self, val: Any) -> "Literal": return Literal(s, self.language, datatype=new_datatype) - def __sub__(self, val: Any) -> "Literal": + def __sub__(self, val: Any) -> Literal: """ >>> from rdflib.namespace import XSD >>> Literal(2) - 1 @@ -901,9 +915,11 @@ def __sub__(self, val: Any) -> "Literal": return Literal( self.toPython() - val.toPython(), self.language, - datatype=_XSD_DURATION - if self.datatype in (_XSD_DATETIME, _XSD_DATE, _XSD_TIME) - else self.datatype, + datatype=( + _XSD_DURATION + if self.datatype in (_XSD_DATETIME, _XSD_DATE, _XSD_TIME) + else self.datatype + ), ) # if the datatypes are not the same but are both numeric, subtract the Python values and strip off decimal junk @@ -938,7 +954,7 @@ def __bool__(self) -> bool: return bool(self.value) return len(self) != 0 - def __neg__(self) -> "Literal": + def __neg__(self) -> Literal: """ >>> (- Literal(1)) rdflib.term.Literal('-1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) @@ -960,7 +976,7 @@ def __neg__(self) -> "Literal": else: raise TypeError("Not a number; %s" % repr(self)) - def __pos__(self) -> "Literal": + def __pos__(self) -> Literal: """ >>> (+ Literal(1)) rdflib.term.Literal('1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) @@ -980,7 +996,7 @@ def __pos__(self) -> "Literal": else: raise TypeError("Not a number; %s" % repr(self)) - def __abs__(self) -> "Literal": + def __abs__(self) -> Literal: """ >>> abs(Literal(-1)) rdflib.term.Literal('1', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) @@ -999,7 +1015,7 @@ def __abs__(self) -> "Literal": else: raise TypeError("Not a number; %s" % repr(self)) - def __invert__(self) -> "Literal": + def __invert__(self) -> Literal: """ >>> ~(Literal(-1)) rdflib.term.Literal('0', datatype=rdflib.term.URIRef('http://www.w3.org/2001/XMLSchema#integer')) @@ -1387,7 +1403,7 @@ def eq(self, other: Any) -> bool: def neq(self, other: Any) -> bool: return not self.eq(other) - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: r''' Returns a representation in the N3 format. @@ -1448,7 +1464,7 @@ def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: def _literal_n3( self, use_plain: bool = False, - qname_callback: Optional[Callable[[str], str]] = None, + qname_callback: Optional[Callable[[URIRef], Optional[str]]] = None, ) -> str: """ Using plain literal (shorthand) output:: @@ -1609,19 +1625,34 @@ def _parseXML(xmlstring: str) -> xml.dom.minidom.Document: # noqa: N802 return retval -def _parseHTML(htmltext: str) -> xml.dom.minidom.DocumentFragment: # noqa: N802 - try: - import html5lib - - parser = html5lib.HTMLParser(tree=html5lib.treebuilders.getTreeBuilder("dom")) - retval = parser.parseFragment(htmltext) - retval.normalize() - return retval - except ImportError: - raise ImportError( - "HTML5 parser not available. Try installing" - + " html5lib " - ) +def _parse_html(lexical_form: str) -> xml.dom.minidom.DocumentFragment: + """ + Parse the lexical form of an HTML literal into a document fragment + using the ``dom`` from html5lib tree builder. + + :param lexical_form: The lexical form of the HTML literal. + :return: A document fragment representing the HTML literal. + :raises: `html5lib.html5parser.ParseError` if the lexical form is + not valid HTML. + """ + parser = html5lib.HTMLParser( + tree=html5lib.treebuilders.getTreeBuilder("dom"), strict=True + ) + result: xml.dom.minidom.DocumentFragment = parser.parseFragment(lexical_form) + result.normalize() + return result + + +def _write_html(value: xml.dom.minidom.DocumentFragment) -> bytes: + """ + Serialize a document fragment representing an HTML literal into + its lexical form. + + :param value: A document fragment representing an HTML literal. + :return: The lexical form of the HTML literal. + """ + result = html5lib.serialize(value, tree="dom") + return result def _writeXML( # noqa: N802 @@ -1881,8 +1912,8 @@ def _well_formed_negative_integer(lexical: Union[str, bytes], value: Any) -> boo def _py2literal( obj: Any, pType: Any, # noqa: N803 - castFunc: Optional[Callable[[Any], Any]], - dType: Optional[_StrT], + castFunc: Optional[Callable[[Any], Any]], # noqa: N803 + dType: Optional[_StrT], # noqa: N803 ) -> Tuple[Any, Optional[_StrT]]: if castFunc is not None: return castFunc(obj), dType @@ -1937,14 +1968,21 @@ def _castPythonToLiteral( # noqa: N802 (Duration, (lambda i: duration_isoformat(i), _XSD_DURATION)), (timedelta, (lambda i: duration_isoformat(i), _XSD_DAYTIMEDURATION)), (xml.dom.minidom.Document, (_writeXML, _RDF_XMLLITERAL)), - # this is a bit dirty - by accident the html5lib parser produces - # DocumentFragments, and the xml parser Documents, letting this - # decide what datatype to use makes roundtripping easier, but it a - # bit random - (xml.dom.minidom.DocumentFragment, (_writeXML, _RDF_HTMLLITERAL)), (Fraction, (None, _OWL_RATIONAL)), ] +if html5lib is not None: + # This is a bit dirty, by accident the html5lib parser produces + # DocumentFragments, and the xml parser Documents, letting this + # decide what datatype to use makes roundtripping easier, but it a + # bit random. + # + # This must happen before _GenericPythonToXSDRules is assigned to + # _OriginalGenericPythonToXSDRules. + _GenericPythonToXSDRules.append( + (xml.dom.minidom.DocumentFragment, (_write_html, _RDF_HTMLLITERAL)) + ) + _OriginalGenericPythonToXSDRules = list(_GenericPythonToXSDRules) _SpecificPythonToXSDRules: List[ @@ -1995,9 +2033,13 @@ def _castPythonToLiteral( # noqa: N802 URIRef(_XSD_PFX + "base64Binary"): b64decode, URIRef(_XSD_PFX + "anyURI"): None, _RDF_XMLLITERAL: _parseXML, - _RDF_HTMLLITERAL: _parseHTML, } +if html5lib is not None: + # It is probably best to keep this close to the definition of + # _GenericPythonToXSDRules so nobody misses it. + XSDToPython[_RDF_HTMLLITERAL] = _parse_html + _check_well_formed_types: Dict[URIRef, Callable[[Union[str, bytes], Any], bool]] = { URIRef(_XSD_PFX + "boolean"): _well_formed_boolean, URIRef(_XSD_PFX + "nonPositiveInteger"): _well_formed_non_positive_integer, @@ -2141,7 +2183,7 @@ class Variable(Identifier): __slots__ = () - def __new__(cls, value: str) -> "Variable": + def __new__(cls, value: str) -> Variable: if len(value) == 0: raise Exception("Attempted to create variable with empty string as name!") if value[0] == "?": @@ -2159,10 +2201,10 @@ def __repr__(self) -> str: def toPython(self) -> str: # noqa: N802 return "?%s" % self - def n3(self, namespace_manager: Optional["NamespaceManager"] = None) -> str: + def n3(self, namespace_manager: Optional[NamespaceManager] = None) -> str: return "?%s" % self - def __reduce__(self) -> Tuple[Type["Variable"], Tuple[str]]: + def __reduce__(self) -> Tuple[Type[Variable], Tuple[str]]: return (Variable, (str(self),)) @@ -2212,9 +2254,11 @@ def recurse(): # length would be unnecessary. In Python 3, # the semantics of map has changed (why, oh why???) and the check # for the length becomes necessary... - if len(node.childNodes) != len(other.childNodes): + # type error: Item "None" of "Union[None, Attr, Comment, Document, DocumentFragment, DocumentType, Element, Entity, Notation, ProcessingInstruction, Text]" has no attribute "childNodes" + if len(node.childNodes) != len(other.childNodes): # type: ignore[union-attr] return False - for nc, oc in map(lambda x, y: (x, y), node.childNodes, other.childNodes): + # type error: Item "None" of "Union[None, Attr, Comment, Document, DocumentFragment, DocumentType, Element, Entity, Notation, ProcessingInstruction, Text]" has no attribute "childNodes" + for nc, oc in map(lambda x, y: (x, y), node.childNodes, other.childNodes): # type: ignore[union-attr] if not _isEqualXMLNode(nc, oc): return False # if we got here then everything is fine: diff --git a/rdflib/tools/chunk_serializer.py b/rdflib/tools/chunk_serializer.py index 9f9f133a9..e5a6155b1 100644 --- a/rdflib/tools/chunk_serializer.py +++ b/rdflib/tools/chunk_serializer.py @@ -6,6 +6,8 @@ file, which will be a Turtle file. """ +from __future__ import annotations + from contextlib import ExitStack, contextmanager from pathlib import Path from typing import TYPE_CHECKING, BinaryIO, Generator, Optional, Tuple diff --git a/rdflib/tools/csv2rdf.py b/rdflib/tools/csv2rdf.py index fe740356a..9c3560f42 100644 --- a/rdflib/tools/csv2rdf.py +++ b/rdflib/tools/csv2rdf.py @@ -7,6 +7,8 @@ """ +from __future__ import annotations + import codecs import configparser import csv @@ -17,11 +19,12 @@ import sys import time import warnings +from typing import Any, Dict, List, Optional, Tuple, Union from urllib.parse import quote import rdflib -from rdflib import RDF, RDFS -from rdflib.namespace import split_uri +from rdflib.namespace import RDF, RDFS, split_uri +from rdflib.term import URIRef __all__ = ["CSV2RDF"] @@ -88,10 +91,10 @@ """ # bah - ugly global -uris = {} +uris: Dict[Any, Tuple[URIRef, Optional[URIRef]]] = {} -def toProperty(label): +def toProperty(label: str): # noqa: N802 """ CamelCase + lowercase initial a string @@ -103,17 +106,18 @@ def toProperty(label): """ label = re.sub(r"[^\w]", " ", label) label = re.sub("([a-z])([A-Z])", "\\1 \\2", label) - label = label.split(" ") + # type error: Incompatible types in assignment (expression has type "None", variable has type "BinaryIO") + label = label.split(" ") # type: ignore[assignment] return "".join([label[0].lower()] + [x.capitalize() for x in label[1:]]) -def toPropertyLabel(label): +def toPropertyLabel(label): # noqa: N802 if not label[1:2].isupper(): return label[0:1].lower() + label[1:] return label -def index(l_, i): +def index(l_: List[int], i: Tuple[int, ...]) -> Tuple[int, ...]: """return a set of indexes from a list >>> index([1,2,3],(0,2)) (1, 3) @@ -127,7 +131,7 @@ def csv_reader(csv_data, dialect=csv.excel, **kwargs): yield row -def prefixuri(x, prefix, class_=None): +def prefixuri(x, prefix, class_: Optional[URIRef] = None): if prefix: r = rdflib.URIRef(prefix + quote(x.encode("utf8").replace(" ", "_"), safe="")) else: @@ -143,12 +147,13 @@ class NodeMaker: def range(self): return rdflib.RDFS.Literal - def __call__(self, x): + def __call__(self, x: Any): return rdflib.Literal(x) class NodeUri(NodeMaker): def __init__(self, prefix, class_): + self.class_: Optional[URIRef] = None self.prefix = prefix if class_: self.class_ = rdflib.URIRef(class_) @@ -301,7 +306,7 @@ def __init__(self): self.CLASS = None self.BASE = None self.PROPBASE = None - self.IDENT = "auto" + self.IDENT: Union[Tuple[str, ...], str] = "auto" self.LABEL = None self.DEFINECLASS = False self.SKIP = 0 @@ -373,15 +378,20 @@ def convert(self, csvreader): uri = self.BASE[ "_".join( [ - quote(x.encode("utf8").replace(" ", "_"), safe="") - for x in index(l_, self.IDENT) + # type error: "int" has no attribute "encode" + quote(x.encode("utf8").replace(" ", "_"), safe="") # type: ignore[attr-defined] + # type error: Argument 2 to "index" has incompatible type "Union[Tuple[str, ...], str]"; expected "Tuple[int, ...]" + for x in index(l_, self.IDENT) # type: ignore[arg-type] ] ) ] if self.LABEL: self.triple( - uri, RDFS.label, rdflib.Literal(" ".join(index(l_, self.LABEL))) + # type error: Argument 1 to "join" of "str" has incompatible type "Tuple[int, ...]"; expected "Iterable[str]" + uri, + RDFS.label, + rdflib.Literal(" ".join(index(l_, self.LABEL))), # type: ignore[arg-type] ) if self.CLASS: @@ -389,7 +399,8 @@ def convert(self, csvreader): self.triple(uri, RDF.type, self.CLASS) for i, x in enumerate(l_): - x = x.strip() + # type error: "int" has no attribute "strip" + x = x.strip() # type: ignore[attr-defined] if x != "": if self.COLUMNS.get(i, self.DEFAULT) == "ignore": continue @@ -405,7 +416,8 @@ def convert(self, csvreader): warnings.warn( "Could not process value for column " + "%d:%s in row %d, ignoring: %s " - % (i, headers[i], rows, e.message) + # type error: "Exception" has no attribute "message" + % (i, headers[i], rows, e.message) # type: ignore[attr-defined] ) rows += 1 @@ -420,13 +432,19 @@ def convert(self, csvreader): # output types/labels for generated URIs classes = set() - for l_, x in uris.items(): - u, c = x - self.triple(u, RDFS.label, rdflib.Literal(l_)) - if c: - c = rdflib.URIRef(c) + # type error: Incompatible types in assignment (expression has type "Tuple[URIRef, Optional[URIRef]]", variable has type "int") + for l_, x in uris.items(): # type: ignore[assignment] + # type error: "int" object is not iterable + u, c = x # type: ignore[misc] + # type error: Cannot determine type of "u" + self.triple(u, RDFS.label, rdflib.Literal(l_)) # type: ignore[has-type] + # type error: Cannot determine type of "c" + if c: # type: ignore[has-type] + # type error: Cannot determine type of "c" + c = rdflib.URIRef(c) # type: ignore[has-type] classes.add(c) - self.triple(u, RDF.type, c) + # type error: Cannot determine type of "u" + self.triple(u, RDF.type, c) # type: ignore[has-type] for c in classes: self.triple(c, RDF.type, RDFS.Class) @@ -439,6 +457,7 @@ def convert(self, csvreader): def main(): csv2rdf = CSV2RDF() + opts: Union[Dict[str, str], List[Tuple[str, str]]] opts, files = getopt.getopt( sys.argv[1:], "hc:b:p:i:o:Cf:l:s:d:D:", diff --git a/rdflib/tools/defined_namespace_creator.py b/rdflib/tools/defined_namespace_creator.py index dcc6a3be7..cddc57c2f 100644 --- a/rdflib/tools/defined_namespace_creator.py +++ b/rdflib/tools/defined_namespace_creator.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ This rdflib Python script creates a DefinedNamespace Python file from a given RDF file @@ -12,17 +10,17 @@ Nicholas J. Car, Dec, 2021 """ + +from __future__ import annotations + import argparse import datetime -import sys from pathlib import Path from typing import TYPE_CHECKING, Iterable, List, Tuple -sys.path.append(str(Path(__file__).parent.absolute().parent.parent)) - -from rdflib.graph import Graph # noqa: E402 -from rdflib.namespace import DCTERMS, OWL, RDFS, SKOS # noqa: E402 -from rdflib.util import guess_format # noqa: E402 +from rdflib.graph import Graph +from rdflib.namespace import DCTERMS, OWL, RDFS, SKOS +from rdflib.util import guess_format if TYPE_CHECKING: from rdflib.query import ResultRow diff --git a/rdflib/tools/graphisomorphism.py b/rdflib/tools/graphisomorphism.py index 004b567b8..01f6adf5a 100644 --- a/rdflib/tools/graphisomorphism.py +++ b/rdflib/tools/graphisomorphism.py @@ -49,7 +49,7 @@ def vhashtriple(self, triple, term, done): else: yield self.vhash(triple[p], done=True) - def __eq__(self, G): + def __eq__(self, G): # noqa: N803 """Graph isomorphism testing.""" if not isinstance(G, IsomorphicTestableGraph): return False @@ -59,7 +59,7 @@ def __eq__(self, G): return True # @@ return self.internal_hash() == G.internal_hash() - def __ne__(self, G): + def __ne__(self, G): # noqa: N803 """Negative graph isomorphism testing.""" return not self.__eq__(G) @@ -91,7 +91,7 @@ def main(): (options, args) = op.parse_args() graphs = [] - graph2FName = {} + graph2FName = {} # noqa: N806 if options.stdin: graph = IsomorphicTestableGraph().parse(sys.stdin, format=options.inputFormat) graphs.append(graph) diff --git a/rdflib/tools/rdf2dot.py b/rdflib/tools/rdf2dot.py index 0ca1fa1e0..5f78f4076 100644 --- a/rdflib/tools/rdf2dot.py +++ b/rdflib/tools/rdf2dot.py @@ -9,13 +9,18 @@ """ +from __future__ import annotations + import collections import html import sys +from typing import Any, Dict, TextIO import rdflib import rdflib.extras.cmdlineutils from rdflib import XSD +from rdflib.graph import Graph +from rdflib.term import Literal, Node, URIRef LABEL_PROPERTIES = [ rdflib.RDFS.label, @@ -77,31 +82,32 @@ ISACOLOR = "black" -def rdf2dot(g, stream, opts={}): +def rdf2dot(g: Graph, stream: TextIO, opts: Dict[str, Any] = {}): """ Convert the RDF graph to DOT writes the dot output to the stream """ fields = collections.defaultdict(set) - nodes = {} + nodes: Dict[Node, str] = {} - def node(x): + def node(x: Node) -> str: if x not in nodes: nodes[x] = "node%d" % len(nodes) return nodes[x] - def label(x, g): - for labelProp in LABEL_PROPERTIES: + def label(x: Node, g: Graph): + for labelProp in LABEL_PROPERTIES: # noqa: N806 l_ = g.value(x, labelProp) if l_: return l_ try: - return g.namespace_manager.compute_qname(x)[2] + # type error: Argument 1 to "compute_qname" of "NamespaceManager" has incompatible type "Node"; expected "str" + return g.namespace_manager.compute_qname(x)[2] # type: ignore[arg-type] except Exception: return x - def formatliteral(l, g): + def formatliteral(l: Literal, g): # noqa: E741 v = html.escape(l) if l.datatype: return ""%s"^^%s" % (v, qname(l.datatype, g)) @@ -109,7 +115,7 @@ def formatliteral(l, g): return ""%s"@%s" % (v, l.language) return ""%s"" % v - def qname(x, g): + def qname(x: URIRef, g: Graph) -> str: try: q = g.compute_qname(x) return q[0] + ":" + q[2] @@ -131,9 +137,11 @@ def color(p): "\t%s -> %s [ color=%s, label=< %s > ] ;\n" ) - stream.write(opstr % (sn, on, color(p), qname(p, g))) + # type error: Argument 1 to "qname" has incompatible type "Node"; expected "URIRef" + stream.write(opstr % (sn, on, color(p), qname(p, g))) # type: ignore[arg-type] else: - fields[sn].add((qname(p, g), formatliteral(o, g))) + # type error: Argument 1 to "qname" has incompatible type "Node"; expected "URIRef" + fields[sn].add((qname(p, g), formatliteral(o, g))) # type: ignore[arg-type] for u, n in nodes.items(): stream.write("# %s %s\n" % (u, n)) @@ -151,7 +159,8 @@ def color(p): ) stream.write( opstr - % (n, NODECOLOR, html.escape(label(u, g)), u, html.escape(u), "".join(f)) + # type error: Value of type variable "AnyStr" of "escape" cannot be "Node" + % (n, NODECOLOR, html.escape(label(u, g)), u, html.escape(u), "".join(f)) # type: ignore[type-var] ) stream.write("}\n") diff --git a/rdflib/tools/rdfpipe.py b/rdflib/tools/rdfpipe.py index f93138423..118cd8b98 100644 --- a/rdflib/tools/rdfpipe.py +++ b/rdflib/tools/rdfpipe.py @@ -1,13 +1,15 @@ #!/usr/bin/env python -# -*- coding: UTF-8 -*- + """ A commandline tool for parsing RDF in different formats and serializing the resulting graph to a chosen format. """ +from __future__ import annotations import logging import sys from optparse import OptionParser +from typing import BinaryIO, Optional import rdflib from rdflib import plugin @@ -189,7 +191,7 @@ def main(): pfx, uri = ns_kw.split("=") ns_bindings[pfx] = uri - outfile = sys.stdout.buffer + outfile: Optional[BinaryIO] = sys.stdout.buffer if opts.no_out: outfile = None diff --git a/rdflib/tools/rdfs2dot.py b/rdflib/tools/rdfs2dot.py index 4e639b48d..8368c9319 100644 --- a/rdflib/tools/rdfs2dot.py +++ b/rdflib/tools/rdfs2dot.py @@ -9,12 +9,16 @@ rdf2dot my_rdfs_file.rdf | dot -Tpng | display """ +from __future__ import annotations + import collections import itertools import sys +from typing import Dict import rdflib.extras.cmdlineutils from rdflib import RDF, RDFS, XSD +from rdflib.term import Identifier XSDTERMS = [ XSD[x] @@ -75,7 +79,7 @@ def rdfs2dot(g, stream, opts={}): """ fields = collections.defaultdict(set) - nodes = {} + nodes: Dict[Identifier, str] = {} def node(nd): if nd not in nodes: diff --git a/rdflib/util.py b/rdflib/util.py index 2442b3728..ab594b5be 100644 --- a/rdflib/util.py +++ b/rdflib/util.py @@ -1,5 +1,3 @@ -from __future__ import annotations - """ Some utility functions. @@ -22,6 +20,8 @@ """ +from __future__ import annotations + from calendar import timegm from os.path import splitext @@ -53,6 +53,7 @@ if TYPE_CHECKING: from rdflib.graph import Graph + __all__ = [ "list2set", "first", @@ -409,10 +410,10 @@ def _get_ext(fpath: str, lower: bool = True) -> str: def find_roots( - graph: "Graph", - prop: "rdflib.term.URIRef", - roots: Optional[Set["rdflib.term.Node"]] = None, -) -> Set["rdflib.term.Node"]: + graph: Graph, + prop: rdflib.term.URIRef, + roots: Optional[Set[rdflib.term.Node]] = None, +) -> Set[rdflib.term.Node]: """ Find the roots in some sort of transitive hierarchy. @@ -437,14 +438,14 @@ def find_roots( def get_tree( - graph: "Graph", - root: "rdflib.term.Node", - prop: "rdflib.term.URIRef", - mapper: Callable[["rdflib.term.Node"], "rdflib.term.Node"] = lambda x: x, + graph: Graph, + root: rdflib.term.Node, + prop: rdflib.term.URIRef, + mapper: Callable[[rdflib.term.Node], rdflib.term.Node] = lambda x: x, sortkey: Optional[Callable[[Any], Any]] = None, - done: Optional[Set["rdflib.term.Node"]] = None, + done: Optional[Set[rdflib.term.Node]] = None, dir: str = "down", -) -> Optional[Tuple["rdflib.term.Node", List[Any]]]: +) -> Optional[Tuple[rdflib.term.Node, List[Any]]]: """ Return a nested list/tuple structure representing the tree built by the transitive property given, starting from the root given @@ -486,15 +487,13 @@ def get_tree( @overload -def _coalesce(*args: Optional[_AnyT], default: _AnyT) -> _AnyT: - ... +def _coalesce(*args: Optional[_AnyT], default: _AnyT) -> _AnyT: ... @overload def _coalesce( *args: Optional[_AnyT], default: Optional[_AnyT] = ... -) -> Optional[_AnyT]: - ... +) -> Optional[_AnyT]: ... def _coalesce( diff --git a/rdflib/void.py b/rdflib/void.py index 8a123e5f5..fac16d010 100644 --- a/rdflib/void.py +++ b/rdflib/void.py @@ -1,12 +1,18 @@ +from __future__ import annotations + import collections +from typing import DefaultDict, Dict, Optional, Set -from rdflib.graph import Graph +from rdflib.graph import Graph, _ObjectType, _PredicateType, _SubjectType from rdflib.namespace import RDF, VOID -from rdflib.term import Literal, URIRef +from rdflib.term import IdentifiedNode, Literal, URIRef def generateVoID( # noqa: N802 - g, dataset=None, res=None, distinctForPartitions=True # noqa: N803 + g: Graph, + dataset: Optional[IdentifiedNode] = None, + res: Optional[Graph] = None, + distinctForPartitions: bool = True, # noqa: N803 ): """ Returns a new graph with a VoID description of the passed dataset @@ -27,18 +33,24 @@ def generateVoID( # noqa: N802 """ - typeMap = collections.defaultdict(set) # noqa: N806 - classes = collections.defaultdict(set) # noqa: N806 + typeMap: Dict[_SubjectType, Set[_SubjectType]] = ( # noqa: N806 + collections.defaultdict(set) + ) + classes: Dict[_ObjectType, Set[_SubjectType]] = collections.defaultdict(set) for e, c in g.subject_objects(RDF.type): classes[c].add(e) typeMap[e].add(c) triples = 0 - subjects = set() - objects = set() - properties = set() - classCount = collections.defaultdict(int) # noqa: N806 - propCount = collections.defaultdict(int) # noqa: N806 + subjects: Set[_SubjectType] = set() + objects: Set[_ObjectType] = set() + properties: Set[_PredicateType] = set() + classCount: DefaultDict[_SubjectType, int] = collections.defaultdict( # noqa: N806 + int + ) + propCount: DefaultDict[_PredicateType, int] = collections.defaultdict( # noqa: N806 + int + ) classProps = collections.defaultdict(set) # noqa: N806 classObjects = collections.defaultdict(set) # noqa: N806 diff --git a/run_tests.py b/run_tests.py index c8db26375..c3ef4acd4 100755 --- a/run_tests.py +++ b/run_tests.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python """ Testing with pytest ================= @@ -43,6 +42,6 @@ ) exit(1) - finalArgs = sys.argv[1:] + finalArgs = sys.argv[1:] # noqa: N816 print("Running pytest with:", json.dumps(finalArgs)) sys.exit(pytest.main(args=finalArgs)) diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index dd7baf900..000000000 --- a/setup.cfg +++ /dev/null @@ -1,28 +0,0 @@ -[options.package_data] -rdflib = py.typed - -# https://flake8.pycqa.org/en/latest/user/configuration.html -[flake8] -exclude = - .git, - __pycache__, - .venv, - .var, - .tox, - var, - .mypy_cache, - test/data/suites/, # does not contain python - test/jsonld/1.1/, # does not contain python - test/jsonld/test-suite/, # does not contain python - test/data/variants/, # does not contain python - test/data/translate_algebra/, # does not contain python - docs/rdf_terms.rst, # This file is causing an error on GitHub actions -extend-ignore = - # Disabled so that black can control line length. - E501, # line too long - # Disabled based on black recommendations - # https://black.readthedocs.io/en/stable/faq.html#why-are-flake8-s-e203-and-w503-violated - E203, # Whitespace before ':' - W503, # Line break occurred before a binary operator - # Disabled because this bumps heads with black - E231, # missing whitespace after ',' diff --git a/test/conftest.py b/test/conftest.py index 01153f9fa..f3431c927 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,15 +1,16 @@ +from __future__ import annotations + import sys from contextlib import ExitStack import pytest +# This is here so that asserts from these modules are formatted for human +# readibility. pytest.register_assert_rewrite("test.utils") -from pathlib import Path # noqa: E402 -from test.utils.audit import AuditHookDispatcher # noqa: E402 -from test.utils.http import ctx_http_server # noqa: E402 -from test.utils.httpfileserver import HTTPFileServer # noqa: E402 -from typing import ( # noqa: E402 +from pathlib import Path +from typing import ( Collection, Dict, Generator, @@ -20,16 +21,16 @@ ) from rdflib import Graph +from test.utils.audit import AuditHookDispatcher +from test.utils.http import ctx_http_server +from test.utils.httpfileserver import HTTPFileServer from .data import TEST_DATA_DIR -from .utils.earl import EARLReporter # noqa: E402 -from .utils.httpservermock import ServedBaseHTTPServerMock # noqa: E402 +from .utils.earl import EARLReporter +from .utils.httpservermock import ServedBaseHTTPServerMock pytest_plugins = [EARLReporter.__module__] -# This is here so that asserts from these modules are formatted for human -# readibility. - @pytest.fixture(scope="session") def http_file_server() -> Generator[HTTPFileServer, None, None]: diff --git a/test/data.py b/test/data.py index 779c522ae..f5f986782 100644 --- a/test/data.py +++ b/test/data.py @@ -1,39 +1,42 @@ from pathlib import Path from rdflib import URIRef -from rdflib.graph import Graph +from test.utils.graph import cached_graph TEST_DIR = Path(__file__).parent TEST_DATA_DIR = TEST_DIR / "data" -alice_uri = URIRef("http://example.org/alice") -bob_uri = URIRef("http://example.org/bob") - -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") - -context0 = URIRef("urn:example:context-0") -context1 = URIRef("urn:example:context-1") -context2 = URIRef("urn:example:context-2") - - -simple_triple_graph = Graph().add( - ( - URIRef("http://example.org/subject"), - URIRef("http://example.org/predicate"), - URIRef("http://example.org/object"), - ) -) -""" -A simple graph with a single triple. This is equivalent to the following RDF files: - -* ``test/data/variants/simple_triple.nq`` -* ``test/data/variants/simple_triple.nt`` -* ``test/data/variants/simple_triple.ttl`` -* ``test/data/variants/simple_triple.xml`` -""" +ALICE_URI = URIRef("http://example.org/alice") +BOB_URI = URIRef("http://example.org/bob") + +MICHEL = URIRef("urn:example:michel") +TAREK = URIRef("urn:example:tarek") +BOB = URIRef("urn:example:bob") +LIKES = URIRef("urn:example:likes") +HATES = URIRef("urn:example:hates") +PIZZA = URIRef("urn:example:pizza") +CHEESE = URIRef("urn:example:cheese") +CONTEXT0 = URIRef("urn:example:context-0") +CONTEXT1 = URIRef("urn:example:context-1") +CONTEXT2 = URIRef("urn:example:context-2") + + +SIMPLE_TRIPLE_GRAPH = cached_graph((TEST_DATA_DIR / "variants" / "simple_triple.py",)) + +__all__ = [ + "TEST_DIR", + "TEST_DATA_DIR", + "SIMPLE_TRIPLE_GRAPH", + "ALICE_URI", + "BOB_URI", + "MICHEL", + "TAREK", + "BOB", + "LIKES", + "HATES", + "PIZZA", + "CHEESE", + "CONTEXT0", + "CONTEXT1", + "CONTEXT2", +] diff --git a/test/data/fetcher.py b/test/data/fetcher.py index 1ea8e337c..c54c0bd66 100755 --- a/test/data/fetcher.py +++ b/test/data/fetcher.py @@ -1,4 +1,6 @@ #!/usr/bin/env python +from __future__ import annotations + import argparse import enum import logging diff --git a/test/data/suites/trix/test_trix.py b/test/data/suites/trix/test_trix.py index 5e553c5c8..8b7c08621 100644 --- a/test/data/suites/trix/test_trix.py +++ b/test/data/suites/trix/test_trix.py @@ -1,8 +1,8 @@ """This runs the TriX tests for RDFLib's informally-assembled TriX test suite.""" -from test.data import TEST_DATA_DIR -from test.utils.manifest import RDFTest, read_manifest -from test.utils.namespace import RDFT + +from __future__ import annotations + from typing import Callable, Dict import pytest @@ -11,6 +11,9 @@ from rdflib.compare import graph_diff, isomorphic from rdflib.namespace import split_uri from rdflib.term import Node, URIRef +from test.data import TEST_DATA_DIR +from test.utils.manifest import RDFTest, read_manifest +from test.utils.namespace import RDFT verbose = False diff --git a/test/data/suites/w3c/dawg-data-r2/sort/.manifest.ttl.swp b/test/data/suites/w3c/dawg-data-r2/sort/.manifest.ttl.swp deleted file mode 100644 index 9a14952b6..000000000 Binary files a/test/data/suites/w3c/dawg-data-r2/sort/.manifest.ttl.swp and /dev/null differ diff --git a/test/data/variants/blank_and_base_prefix-asserts.json b/test/data/variants/blank_and_base_prefix-meta.json similarity index 100% rename from test/data/variants/blank_and_base_prefix-asserts.json rename to test/data/variants/blank_and_base_prefix-meta.json diff --git a/test/data/variants/diverse_quads-asserts.json b/test/data/variants/diverse_quads-meta.json similarity index 100% rename from test/data/variants/diverse_quads-asserts.json rename to test/data/variants/diverse_quads-meta.json diff --git a/test/data/variants/diverse_quads-variant-typed.jsonld b/test/data/variants/diverse_quads-variant-typed.jsonld new file mode 100644 index 000000000..58cb3fb83 --- /dev/null +++ b/test/data/variants/diverse_quads-variant-typed.jsonld @@ -0,0 +1,73 @@ +{ + "@graph": [ + { + "@graph": [ + { + "@id": "egschema:subject", + "egschema:predicate": [ + 12, + { + "@id": "egschema:object" + } + ] + }, + { + "@id": "eghttp:subject", + "predicate": { + "@language": "jpx", + "@value": "日本語の表記体系" + } + }, + { + "@id": "egurn:subject", + "egschema:predicate": { + "@id": "egschema:subject" + } + } + ], + "@id": "egschema:graph" + }, + { + "@id": "egschema:subject", + "egschema:predicate": { + "@id": "egschema:object" + } + }, + { + "@id": "eghttp:subject", + "predicate": "typeless" + }, + { + "@graph": [ + { + "@id": "egschema:subject", + "egschema:predicate": { + "@id": "egschema:object" + }, + "predicate": [ + {"@value": "XSD string", "@type": "xsd:string"}, + { + "@id": "eghttp:object" + } + ] + } + ], + "@id": "egurn:graph" + }, + { + "@id": "egurn:subject", + "egurn:predicate": { + "@id": "egurn:object" + } + } + ], + "@context": { + "predicate": { + "@id": "http://example.com/predicate" + }, + "egurn": "urn:example:", + "xsd": "http://www.w3.org/2001/XMLSchema#", + "egschema": "example:", + "eghttp": "http://example.com/" + } +} diff --git a/test/data/variants/diverse_quads-variant-typed.nq b/test/data/variants/diverse_quads-variant-typed.nq new file mode 100644 index 000000000..574bd5f31 --- /dev/null +++ b/test/data/variants/diverse_quads-variant-typed.nq @@ -0,0 +1,10 @@ + "日本語の表記体系"@jpx . + . + . + "12"^^ . + . + . + "XSD string"^^ . + . + "typeless" . + . diff --git a/test/data/variants/diverse_quads.py b/test/data/variants/diverse_quads.py new file mode 100644 index 000000000..4c92851f1 --- /dev/null +++ b/test/data/variants/diverse_quads.py @@ -0,0 +1,30 @@ +from rdflib.graph import ConjunctiveGraph, Graph +from rdflib.namespace import XSD +from rdflib.term import Literal +from test.utils.namespace import EGDC, EGSCHEME, EGURN + + +def populate_graph(graph: Graph) -> None: + assert isinstance(graph, ConjunctiveGraph) + + graph.add((EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object)) + graph.add((EGDC.subject, EGDC.predicate, Literal("typeless"))) + graph.add((EGURN.subject, EGURN.predicate, EGURN.object)) + + egscheme_graph = graph.get_context(EGSCHEME.graph) + egscheme_graph.add( + (EGDC.subject, EGDC.predicate, Literal("日本語の表記体系", lang="jpx")) + ) + egscheme_graph.add((EGURN.subject, EGSCHEME.predicate, EGSCHEME.subject)) + egscheme_graph.add((EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object)) + egscheme_graph.add((EGSCHEME.subject, EGSCHEME.predicate, Literal(12))) + + egurn_graph = graph.get_context(EGURN.graph) + egurn_graph.add((EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object)) + egurn_graph.add((EGSCHEME.subject, EGDC.predicate, EGDC.object)) + egurn_graph.add( + (EGSCHEME.subject, EGDC.predicate, Literal("XSD string", datatype=XSD.string)) + ) + + +__all__ = ["populate_graph"] diff --git a/test/data/variants/diverse_triples-asserts.json b/test/data/variants/diverse_triples-meta.json similarity index 100% rename from test/data/variants/diverse_triples-asserts.json rename to test/data/variants/diverse_triples-meta.json diff --git a/test/data/variants/diverse_triples.py b/test/data/variants/diverse_triples.py new file mode 100644 index 000000000..c77675278 --- /dev/null +++ b/test/data/variants/diverse_triples.py @@ -0,0 +1,17 @@ +from rdflib.graph import Graph +from rdflib.term import Literal +from test.utils.namespace import EGDC, EGSCHEME, EGURN + + +def populate_graph(graph: Graph) -> None: + assert isinstance(graph, Graph) + + graph.add((EGDC.subject, EGDC.predicate, Literal("日本語の表記体系", lang="jpx"))) + graph.add((EGURN.subject, EGSCHEME.predicate, EGSCHEME.subject)) + + graph.add((EGSCHEME.object, EGDC.predicate, Literal("XSD string"))) + graph.add((EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object)) + graph.add((EGSCHEME.subject, EGSCHEME.predicate, Literal(12))) + + +__all__ = ["populate_graph"] diff --git a/test/data/variants/forward_slash-asserts.json b/test/data/variants/forward_slash-meta.json similarity index 100% rename from test/data/variants/forward_slash-asserts.json rename to test/data/variants/forward_slash-meta.json diff --git a/test/data/variants/more_quads-asserts.json b/test/data/variants/more_quads-meta.json similarity index 100% rename from test/data/variants/more_quads-asserts.json rename to test/data/variants/more_quads-meta.json diff --git a/test/data/variants/rdf11trig_eg2-asserts.json b/test/data/variants/rdf11trig_eg2-meta.json similarity index 100% rename from test/data/variants/rdf11trig_eg2-asserts.json rename to test/data/variants/rdf11trig_eg2-meta.json diff --git a/test/data/variants/rdf_prefix-asserts.json b/test/data/variants/rdf_prefix-meta.json similarity index 100% rename from test/data/variants/rdf_prefix-asserts.json rename to test/data/variants/rdf_prefix-meta.json diff --git a/test/data/variants/relative_triple-meta.json b/test/data/variants/relative_triple-meta.json new file mode 100644 index 000000000..b70798523 --- /dev/null +++ b/test/data/variants/relative_triple-meta.json @@ -0,0 +1,8 @@ +{ + "public_id": "http://example.org/variants/relative_triples", + "quad_count": 1, + "exact_match" : true, + "has_subject_iris": [ + "http://example.org/variants/path/subject" + ] +} diff --git a/test/data/variants/relative_triple.nt b/test/data/variants/relative_triple.nt new file mode 100644 index 000000000..dd393cec6 --- /dev/null +++ b/test/data/variants/relative_triple.nt @@ -0,0 +1 @@ + . diff --git a/test/data/variants/relative_triple.ttl b/test/data/variants/relative_triple.ttl new file mode 100644 index 000000000..f9cbabd61 --- /dev/null +++ b/test/data/variants/relative_triple.ttl @@ -0,0 +1,3 @@ +@prefix anchor: . + +anchor:subject anchor:predicate anchor:object . diff --git a/test/data/variants/schema_only_base-asserts.json b/test/data/variants/schema_only_base-meta.json similarity index 100% rename from test/data/variants/schema_only_base-asserts.json rename to test/data/variants/schema_only_base-meta.json diff --git a/test/data/variants/simple_quad-asserts.json b/test/data/variants/simple_quad-meta.json similarity index 100% rename from test/data/variants/simple_quad-asserts.json rename to test/data/variants/simple_quad-meta.json diff --git a/test/data/variants/simple_quad.py b/test/data/variants/simple_quad.py new file mode 100644 index 000000000..8d5cccc77 --- /dev/null +++ b/test/data/variants/simple_quad.py @@ -0,0 +1,12 @@ +from rdflib.graph import ConjunctiveGraph, Graph +from test.utils.namespace import EGDO + + +def populate_graph(graph: Graph) -> None: + assert isinstance(graph, ConjunctiveGraph) + + egdo_graph = graph.get_context(EGDO.graph) + egdo_graph.add((EGDO.subject, EGDO.predicate, EGDO.object)) + + +__all__ = ["populate_graph"] diff --git a/test/data/variants/simple_triple-asserts.json b/test/data/variants/simple_triple-meta.json similarity index 100% rename from test/data/variants/simple_triple-asserts.json rename to test/data/variants/simple_triple-meta.json diff --git a/test/data/variants/simple_triple.py b/test/data/variants/simple_triple.py new file mode 100644 index 000000000..369aca359 --- /dev/null +++ b/test/data/variants/simple_triple.py @@ -0,0 +1,9 @@ +from rdflib.graph import Graph +from test.utils.namespace import EGDO + + +def populate_graph(graph: Graph) -> None: + graph.add((EGDO.subject, EGDO.predicate, EGDO.object)) + + +__all__ = ["populate_graph"] diff --git a/test/data/variants/special_chars-asserts.json b/test/data/variants/special_chars-meta.json similarity index 100% rename from test/data/variants/special_chars-asserts.json rename to test/data/variants/special_chars-meta.json diff --git a/test/data/variants/xml_literal-asserts.json b/test/data/variants/xml_literal-meta.json similarity index 100% rename from test/data/variants/xml_literal-asserts.json rename to test/data/variants/xml_literal-meta.json diff --git a/test/jsonld/__init__.py b/test/jsonld/__init__.py index b082da4f8..8c3c44665 100644 --- a/test/jsonld/__init__.py +++ b/test/jsonld/__init__.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import List from rdflib import parser, plugin, serializer diff --git a/test/jsonld/runner.py b/test/jsonld/runner.py index 77a80ed4f..a8237fd95 100644 --- a/test/jsonld/runner.py +++ b/test/jsonld/runner.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- import json from rdflib import BNode, ConjunctiveGraph @@ -23,6 +22,9 @@ def _preserving_nodeid(self, bnode_context=None): def make_fake_urlinputsource(input_uri, format=None, suite_base=None, options={}): local_url = input_uri.replace("https://w3c.github.io/json-ld-api/tests/", "./") + if (index := local_url.find("#")) > -1: + # Strip off the optional fragment identifier + local_url = local_url[0:index] try: f = open(local_url, "rb") except FileNotFoundError: @@ -34,6 +36,8 @@ def make_fake_urlinputsource(input_uri, format=None, suite_base=None, options={} source.links = [] if local_url.endswith((".jsonld", ".jldt")): source.content_type = "application/ld+json" + elif local_url.endswith(".html"): + source.content_type = "text/html" else: source.content_type = "application/json" source.format = format @@ -173,6 +177,52 @@ def do_test_serializer(suite_base, cat, num, inputpath, expectedpath, context, o _compare_json(expected_json, result_json) +def do_test_html(suite_base, cat, num, inputpath, expectedpath, context, options): + input_uri = suite_base + inputpath + input_graph = ConjunctiveGraph() + + input_src = make_fake_urlinputsource( + input_uri, format="json-ld", suite_base=suite_base, options=options + ) + + context = _load_json(context) if context else context + + # Get test options from the manifest + base = options.get("base", input_src.getPublicId()) + extract_all_scripts = options.get("extractAllScripts", False) + + p = JsonLDParser() + p.parse( + input_src, + input_graph, + base=base, + context=context, + generalized_rdf=True, + extract_all_scripts=extract_all_scripts, + ) + + if expectedpath.endswith(".nq"): + expected_graph = _load_nquads(expectedpath) + + elif expectedpath.endswith(".jsonld"): + expected_graph = ConjunctiveGraph() + with open(expectedpath) as f: + data = f.read() + expected_graph.parse(data=data, format="json-ld") + + # TODO: Change test from graph comparison to json comparison + # The html test cases combine testing for JSON-LD extraction from the HTML + # along with testing for other algorithms (compact/flatten), which we do + # not currently support. In order to test extraction only, we currently + # perform a graph comparison. Consider changing this to a json comparison + # once the processing algorithms are implemented. + + assert isomorphic(input_graph, expected_graph), "Expected:\n%s\nGot:\n%s" % ( + expected_graph.serialize(), + input_graph.serialize(), + ) + + def _load_nquads(source): graph = ConjunctiveGraph() with open(source) as f: diff --git a/test/jsonld/test_api.py b/test/jsonld/test_api.py index 5beab1fd9..eb8a0cf80 100644 --- a/test/jsonld/test_api.py +++ b/test/jsonld/test_api.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from rdflib import Graph, Literal, URIRef diff --git a/test/jsonld/test_compaction.py b/test/jsonld/test_compaction.py index e76de5580..c565c7e0e 100644 --- a/test/jsonld/test_compaction.py +++ b/test/jsonld/test_compaction.py @@ -1,8 +1,9 @@ -# -*- coding: UTF-8 -*- +from __future__ import annotations import itertools import json import re +from typing import Any, Dict, List, Tuple import pytest @@ -13,11 +14,11 @@ register("json-ld", Serializer, "rdflib.plugins.serializers.jsonld", "JsonLDSerializer") -cases = [] +cases: List[Tuple[str, Dict[str, Any]]] = [] -def case(*args): - cases.append(args) +def case(source: str, data: Dict[str, Any]): + cases.append((source, data)) case( diff --git a/test/jsonld/test_context.py b/test/jsonld/test_context.py index c26fcb0ca..cdecd6363 100644 --- a/test/jsonld/test_context.py +++ b/test/jsonld/test_context.py @@ -2,6 +2,8 @@ JSON-LD Context Spec """ +from __future__ import annotations + import json from functools import wraps from pathlib import Path @@ -19,7 +21,7 @@ def _try_wrapper(f): def _try(): try: f() - assert e == expected_error + assert e == expected_error # noqa: F821 except Exception as e: success = e == expected_error else: @@ -142,7 +144,7 @@ def _mock_source_loader(f): @wraps(f) def _wrapper(): try: - context.source_to_json = SOURCES.get + context.source_to_json = lambda source: (SOURCES.get(source), None) f() finally: context.source_to_json = _source_to_json @@ -208,7 +210,7 @@ def test_ignore_base_remote_context(): def test_recursive_context_inclusion_error(): ctx_url = "http://example.org/recursive.jsonld" SOURCES[ctx_url] = {"@context": ctx_url} - ctx = Context(ctx_url) + ctx = Context(ctx_url) # noqa: F841 @_expect_exception(errors.INVALID_REMOTE_CONTEXT) @@ -216,7 +218,7 @@ def test_recursive_context_inclusion_error(): def test_invalid_remote_context(): ctx_url = "http://example.org/recursive.jsonld" SOURCES[ctx_url] = {"key": "value"} - ctx = Context(ctx_url) + ctx = Context(ctx_url) # noqa: F841 def test_file_source(tmp_path: Path) -> None: diff --git a/test/jsonld/test_named_graphs.py b/test/jsonld/test_named_graphs.py index 1d1bd6265..5705a847a 100644 --- a/test/jsonld/test_named_graphs.py +++ b/test/jsonld/test_named_graphs.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from rdflib import ConjunctiveGraph, Dataset, Graph, URIRef data = """ diff --git a/test/jsonld/test_nested_arrays.py b/test/jsonld/test_nested_arrays.py new file mode 100644 index 000000000..1207f0e37 --- /dev/null +++ b/test/jsonld/test_nested_arrays.py @@ -0,0 +1,71 @@ +from __future__ import annotations + +from rdflib import Graph, Literal +from rdflib.collection import Collection +from test.utils.namespace import EGDC + +prop = EGDC["props/a"] +res = EGDC["res"] + +DATA_NO_CONTAINER = """ +{ + "@context": { + "egdc": "http://example.com/", + "a": { + "@id": "egdc:props/a" + } + }, + "a": [ + [[1, 2, 3], ["4", 5]], + 6, + [7, { "@id": "egdc:res" }] + ] +} +""" + +DATA_LIST = """ +{ + "@context": { + "egdc": "http://example.com/", + "a": { + "@id": "egdc:props/a", + "@container": "@list" + } + }, + "a": [ + [[1, 2, 3], ["4", 5]], + 6, + [7, { "@id": "egdc:res" }] + ] +} +""" + + +def test_container_list(): + g = Graph() + g.parse(data=DATA_LIST, format="application/ld+json") + + outer = Collection(g, next(g.objects(predicate=prop))) + assert len(outer) == 3 + inner1, inner2, inner3 = outer + + inner1 = Collection(g, inner1) + inner1_1, inner1_2 = map(lambda l: Collection(g, l), inner1) # noqa: E741 + assert list(inner1_1) == [Literal(x) for x in (1, 2, 3)] + assert list(inner1_2) == [Literal(x) for x in ("4", 5)] + + assert inner2 == Literal(6) + + inner3 = Collection(g, inner3) + assert list(inner3) == [Literal(7), res] + + +def test_no_container(): + g = Graph() + g.parse(data=DATA_NO_CONTAINER, format="application/ld+json") + + assert len(g) == 8 + + objects = set(g.objects(predicate=prop)) + assert len(objects) == 8 + assert objects == set([Literal(x) for x in (1, 2, 3, "4", 5, 6, 7)] + [res]) diff --git a/test/jsonld/test_onedotone.py b/test/jsonld/test_onedotone.py index 4c555d1ec..840a657cb 100644 --- a/test/jsonld/test_onedotone.py +++ b/test/jsonld/test_onedotone.py @@ -1,5 +1,8 @@ +from __future__ import annotations + import json import os +import re from os import chdir, environ, getcwd from os import path as p from typing import Tuple @@ -21,7 +24,6 @@ "remote", ) unsupported_tests += ("flatten", "compact", "expand") -unsupported_tests += ("html",) unsupported_tests += ("fromRdf",) # The JSON-LD 1.1 enhancement applies to parsing only known_bugs: Tuple[str, ...] = ( @@ -136,7 +138,8 @@ "toRdf/tn02-in", # TODO: Rdflib should silently reject bad predicate URIs "toRdf/wf02-in", - # TODO: we don't extract context or json-ld that's embedded in HTML + # TODO: Determine why f004 expects to extract all scripts + "html/f004-in", "remote-doc/0013-in", "remote-doc/la01-in", "remote-doc/la02-in", @@ -191,7 +194,7 @@ def read_manifest(skiptests): else: inputpath = test.get("input") expectedpath = test.get("expect") - expected_error = test.get("expect") # TODO: verify error + expected_error = test.get("expect") # TODO: verify error # noqa: F841 context = test.get("context", False) options = test.get("option") or {} if expectedpath: @@ -217,6 +220,10 @@ def get_test_suite_cases(): func = runner.do_test_json else: # toRdf func = runner.do_test_parser + elif re.search( + r"\.html(#.*)?$", inputpath + ): # html (with optional fragment identifier) + func = runner.do_test_html else: # fromRdf func = runner.do_test_serializer rdf_test_uri = URIRef("{0}{1}-manifest#t{2}".format(TC_BASE, cat, num)) diff --git a/test/jsonld/test_testsuite.py b/test/jsonld/test_testsuite.py index 2be7a64b0..25da92327 100644 --- a/test/jsonld/test_testsuite.py +++ b/test/jsonld/test_testsuite.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json from os import chdir, environ, getcwd from os import path as p @@ -72,7 +74,7 @@ def read_manifest(skiptests): else: inputpath = test.get("input") expectedpath = test.get("expect") - expected_error = test.get("expect") # TODO: verify error + expected_error = test.get("expect") # TODO: verify error # noqa: F841 context = test.get("context", False) options = test.get("option") or {} if expectedpath: diff --git a/test/plugins/parser/example/rdflib/plugin/parser/__init__.py b/test/plugins/parser/example/rdflib/plugin/parser/__init__.py index a6aefa291..715bc0ec9 100644 --- a/test/plugins/parser/example/rdflib/plugin/parser/__init__.py +++ b/test/plugins/parser/example/rdflib/plugin/parser/__init__.py @@ -1,10 +1,12 @@ +from __future__ import annotations + from typing import TYPE_CHECKING, Set, Tuple +from rdflib.namespace import Namespace from rdflib.parser import Parser if TYPE_CHECKING: from rdflib.graph import Graph - from rdflib.namespace import Namespace from rdflib.parser import InputSource from rdflib.term import URIRef @@ -13,19 +15,16 @@ class ExampleParser(Parser): def __init__(self): super().__init__() - def parse(self, source: "InputSource", sink: "Graph"): + def parse(self, source: InputSource, sink: Graph): for triple in self.constant_output(): sink.add(triple) @classmethod - def namespace(cls) -> "Namespace": + def namespace(cls) -> Namespace: return Namespace("example:rdflib:plugin:parser:") @classmethod def constant_output( cls, - ) -> Set[Tuple["URIRef", "URIRef", "URIRef"]]: + ) -> Set[Tuple[URIRef, URIRef, URIRef]]: return {(cls.namespace().subj, cls.namespace().pred, cls.namespace().obj)} - - -from rdflib.namespace import Namespace diff --git a/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py b/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py index a4a87725b..c21086e2c 100644 --- a/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py +++ b/test/plugins/sparqleval/example/rdflib/plugin/sparqleval/__init__.py @@ -1,5 +1,10 @@ from typing import Any +from rdflib import Namespace +from rdflib.plugins.sparql.evaluate import evalPart +from rdflib.plugins.sparql.evalutils import _eval +from rdflib.plugins.sparql.sparql import SPARQLError + def custom_eval_extended(ctx: Any, extend: Any) -> Any: for c in evalPart(ctx, extend.p): @@ -24,11 +29,6 @@ def custom_eval(ctx: Any, part: Any) -> Any: raise NotImplementedError() -from rdflib import Namespace -from rdflib.plugins.sparql.evaluate import evalPart -from rdflib.plugins.sparql.evalutils import _eval -from rdflib.plugins.sparql.sparql import SPARQLError - namespace = Namespace("example:rdflib:plugin:sparqleval:") function_uri = namespace["function"] function_result = namespace["result"] diff --git a/test/test_conjunctivegraph/test_conjunctive_graph.py b/test/test_conjunctivegraph/test_conjunctive_graph.py index bbaedcdee..0fdb0af1d 100644 --- a/test/test_conjunctivegraph/test_conjunctive_graph.py +++ b/test/test_conjunctivegraph/test_conjunctive_graph.py @@ -2,7 +2,6 @@ Tests for ConjunctiveGraph that do not depend on the underlying store """ - import pytest from rdflib import ConjunctiveGraph, Graph @@ -58,6 +57,13 @@ def test_context_namespaces(): assert ("ex", ns) in g.namespace_manager.namespaces() +def test_deprecated(): + with pytest.warns( + DeprecationWarning, match="ConjunctiveGraph is deprecated, use Dataset instead." + ): + ConjunctiveGraph() + + def get_graph_ids_tests(): def check(kws): cg = ConjunctiveGraph() diff --git a/test/test_conjunctivegraph/test_conjunctivegraph_generators.py b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py index 853320dd3..1cdc73358 100644 --- a/test/test_conjunctivegraph/test_conjunctivegraph_generators.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py @@ -1,7 +1,7 @@ import os -from test.data import TEST_DATA_DIR from rdflib import ConjunctiveGraph, URIRef +from test.data import TEST_DATA_DIR timblcardn3 = open(os.path.join(TEST_DATA_DIR, "timbl-card.n3")).read() diff --git a/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py index dd95dd7b0..f1914b56d 100644 --- a/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py @@ -1,102 +1,111 @@ import os -from test.data import TEST_DATA_DIR, cheese, likes, michel, pizza, tarek from rdflib import ConjunctiveGraph, Graph +from test.data import CHEESE, LIKES, MICHEL, PIZZA, TAREK, TEST_DATA_DIR sportquadstrig = open(os.path.join(TEST_DATA_DIR, "sportquads.trig")).read() def test_operators_with_conjunctivegraph_and_graph(): cg = ConjunctiveGraph() - cg.add((tarek, likes, pizza)) - cg.add((tarek, likes, michel)) + cg.add((TAREK, LIKES, PIZZA)) + cg.add((TAREK, LIKES, MICHEL)) g = Graph() - g.add([tarek, likes, pizza]) - g.add([tarek, likes, cheese]) + g.add([TAREK, LIKES, PIZZA]) + g.add([TAREK, LIKES, CHEESE]) - assert len(cg + g) == 3 # adds cheese as liking + assert len(cg + g) == 3 # adds CHEESE as liking - assert len(cg - g) == 1 # removes pizza + assert len(cg - g) == 1 # removes PIZZA - assert len(cg * g) == 1 # only pizza + assert len(cg * g) == 1 # only PIZZA - assert len(cg ^ g) == 2 # removes pizza, adds cheese + assert len(cg ^ g) == 2 # removes PIZZA, adds CHEESE def test_reversed_operators_with_conjunctivegraph_and_graph(): cg = ConjunctiveGraph() - cg.add((tarek, likes, pizza)) - cg.add((tarek, likes, michel)) + cg.add((TAREK, LIKES, PIZZA)) + cg.add((TAREK, LIKES, MICHEL)) g = Graph() - g.add([tarek, likes, pizza]) - g.add([tarek, likes, cheese]) + g.add([TAREK, LIKES, PIZZA]) + g.add([TAREK, LIKES, CHEESE]) - assert len(g + cg) == 3 # adds cheese as liking + assert len(g + cg) == 3 # adds CHEESE as liking - assert len(g - cg) == 1 # removes pizza + assert len(g - cg) == 1 # removes PIZZA - assert len(g * cg) == 1 # only pizza + assert len(g * cg) == 1 # only PIZZA - assert len(g ^ cg) == 2 # removes pizza, adds cheese + assert len(g ^ cg) == 2 # removes PIZZA, adds CHEESE def test_reversed_operators_with_conjunctivegraph_with_contexts_and_graph(): cg = ConjunctiveGraph() - cg.add((tarek, likes, pizza)) - cg.add((tarek, likes, michel)) + cg.add((TAREK, LIKES, PIZZA)) + cg.add((TAREK, LIKES, MICHEL)) cg.parse(data=sportquadstrig, format="trig") g = Graph() - g.add([tarek, likes, pizza]) - g.add([tarek, likes, cheese]) + g.add([TAREK, LIKES, PIZZA]) + g.add([TAREK, LIKES, CHEESE]) - assert len(g + cg) == 10 # adds cheese as liking plus sevenquads + assert len(g + cg) == 10 # adds CHEESE as liking plus sevenquads assert len(list((g + cg).triples((None, None, None)))) == 10 - assert len(g - cg) == 1 # removes pizza + assert len(g - cg) == 1 # removes PIZZA - assert len(g * cg) == 1 # only pizza + assert len(g * cg) == 1 # only PIZZA - assert len(g ^ cg) == 9 # removes pizza, adds cheese and sevenquads + assert len(g ^ cg) == 9 # removes PIZZA, adds CHEESE and sevenquads def test_operators_with_two_conjunctivegraphs(): cg1 = ConjunctiveGraph() - cg1.add([tarek, likes, pizza]) - cg1.add([tarek, likes, michel]) + cg1.add([TAREK, LIKES, PIZZA]) + cg1.add([TAREK, LIKES, MICHEL]) cg2 = ConjunctiveGraph() - cg2.add([tarek, likes, pizza]) - cg2.add([tarek, likes, cheese]) + cg2.add([TAREK, LIKES, PIZZA]) + cg2.add([TAREK, LIKES, CHEESE]) - assert len(cg1 + cg2) == 3 # adds cheese as liking + assert len(cg1 + cg2) == 3 # adds CHEESE as liking - assert len(cg1 - cg2) == 1 # removes pizza from cg1 + assert len(cg1 - cg2) == 1 # removes PIZZA from cg1 - assert len(cg1 * cg2) == 1 # only pizza + assert len(cg1 * cg2) == 1 # only PIZZA - assert len(cg1 + cg2) == 3 # adds cheese as liking + assert len(cg1 + cg2) == 3 # adds CHEESE as liking - assert len(cg1 ^ cg2) == 2 # removes pizza, adds cheese + assert len(cg1 ^ cg2) == 2 # removes PIZZA, adds CHEESE def test_operators_with_two_conjunctivegraphs_one_with_contexts(): cg1 = ConjunctiveGraph() - cg1.add([tarek, likes, pizza]) - cg1.add([tarek, likes, michel]) + cg1.add([TAREK, LIKES, PIZZA]) + cg1.add([TAREK, LIKES, MICHEL]) cg2 = ConjunctiveGraph() - cg2.add([tarek, likes, pizza]) - cg2.add([tarek, likes, cheese]) + cg2.add([TAREK, LIKES, PIZZA]) + cg2.add([TAREK, LIKES, CHEESE]) cg2.parse(data=sportquadstrig, format="trig") - assert len(cg1 + cg2) == 10 # adds cheese as liking and all seven quads + assert len(cg1 + cg2) == 10 # adds CHEESE as liking and all seven quads - assert len(cg1 - cg2) == 1 # removes pizza + assert len(cg1 - cg2) == 1 # removes PIZZA - assert len(cg1 * cg2) == 1 # only pizza + assert len(cg1 * cg2) == 1 # only PIZZA - assert len(cg1 ^ cg2) == 9 # removes pizza + assert len(cg1 ^ cg2) == 9 # removes PIZZA + + +def test_operators_returning_correct_type(): + g1 = ConjunctiveGraph() + g2 = ConjunctiveGraph() + assert type(g1 + g2) is ConjunctiveGraph + assert type(g1 - g2) is ConjunctiveGraph + assert type(g1 * g2) is ConjunctiveGraph + assert type(g1 ^ g2) is ConjunctiveGraph diff --git a/test/test_dataset/test_dataset.py b/test/test_dataset/test_dataset.py index 18c2920ee..19b9fe830 100644 --- a/test/test_dataset/test_dataset.py +++ b/test/test_dataset/test_dataset.py @@ -1,15 +1,15 @@ -# -*- coding: utf-8 -*- import os import shutil import tempfile -from test.data import context1, likes, pizza, tarek +import warnings import pytest from rdflib import URIRef, plugin from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph -from rdflib.namespace import Namespace from rdflib.store import Store +from test.data import CONTEXT1, LIKES, PIZZA, TAREK +from test.utils.namespace import EGSCHEME # Will also run SPARQLUpdateStore tests against local SPARQL1.1 endpoint if # available. This assumes SPARQL1.1 query/update endpoints running locally at @@ -54,7 +54,7 @@ def get_dataset(request): store = request.param try: - dataset = Dataset(store=store) + dataset = Dataset(store=store) # noqa: F841 except ImportError: pytest.skip("Dependencies for store '%s' not available!" % store) @@ -115,30 +115,30 @@ def test_graph_aware(get_dataset): if not dataset.store.graph_aware: return - g1 = dataset.graph(context1) + g1 = dataset.graph(CONTEXT1) # Some SPARQL endpoint backends (e.g. TDB) do not consider # empty named graphs if store != "SPARQLUpdateStore": # added graph exists assert set(x.identifier for x in dataset.contexts()) == set( - [context1, DATASET_DEFAULT_GRAPH_ID] + [CONTEXT1, DATASET_DEFAULT_GRAPH_ID] ) # added graph is empty assert len(g1) == 0 - g1.add((tarek, likes, pizza)) + g1.add((TAREK, LIKES, PIZZA)) # added graph still exists assert set(x.identifier for x in dataset.contexts()) == set( - [context1, DATASET_DEFAULT_GRAPH_ID] + [CONTEXT1, DATASET_DEFAULT_GRAPH_ID] ) # added graph contains one triple assert len(g1) == 1 - g1.remove((tarek, likes, pizza)) + g1.remove((TAREK, LIKES, PIZZA)) # added graph is empty assert len(g1) == 0 @@ -148,10 +148,10 @@ def test_graph_aware(get_dataset): if store != "SPARQLUpdateStore": # graph still exists, although empty assert set(x.identifier for x in dataset.contexts()) == set( - [context1, DATASET_DEFAULT_GRAPH_ID] + [CONTEXT1, DATASET_DEFAULT_GRAPH_ID] ) - dataset.remove_graph(context1) + dataset.remove_graph(CONTEXT1) # graph is gone assert set(x.identifier for x in dataset.contexts()) == set( @@ -170,7 +170,7 @@ def test_default_graph(get_dataset): "is supported by your SPARQL endpoint" ) - dataset.add((tarek, likes, pizza)) + dataset.add((TAREK, LIKES, PIZZA)) assert len(dataset) == 1 # only default exists assert list(dataset.contexts()) == [dataset.default_context] @@ -193,11 +193,11 @@ def test_not_union(get_dataset): "its default graph as the union of the named graphs" ) - subgraph1 = dataset.graph(context1) - subgraph1.add((tarek, likes, pizza)) + subgraph1 = dataset.graph(CONTEXT1) + subgraph1.add((TAREK, LIKES, PIZZA)) - assert list(dataset.objects(tarek, None)) == [] - assert list(subgraph1.objects(tarek, None)) == [pizza] + assert list(dataset.objects(TAREK, None)) == [] + assert list(subgraph1.objects(TAREK, None)) == [PIZZA] def test_iter(get_dataset): @@ -232,9 +232,6 @@ def test_iter(get_dataset): assert i_new == i_trad # both should be 3 -EGSCHEMA = Namespace("example:") - - def test_subgraph_without_identifier() -> None: """ Graphs with no identifies assigned are identified by Skolem IRIs with a @@ -257,7 +254,7 @@ def test_subgraph_without_identifier() -> None: ) subgraph: Graph = dataset.graph() - subgraph.add((EGSCHEMA["subject"], EGSCHEMA["predicate"], EGSCHEMA["object"])) + subgraph.add((EGSCHEME["subject"], EGSCHEME["predicate"], EGSCHEME["object"])) namespaces = set(nman.namespaces()) assert next( @@ -265,3 +262,14 @@ def test_subgraph_without_identifier() -> None: ) == ("genid", genid_prefix) assert f"{subgraph.identifier}".startswith(genid_prefix) + + +def test_not_deprecated(): + """ + Ensure Dataset does not trigger the deprecation warning + from the ConjunctiveGraph superclass. + """ + + with warnings.catch_warnings(): + warnings.simplefilter("error") + Dataset() diff --git a/test/test_dataset/test_dataset_default_graph.py b/test/test_dataset/test_dataset_default_graph.py index fb219770c..a1de4ebbe 100644 --- a/test/test_dataset/test_dataset_default_graph.py +++ b/test/test_dataset/test_dataset_default_graph.py @@ -1,6 +1,7 @@ +from __future__ import annotations + import itertools import logging -from test.data import TEST_DATA_DIR from typing import Iterable, Type, Union import pytest @@ -8,6 +9,7 @@ from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, ConjunctiveGraph, Dataset from rdflib.term import BNode, URIRef +from test.data import TEST_DATA_DIR def make_load_default_and_named() -> Iterable[ParameterSet]: diff --git a/test/test_dataset/test_dataset_generators.py b/test/test_dataset/test_dataset_generators.py index 165ba9b1b..1b3bd313c 100644 --- a/test/test_dataset/test_dataset_generators.py +++ b/test/test_dataset/test_dataset_generators.py @@ -1,23 +1,23 @@ import os -from test.data import TEST_DATA_DIR, bob, cheese, hates, likes, michel, pizza, tarek from rdflib import Dataset, URIRef +from test.data import BOB, CHEESE, HATES, LIKES, MICHEL, PIZZA, TAREK, TEST_DATA_DIR timblcardn3 = open(os.path.join(TEST_DATA_DIR, "timbl-card.n3")).read() def add_stuff(graph): - graph.add((tarek, likes, pizza)) - graph.add((tarek, likes, cheese)) - graph.add((tarek, likes, bob)) - graph.add((tarek, likes, michel)) - graph.add((michel, likes, pizza)) - graph.add((michel, likes, cheese)) - graph.add((michel, likes, tarek)) - graph.add((bob, likes, cheese)) - graph.add((bob, hates, pizza)) - graph.add((bob, hates, michel)) - graph.add((bob, likes, tarek)) + graph.add((TAREK, LIKES, PIZZA)) + graph.add((TAREK, LIKES, CHEESE)) + graph.add((TAREK, LIKES, BOB)) + graph.add((TAREK, LIKES, MICHEL)) + graph.add((MICHEL, LIKES, PIZZA)) + graph.add((MICHEL, LIKES, CHEESE)) + graph.add((MICHEL, LIKES, TAREK)) + graph.add((BOB, LIKES, CHEESE)) + graph.add((BOB, HATES, PIZZA)) + graph.add((BOB, HATES, MICHEL)) + graph.add((BOB, LIKES, TAREK)) def test_unique_subjects(): diff --git a/test/test_examples.py b/test/test_examples.py index 9a85de6e2..39b967c5c 100644 --- a/test/test_examples.py +++ b/test/test_examples.py @@ -1,3 +1,4 @@ +import logging import subprocess import sys from pathlib import Path @@ -29,4 +30,20 @@ def test_example(example_file: Path) -> None: # this example requires a berkeleydb installation pytest.skip("The BerkeleyDB example is not working correctly.") - subprocess.run([sys.executable, f"{example_file}"], check=True) + result = subprocess.run( + [sys.executable, f"{example_file}"], + capture_output=True, + ) + + logging.debug("result = %s", result) + + try: + result.check_returncode() + except subprocess.CalledProcessError: + if ( + example_file.stem == "sparqlstore_example" + and "http.client.RemoteDisconnected: Remote end closed connection without response" + in result.stderr.decode("utf-8") + ): + pytest.skip("this test uses dbpedia which is down sometimes") + raise diff --git a/test/test_extras/test_extras_external_graph_libs.py b/test/test_extras/test_extras_external_graph_libs.py index 9c610b50e..62a19b886 100644 --- a/test/test_extras/test_extras_external_graph_libs.py +++ b/test/test_extras/test_extras_external_graph_libs.py @@ -5,7 +5,7 @@ def test_rdflib_to_networkx(): try: - import networkx + import networkx # noqa: F401 except ImportError: pytest.skip("couldn't find networkx") from rdflib.extras.external_graph_libs import ( @@ -15,7 +15,7 @@ def test_rdflib_to_networkx(): ) g = Graph() - a, b, l = URIRef("a"), URIRef("b"), Literal("l") + a, b, l = URIRef("a"), URIRef("b"), Literal("l") # noqa: E741 p, q = URIRef("p"), URIRef("q") edges = [(a, p, b), (a, q, b), (b, p, a), (b, p, l)] for t in edges: @@ -62,7 +62,7 @@ def test_rdflib_to_graphtool(): from rdflib.extras.external_graph_libs import rdflib_to_graphtool g = Graph() - a, b, l = URIRef("a"), URIRef("b"), Literal("l") + a, b, l = URIRef("a"), URIRef("b"), Literal("l") # noqa: E741 p, q = URIRef("p"), URIRef("q") edges = [(a, p, b), (a, q, b), (b, p, a), (b, p, l)] for t in edges: @@ -74,7 +74,7 @@ def test_rdflib_to_graphtool(): vpterm = mdg.vertex_properties["term"] va = gt_util.find_vertex(mdg, vpterm, a)[0] vb = gt_util.find_vertex(mdg, vpterm, b)[0] - vl = gt_util.find_vertex(mdg, vpterm, l)[0] + vl = gt_util.find_vertex(mdg, vpterm, l)[0] # noqa: F841 assert (va, vb) in [(e.source(), e.target()) for e in list(mdg.edges())] epterm = mdg.edge_properties["term"] @@ -82,7 +82,7 @@ def test_rdflib_to_graphtool(): assert len(list(gt_util.find_edge(mdg, epterm, q))) == 1 mdg = rdflib_to_graphtool( - g, e_prop_names=[str("name")], transform_p=lambda s, p, o: {str("name"): str(p)} + g, e_prop_names=["name"], transform_p=lambda s, p, o: {"name": str(p)} ) epterm = mdg.edge_properties["name"] assert len(list(gt_util.find_edge(mdg, epterm, str(p)))) == 3 diff --git a/test/test_extras/test_infixowl/test_basic.py b/test/test_extras/test_infixowl/test_basic.py index af9545499..805ea62f5 100644 --- a/test/test_extras/test_infixowl/test_basic.py +++ b/test/test_extras/test_infixowl/test_basic.py @@ -1,5 +1,3 @@ -from test.data import context0 - import pytest from rdflib import OWL, Graph, Literal, Namespace @@ -12,12 +10,13 @@ max, some, ) +from test.data import CONTEXT0 EXNS = Namespace("http://example.org/vocab/") def test_lshift_rlshift_delimiters(): - g = Graph(identifier=context0) + g = Graph(identifier=CONTEXT0) g.bind("ex", EXNS) Individual.factoryGraph = g @@ -29,18 +28,18 @@ def test_lshift_rlshift_delimiters(): classF = Class(EXNS.F) # noqa: N806 anonClass = EXNS.someProp << some >> classD # noqa: N806 - classF += anonClass + classF += anonClass # noqa: N806 assert str(list(anonClass.subClassOf)) == "[Class: ex:F ]" classA = classE | classF | anonClass # noqa: N806 - classB += classA + classB += classA # noqa: N806 classA.equivalentClass = [Class()] classB.subClassOf = [EXNS.someProp << some >> classC] assert str(classA) == "( ex:E OR ex:F OR ( ex:someProp SOME ex:D ) )" def test_matmul_rmatmul_delimiters(): - g = Graph(identifier=context0) + g = Graph(identifier=CONTEXT0) g.bind("ex", EXNS) Individual.factoryGraph = g @@ -52,11 +51,11 @@ def test_matmul_rmatmul_delimiters(): classF = Class(EXNS.F) # noqa: N806 anonClass = EXNS.someProp @ some @ classD # noqa: N806 - classF += anonClass + classF += anonClass # noqa: N806 assert str(list(anonClass.subClassOf)) == "[Class: ex:F ]" classA = classE | classF | anonClass # noqa: N806 - classB += classA + classB += classA # noqa: N806 classA.equivalentClass = [Class()] classB.subClassOf = [EXNS.someProp @ some @ classC] assert str(classA) == "( ex:E OR ex:F OR ( ex:someProp SOME ex:D ) )" @@ -83,7 +82,7 @@ def test_infixowl_serialization(): @pytest.mark.webtest def test_infix_owl_example1(): - g = Graph(identifier=context0) + g = Graph(identifier=CONTEXT0) g.bind("ex", EXNS) Individual.factoryGraph = g diff --git a/test/test_extras/test_infixowl/test_class.py b/test/test_extras/test_infixowl/test_class.py index f82f63c2f..a1ba88945 100644 --- a/test/test_extras/test_infixowl/test_class.py +++ b/test/test_extras/test_infixowl/test_class.py @@ -1,5 +1,3 @@ -from test.data import context0, context1 - import pytest from rdflib import OWL, RDFS, BNode, Graph, Literal, Namespace, URIRef, Variable @@ -12,6 +10,7 @@ max, ) from rdflib.util import first +from test.data import CONTEXT0, CONTEXT1 EXNS = Namespace("http://example.org/vocab/") PZNS = Namespace( @@ -154,7 +153,7 @@ def test_class_getparents(graph): assert list(sibling.subSumpteeIds()) == [] - assert str(brother.__repr__(full=True)) == "Class: ex:Brother " + assert str(brother.manchesterClass(full=True)) == "Class: ex:Brother " assert graph.serialize(format="ttl") == ( "@prefix ex: .\n" @@ -216,7 +215,7 @@ def test_class_serialize(graph): nameIsLabel=True, ) - g1 = Graph(identifier=context1) + g1 = Graph(identifier=CONTEXT1) owlc.serialize(g1) @@ -228,9 +227,9 @@ def test_class_serialize(graph): owlc.extent = None - owlc.extent = [context1] + owlc.extent = [CONTEXT1] - assert list(owlc.extent) == [context1] + assert list(owlc.extent) == [CONTEXT1] pred = RDFS.comment @@ -252,7 +251,7 @@ def test_class_serialize(graph): assert str(owlc.__invert__()) == "Some Class DisjointWith ( NOT ex:Sister )\n" - assert owlc.__repr__(full=True) == ( + assert owlc.manchesterClass(full=True) == ( "Class: ex:test \n" " ## A Defined Class (Man) ##\n" " This is a Man\n" @@ -273,7 +272,7 @@ def test_class_serialize(graph): def test_class_nameislabel(): - g = Graph(identifier=context0) + g = Graph(identifier=CONTEXT0) g.bind("ex", EXNS) Individual.factoryGraph = g @@ -316,7 +315,7 @@ def test_class_nameislabel(): def test_class_nameisnotlabel(): - g = Graph(identifier=context0) + g = Graph(identifier=CONTEXT0) g.bind("ex", EXNS) Individual.factoryGraph = g diff --git a/test/test_extras/test_infixowl/test_cover.py b/test/test_extras/test_infixowl/test_cover.py index 9dd8b1614..53fbb1ec4 100644 --- a/test/test_extras/test_infixowl/test_cover.py +++ b/test/test_extras/test_infixowl/test_cover.py @@ -1,5 +1,3 @@ -from test.data import TEST_DATA_DIR, context0 - import pytest from rdflib import OWL, RDF, RDFS, XSD, Graph, Literal, Namespace, URIRef, logger @@ -28,6 +26,7 @@ some, value, ) +from test.data import CONTEXT0, TEST_DATA_DIR EXNS = Namespace("http://example.org/vocab/") PZNS = Namespace( @@ -105,14 +104,14 @@ def test_allclasses(): def test_check_allclasses(): - from test.data import bob, michel, tarek + from test.data import BOB, MICHEL, TAREK g = Graph() g.bind("ex", PZNS) - g.add((tarek, RDF.type, OWL.Class)) - g.add((michel, RDF.type, OWL.Class)) - g.add((bob, RDF.type, OWL.Class)) + g.add((TAREK, RDF.type, OWL.Class)) + g.add((MICHEL, RDF.type, OWL.Class)) + g.add((BOB, RDF.type, OWL.Class)) assert set(g.subjects(predicate=RDF.type, object=OWL.Class)) == { URIRef("urn:example:bob"), @@ -230,7 +229,7 @@ def test_owlrdfproxylist(): ogbujiBros.append(fred) - ogbujiBros += bert + ogbujiBros += bert # noqa: N806 assert ogbujiBros[1] == EXNS.uche @@ -284,7 +283,7 @@ def test_deepclassclear(): classA = classE | classF | anonClass # noqa: N806 classG = Class(EXNS.G, complementOf=classA) # noqa: N806 - classB += classA + classB += classA # noqa: N806 classA.equivalentClass = [Class()] classA.complementOf = classG classB.subClassOf = [EXNS.someProp << some >> classC] @@ -329,7 +328,7 @@ def test_changeoperator(): # Converts a unionOf / intersectionOf class expression into one # that instead uses the given operator - g = Graph(identifier=context0) + g = Graph(identifier=CONTEXT0) g.bind("ex", EXNS) Individual.factoryGraph = g diff --git a/test/test_extras/test_infixowl/test_individual.py b/test/test_extras/test_infixowl/test_individual.py index 3937fdffb..b07e16852 100644 --- a/test/test_extras/test_infixowl/test_individual.py +++ b/test/test_extras/test_infixowl/test_individual.py @@ -1,9 +1,8 @@ -from test.data import context0, pizza - import pytest from rdflib import OWL, RDFS, BNode, Graph, Literal, Namespace, URIRef from rdflib.extras.infixowl import Class, Individual +from test.data import CONTEXT0, PIZZA EXNS = Namespace("http://example.org/vocab/") @@ -57,7 +56,7 @@ def test_individual_type_settergetter(graph): "\n" ) - b.replace(Class(identifier=context0)) + b.replace(Class(identifier=CONTEXT0)) assert graph.serialize(format="ttl") == ( "@prefix ns1: .\n" @@ -75,9 +74,9 @@ def test_individual_identity__settergetter(graph): b.identifier = URIRef("http://www.w3.org/2002/07/owl#Restriction") - b.identifier = pizza + b.identifier = PIZZA - assert b.identifier == pizza + assert b.identifier == PIZZA b.identifier = URIRef("http://www.w3.org/2002/07/owl#Restriction") @@ -93,12 +92,12 @@ def test_individual_sameas__settergetter(graph): assert list(b.sameAs) == [] - b.sameAs = pizza + b.sameAs = PIZZA - assert list(b.sameAs) == [pizza] + assert list(b.sameAs) == [PIZZA] bnodeid = BNode("harry") - b.sameAs = [pizza, bnodeid] + b.sameAs = [PIZZA, bnodeid] - assert list(b.sameAs) == [pizza, bnodeid] + assert list(b.sameAs) == [PIZZA, bnodeid] diff --git a/test/test_extras/test_infixowl/test_logic_structuring.py b/test/test_extras/test_infixowl/test_logic_structuring.py index de3134f39..5bbf0f8ce 100644 --- a/test/test_extras/test_infixowl/test_logic_structuring.py +++ b/test/test_extras/test_infixowl/test_logic_structuring.py @@ -42,7 +42,7 @@ def test_logic_structuring(graph): structure += joint locatedInLeg = hasLocation @ some @ leg # noqa: N806 - locatedInLeg += knee + locatedInLeg += knee # noqa: N806 assert graph.serialize(format="ttl") == ( "@prefix ex: .\n" diff --git a/test/test_extras/test_infixowl/test_manchester_syntax.py b/test/test_extras/test_infixowl/test_manchester_syntax.py index 52669af6e..40f1cef10 100644 --- a/test/test_extras/test_infixowl/test_manchester_syntax.py +++ b/test/test_extras/test_infixowl/test_manchester_syntax.py @@ -1,9 +1,8 @@ -from test.data import TEST_DATA_DIR - import pytest from rdflib import OWL, RDFS, Graph, Literal, Namespace from rdflib.extras.infixowl import Class, Individual, manchesterSyntax +from test.data import TEST_DATA_DIR EXNS = Namespace("http://example.org/vocab/") PZNS = Namespace( diff --git a/test/test_extras/test_infixowl/test_rubric.py b/test/test_extras/test_infixowl/test_rubric.py index 7e97ed7d9..50d165f66 100644 --- a/test/test_extras/test_infixowl/test_rubric.py +++ b/test/test_extras/test_infixowl/test_rubric.py @@ -1,4 +1,12 @@ -from rdflib import BNode, ConjunctiveGraph, Dataset, Graph, Literal, Namespace, URIRef +from rdflib import ( # noqa: I001 + BNode, + ConjunctiveGraph, + Dataset, + Graph, + Literal, + Namespace, + URIRef, +) from rdflib.extras.infixowl import Class, Property, classOrTerm from rdflib.extras.infixowl import generateQName as generate_qname # noqa: N813 from rdflib.extras.infixowl import propertyOrIdentifier diff --git a/test/test_extras/test_shacl_extras.py b/test/test_extras/test_shacl_extras.py new file mode 100644 index 000000000..417e75b68 --- /dev/null +++ b/test/test_extras/test_shacl_extras.py @@ -0,0 +1,218 @@ +from __future__ import annotations + +from typing import Union + +import pytest + +from rdflib import Graph, URIRef +from rdflib.extras.shacl import SHACLPathError, parse_shacl_path +from rdflib.namespace import SH, Namespace +from rdflib.paths import Path + +EX = Namespace("http://example.org/") + + +# Create a graph that gets loaded only once +@pytest.fixture(scope="module") +def path_source_data(): + data = """ + @prefix ex: . + @prefix rdfs: . + @prefix sh: . + + + ex:TestPropShape1 + sh:path ex:pred1 ; + . + ex:TestPropShape2a + sh:path ( + ex:pred1 + ex:pred2 + ex:pred3 + ) ; + . + ex:TestPropShape2b + sh:path ( + ( + ex:pred1 + ex:pred2 + ) + ex:pred3 + ) ; + . + ex:TestPropShape3 + sh:path [ + sh:inversePath ex:pred1 ; + ] ; + . + ex:TestPropShape4a + sh:path [ + sh:alternativePath ( + ex:pred1 + ex:pred2 + ex:pred3 + ) ; + ] ; + . + ex:TestPropShape4b + sh:path [ + sh:alternativePath ( + [ + sh:alternativePath ( + ex:pred1 + ex:pred2 + ) ; + ] + ex:pred3 + ) ; + ] ; + . + ex:TestPropShape5 + sh:path [ + sh:zeroOrMorePath ex:pred1 ; + ] ; + . + ex:TestPropShape6 + sh:path [ + sh:oneOrMorePath ex:pred1 ; + ] ; + . + ex:TestPropShape7 + sh:path [ + sh:zeroOrOnePath ex:pred1 ; + ] ; + . + ex:TestPropShape8 + sh:path [ + sh:zeroOrMorePath [ + sh:inversePath ex:pred1 ; + ] ; + ] ; + . + ex:TestPropShape9 + sh:path [ + sh:alternativePath ( + [ + sh:inversePath ex:pred1 ; + ] + ( + ex:pred1 + ex:pred2 + ) + [ + sh:alternativePath ( + ex:pred1 + ex:pred2 + ex:pred3 + ) ; + ] + ) ; + ] ; + . + ex:TestPropShape10 + sh:path ( + [ + sh:zeroOrMorePath [ + sh:inversePath ex:pred1 ; + ] ; + ] + [ + sh:alternativePath ( + [ + sh:zeroOrMorePath [ + sh:inversePath ex:pred1 ; + ] ; + ] + [ + sh:alternativePath ( + ex:pred1 + [ + sh:oneOrMorePath ex:pred2 ; + ] + [ + sh:zeroOrMorePath ex:pred3 ; + ] + ) ; + ] + ) ; + ] + ) ; + . + ex:InvalidTestPropShape1 + sh:path () ; + . + ex:InvalidTestPropShape2 + sh:path ( + ex:pred1 + ) ; + . + ex:InvalidTestPropShape3 + sh:path [ + sh:alternativePath () ; + ] ; + . + ex:InvalidTestPropShape4 + sh:path [ + sh:alternativePath ( + ex:pred1 + ) ; + ] ; + . + ex:InvalidTestPropShape5 + sh:path [ + ex:invalidShaclPathProperty ex:pred1 + ] ; + . + ex:InvalidTestPropShape6 + sh:path "This can't be a literal!"; + . + """ + g = Graph() + g.parse(data=data, format="turtle") + yield g + + +@pytest.mark.parametrize( + ("resource", "expected"), + ( + # Single SHACL Path + (EX.TestPropShape1, EX.pred1), + (EX.TestPropShape2a, EX.pred1 / EX.pred2 / EX.pred3), + (EX.TestPropShape2b, EX.pred1 / EX.pred2 / EX.pred3), + (EX.TestPropShape3, ~EX.pred1), + (EX.TestPropShape4a, EX.pred1 | EX.pred2 | EX.pred3), + (EX.TestPropShape4b, EX.pred1 | EX.pred2 | EX.pred3), + (EX.TestPropShape5, EX.pred1 * "*"), # type: ignore[operator] + (EX.TestPropShape6, EX.pred1 * "+"), # type: ignore[operator] + (EX.TestPropShape7, EX.pred1 * "?"), # type: ignore[operator] + # SHACL Path Combinations + (EX.TestPropShape8, ~EX.pred1 * "*"), + ( + EX.TestPropShape9, + ~EX.pred1 | EX.pred1 / EX.pred2 | EX.pred1 | EX.pred2 | EX.pred3, + ), + ( + EX.TestPropShape10, + ~EX.pred1 + * "*" + / (~EX.pred1 * "*" | EX.pred1 | EX.pred2 * "+" | EX.pred3 * "*"), # type: ignore[operator] + ), + # Invalid Operations + (EX.InvalidTestPropShape1, SHACLPathError), + (EX.InvalidTestPropShape2, SHACLPathError), + (EX.InvalidTestPropShape3, SHACLPathError), + (EX.InvalidTestPropShape4, SHACLPathError), + (EX.InvalidTestPropShape5, SHACLPathError), + (EX.InvalidTestPropShape6, TypeError), + ), +) +def test_parse_shacl_path( + path_source_data: Graph, resource: URIRef, expected: Union[URIRef, Path] +): + path_root = path_source_data.value(resource, SH.path) + + if isinstance(expected, type): + with pytest.raises(expected): # type: ignore[arg-type] + parse_shacl_path(path_source_data, path_root) # type: ignore[arg-type] + else: + assert parse_shacl_path(path_source_data, path_root) == expected # type: ignore[arg-type] diff --git a/test/test_graph/test_aggregate_graphs.py b/test/test_graph/test_aggregate_graphs.py index 12309684c..fd45c4a14 100644 --- a/test/test_graph/test_aggregate_graphs.py +++ b/test/test_graph/test_aggregate_graphs.py @@ -6,7 +6,7 @@ from rdflib.store import Store from rdflib.term import URIRef -testGraph1N3 = """ +TEST_GRAPH_1N3 = """ @prefix rdf: . @prefix rdfs: . @prefix : . @@ -16,7 +16,7 @@ """ -testGraph2N3 = """ +TEST_GRAPH_2N3 = """ @prefix rdf: . @prefix rdfs: . @prefix : . @@ -26,7 +26,7 @@ :a :d :e. """ -testGraph3N3 = """ +TEST_GRAPH_3N3 = """ @prefix rdf: . @prefix rdfs: . @prefix log: . @@ -34,7 +34,7 @@ <> a log:N3Document. """ -sparqlQ = """ +SPARQL_Q = """ PREFIX rdfs: SELECT * FROM NAMED @@ -44,12 +44,12 @@ WHERE {?sub ?pred rdfs:Class }""" -sparqlQ2 = """ +SPARQL_Q2 = """ PREFIX rdfs: SELECT ?class WHERE { GRAPH ?graph { ?member a ?class } }""" -sparqlQ3 = """ +SPARQL_Q3 = """ PREFIX rdfs: PREFIX log: SELECT ?n3Doc @@ -57,58 +57,58 @@ def test_aggregate_raw(): - memStore = plugin.get("Memory", Store)() - graph1 = Graph(memStore) - graph2 = Graph(memStore) - graph3 = Graph(memStore) - - for n3Str, graph in [ - (testGraph1N3, graph1), - (testGraph2N3, graph2), - (testGraph3N3, graph3), + mem_store = plugin.get("Memory", Store)() + graph1 = Graph(mem_store) + graph2 = Graph(mem_store) + graph3 = Graph(mem_store) + + for n3_str, graph in [ + (TEST_GRAPH_1N3, graph1), + (TEST_GRAPH_2N3, graph2), + (TEST_GRAPH_3N3, graph3), ]: - graph.parse(StringIO(n3Str), format="n3") + graph.parse(StringIO(n3_str), format="n3") - G = ReadOnlyGraphAggregate([graph1, graph2, graph3]) + g = ReadOnlyGraphAggregate([graph1, graph2, graph3]) # Test triples - assert len(list(G.triples((None, RDF.type, None)))) == 4 - assert len(list(G.triples((URIRef("http://test/bar"), None, None)))) == 2 - assert len(list(G.triples((None, URIRef("http://test/d"), None)))) == 3 + assert len(list(g.triples((None, RDF.type, None)))) == 4 + assert len(list(g.triples((URIRef("http://test/bar"), None, None)))) == 2 + assert len(list(g.triples((None, URIRef("http://test/d"), None)))) == 3 # Test __len__ - assert len(G) == 8 + assert len(g) == 8 # assert context iteration - for g in G.contexts(): + for g in g.contexts(): assert isinstance(g, Graph) # Test __contains__ - assert (URIRef("http://test/foo"), RDF.type, RDFS.Resource) in G + assert (URIRef("http://test/foo"), RDF.type, RDFS.Resource) in g - barPredicates = [URIRef("http://test/d"), RDFS.isDefinedBy] + bar_predicates = [URIRef("http://test/d"), RDFS.isDefinedBy] assert ( - len(list(G.triples_choices((URIRef("http://test/bar"), barPredicates, None)))) + len(list(g.triples_choices((URIRef("http://test/bar"), bar_predicates, None)))) == 2 ) def test_aggregate2(): - memStore = plugin.get("Memory", Store)() - graph1 = Graph(memStore, URIRef("http://example.com/graph1")) - graph2 = Graph(memStore, URIRef("http://example.com/graph2")) - graph3 = Graph(memStore, URIRef("http://example.com/graph3")) - - for n3Str, graph in [ - (testGraph1N3, graph1), - (testGraph2N3, graph2), - (testGraph3N3, graph3), + mem_store = plugin.get("Memory", Store)() + graph1 = Graph(mem_store, URIRef("http://example.com/graph1")) + graph2 = Graph(mem_store, URIRef("http://example.com/graph2")) + graph3 = Graph(mem_store, URIRef("http://example.com/graph3")) + + for n3_str, graph in [ + (TEST_GRAPH_1N3, graph1), + (TEST_GRAPH_2N3, graph2), + (TEST_GRAPH_3N3, graph3), ]: - graph.parse(StringIO(n3Str), format="n3") + graph.parse(StringIO(n3_str), format="n3") - graph4 = Graph(memStore, RDFS) - graph4.parse(data=testGraph1N3, format="n3") - g = ConjunctiveGraph(memStore) + graph4 = Graph(mem_store, RDFS) + graph4.parse(data=TEST_GRAPH_1N3, format="n3") + g = ConjunctiveGraph(mem_store) assert g is not None assert len(list(g.quads((None, None, None, None)))) == 11 assert len(list(g.contexts())) == 4 diff --git a/test/test_graph/test_batch_add.py b/test/test_graph/test_batch_add.py index 112a8903c..a6ab52fba 100644 --- a/test/test_graph/test_batch_add.py +++ b/test/test_graph/test_batch_add.py @@ -52,7 +52,7 @@ def test_add_quad_for_non_conjunctive_pass_on_context_matches(self): cut.add((URIRef("a"), URIRef("b"), URIRef("c"), g)) assert 1 == len(g) - def test_no_addN_on_exception(self): + def test_no_addN_on_exception(self): # noqa: N802 """ Even if we've added triples so far, it may be that attempting to add the last batch is the cause of our exception, so we don't want to attempt again @@ -71,12 +71,12 @@ def test_no_addN_on_exception(self): pass assert 10 == len(g) - def test_addN_batching_addN(self): + def test_addN_batching_addN(self): # noqa: N802 class MockGraph: def __init__(self): self.counts = [] - def addN(self, quads): + def addN(self, quads): # noqa: N802 self.counts.append(sum(1 for _ in quads)) g = MockGraph() diff --git a/test/test_graph/test_canonicalization.py b/test/test_graph/test_canonicalization.py index b7b8228bf..58a5802a8 100644 --- a/test/test_graph/test_canonicalization.py +++ b/test/test_graph/test_canonicalization.py @@ -1,6 +1,5 @@ from collections import Counter from io import StringIO -from test.utils import GraphHelper from typing import TYPE_CHECKING, Set import pytest @@ -10,6 +9,7 @@ from rdflib.compare import to_canonical_graph, to_isomorphic from rdflib.namespace import FOAF from rdflib.plugins.stores.memory import Memory +from test.utils import GraphHelper if TYPE_CHECKING: from rdflib.graph import _TripleType @@ -27,55 +27,42 @@ def get_digest_value(rdf, mimetype): def negative_graph_match_test(): """Test of FRIR identifiers against tricky RDF graphs with blank nodes.""" - testInputs = [ + testInputs = [ # noqa: N806 [ - str( - """@prefix : . + """@prefix : . :rel [ :label "Same" ]. - """ - ), - str( - """@prefix : . + """, + """@prefix : . :rel [ :label "Same" ], [ :label "Same" ]. - """ - ), + """, False, ], [ - str( - """@prefix : . + """@prefix : . :rel . - """ - ), - str( - """@prefix : . + """, + """@prefix : . :rel , . - """ - ), + """, True, ], [ - str( - """@prefix : . + """@prefix : . :linear_two_step_symmetry_start :related [ :related [ :related :linear_two_step_symmatry_end]], - [ :related [ :related :linear_two_step_symmatry_end]].""" - ), - str( - """@prefix : . + [ :related [ :related :linear_two_step_symmatry_end]].""", + """@prefix : . :linear_two_step_symmetry_start :related [ :related [ :related :linear_two_step_symmatry_end]], - [ :related [ :related :linear_two_step_symmatry_end]].""" - ), + [ :related [ :related :linear_two_step_symmatry_end]].""", True, ], [ - str( - """@prefix : . + """@prefix : . _:a :rel [ :rel [ :rel [ @@ -84,10 +71,8 @@ def negative_graph_match_test(): ]; ]; ]; - ].""" - ), - str( - """@prefix : . + ].""", + """@prefix : . _:a :rel [ :rel [ :rel [ @@ -98,14 +83,12 @@ def negative_graph_match_test(): ]; ]; ]; - ].""" - ), + ].""", False, ], # This test fails because the algorithm purposefully breaks the symmetry of symetric [ - str( - """@prefix : . + """@prefix : . _:a :rel [ :rel [ :rel [ @@ -114,10 +97,8 @@ def negative_graph_match_test(): ]; ]; ]; - ].""" - ), - str( - """@prefix : . + ].""", + """@prefix : . _:a :rel [ :rel [ :rel [ @@ -126,13 +107,11 @@ def negative_graph_match_test(): ]; ]; ]; - ].""" - ), + ].""", True, ], [ - str( - """@prefix : . + """@prefix : . _:a :rel [ :rel [ :label "foo"; @@ -142,10 +121,8 @@ def negative_graph_match_test(): ]; ]; ]; - ].""" - ), - str( - """@prefix : . + ].""", + """@prefix : . _:a :rel [ :rel [ :rel [ @@ -154,13 +131,11 @@ def negative_graph_match_test(): ]; ]; ]; - ].""" - ), + ].""", False, ], [ - str( - """@prefix : . + """@prefix : . _:0001 :rel _:0003, _:0004. _:0002 :rel _:0005, _:0006. _:0003 :rel _:0001, _:0007, _:0010. @@ -171,10 +146,8 @@ def negative_graph_match_test(): _:0008 :rel _:0004, _:0006, _:0010. _:0009 :rel _:0004, _:0005, _:0007. _:0010 :rel _:0003, _:0006, _:0008. - """ - ), - str( - """@prefix : . + """, + """@prefix : . _:0001 :rel _:0003, _:0004. _:0002 :rel _:0005, _:0006. _:0003 :rel _:0001, _:0007, _:0010. @@ -185,8 +158,7 @@ def negative_graph_match_test(): _:0005 :rel _:0002, _:0007, _:0009. _:0006 :rel _:0002, _:0008, _:0010. _:0007 :rel _:0003, _:0005, _:0009. - """ - ), + """, True, ], ] @@ -455,7 +427,7 @@ def test_issue725_collapsing_bnodes_2(): [] a rdf:Statement ; rdf:object _:v1 ; rdf:predicate [ ] ; - rdf:subject .""" + rdf:subject .""" # noqa: F841 # g = Graph() # g.parse(data=turtle, format='turtle') diff --git a/test/test_graph/test_container.py b/test/test_graph/test_container.py index cfbace415..5e24978f1 100644 --- a/test/test_graph/test_container.py +++ b/test/test_graph/test_container.py @@ -1,3 +1,5 @@ +import pytest + from rdflib import Graph from rdflib.container import Alt, Bag, Seq from rdflib.term import BNode, Literal @@ -18,45 +20,45 @@ def setup_class(cls): cls.g, BNode(), [Literal("1"), Literal("2"), Literal("3"), Literal("4")] ) - def testA(self): + def test_a(self): assert len(self.c1) == 0 - def testB(self): + def test_b(self): assert len(self.c2) == 4 - def testC(self): + def test_c(self): self.c2.append(Literal("5")) del self.c2[2] assert len(self.c2) == 4 - def testD(self): + def test_d(self): assert self.c2.index(Literal("5")) == 4 - def testE(self): + def test_e(self): assert self.c2[2] == Literal("3") - def testF(self): + def test_f(self): self.c2[2] = Literal("9") assert self.c2[2] == Literal("9") - def testG(self): + def test_g(self): self.c2.clear() assert len(self.c2) == 0 - def testH(self): + def test_h(self): self.c2.append_multiple([Literal("80"), Literal("90")]) assert self.c2[1] == Literal("80") - def testI(self): + def test_i(self): assert self.c2[2] == Literal("90") - def testJ(self): + def test_j(self): assert len(self.c2) == 2 - def testK(self): + def test_k(self): assert self.c2.end() == 2 - def testL(self): + def test_l(self): assert self.c3.anyone() in [ Literal("1"), Literal("2"), @@ -64,18 +66,27 @@ def testL(self): Literal("4"), ] - def testM(self): + def test_m(self): self.c4.add_at_position(3, Literal("60")) assert len(self.c4) == 5 - def testN(self): + def test_n(self): assert self.c4.index(Literal("60")) == 3 - def testO(self): + def test_o(self): assert self.c4.index(Literal("3")) == 4 - def testP(self): + def test_p(self): assert self.c4.index(Literal("4")) == 5 - def testQ(self): + def test_q(self): assert self.c2.index(Literal("1000")) != 3 + + def test_r(self): + match = "rdflib.container.Container.type_of_conatiner is deprecated. Use type_of_container method instead." + with pytest.warns(DeprecationWarning, match=match): + assert self.c1.type_of_container() == self.c1.type_of_conatiner() + with pytest.warns(DeprecationWarning, match=match): + assert self.c3.type_of_container() == self.c3.type_of_conatiner() + with pytest.warns(DeprecationWarning, match=match): + assert self.c4.type_of_container() == self.c4.type_of_conatiner() diff --git a/test/test_graph/test_diff.py b/test/test_graph/test_diff.py index 587281872..696d8be89 100644 --- a/test/test_graph/test_diff.py +++ b/test/test_graph/test_diff.py @@ -1,13 +1,6 @@ +from __future__ import annotations + from dataclasses import dataclass, field -from test.utils import ( - COLLAPSED_BNODE, - BNodeHandling, - GHQuad, - GHTriple, - GraphHelper, - MarksType, - MarkType, -) from typing import TYPE_CHECKING, Collection, Set, Tuple, Type, Union, cast import pytest @@ -17,8 +10,17 @@ from rdflib import Graph from rdflib.compare import graph_diff from rdflib.graph import ConjunctiveGraph, Dataset -from rdflib.namespace import FOAF, RDF, Namespace +from rdflib.namespace import FOAF, RDF from rdflib.term import BNode, Literal +from test.utils import ( + COLLAPSED_BNODE, + BNodeHandling, + GHQuad, + GHTriple, + GraphHelper, + MarksType, + MarkType, +) if TYPE_CHECKING: from rdflib.graph import _TripleType @@ -128,9 +130,6 @@ def as_params(self) -> ParameterSet: return pytest.param(self, marks=self.marks) -EGSCHEME = Namespace("example:") - - @pytest.mark.parametrize( "test_case", [ diff --git a/test/test_graph/test_graph.py b/test/test_graph/test_graph.py index 289d577ab..639aa710c 100644 --- a/test/test_graph/test_graph.py +++ b/test/test_graph/test_graph.py @@ -1,13 +1,9 @@ -# -*- coding: utf-8 -*- +from __future__ import annotations + import logging import os -from contextlib import ExitStack from pathlib import Path -from test.data import TEST_DATA_DIR, bob, cheese, hates, likes, michel, pizza, tarek -from test.utils import GraphHelper, get_unique_plugin_names -from test.utils.exceptions import ExceptionChecker -from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource -from typing import Callable, Optional, Set, Tuple, Union +from typing import Callable, Optional, Set, Tuple from urllib.error import HTTPError, URLError import pytest @@ -18,6 +14,10 @@ from rdflib.plugin import PluginException from rdflib.store import Store from rdflib.term import BNode +from test.data import BOB, CHEESE, HATES, LIKES, MICHEL, PIZZA, TAREK, TEST_DATA_DIR +from test.utils import GraphHelper, get_unique_plugin_names +from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource +from test.utils.outcome import ExceptionChecker, OutcomeChecker, OutcomePrimitive def test_property_store() -> None: @@ -105,23 +105,23 @@ def make_graph() -> Graph: def populate_graph(graph: Graph): - graph.add((tarek, likes, pizza)) - graph.add((tarek, likes, cheese)) - graph.add((michel, likes, pizza)) - graph.add((michel, likes, cheese)) - graph.add((bob, likes, cheese)) - graph.add((bob, hates, pizza)) - graph.add((bob, hates, michel)) # gasp! + graph.add((TAREK, LIKES, PIZZA)) + graph.add((TAREK, LIKES, CHEESE)) + graph.add((MICHEL, LIKES, PIZZA)) + graph.add((MICHEL, LIKES, CHEESE)) + graph.add((BOB, LIKES, CHEESE)) + graph.add((BOB, HATES, PIZZA)) + graph.add((BOB, HATES, MICHEL)) # gasp! def depopulate_graph(graph: Graph): - graph.remove((tarek, likes, pizza)) - graph.remove((tarek, likes, cheese)) - graph.remove((michel, likes, pizza)) - graph.remove((michel, likes, cheese)) - graph.remove((bob, likes, cheese)) - graph.remove((bob, hates, pizza)) - graph.remove((bob, hates, michel)) # gasp! + graph.remove((TAREK, LIKES, PIZZA)) + graph.remove((TAREK, LIKES, CHEESE)) + graph.remove((MICHEL, LIKES, PIZZA)) + graph.remove((MICHEL, LIKES, CHEESE)) + graph.remove((BOB, LIKES, CHEESE)) + graph.remove((BOB, HATES, PIZZA)) + graph.remove((BOB, HATES, MICHEL)) # gasp! def test_add(make_graph: GraphFactory): @@ -138,41 +138,41 @@ def test_remove(make_graph: GraphFactory): def test_triples(make_graph: GraphFactory): graph = make_graph() triples = graph.triples - Any = None + Any = None # noqa: N806 populate_graph(graph) # unbound subjects - assert len(list(triples((Any, likes, pizza)))) == 2 - assert len(list(triples((Any, hates, pizza)))) == 1 - assert len(list(triples((Any, likes, cheese)))) == 3 - assert len(list(triples((Any, hates, cheese)))) == 0 + assert len(list(triples((Any, LIKES, PIZZA)))) == 2 + assert len(list(triples((Any, HATES, PIZZA)))) == 1 + assert len(list(triples((Any, LIKES, CHEESE)))) == 3 + assert len(list(triples((Any, HATES, CHEESE)))) == 0 # unbound objects - assert len(list(triples((michel, likes, Any)))) == 2 - assert len(list(triples((tarek, likes, Any)))) == 2 - assert len(list(triples((bob, hates, Any)))) == 2 - assert len(list(triples((bob, likes, Any)))) == 1 + assert len(list(triples((MICHEL, LIKES, Any)))) == 2 + assert len(list(triples((TAREK, LIKES, Any)))) == 2 + assert len(list(triples((BOB, HATES, Any)))) == 2 + assert len(list(triples((BOB, LIKES, Any)))) == 1 # unbound predicates - assert len(list(triples((michel, Any, cheese)))) == 1 - assert len(list(triples((tarek, Any, cheese)))) == 1 - assert len(list(triples((bob, Any, pizza)))) == 1 - assert len(list(triples((bob, Any, michel)))) == 1 + assert len(list(triples((MICHEL, Any, CHEESE)))) == 1 + assert len(list(triples((TAREK, Any, CHEESE)))) == 1 + assert len(list(triples((BOB, Any, PIZZA)))) == 1 + assert len(list(triples((BOB, Any, MICHEL)))) == 1 # unbound subject, objects - assert len(list(triples((Any, hates, Any)))) == 2 - assert len(list(triples((Any, likes, Any)))) == 5 + assert len(list(triples((Any, HATES, Any)))) == 2 + assert len(list(triples((Any, LIKES, Any)))) == 5 # unbound predicates, objects - assert len(list(triples((michel, Any, Any)))) == 2 - assert len(list(triples((bob, Any, Any)))) == 3 - assert len(list(triples((tarek, Any, Any)))) == 2 + assert len(list(triples((MICHEL, Any, Any)))) == 2 + assert len(list(triples((BOB, Any, Any)))) == 3 + assert len(list(triples((TAREK, Any, Any)))) == 2 # unbound subjects, predicates - assert len(list(triples((Any, Any, pizza)))) == 3 - assert len(list(triples((Any, Any, cheese)))) == 3 - assert len(list(triples((Any, Any, michel)))) == 1 + assert len(list(triples((Any, Any, PIZZA)))) == 3 + assert len(list(triples((Any, Any, CHEESE)))) == 3 + assert len(list(triples((Any, Any, MICHEL)))) == 1 # all unbound assert len(list(triples((Any, Any, Any)))) == 7 @@ -188,7 +188,7 @@ def test_connected(make_graph: GraphFactory): jeroen = URIRef("jeroen") unconnected = URIRef("unconnected") - graph.add((jeroen, likes, unconnected)) + graph.add((jeroen, LIKES, unconnected)) assert graph.connected() is False @@ -197,82 +197,82 @@ def test_graph_sub(make_graph: GraphFactory): g1 = make_graph() g2 = make_graph() - g1.add((tarek, likes, pizza)) - g1.add((bob, likes, cheese)) + g1.add((TAREK, LIKES, PIZZA)) + g1.add((BOB, LIKES, CHEESE)) - g2.add((bob, likes, cheese)) + g2.add((BOB, LIKES, CHEESE)) g3 = g1 - g2 assert len(g3) == 1 - assert (tarek, likes, pizza) in g3 - assert (tarek, likes, cheese) not in g3 + assert (TAREK, LIKES, PIZZA) in g3 + assert (TAREK, LIKES, CHEESE) not in g3 - assert (bob, likes, cheese) not in g3 + assert (BOB, LIKES, CHEESE) not in g3 g1 -= g2 assert len(g1) == 1 - assert (tarek, likes, pizza) in g1 - assert (tarek, likes, cheese) not in g1 + assert (TAREK, LIKES, PIZZA) in g1 + assert (TAREK, LIKES, CHEESE) not in g1 - assert (bob, likes, cheese) not in g1 + assert (BOB, LIKES, CHEESE) not in g1 def test_graph_add(make_graph: GraphFactory): g1 = make_graph() g2 = make_graph() - g1.add((tarek, likes, pizza)) - g2.add((bob, likes, cheese)) + g1.add((TAREK, LIKES, PIZZA)) + g2.add((BOB, LIKES, CHEESE)) g3 = g1 + g2 assert len(g3) == 2 - assert (tarek, likes, pizza) in g3 - assert (tarek, likes, cheese) not in g3 + assert (TAREK, LIKES, PIZZA) in g3 + assert (TAREK, LIKES, CHEESE) not in g3 - assert (bob, likes, cheese) in g3 + assert (BOB, LIKES, CHEESE) in g3 g1 += g2 assert len(g1) == 2 - assert (tarek, likes, pizza) in g1 - assert (tarek, likes, cheese) not in g1 + assert (TAREK, LIKES, PIZZA) in g1 + assert (TAREK, LIKES, CHEESE) not in g1 - assert (bob, likes, cheese) in g1 + assert (BOB, LIKES, CHEESE) in g1 def test_graph_intersection(make_graph: GraphFactory): g1 = make_graph() g2 = make_graph() - g1.add((tarek, likes, pizza)) - g1.add((michel, likes, cheese)) + g1.add((TAREK, LIKES, PIZZA)) + g1.add((MICHEL, LIKES, CHEESE)) - g2.add((bob, likes, cheese)) - g2.add((michel, likes, cheese)) + g2.add((BOB, LIKES, CHEESE)) + g2.add((MICHEL, LIKES, CHEESE)) g3 = g1 * g2 assert len(g3) == 1 - assert (tarek, likes, pizza) not in g3 - assert (tarek, likes, cheese) not in g3 + assert (TAREK, LIKES, PIZZA) not in g3 + assert (TAREK, LIKES, CHEESE) not in g3 - assert (bob, likes, cheese) not in g3 + assert (BOB, LIKES, CHEESE) not in g3 - assert (michel, likes, cheese) in g3 + assert (MICHEL, LIKES, CHEESE) in g3 g1 *= g2 assert len(g1) == 1 - assert (tarek, likes, pizza) not in g1 - assert (tarek, likes, cheese) not in g1 + assert (TAREK, LIKES, PIZZA) not in g1 + assert (TAREK, LIKES, CHEESE) not in g1 - assert (bob, likes, cheese) not in g1 + assert (BOB, LIKES, CHEESE) not in g1 - assert (michel, likes, cheese) in g1 + assert (MICHEL, LIKES, CHEESE) in g1 def test_guess_format_for_parse( @@ -373,7 +373,7 @@ def test_guess_format_for_parse_http( http_file_server: HTTPFileServer, file: Path, content_type: Optional[str], - expected_result: Union[int, ExceptionChecker], + expected_result: OutcomePrimitive[int], ) -> None: graph = make_graph() headers: Tuple[Tuple[str, str], ...] = tuple() @@ -384,25 +384,28 @@ def test_guess_format_for_parse_http( ProtoFileResource(headers, file), suffix=f"/{file.name}", ) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - + checker = OutcomeChecker.from_primitive(expected_result) assert 0 == len(graph) - with ExitStack() as exit_stack: - if isinstance(expected_result, ExceptionChecker): - catcher = exit_stack.enter_context(pytest.raises(expected_result.type)) + with checker.context(): graph.parse(location=file_info.request_url) + checker.check(len(graph)) + - if catcher is not None: - # assert catcher.value is not None - assert isinstance(expected_result, ExceptionChecker) - logging.debug("graph = %s", list(graph.triples((None, None, None)))) - else: - assert isinstance(expected_result, int) - assert expected_result == len(graph) +@pytest.mark.webtest +def test_guess_format_for_parse_http_text_plain(): + # Any raw url of a file from GitHub will return the content-type with text/plain. + url = "https://raw.githubusercontent.com/AGLDWG/vocpub-profile/master/validators/validator.ttl" + graph = Graph().parse(url) + assert len(graph) > 0 + + # A url that returns content-type text/html. + url = "https://github.com/RDFLib/rdflib/issues/2734" + with pytest.raises(PluginException): + graph = Graph().parse(url) def test_parse_file_uri(make_graph: GraphFactory): - EG = Namespace("http://example.org/#") + EG = Namespace("http://example.org/#") # noqa: N806 g = make_graph() g.parse( Path(os.path.join(TEST_DATA_DIR, "suites", "nt_misc", "simple-04.nt")) diff --git a/test/test_graph/test_graph_cbd.py b/test/test_graph/test_graph_cbd.py index cb9e3761b..d871bdd59 100644 --- a/test/test_graph/test_graph_cbd.py +++ b/test/test_graph/test_graph_cbd.py @@ -1,16 +1,16 @@ -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper +"""Tests the Graph class' cbd() function""" import pytest from rdflib import Graph, Namespace from rdflib.namespace import RDF, RDFS from rdflib.term import Literal, URIRef +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper EXAMPLE_GRAPH_FILE_PATH = TEST_DATA_DIR / "spec" / "cbd" / "example_graph.rdf" EXAMPLE_GRAPH_CBD_FILE_PATH = TEST_DATA_DIR / "spec" / "cbd" / "example_graph_cbd.rdf" -"""Tests the Graph class' cbd() function""" EX = Namespace("http://ex/") @@ -52,7 +52,7 @@ def get_graph(): g.close() -def testCbd(get_graph): +def testCbd(get_graph): # noqa: N802 g = get_graph assert len(g.cbd(EX.R1)) == 3, "cbd() for R1 should return 3 triples" @@ -63,7 +63,7 @@ def testCbd(get_graph): assert len(g.cbd(EX.R4)) == 0, "cbd() for R4 should return 0 triples" -def testCbdReified(get_graph): +def testCbdReified(get_graph): # noqa: N802 g = get_graph # add some reified triples to the testing graph g.parse( diff --git a/test/test_graph/test_graph_context.py b/test/test_graph/test_graph_context.py index f6ef5c3e4..7d0a90f7c 100644 --- a/test/test_graph/test_graph_context.py +++ b/test/test_graph/test_graph_context.py @@ -1,8 +1,11 @@ +from __future__ import annotations + import os import shutil import sys import unittest from tempfile import mkdtemp, mkstemp +from typing import Optional import pytest @@ -13,7 +16,7 @@ class ContextTestCase(unittest.TestCase): store = "default" slow = True - tmppath = None + tmppath: Optional[str] = None def setUp(self): try: @@ -46,7 +49,7 @@ def tearDown(self): else: os.remove(self.tmppath) - def addStuff(self): + def add_stuff(self): tarek = self.tarek michel = self.michel bob = self.bob @@ -65,7 +68,7 @@ def addStuff(self): graph.add((bob, hates, pizza)) graph.add((bob, hates, michel)) # gasp! - def removeStuff(self): + def remove_stuff(self): tarek = self.tarek michel = self.michel bob = self.bob @@ -84,7 +87,7 @@ def removeStuff(self): graph.remove((bob, hates, pizza)) graph.remove((bob, hates, michel)) # gasp! - def addStuffInMultipleContexts(self): + def add_stuff_in_multiple_contexts(self): c1 = self.c1 c2 = self.c2 triple = (self.pizza, self.hates, self.tarek) # revenge! @@ -98,58 +101,58 @@ def addStuffInMultipleContexts(self): graph = Graph(self.graph.store, c2) graph.add(triple) - def testConjunction(self): + def test_conjunction(self): if self.store == "SQLite": pytest.skip("Skipping known issue with __len__") - self.addStuffInMultipleContexts() + self.add_stuff_in_multiple_contexts() triple = (self.pizza, self.likes, self.pizza) # add to context 1 graph = Graph(self.graph.store, self.c1) graph.add(triple) self.assertEqual(len(self.graph), len(graph)) - def testAdd(self): - self.addStuff() + def test_add(self): + self.add_stuff() - def testRemove(self): - self.addStuff() - self.removeStuff() + def test_remove(self): + self.add_stuff() + self.remove_stuff() - def testLenInOneContext(self): + def test_len_in_one_context(self): c1 = self.c1 # make sure context is empty self.graph.remove_context(self.graph.get_context(c1)) graph = Graph(self.graph.store, c1) - oldLen = len(self.graph) + old_len = len(self.graph) for i in range(0, 10): graph.add((BNode(), self.hates, self.hates)) - self.assertEqual(len(graph), oldLen + 10) - self.assertEqual(len(self.graph.get_context(c1)), oldLen + 10) + self.assertEqual(len(graph), old_len + 10) + self.assertEqual(len(self.graph.get_context(c1)), old_len + 10) self.graph.remove_context(self.graph.get_context(c1)) - self.assertEqual(len(self.graph), oldLen) + self.assertEqual(len(self.graph), old_len) self.assertEqual(len(graph), 0) - def testLenInMultipleContexts(self): + def test_len_in_multiple_contexts(self): if self.store == "SQLite": pytest.skip("Skipping known issue with __len__") - oldLen = len(self.graph) - self.addStuffInMultipleContexts() + old_len = len(self.graph) + self.add_stuff_in_multiple_contexts() - # addStuffInMultipleContexts is adding the same triple to + # add_stuff_in_multiple_contexts is adding the same triple to # three different contexts. So it's only + 1 - self.assertEqual(len(self.graph), oldLen + 1) + self.assertEqual(len(self.graph), old_len + 1) graph = Graph(self.graph.store, self.c1) - self.assertEqual(len(graph), oldLen + 1) + self.assertEqual(len(graph), old_len + 1) - def testRemoveInMultipleContexts(self): + def test_remove_in_multiple_contexts(self): c1 = self.c1 c2 = self.c2 triple = (self.pizza, self.hates, self.tarek) # revenge! - self.addStuffInMultipleContexts() + self.add_stuff_in_multiple_contexts() # triple should be still in store after removing it from c1 + c2 self.assertTrue(triple in self.graph) @@ -164,14 +167,14 @@ def testRemoveInMultipleContexts(self): self.assertTrue(triple not in self.graph) # add again and see if remove without context removes all triples! - self.addStuffInMultipleContexts() + self.add_stuff_in_multiple_contexts() self.graph.remove(triple) self.assertTrue(triple not in self.graph) - def testContexts(self): + def test_contexts(self): triple = (self.pizza, self.hates, self.tarek) # revenge! - self.addStuffInMultipleContexts() + self.add_stuff_in_multiple_contexts() def cid(c): return c.identifier @@ -179,27 +182,27 @@ def cid(c): self.assertTrue(self.c1 in map(cid, self.graph.contexts())) self.assertTrue(self.c2 in map(cid, self.graph.contexts())) - contextList = list(map(cid, list(self.graph.contexts(triple)))) - self.assertTrue(self.c1 in contextList, (self.c1, contextList)) - self.assertTrue(self.c2 in contextList, (self.c2, contextList)) + context_list = list(map(cid, list(self.graph.contexts(triple)))) + self.assertTrue(self.c1 in context_list, (self.c1, context_list)) + self.assertTrue(self.c2 in context_list, (self.c2, context_list)) - def testRemoveContext(self): + def test_remove_context(self): c1 = self.c1 - self.addStuffInMultipleContexts() + self.add_stuff_in_multiple_contexts() self.assertEqual(len(Graph(self.graph.store, c1)), 1) self.assertEqual(len(self.graph.get_context(c1)), 1) self.graph.remove_context(self.graph.get_context(c1)) self.assertTrue(self.c1 not in self.graph.contexts()) - def testRemoveAny(self): - Any = None - self.addStuffInMultipleContexts() - self.graph.remove((Any, Any, Any)) + def test_remove_any(self): + any = None + self.add_stuff_in_multiple_contexts() + self.graph.remove((any, any, any)) self.assertEqual(len(self.graph), 0) - def testTriples(self): + def test_triples(self): tarek = self.tarek michel = self.michel bob = self.bob @@ -213,78 +216,78 @@ def testTriples(self): graph = self.graph c1graph = Graph(self.graph.store, c1) c1triples = c1graph.triples - Any = None + any = None - self.addStuff() + self.add_stuff() # unbound subjects with context - asserte(len(list(c1triples((Any, likes, pizza)))), 2) - asserte(len(list(c1triples((Any, hates, pizza)))), 1) - asserte(len(list(c1triples((Any, likes, cheese)))), 3) - asserte(len(list(c1triples((Any, hates, cheese)))), 0) + asserte(len(list(c1triples((any, likes, pizza)))), 2) + asserte(len(list(c1triples((any, hates, pizza)))), 1) + asserte(len(list(c1triples((any, likes, cheese)))), 3) + asserte(len(list(c1triples((any, hates, cheese)))), 0) # unbound subjects without context, same results! - asserte(len(list(triples((Any, likes, pizza)))), 2) - asserte(len(list(triples((Any, hates, pizza)))), 1) - asserte(len(list(triples((Any, likes, cheese)))), 3) - asserte(len(list(triples((Any, hates, cheese)))), 0) + asserte(len(list(triples((any, likes, pizza)))), 2) + asserte(len(list(triples((any, hates, pizza)))), 1) + asserte(len(list(triples((any, likes, cheese)))), 3) + asserte(len(list(triples((any, hates, cheese)))), 0) # unbound objects with context - asserte(len(list(c1triples((michel, likes, Any)))), 2) - asserte(len(list(c1triples((tarek, likes, Any)))), 2) - asserte(len(list(c1triples((bob, hates, Any)))), 2) - asserte(len(list(c1triples((bob, likes, Any)))), 1) + asserte(len(list(c1triples((michel, likes, any)))), 2) + asserte(len(list(c1triples((tarek, likes, any)))), 2) + asserte(len(list(c1triples((bob, hates, any)))), 2) + asserte(len(list(c1triples((bob, likes, any)))), 1) # unbound objects without context, same results! - asserte(len(list(triples((michel, likes, Any)))), 2) - asserte(len(list(triples((tarek, likes, Any)))), 2) - asserte(len(list(triples((bob, hates, Any)))), 2) - asserte(len(list(triples((bob, likes, Any)))), 1) + asserte(len(list(triples((michel, likes, any)))), 2) + asserte(len(list(triples((tarek, likes, any)))), 2) + asserte(len(list(triples((bob, hates, any)))), 2) + asserte(len(list(triples((bob, likes, any)))), 1) # unbound predicates with context - asserte(len(list(c1triples((michel, Any, cheese)))), 1) - asserte(len(list(c1triples((tarek, Any, cheese)))), 1) - asserte(len(list(c1triples((bob, Any, pizza)))), 1) - asserte(len(list(c1triples((bob, Any, michel)))), 1) + asserte(len(list(c1triples((michel, any, cheese)))), 1) + asserte(len(list(c1triples((tarek, any, cheese)))), 1) + asserte(len(list(c1triples((bob, any, pizza)))), 1) + asserte(len(list(c1triples((bob, any, michel)))), 1) # unbound predicates without context, same results! - asserte(len(list(triples((michel, Any, cheese)))), 1) - asserte(len(list(triples((tarek, Any, cheese)))), 1) - asserte(len(list(triples((bob, Any, pizza)))), 1) - asserte(len(list(triples((bob, Any, michel)))), 1) + asserte(len(list(triples((michel, any, cheese)))), 1) + asserte(len(list(triples((tarek, any, cheese)))), 1) + asserte(len(list(triples((bob, any, pizza)))), 1) + asserte(len(list(triples((bob, any, michel)))), 1) # unbound subject, objects with context - asserte(len(list(c1triples((Any, hates, Any)))), 2) - asserte(len(list(c1triples((Any, likes, Any)))), 5) + asserte(len(list(c1triples((any, hates, any)))), 2) + asserte(len(list(c1triples((any, likes, any)))), 5) # unbound subject, objects without context, same results! - asserte(len(list(triples((Any, hates, Any)))), 2) - asserte(len(list(triples((Any, likes, Any)))), 5) + asserte(len(list(triples((any, hates, any)))), 2) + asserte(len(list(triples((any, likes, any)))), 5) # unbound predicates, objects with context - asserte(len(list(c1triples((michel, Any, Any)))), 2) - asserte(len(list(c1triples((bob, Any, Any)))), 3) - asserte(len(list(c1triples((tarek, Any, Any)))), 2) + asserte(len(list(c1triples((michel, any, any)))), 2) + asserte(len(list(c1triples((bob, any, any)))), 3) + asserte(len(list(c1triples((tarek, any, any)))), 2) # unbound predicates, objects without context, same results! - asserte(len(list(triples((michel, Any, Any)))), 2) - asserte(len(list(triples((bob, Any, Any)))), 3) - asserte(len(list(triples((tarek, Any, Any)))), 2) + asserte(len(list(triples((michel, any, any)))), 2) + asserte(len(list(triples((bob, any, any)))), 3) + asserte(len(list(triples((tarek, any, any)))), 2) # unbound subjects, predicates with context - asserte(len(list(c1triples((Any, Any, pizza)))), 3) - asserte(len(list(c1triples((Any, Any, cheese)))), 3) - asserte(len(list(c1triples((Any, Any, michel)))), 1) + asserte(len(list(c1triples((any, any, pizza)))), 3) + asserte(len(list(c1triples((any, any, cheese)))), 3) + asserte(len(list(c1triples((any, any, michel)))), 1) # unbound subjects, predicates without context, same results! - asserte(len(list(triples((Any, Any, pizza)))), 3) - asserte(len(list(triples((Any, Any, cheese)))), 3) - asserte(len(list(triples((Any, Any, michel)))), 1) + asserte(len(list(triples((any, any, pizza)))), 3) + asserte(len(list(triples((any, any, cheese)))), 3) + asserte(len(list(triples((any, any, michel)))), 1) # all unbound with context - asserte(len(list(c1triples((Any, Any, Any)))), 7) + asserte(len(list(c1triples((any, any, any)))), 7) # all unbound without context, same result! - asserte(len(list(triples((Any, Any, Any)))), 7) + asserte(len(list(triples((any, any, any)))), 7) for c in [graph, self.graph.get_context(c1)]: # unbound subjects @@ -356,9 +359,9 @@ def testTriples(self): ) # remove stuff and make sure the graph is empty again - self.removeStuff() - asserte(len(list(c1triples((Any, Any, Any)))), 0) - asserte(len(list(triples((Any, Any, Any)))), 0) + self.remove_stuff() + asserte(len(list(c1triples((any, any, any)))), 0) + asserte(len(list(triples((any, any, any)))), 0) # dynamically create classes for each registered Store diff --git a/test/test_graph/test_graph_formula.py b/test/test_graph/test_graph_formula.py index 32b3aef71..0f77dc294 100644 --- a/test/test_graph/test_graph_formula.py +++ b/test/test_graph/test_graph_formula.py @@ -13,13 +13,13 @@ @prefix : . {:a :b :c;a :foo} => {:a :d :c,?y}. _:foo a rdfs:Class. -:a :d :c.""" +:a :d :c.""" # noqa: N816 # Thorough test suite for formula-aware store -def checkFormulaStore(store="default", configString=None): +def checkFormulaStore(store="default", configString=None): # noqa: N802, N803 try: g = ConjunctiveGraph(store=store) except ImportError: @@ -38,10 +38,10 @@ def checkFormulaStore(store="default", configString=None): g.parse(data=testN3, format="n3") try: for s, p, o in g.triples((None, implies, None)): - formulaA = s - formulaB = o + formulaA = s # noqa: N806 + formulaB = o # noqa: N806 - assert type(formulaA) == QuotedGraph and type(formulaB) == QuotedGraph + assert type(formulaA) is QuotedGraph and type(formulaB) is QuotedGraph # a = URIRef('http://test/a') b = URIRef("http://test/b") c = URIRef("http://test/c") diff --git a/test/test_graph/test_graph_generators.py b/test/test_graph/test_graph_generators.py index 8d623e27c..0d89c9b7f 100644 --- a/test/test_graph/test_graph_generators.py +++ b/test/test_graph/test_graph_generators.py @@ -1,23 +1,23 @@ import os -from test.data import TEST_DATA_DIR, bob, cheese, hates, likes, michel, pizza, tarek from rdflib import Graph +from test.data import BOB, CHEESE, HATES, LIKES, MICHEL, PIZZA, TAREK, TEST_DATA_DIR timblcardn3 = open(os.path.join(TEST_DATA_DIR, "timbl-card.n3")).read() def add_stuff(graph): - graph.add((tarek, likes, pizza)) - graph.add((tarek, likes, cheese)) - graph.add((tarek, likes, bob)) - graph.add((tarek, likes, michel)) - graph.add((michel, likes, pizza)) - graph.add((michel, likes, cheese)) - graph.add((michel, likes, tarek)) - graph.add((bob, likes, cheese)) - graph.add((bob, hates, pizza)) - graph.add((bob, hates, michel)) - graph.add((bob, likes, tarek)) + graph.add((TAREK, LIKES, PIZZA)) + graph.add((TAREK, LIKES, CHEESE)) + graph.add((TAREK, LIKES, BOB)) + graph.add((TAREK, LIKES, MICHEL)) + graph.add((MICHEL, LIKES, PIZZA)) + graph.add((MICHEL, LIKES, CHEESE)) + graph.add((MICHEL, LIKES, TAREK)) + graph.add((BOB, LIKES, CHEESE)) + graph.add((BOB, HATES, PIZZA)) + graph.add((BOB, HATES, MICHEL)) + graph.add((BOB, LIKES, TAREK)) def test_unique_subjects(): diff --git a/test/test_graph/test_graph_http.py b/test/test_graph/test_graph_http.py index 4d5ed09e0..11eebe38b 100644 --- a/test/test_graph/test_graph_http.py +++ b/test/test_graph/test_graph_http.py @@ -1,6 +1,13 @@ +from __future__ import annotations + import logging import re from http.server import BaseHTTPRequestHandler +from urllib.error import HTTPError + +import pytest + +from rdflib import Graph from test.data import TEST_DATA_DIR from test.utils import GraphHelper from test.utils.graph import cached_graph @@ -12,12 +19,8 @@ ctx_http_handler, ) from test.utils.httpservermock import ServedBaseHTTPServerMock +from test.utils.namespace import EGDO from test.utils.wildcard import URL_PARSE_RESULT_WILDCARD -from urllib.error import HTTPError - -import pytest - -from rdflib import Graph, Namespace """ Test that correct content negotiation headers are passed @@ -61,8 +64,6 @@ ] """ -EG = Namespace("http://example.org/") - class ContentNegotiationHandler(BaseHTTPRequestHandler): def do_GET(self): # noqa: N802 @@ -106,7 +107,7 @@ def log_message(self, *args): class TestGraphHTTP: def test_content_negotiation(self) -> None: expected = Graph() - expected.add((EG.a, EG.b, EG.c)) + expected.add((EGDO.a, EGDO.b, EGDO.c)) expected_triples = GraphHelper.triple_set(expected) with ctx_http_handler(ContentNegotiationHandler) as server: @@ -121,7 +122,7 @@ def test_content_negotiation(self) -> None: def test_content_negotiation_no_format(self) -> None: expected = Graph() - expected.add((EG.a, EG.b, EG.c)) + expected.add((EGDO.a, EGDO.b, EGDO.c)) expected_triples = GraphHelper.triple_set(expected) with ctx_http_handler(ContentNegotiationHandler) as server: @@ -135,7 +136,7 @@ def test_content_negotiation_no_format(self) -> None: def test_source(self) -> None: expected = Graph() - expected.add((EG["a"], EG["b"], EG["c"])) + expected.add((EGDO["a"], EGDO["b"], EGDO["c"])) expected_triples = GraphHelper.triple_set(expected) with ServedBaseHTTPServerMock() as httpmock: @@ -145,7 +146,7 @@ def test_source(self) -> None: MockHTTPResponse( 200, "OK", - f"<{EG['a']}> <{EG['b']}> <{EG['c']}>.".encode(), + f"<{EGDO['a']}> <{EGDO['b']}> <{EGDO['c']}>.".encode(), {"Content-Type": ["text/turtle"]}, ) ) @@ -155,7 +156,7 @@ def test_source(self) -> None: def test_3xx(self) -> None: expected = Graph() - expected.add((EG["a"], EG["b"], EG["c"])) + expected.add((EGDO["a"], EGDO["b"], EGDO["c"])) expected_triples = GraphHelper.triple_set(expected) with ServedBaseHTTPServerMock() as httpmock: @@ -166,7 +167,7 @@ def test_3xx(self) -> None: MockHTTPResponse( 302, "FOUND", - "".encode(), + b"", {"Location": [f"{url}/loc/302/{idx}"]}, ) ) @@ -175,7 +176,7 @@ def test_3xx(self) -> None: MockHTTPResponse( 303, "See Other", - "".encode(), + b"", {"Location": [f"{url}/loc/303/{idx}"]}, ) ) @@ -184,7 +185,7 @@ def test_3xx(self) -> None: MockHTTPResponse( 308, "Permanent Redirect", - "".encode(), + b"", {"Location": [f"{url}/loc/308/{idx}"]}, ) ) @@ -193,7 +194,7 @@ def test_3xx(self) -> None: MockHTTPResponse( 200, "OK", - f"<{EG['a']}> <{EG['b']}> <{EG['c']}>.".encode(), + f"<{EGDO['a']}> <{EGDO['b']}> <{EGDO['c']}>.".encode(), {"Content-Type": ["text/turtle"]}, ) ) @@ -228,7 +229,7 @@ def test_5xx(self): with ServedBaseHTTPServerMock() as httpmock: url = httpmock.url httpmock.responses[MethodName.GET].append( - MockHTTPResponse(500, "Internal Server Error", "".encode(), {}) + MockHTTPResponse(500, "Internal Server Error", b"", {}) ) graph = Graph() diff --git a/test/test_graph/test_graph_items.py b/test/test_graph/test_graph_items.py index 55f81e7fe..4984792a9 100644 --- a/test/test_graph/test_graph_items.py +++ b/test/test_graph/test_graph_items.py @@ -1,4 +1,40 @@ -from rdflib import RDF, Graph +from rdflib import RDF, Graph, Namespace + +EX = Namespace("http://example.org/") + + +def test_items(): + g = Graph().parse( + data=""" + @prefix : . + + <> :value ( + + + ). + + <> :value ( + + ). + + <> :value (). + + <> :value ( + + + + ). + """, + format="turtle", + ) + + values = {tuple(g.items(v)) for v in g.objects(None, RDF.value)} + assert values == { + (EX.thing1, EX.thing2), + (EX.thing3,), + (), + (EX.thing4, EX.thing5, EX.thing6), + } def test_recursive_list_detection(): @@ -22,7 +58,7 @@ def test_recursive_list_detection(): for v in g.objects(None, RDF.value): try: list(g.items(v)) - except ValueError as e: + except ValueError as e: # noqa: F841 pass else: assert False, "Expected detection of recursive rdf:rest reference" diff --git a/test/test_graph/test_graph_redirect.py b/test/test_graph/test_graph_redirect.py index c61adbc59..b91623baa 100644 --- a/test/test_graph/test_graph_redirect.py +++ b/test/test_graph/test_graph_redirect.py @@ -1,11 +1,13 @@ -from test.data import TEST_DATA_DIR, simple_triple_graph -from test.utils import GraphHelper -from test.utils.http import MethodName, MockHTTPResponse -from test.utils.httpservermock import ServedBaseHTTPServerMock +from __future__ import annotations + from typing import Tuple from urllib.parse import urlparse from rdflib.graph import Graph +from test.data import SIMPLE_TRIPLE_GRAPH, TEST_DATA_DIR +from test.utils import GraphHelper +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock def test_graph_redirect_new_host( @@ -38,7 +40,7 @@ def test_graph_redirect_new_host( graph = Graph() graph.parse(location=f"{mock_a.url}/a/data.ttl") - GraphHelper.assert_sets_equals(graph, simple_triple_graph) + GraphHelper.assert_sets_equals(graph, SIMPLE_TRIPLE_GRAPH) for mock in function_httpmocks: assert 1 == len(mock.requests[MethodName.GET]) for request in mock.requests[MethodName.GET]: diff --git a/test/test_graph/test_graph_store.py b/test/test_graph/test_graph_store.py index 144434cc6..9a3397702 100644 --- a/test/test_graph/test_graph_store.py +++ b/test/test_graph/test_graph_store.py @@ -2,6 +2,8 @@ Tests for usage of the Store interface from Graph/NamespaceManager. """ +from __future__ import annotations + import itertools import logging from typing import ( @@ -10,21 +12,26 @@ Callable, Dict, Iterable, + Mapping, Optional, Sequence, Tuple, Type, Union, ) +from unittest.mock import patch import pytest import rdflib.namespace -from rdflib.graph import Graph +from rdflib.graph import ConjunctiveGraph, Dataset, Graph from rdflib.namespace import Namespace +from rdflib.plugins.sparql.sparql import Query from rdflib.plugins.stores.memory import Memory +from rdflib.query import Result from rdflib.store import Store -from rdflib.term import URIRef +from rdflib.term import Identifier, URIRef, Variable +from test.data import SIMPLE_TRIPLE_GRAPH if TYPE_CHECKING: from _pytest.mark.structures import ParameterSet @@ -69,10 +76,10 @@ def bind(self, prefix, namespace, override=True, replace=False) -> None: EGNS_V2 = EGNS["v2"] -def make_test_graph_store_bind_cases( +def make_graph_store_bind_cases( store_type: Type[Store] = Memory, graph_type: Type[Graph] = Graph, -) -> Iterable[Union[Tuple[Any, ...], "ParameterSet"]]: +) -> Iterable[Union[Tuple[Any, ...], ParameterSet]]: """ Generate test cases for test_graph_store_bind. """ @@ -194,9 +201,9 @@ def _p( @pytest.mark.parametrize( ["graph_factory", "ops", "expected_bindings"], itertools.chain( - make_test_graph_store_bind_cases(), - make_test_graph_store_bind_cases(store_type=MemoryWithoutBindOverride), - make_test_graph_store_bind_cases(graph_type=GraphWithoutBindOverrideFix), + make_graph_store_bind_cases(), + make_graph_store_bind_cases(store_type=MemoryWithoutBindOverride), + make_graph_store_bind_cases(graph_type=GraphWithoutBindOverrideFix), ), ) def test_graph_store_bind( @@ -205,9 +212,111 @@ def test_graph_store_bind( expected_bindings: NamespaceBindings, ) -> None: """ - The expected sequence of graph operations results in the expected namespace bindings. + The expected sequence of graph operations results in the expected + namespace bindings. """ graph = graph_factory() for op in ops: op(graph) check_ns(graph, expected_bindings) + + +@pytest.mark.parametrize( + ("graph_factory", "query_graph"), + [ + (Graph, lambda graph: graph.identifier), + (ConjunctiveGraph, "__UNION__"), + (Dataset, lambda graph: graph.default_context.identifier), + (lambda store: Dataset(store=store, default_union=True), "__UNION__"), + ], +) +def test_query_query_graph( + graph_factory: Callable[[Store], Graph], + query_graph: Union[str, Callable[[Graph], str]], +) -> None: + """ + The `Graph.query` method passes the correct ``queryGraph`` argument + to stores that have implemented a `Store.query` method. + """ + + mock_result = Result("SELECT") + mock_result.vars = [Variable("s"), Variable("p"), Variable("o")] + mock_result.bindings = [ + { + Variable("s"): URIRef("http://example.org/subject"), + Variable("p"): URIRef("http://example.org/predicate"), + Variable("o"): URIRef("http://example.org/object"), + }, + ] + + query_string = r"FAKE QUERY, NOT USED" + store = Memory() + graph = graph_factory(store) + + if callable(query_graph): + query_graph = query_graph(graph) + + def mock_query( + query: Union[Query, str], + initNs: Mapping[str, Any], # noqa: N803 + initBindings: Mapping[str, Identifier], # noqa: N803 + queryGraph: str, # noqa: N803 + **kwargs, + ) -> Result: + assert query_string == query + assert dict(store.namespaces()) == initNs + assert {} == initBindings + assert query_graph == queryGraph + assert {} == kwargs + return mock_result + + with patch.object(store, "query", wraps=mock_query) as wrapped_query: + actual_result = graph.query(query_string) + assert actual_result.type == "SELECT" + assert list(actual_result) == list( + SIMPLE_TRIPLE_GRAPH.triples((None, None, None)) + ) + assert wrapped_query.call_count == 1 + + +@pytest.mark.parametrize( + ("graph_factory", "query_graph"), + [ + (Graph, lambda graph: graph.identifier), + (ConjunctiveGraph, "__UNION__"), + (Dataset, lambda graph: graph.default_context.identifier), + (lambda store: Dataset(store=store, default_union=True), "__UNION__"), + ], +) +def test_update_query_graph( + graph_factory: Callable[[Store], Graph], + query_graph: Union[str, Callable[[Graph], str]], +) -> None: + """ + The `Graph.update` method passes the correct ``queryGraph`` argument + to stores that have implemented a `Store.update` method. + """ + + update_string = r"FAKE UPDATE, NOT USED" + store = Memory() + graph = graph_factory(store) + + if callable(query_graph): + query_graph = query_graph(graph) + + def mock_update( + query: Union[Query, str], + initNs: Mapping[str, Any], # noqa: N803 + initBindings: Mapping[str, Identifier], # noqa: N803 + queryGraph: str, # noqa: N803 + **kwargs, + ) -> None: + assert update_string == query + assert dict(store.namespaces()) == initNs + assert {} == initBindings + assert query_graph == queryGraph + assert {} == kwargs + + with patch.object(store, "update", wraps=mock_update) as wrapped_update: + graph.update(update_string) + assert wrapped_update.call_count == 1 diff --git a/test/test_graph/test_namespace_rebinding.py b/test/test_graph/test_namespace_rebinding.py index 15cf44730..babac1b4f 100644 --- a/test/test_graph/test_namespace_rebinding.py +++ b/test/test_graph/test_namespace_rebinding.py @@ -1,11 +1,10 @@ -from test.data import context1, context2, tarek - import pytest from rdflib import ConjunctiveGraph, Graph, Literal from rdflib.namespace import OWL, Namespace, NamespaceManager from rdflib.plugins.stores.memory import Memory from rdflib.term import URIRef +from test.data import CONTEXT1, CONTEXT2, TAREK foaf1_uri = URIRef("http://xmlns.com/foaf/0.1/") foaf2_uri = URIRef("http://xmlns.com/foaf/2.0/") @@ -180,7 +179,7 @@ def test_binding_replace(): s = g.serialize(format="n3") - for l in expected.split(): + for l in expected.split(): # noqa: E741 assert l in s @@ -249,7 +248,7 @@ def test_automatic_handling_of_unknown_predicates(): g = Graph(bind_namespaces="none") - g.add((tarek, URIRef("http://xmlns.com/foaf/0.1/name"), Literal("Tarek"))) + g.add((TAREK, URIRef("http://xmlns.com/foaf/0.1/name"), Literal("Tarek"))) assert len(list(g.namespaces())) > 0 @@ -257,7 +256,7 @@ def test_automatic_handling_of_unknown_predicates(): def test_automatic_handling_of_unknown_predicates_only_effected_after_serialization(): g = Graph(bind_namespaces="none") - g.add((tarek, URIRef("http://xmlns.com/foaf/0.1/name"), Literal("Tarek"))) + g.add((TAREK, URIRef("http://xmlns.com/foaf/0.1/name"), Literal("Tarek"))) assert "@prefix ns1: ." in g.serialize(format="n3") @@ -274,17 +273,17 @@ def test_multigraph_bindings(): store = Memory() - g1 = Graph(store, identifier=context1, bind_namespaces="none") + g1 = Graph(store, identifier=CONTEXT1, bind_namespaces="none") g1.bind("foaf", FOAF1) assert list(g1.namespaces()) == [("foaf", foaf1_uri)] assert list(store.namespaces()) == [("foaf", foaf1_uri)] - g1.add((tarek, FOAF1.name, Literal("tarek"))) + g1.add((TAREK, FOAF1.name, Literal("tarek"))) assert list(store.namespaces()) == [("foaf", foaf1_uri)] - g2 = Graph(store, identifier=context2, bind_namespaces="none") + g2 = Graph(store, identifier=CONTEXT2, bind_namespaces="none") g2.parse(data=data, format="n3") # The parser-caused rebind is in the underlying store and all objects @@ -311,7 +310,7 @@ def test_multigraph_bindings(): format="n3" ) - # In the notation3 format, the statement asserting tarek's name + # In the notation3 format, the statement asserting TAREK's name # now references the changed prefix: assert ' friend-of-a-friend:name "tarek" .' in cg.serialize( format="n3" @@ -410,7 +409,7 @@ def test_change_namespace_and_prefix(): assert list(g.namespaces()) == [("foaf", foaf2_uri), ("foaf1", foaf1_uri)] foaf3_uri = URIRef("http://xmlns.com/foaf/3.0/") - FOAF3 = Namespace("http://xmlns.com/foaf/3.0/") + FOAF3 = Namespace("http://xmlns.com/foaf/3.0/") # noqa: N806 g.bind("foaf", FOAF3) @@ -421,7 +420,7 @@ def test_change_namespace_and_prefix(): ] foaf4_uri = URIRef("http://xmlns.com/foaf/4.0/") - FOAF4 = Namespace("http://xmlns.com/foaf/4.0/") + FOAF4 = Namespace("http://xmlns.com/foaf/4.0/") # noqa: N806 g.bind("foaf", FOAF4) diff --git a/test/test_graph/test_skolemization.py b/test/test_graph/test_skolemization.py index aee86bae4..78da75493 100644 --- a/test/test_graph/test_skolemization.py +++ b/test/test_graph/test_skolemization.py @@ -1,19 +1,19 @@ +from __future__ import annotations + import logging import re -from test.utils import GraphHelper from typing import Pattern, Union import pytest from rdflib import Graph -from rdflib.namespace import Namespace from rdflib.term import BNode, Literal, URIRef - -EG = Namespace("http://example.com/") +from test.utils import GraphHelper +from test.utils.namespace import EGDC base_triples = { - (EG.subject, EG.predicate, EG.object0), - (EG.subject, EG.predicate, EG.object1), + (EGDC.subject, EGDC.predicate, EGDC.object0), + (EGDC.subject, EGDC.predicate, EGDC.object1), } @@ -40,7 +40,7 @@ def test_skolemization( g = Graph() for triple in base_triples: g.add(triple) - g.add((EG.scheck, EG.pcheck, node)) + g.add((EGDC.scheck, EGDC.pcheck, node)) assert len(g) == 3 dsg = g.skolemize() if expected_uri is None: @@ -50,7 +50,7 @@ def test_skolemization( iset = GraphHelper.triple_or_quad_set(dsg) logging.debug("iset = %s", iset) assert iset.issuperset(base_triples) - check_triples = list(dsg.triples((EG.scheck, EG.pcheck, None))) + check_triples = list(dsg.triples((EGDC.scheck, EGDC.pcheck, None))) assert len(check_triples) == 1 sbnode = check_triples[0][2] logging.debug("sbnode = %s, sbnode_value = %s", sbnode, f"{sbnode}") @@ -77,7 +77,7 @@ def test_deskolemization( g = Graph() for triple in base_triples: g.add(triple) - g.add((EG.scheck, EG.pcheck, URIRef(iri))) + g.add((EGDC.scheck, EGDC.pcheck, URIRef(iri))) assert len(g) == 3 dsg = g.de_skolemize() if expected_bnode_value is None: @@ -87,7 +87,7 @@ def test_deskolemization( iset = GraphHelper.triple_or_quad_set(dsg) logging.debug("iset = %s", iset) assert iset.issuperset(base_triples) - check_triples = list(dsg.triples((EG.scheck, EG.pcheck, None))) + check_triples = list(dsg.triples((EGDC.scheck, EGDC.pcheck, None))) assert len(check_triples) == 1 bnode = check_triples[0][2] logging.debug("bnode = %s, bnode_value = %s", bnode, f"{bnode}") diff --git a/test/test_graph/test_slice.py b/test/test_graph/test_slice.py index 373b0831f..7e503f8ed 100644 --- a/test/test_graph/test_slice.py +++ b/test/test_graph/test_slice.py @@ -1,6 +1,5 @@ -from test.data import bob, cheese, hates, likes, michel, pizza, tarek - from rdflib import Graph +from test.data import BOB, CHEESE, HATES, LIKES, MICHEL, PIZZA, TAREK class TestGraphSlice: @@ -19,37 +18,37 @@ def soe(x, y): return set([a[2] for a in x]) == set(y) # equals objects g = Graph() - g.add((tarek, likes, pizza)) - g.add((tarek, likes, cheese)) - g.add((michel, likes, pizza)) - g.add((michel, likes, cheese)) - g.add((bob, likes, cheese)) - g.add((bob, hates, pizza)) - g.add((bob, hates, michel)) # gasp! + g.add((TAREK, LIKES, PIZZA)) + g.add((TAREK, LIKES, CHEESE)) + g.add((MICHEL, LIKES, PIZZA)) + g.add((MICHEL, LIKES, CHEESE)) + g.add((BOB, LIKES, CHEESE)) + g.add((BOB, HATES, PIZZA)) + g.add((BOB, HATES, MICHEL)) # gasp! # Single terms are all trivial: # single index slices by subject, i.e. return triples((x,None,None)) - # tell me everything about "tarek" - sl(g[tarek], 2) + # tell me everything about "TAREK" + sl(g[TAREK], 2) # single slice slices by s,p,o, with : used to split - # tell me everything about "tarek" (same as above) - sl(g[tarek::], 2) + # tell me everything about "TAREK" (same as above) + sl(g[TAREK::], 2) - # give me every "likes" relationship - sl(g[:likes:], 5) + # give me every "LIKES" relationship + sl(g[:LIKES:], 5) - # give me every relationship to pizza - sl(g[::pizza], 3) + # give me every relationship to PIZZA + sl(g[::PIZZA], 3) - # give me everyone who likes pizza - sl(g[:likes:pizza], 2) + # give me everyone who LIKES PIZZA + sl(g[:LIKES:PIZZA], 2) - # does tarek like pizza? - assert g[tarek:likes:pizza] is True + # does TAREK like PIZZA? + assert g[TAREK:LIKES:PIZZA] is True # More intesting is using paths # everything hated or liked - sl(g[: hates | likes], 7) + sl(g[: HATES | LIKES], 7) diff --git a/test/test_graph/test_variants.py b/test/test_graph/test_variants.py index 09b2a156d..1136753a4 100644 --- a/test/test_graph/test_variants.py +++ b/test/test_graph/test_variants.py @@ -1,16 +1,18 @@ from __future__ import annotations +import dataclasses +import itertools import json import logging import os import re +from collections import defaultdict from dataclasses import dataclass, field from pathlib import Path, PurePath -from test.data import TEST_DATA_DIR -from test.utils import GraphHelper from typing import ( ClassVar, Collection, + DefaultDict, Dict, Iterable, List, @@ -18,6 +20,7 @@ OrderedDict, Pattern, Tuple, + Type, Union, cast, ) @@ -27,10 +30,14 @@ import rdflib.compare import rdflib.util -from rdflib.graph import Dataset +from rdflib.graph import Dataset, _GraphT from rdflib.namespace import XSD from rdflib.term import URIRef -from rdflib.util import guess_format +from test.data import TEST_DATA_DIR +from test.utils import GraphHelper +from test.utils.graph import GraphSource + +MODULE_PATH = Path(__file__).parent TEST_DIR = Path(__file__).parent.parent.absolute() VARIANTS_DIR = TEST_DATA_DIR / "variants" @@ -41,27 +48,23 @@ SUFFIX_FORMAT_MAP = {**rdflib.util.SUFFIX_FORMAT_MAP, "hext": "hext"} -@dataclass +@dataclass(frozen=True) class GraphAsserts: """ - A specification of asserts that must be checked against a graph. This is - read in from a JSON dict. + A specification of asserts that must be checked against a graph. """ quad_count: Optional[int] = None - exact_match: bool = False has_subject_iris: Optional[List[str]] = None - def check(self, first_graph: Optional[Dataset], graph: Dataset) -> None: + def check(self, graph: Dataset) -> None: """ if `first_graph` is `None` then this is the first check before any other graphs have been processed. """ if self.quad_count is not None: assert self.quad_count == len(list(graph.quads())) - if first_graph is not None and self.exact_match: - GraphHelper.assert_quad_sets_equals(first_graph, graph) - if first_graph is None and self.has_subject_iris is not None: + if self.has_subject_iris is not None: subjects_iris = { f"{subject}" for subject in graph.subjects() @@ -70,25 +73,61 @@ def check(self, first_graph: Optional[Dataset], graph: Dataset) -> None: assert set(self.has_subject_iris) == subjects_iris @classmethod - def from_path(cls, path: Path) -> GraphAsserts: + def from_path(cls, path: Path): with path.open("r") as f: - return cls(**json.load(f)) + keys = dataclasses.fields(cls) + data = json.load(f) + return cls(**{key.name: data[key.name] for key in keys if key.name in data}) + + +@dataclass(frozen=True) +class GraphVariantsMeta(GraphAsserts): + """ + Meta information about a set of variants. + """ + + public_id: Optional[str] = None + exact_match: bool = False + + +_VARIANT_PREFERENCE: Dict[str, int] = dict( + (format, index) + for index, format in enumerate( + [ + "python", + "nquads", + "nt", + "ntriples", + "turtle", + "ttl", + "trig", + "xml", + "hext", + ] + ) +) @dataclass(order=True) class GraphVariants: """ - Represents a graph with multiple variants in different files. + Represents multiple variants of a single graph in different files. """ key: str - variants: Dict[str, Path] = field(default_factory=OrderedDict) - asserts: GraphAsserts = field(default_factory=lambda: GraphAsserts()) + variants: Dict[str, GraphSource] = field(default_factory=OrderedDict) + meta: GraphVariantsMeta = field(default_factory=GraphVariantsMeta) _variant_regex: ClassVar[Pattern[str]] = re.compile( - r"^(.*?)(|[-]variant-[^/]+|[-]asserts)$" + r"^(.*?)(|[-]variant-[^/]+|[-]asserts|[-]meta)$" ) + def __post_init__(self) -> None: + self.ordered_variants = sorted( + self.variants.items(), + key=lambda variant: _VARIANT_PREFERENCE.get(variant[1].format, 1000), + ) + def pytest_param( self, marks: Optional[ @@ -97,9 +136,20 @@ def pytest_param( ) -> ParameterSet: if marks is None: marks = cast(Tuple[MarkDecorator], tuple()) - logging.debug("self = %s", self) return pytest.param(self, id=self.key, marks=marks) + @property + def public_id(self) -> str: + return self.meta.public_id or f"example:rdflib:test:data:variant:{self.key}" + + @property + def preferred_variant(self) -> Tuple[str, GraphSource]: + return self.ordered_variants[0] + + def load(self, variant_key: str, graph_type: Type[_GraphT]) -> _GraphT: + variant = self.variants[variant_key] + return variant.load(public_id=self.public_id, graph_type=graph_type) + @classmethod def _decompose_path(cls, file_path: Path, basedir: Optional[Path]): if basedir: @@ -116,48 +166,65 @@ def _decompose_path(cls, file_path: Path, basedir: Optional[Path]): @classmethod def for_files( cls, file_paths: Iterable[Path], basedir: Optional[Path] = None - ) -> Dict[str, "GraphVariants"]: - graph_varaint_dict: Dict[str, GraphVariants] = {} + ) -> Dict[str, GraphVariants]: + graph_sources: DefaultDict[str, Dict[str, GraphSource]] = defaultdict(dict) + graph_meta: Dict[str, GraphVariantsMeta] = {} for file_path in file_paths: - logging.debug("file_path = %s", file_path) file_key, variant_key = cls._decompose_path(file_path, basedir) - # file_key = f"{file_path.parent / stem}" - if file_key not in graph_varaint_dict: - graph_variant = graph_varaint_dict[file_key] = GraphVariants(file_key) + file_graph_sources = graph_sources[file_key] + if variant_key.endswith("-meta.json"): + if file_key in graph_meta: + raise RuntimeError(f"Duplicate meta for {file_key} in {file_path}") + graph_meta[file_key] = GraphVariantsMeta.from_path(file_path) else: - graph_variant = graph_varaint_dict[file_key] - if variant_key.endswith("-asserts.json"): - graph_variant.asserts = GraphAsserts.from_path(file_path) + if variant_key in file_graph_sources: + raise RuntimeError( + f"Duplicate variant {variant_key} for {file_key} in {file_path}" + ) + file_graph_sources[variant_key] = GraphSource.from_path(file_path) + graph_variant_dict = {} + for file_key, variants in graph_sources.items(): + if file_key in graph_meta: + meta = graph_meta[file_key] + del graph_meta[file_key] else: - graph_variant.variants[variant_key] = file_path - return graph_varaint_dict + meta = GraphVariantsMeta() + if len(variants) < 2: + raise RuntimeError(f"Only one variant for {file_key}") + graph_variant_dict[file_key] = GraphVariants(file_key, variants, meta) + if graph_meta: + raise RuntimeError(f"Unmatched meta {graph_meta}") + return graph_variant_dict @classmethod def for_directory( cls, directory: Path, basedir: Optional[Path] = None - ) -> Dict[str, "GraphVariants"]: + ) -> Dict[str, GraphVariants]: file_paths = [] - for file_path in directory.glob("**/*"): + for file_path in directory.glob("*"): if not file_path.is_file(): continue if file_path.name.endswith(".md"): continue file_paths.append(file_path) - logging.debug("file_paths = %s", file_paths) return cls.for_files(file_paths, basedir) -GRAPH_VARIANT_DICT = { +GRAPH_VARIANTS_DICT = { **GraphVariants.for_directory(VARIANTS_DIR, TEST_DATA_DIR), **GraphVariants.for_files(EXTRA_FILES, TEST_DIR), } -EXPECTED_FAILURES = { - ("variants/schema_only_base"): pytest.mark.xfail( +EXPECTED_FAILURES: Dict[Tuple[str, Optional[str]], MarkDecorator] = { + ("variants/schema_only_base", ".ttl"): pytest.mark.xfail( reason="Some issue with handling base URI that does not end with a slash", raises=ValueError, ), - ("variants/rdf11trig_eg2"): pytest.mark.xfail( + ("variants/schema_only_base", ".n3"): pytest.mark.xfail( + reason="Some issue with handling base URI that does not end with a slash", + raises=ValueError, + ), + ("variants/rdf11trig_eg2", ".hext"): pytest.mark.xfail( reason=""" This fails randomly, passing less than 10% of the time, and always failing with comparing hext against trig. Not clear why, it may be a big with hext @@ -180,15 +247,19 @@ def for_directory( """, raises=AssertionError, ), - ("variants/diverse_quads"): pytest.mark.xfail( + ("variants/diverse_quads", ".nq"): pytest.mark.xfail( + reason=""" + Problems with default/implicit datatype of strings. It should be + xsd:string, but for some parsers it is not. See + for more info. + """, + raises=AssertionError, + ), + ("variants/diverse_quads", ".jsonld"): pytest.mark.xfail( reason=""" - TriG parsing gets confused about what graph 'XSD string' appears in: - (rdflib.term.URIRef('example:subject'), - rdflib.term.URIRef('http://example.com/predicate'), - rdflib.term.Literal('XSD string'), - - rdflib.term.URIRef('example:graph')), - + rdflib.term.URIRef('urn:example:graph')), - ? ++++ + Problems with default/implicit datatype of strings. It should be + xsd:string, but for some parsers it is not. See + for more info. """, raises=AssertionError, ), @@ -198,52 +269,71 @@ def for_directory( def tests_found() -> None: logging.debug("VARIANTS_DIR = %s", VARIANTS_DIR) logging.debug("EXTRA_FILES = %s", EXTRA_FILES) - assert len(GRAPH_VARIANT_DICT) >= 1 - logging.debug("ALL_VARIANT_GRAPHS = %s", GRAPH_VARIANT_DICT) - xml_literal = GRAPH_VARIANT_DICT.get("variants/xml_literal") + assert len(GRAPH_VARIANTS_DICT) >= 1 + logging.debug("ALL_VARIANT_GRAPHS = %s", GRAPH_VARIANTS_DICT) + xml_literal = GRAPH_VARIANTS_DICT.get("variants/xml_literal") assert xml_literal is not None assert len(xml_literal.variants) >= 5 - assert xml_literal.asserts.quad_count == 1 + assert xml_literal.meta.quad_count == 1 -@pytest.mark.parametrize( - "graph_variant", - [ - graph_variant.pytest_param(EXPECTED_FAILURES.get(graph_variant.key)) - for graph_variant in GRAPH_VARIANT_DICT.values() - ], -) -def test_variants(graph_variant: GraphVariants) -> None: +_PREFERRED_GRAPHS: Dict[str, Dataset] = {} + + +def load_preferred(graph_variants: GraphVariants) -> Dataset: + if graph_variants.key in _PREFERRED_GRAPHS: + return _PREFERRED_GRAPHS[graph_variants.key] + preferred_variant = graph_variants.preferred_variant + preferred_graph = graph_variants.load(preferred_variant[0], Dataset) + GraphHelper.strip_literal_datatypes(preferred_graph, {XSD.string}) + _PREFERRED_GRAPHS[graph_variants.key] = preferred_graph + return preferred_graph + + +def make_variant_source_cases() -> Iterable[ParameterSet]: + for graph_variants in GRAPH_VARIANTS_DICT.values(): + variants = graph_variants.ordered_variants + preferred_variant = variants[0] + preferred_key = preferred_variant[0] + + for variant_key in itertools.chain([None], (i[0] for i in variants[1:])): + marks = [] + if (graph_variants.key, variant_key) in EXPECTED_FAILURES: + marks.append(EXPECTED_FAILURES[(graph_variants.key, variant_key)]) + yield pytest.param( + graph_variants, + variant_key, + marks=marks, + id=f"{graph_variants.key}-{preferred_key}-{variant_key}", + ) + + +@pytest.mark.parametrize(["graph_variants", "variant_key"], make_variant_source_cases()) +def test_variant_source( + graph_variants: GraphVariants, variant_key: Optional[str] +) -> None: """ - All variants of a graph are isomorphic with the first variant, and thus - eachother. + All variants of a graph are isomorphic with the preferred variant, + and thus eachother. """ - logging.debug("graph_variant = %s", graph_variant) - public_id = URIRef(f"example:{graph_variant.key}") - assert len(graph_variant.variants) > 0 - first_graph: Optional[Dataset] = None - first_path: Optional[Path] = None - logging.debug("graph_variant.asserts = %s", graph_variant.asserts) - - for variant_key, variant_path in graph_variant.variants.items(): - logging.debug("variant_path = %s", variant_path) - format = guess_format(variant_path.name, fmap=SUFFIX_FORMAT_MAP) - assert format is not None, f"could not determine format for {variant_path.name}" - graph = Dataset() - graph.parse(variant_path, format=format, publicID=public_id) - # Stripping data types as different parsers (e.g. hext) have different - # opinions of when a bare string is of datatype XSD.string or not. - # Probably something that needs more investigation. - GraphHelper.strip_literal_datatypes(graph, {XSD.string}) - graph_variant.asserts.check(first_graph, graph) - if first_graph is None: - first_graph = graph - first_path = variant_path + preferred_path = graph_variants.preferred_variant[1].path + preferred_graph: Dataset = load_preferred(graph_variants) + + if variant_key is None: + # Only check asserts against the preferred variant, and only + # when not comparing variants. + graph_variants.meta.check(preferred_graph) + else: + variant_path = graph_variants.variants[variant_key].path + variant_graph = graph_variants.load(variant_key, Dataset) + GraphHelper.strip_literal_datatypes(variant_graph, {XSD.string}) + + if graph_variants.meta.exact_match: + GraphHelper.assert_quad_sets_equals(preferred_graph, variant_graph) else: - assert first_path is not None GraphHelper.assert_cgraph_isomorphic( - first_graph, - graph, + preferred_graph, + variant_graph, False, - f"checking {variant_path.relative_to(VARIANTS_DIR)} against {first_path.relative_to(VARIANTS_DIR)}", + f"checking {variant_path.relative_to(VARIANTS_DIR)} against {preferred_path.relative_to(VARIANTS_DIR)}", ) diff --git a/test/test_issues/test_issue1043.py b/test/test_issues/test_issue1043.py index 896529e5d..09c54af3f 100644 --- a/test/test_issues/test_issue1043.py +++ b/test/test_issues/test_issue1043.py @@ -1,30 +1,24 @@ import io import sys -import unittest -from rdflib import RDFS, XSD, Graph, Literal, Namespace +from rdflib import RDFS, XSD, Graph, Literal +from test.utils.namespace import EGDO -class TestIssue1043(unittest.TestCase): - def test_issue_1043(self): - expected = """@prefix rdfs: . +def test_issue_1043(): + expected = """@prefix rdfs: . @prefix xsd: . rdfs:label 4e-08 . """ - capturedOutput = io.StringIO() - sys.stdout = capturedOutput - g = Graph() - g.bind("xsd", XSD) - g.bind("rdfs", RDFS) - n = Namespace("http://example.org/") - g.add((n.number, RDFS.label, Literal(0.00000004, datatype=XSD.decimal))) - g.print() - sys.stdout = sys.__stdout__ - self.assertEqual(capturedOutput.getvalue(), expected) - - -if __name__ == "__main__": - unittest.main() + capturedOutput = io.StringIO() # noqa: N806 + sys.stdout = capturedOutput + g = Graph() + g.bind("xsd", XSD) + g.bind("rdfs", RDFS) + g.add((EGDO.number, RDFS.label, Literal(0.00000004, datatype=XSD.decimal))) + g.print() + sys.stdout = sys.__stdout__ + assert capturedOutput.getvalue() == expected diff --git a/test/test_issues/test_issue1141.py b/test/test_issues/test_issue1141.py index 85a1c2c6e..60f71582f 100644 --- a/test/test_issues/test_issue1141.py +++ b/test/test_issues/test_issue1141.py @@ -1,57 +1,56 @@ -import unittest - from rdflib import Graph from rdflib.plugins.stores.auditable import AuditableStore from rdflib.plugins.stores.memory import Memory, SimpleMemory +""" +Tests is Turtle and TriG parsing works with a store with or without formula support +""" -class TestIssue1141(unittest.TestCase): - """ - Tests is Turtle and TriG parsing works with a store with or without formula support - """ - - def test_issue_1141_1(self): - file = b"@prefix : . :s :p :o ." - - for format in ("turtle", "trig"): - # with formula - graph = Graph() - self.assertTrue(graph.store.formula_aware) - graph.parse(data=file, format=format) - self.assertEqual(len(graph), 1) - # without - graph = Graph(store=AuditableStore(Memory())) - self.assertFalse(graph.store.formula_aware) - graph.parse(data=file, format=format) - self.assertEqual(len(graph), 1) +def test_issue_1141_1(): + file = b"@prefix : . :s :p :o ." - def test_issue_1141_2(self): - file = b"@prefix : . :s :p :o ." + for format in ("turtle", "trig"): # with formula - graph = Graph(store=Memory()) - self.assertTrue(graph.store.formula_aware) - graph.parse(data=file, format="turtle") - self.assertEqual(len(graph), 1) - - # without - graph = Graph(store=SimpleMemory()) - self.assertFalse(graph.store.formula_aware) - graph.parse(data=file, format="turtle") - self.assertEqual(len(graph), 1) - - def test_issue_1141_3(self): - file = b" ." - # with contexts - graph = Graph(store=Memory()) - self.assertTrue(graph.store.context_aware) - self.assertTrue(graph.store.formula_aware) - graph.parse(data=file, format="nt") - self.assertEqual(len(graph), 1) + graph = Graph() + assert graph.store.formula_aware + graph.parse(data=file, format=format) + assert len(graph) == 1 # without - graph = Graph(store=SimpleMemory()) - self.assertFalse(graph.store.context_aware) - self.assertFalse(graph.store.formula_aware) - graph.parse(data=file, format="nt") - self.assertEqual(len(graph), 1) + graph = Graph(store=AuditableStore(Memory())) + assert not graph.store.formula_aware + graph.parse(data=file, format=format) + assert len(graph) == 1 + + +def test_issue_1141_2(): + file = b"@prefix : . :s :p :o ." + # with formula + graph = Graph(store=Memory()) + assert graph.store.formula_aware + graph.parse(data=file, format="turtle") + assert len(graph) == 1 + + # without + graph = Graph(store=SimpleMemory()) + assert not graph.store.formula_aware + graph.parse(data=file, format="turtle") + assert len(graph) == 1 + + +def test_issue_1141_3(): + file = b" ." + # with contexts + graph = Graph(store=Memory()) + assert graph.store.context_aware + assert graph.store.formula_aware + graph.parse(data=file, format="nt") + assert len(graph) == 1 + + # without + graph = Graph(store=SimpleMemory()) + assert not graph.store.context_aware + assert not graph.store.formula_aware + graph.parse(data=file, format="nt") + assert len(graph) == 1 diff --git a/test/test_issues/test_issue1160.py b/test/test_issues/test_issue1160.py index 2c7a5cfe4..10f8f146d 100644 --- a/test/test_issues/test_issue1160.py +++ b/test/test_issues/test_issue1160.py @@ -1,6 +1,7 @@ -import unittest from unittest import mock +import pytest + import rdflib from rdflib import ConjunctiveGraph @@ -15,16 +16,16 @@ """ -class NamedGraphWithFragmentTest(unittest.TestCase): - def test_named_graph_with_fragment(self): - """Test that fragment part of the URL is not erased.""" - graph = ConjunctiveGraph() +def test_named_graph_with_fragment(): + """Test that fragment part of the URL is not erased.""" + graph = ConjunctiveGraph() - with mock.patch("rdflib.parser.URLInputSource") as load_mock: - # We have to expect an exception here. - self.assertRaises(Exception, graph.query, QUERY) + with mock.patch("rdflib.parser.URLInputSource") as load_mock: + # We have to expect an exception here. + with pytest.raises(Exception): + graph.query(QUERY) - load_mock.assert_called_with( - rdflib.URIRef("http://ns.example.com/named#"), - "nt", - ) + load_mock.assert_called_with( + rdflib.URIRef("http://ns.example.com/named#"), + "nt", + ) diff --git a/test/test_issues/test_issue1404.py b/test/test_issues/test_issue1404.py index e6f292340..750ecf0a1 100644 --- a/test/test_issues/test_issue1404.py +++ b/test/test_issues/test_issue1404.py @@ -31,7 +31,7 @@ def test_skolem_de_skolem_roundtrip(): # Check the BNode is now a URIRef after skolemization. skolem_bnode = skolemized_graph.value(**query) - assert type(skolem_bnode) == URIRef + assert type(skolem_bnode) is URIRef # Check that the original bnode id exists somewhere in the uri. assert bnode_id in skolem_bnode diff --git a/test/test_issues/test_issue1484.py b/test/test_issues/test_issue1484.py index d0353a99d..87fdb2080 100644 --- a/test/test_issues/test_issue1484.py +++ b/test/test_issues/test_issue1484.py @@ -1,66 +1,58 @@ import io import json -import unittest -from rdflib import RDF, RDFS, Graph, Namespace +from rdflib import RDF, RDFS, Graph +from test.utils.namespace import EGDO -class TestIssue1484_json(unittest.TestCase): - def test_issue_1484_json(self): - """ - Test JSON-LD parsing of result from json.dump - """ - n = Namespace("http://example.org/") - jsondata = {"@id": n.s, "@type": [n.t], n.p: {"@id": n.o}} +def test_issue_1484_json(): + """ + Test JSON-LD parsing of result from json.dump + """ + jsondata = {"@id": EGDO.s, "@type": [EGDO.t], EGDO.p: {"@id": EGDO.o}} - s = io.StringIO() - json.dump(jsondata, s, indent=2, separators=(",", ": ")) - s.seek(0) - - DEBUG = False - if DEBUG: - print("S: ", s.read()) - s.seek(0) - - b = n.base - g = Graph() - g.bind("rdf", RDF) - g.bind("rdfs", RDFS) - g.parse(source=s, publicID=b, format="json-ld") - - assert (n.s, RDF.type, n.t) in g - assert (n.s, n.p, n.o) in g + s = io.StringIO() + json.dump(jsondata, s, indent=2, separators=(",", ": ")) + s.seek(0) + DEBUG = False # noqa: N806 + if DEBUG: + print("S: ", s.read()) + s.seek(0) -class TestIssue1484_str(unittest.TestCase): - def test_issue_1484_str(self): - """ - Test JSON-LD parsing of result from string (used by round tripping tests) - - (Previously passes, but broken by earlier fix for above.) - """ - n = Namespace("http://example.org/") - jsonstr = """ - { - "@id": "http://example.org/s", - "@type": [ - "http://example.org/t" - ], - "http://example.org/p": { - "@id": "http://example.org/o" - } + b = EGDO.base + g = Graph() + g.bind("rdf", RDF) + g.bind("rdfs", RDFS) + g.parse(source=s, publicID=b, format="json-ld") + + assert (EGDO.s, RDF.type, EGDO.t) in g + assert (EGDO.s, EGDO.p, EGDO.o) in g + + +def test_issue_1484_str(): + """ + Test JSON-LD parsing of result from string (used by round tripping tests) + + (Previously passes, but broken by earlier fix for above.) + """ + jsonstr = """ + { + "@id": "http://example.org/s", + "@type": [ + "http://example.org/t" + ], + "http://example.org/p": { + "@id": "http://example.org/o" } - """ - - b = n.base - g = Graph() - g.bind("rdf", RDF) - g.bind("rdfs", RDFS) - g.parse(data=jsonstr, publicID=b, format="json-ld") - - assert (n.s, RDF.type, n.t) in g - assert (n.s, n.p, n.o) in g + } + """ + b = EGDO.base + g = Graph() + g.bind("rdf", RDF) + g.bind("rdfs", RDFS) + g.parse(data=jsonstr, publicID=b, format="json-ld") -if __name__ == "__main__": - unittest.main() + assert (EGDO.s, RDF.type, EGDO.t) in g + assert (EGDO.s, EGDO.p, EGDO.o) in g diff --git a/test/test_issues/test_issue160.py b/test/test_issues/test_issue160.py index c9fddcb9c..c59498b35 100644 --- a/test/test_issues/test_issue160.py +++ b/test/test_issues/test_issue160.py @@ -1,5 +1,3 @@ -from unittest import TestCase - from rdflib import ConjunctiveGraph, Literal, Namespace from rdflib.collection import Collection @@ -42,36 +40,35 @@ """ -class CollectionTest(TestCase): - def test_collection_render(self): - foo = Namespace("http://www.example.org/foo/ns/") - ex = Namespace("http://www.example.org/example/foo/") - rdf = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") +def test_collection_render(): + foo = Namespace("http://www.example.org/foo/ns/") + ex = Namespace("http://www.example.org/example/foo/") + rdf = Namespace("http://www.w3.org/1999/02/22-rdf-syntax-ns#") - # Works: x a rdf:List, a foo:Other ; - # Fails: y a foo:Wrapper, foo:wraps x; x a rdf:List, a foo:Other ; + # Works: x a rdf:List, a foo:Other ; + # Fails: y a foo:Wrapper, foo:wraps x; x a rdf:List, a foo:Other ; - target1 = ConjunctiveGraph() - target1.parse(data=target1xml, format="xml") - target2 = ConjunctiveGraph() - target2.parse(data=target2xml, format="xml") + target1 = ConjunctiveGraph() + target1.parse(data=target1xml, format="xml") + target2 = ConjunctiveGraph() + target2.parse(data=target2xml, format="xml") - g = ConjunctiveGraph() - bits = [ex["a"], ex["b"], ex["c"]] - l = Collection(g, ex["thing"], bits) - triple = (ex["thing"], rdf["type"], foo["Other"]) - g.add(triple) - triple = (ex["thing"], foo["property"], Literal("Some Value")) + g = ConjunctiveGraph() + bits = [ex["a"], ex["b"], ex["c"]] + l = Collection(g, ex["thing"], bits) # noqa: E741, F841 + triple = (ex["thing"], rdf["type"], foo["Other"]) + g.add(triple) + triple = (ex["thing"], foo["property"], Literal("Some Value")) + g.add(triple) + for b in bits: + triple = (b, rdf["type"], foo["Item"]) g.add(triple) - for b in bits: - triple = (b, rdf["type"], foo["Item"]) - g.add(triple) - self.assertEqual(g.isomorphic(target1), True) + assert g.isomorphic(target1) is True - # g.add((ex['wrapper'], rdf['type'], foo['Wrapper'])) - # g.add((ex['wrapper'], foo['wraps'], ex['thing'])) - # # resn3 = g.serialize(format="n3") - # # print(resn3) - # resxml = g.serialize(format="pretty-xml") - # # print(resxml) - # self.assertEqual(g.isomorphic(target2), True) + # g.add((ex['wrapper'], rdf['type'], foo['Wrapper'])) + # g.add((ex['wrapper'], foo['wraps'], ex['thing'])) + # # resn3 = g.serialize(format="n3") + # # print(resn3) + # resxml = g.serialize(format="pretty-xml") + # # print(resxml) + # self.assertEqual(g.isomorphic(target2), True) diff --git a/test/test_issues/test_issue161.py b/test/test_issues/test_issue161.py index b1df9bca2..fa2eabc05 100644 --- a/test/test_issues/test_issue161.py +++ b/test/test_issues/test_issue161.py @@ -1,30 +1,27 @@ -from unittest import TestCase - from rdflib.graph import ConjunctiveGraph -class EntityTest(TestCase): - def test_turtle_namespace_prefixes(self): - g = ConjunctiveGraph() - n3 = """ - @prefix _9: . - @prefix p_9: . - @prefix rdfs: . +def test_turtle_namespace_prefixes(): + g = ConjunctiveGraph() + n3 = """ + @prefix _9: . + @prefix p_9: . + @prefix rdfs: . - p_9:a p_9:b p_9:c . + p_9:a p_9:b p_9:c . - a - ; - rdfs:label "Cecil B. DeMille (Director)"; - _9:director_name "Cecil B. DeMille" .""" + a + ; + rdfs:label "Cecil B. DeMille (Director)"; + _9:director_name "Cecil B. DeMille" .""" - g.parse(data=n3, format="n3") - turtle = g.serialize(format="turtle") + g.parse(data=n3, format="n3") + turtle = g.serialize(format="turtle") - # Check round-tripping, just for kicks. - g = ConjunctiveGraph() - g.parse(data=turtle, format="turtle") - # Shouldn't have got to here - s = g.serialize(format="turtle", encoding="latin-1") + # Check round-tripping, just for kicks. + g = ConjunctiveGraph() + g.parse(data=turtle, format="turtle") + # Shouldn't have got to here + s = g.serialize(format="turtle", encoding="latin-1") - self.assertTrue(b"@prefix _9" not in s) + assert b"@prefix _9" not in s diff --git a/test/test_issues/test_issue190.py b/test/test_issues/test_issue190.py index 5efe3f9dd..018cf36d9 100644 --- a/test/test_issues/test_issue190.py +++ b/test/test_issues/test_issue190.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import textwrap import pytest diff --git a/test/test_issues/test_issue1998.py b/test/test_issues/test_issue1998.py index e3f28a395..5a7ae079c 100644 --- a/test/test_issues/test_issue1998.py +++ b/test/test_issues/test_issue1998.py @@ -3,6 +3,7 @@ https://github.com/RDFLib/rdflib/issues/1998 """ + import rdflib diff --git a/test/test_issues/test_issue200.py b/test/test_issues/test_issue200.py index cfe63c870..66aa7d91c 100644 --- a/test/test_issues/test_issue200.py +++ b/test/test_issues/test_issue200.py @@ -1,46 +1,37 @@ -#!/usr/bin/env python - import os -import unittest import pytest import rdflib -if os.name == "nt": - pytestmark = pytest.mark.skip( - reason="No os.fork() and/or os.pipe() on this platform, skipping" - ) - -class TestRandomSeedInFork(unittest.TestCase): - def test_bnode_id_differs_in_fork(self): - """Checks that os.fork()ed child processes produce a - different sequence of BNode ids from the parent process. - """ - r, w = os.pipe() # these are file descriptors, not file objects - pid = os.fork() - if pid: - pb1 = rdflib.term.BNode() - os.close(w) # use os.close() to close a file descriptor - r = os.fdopen(r) # turn r into a file object - txt = r.read() - os.waitpid(pid, 0) # make sure the child process gets cleaned up - r.close() - else: - os.close(r) - w = os.fdopen(w, "w") - cb = rdflib.term.BNode() - w.write(cb) - w.close() - os._exit(0) - assert txt != str( - pb1 - ), "Parent process BNode id: " + "%s, child process BNode id: %s" % ( - txt, - str(pb1), - ) - - -if __name__ == "__main__": - unittest.main() +@pytest.mark.skipif( + condition=(os.name == "nt"), + reason="No os.fork() and/or os.pipe() on this platform, skipping", +) +def test_bnode_id_differs_in_fork(): + """Checks that os.fork()ed child processes produce a + different sequence of BNode ids from the parent process. + """ + r, w = os.pipe() # these are file descriptors, not file objects + pid = os.fork() + if pid: + pb1 = rdflib.term.BNode() + os.close(w) # use os.close() to close a file descriptor + r = os.fdopen(r) # turn r into a file object + txt = r.read() + os.waitpid(pid, 0) # make sure the child process gets cleaned up + r.close() + else: + os.close(r) + w = os.fdopen(w, "w") + cb = rdflib.term.BNode() + w.write(cb) + w.close() + os._exit(0) + assert txt != str( + pb1 + ), "Parent process BNode id: " + "%s, child process BNode id: %s" % ( + txt, + str(pb1), + ) diff --git a/test/test_issues/test_issue209.py b/test/test_issues/test_issue209.py index 3d441d6ce..720c24f74 100644 --- a/test/test_issues/test_issue209.py +++ b/test/test_issues/test_issue209.py @@ -1,24 +1,20 @@ import threading -import unittest import rdflib -def makeNode(): +def make_node(): i = 0 while i < 9999: i += 1 rdflib.term.BNode() -class TestRandomSeedInThread(unittest.TestCase): - def test_bnode_id_gen_in_thread(self): - """ """ - th = threading.Thread(target=makeNode) - th.daemon = True - th.start() - makeNode() - - -if __name__ == "__main__": - unittest.main() +def test_bnode_id_gen_in_thread(): + """ + Test a random seed in a thread. + """ + th = threading.Thread(target=make_node) + th.daemon = True + th.start() + make_node() diff --git a/test/test_issues/test_issue247.py b/test/test_issues/test_issue247.py index b44e92f0c..fa6bba9a5 100644 --- a/test/test_issues/test_issue247.py +++ b/test/test_issues/test_issue247.py @@ -1,5 +1,3 @@ -import unittest - import rdflib failxml = """\ @@ -17,16 +15,11 @@ """ -class TestXMLLiteralwithLangAttr(unittest.TestCase): - def test_failing_parse_of_literal_with_xmllang_attr(self): - """ - Show parse of Literal with xmllang attr fails - Parsing an RDF/XML document fails with a KeyError when - it contains a XML Literal with a xml:lang attribute: - """ - g = rdflib.Graph() - g.parse(data=failxml, format="xml") - - -if __name__ == "__main__": - unittest.main() +def test_failing_parse_of_literal_with_xmllang_attr(): + """ + Show parse of Literal with xmllang attr fails + Parsing an RDF/XML document fails with a KeyError when + it contains a XML Literal with a xml:lang attribute: + """ + g = rdflib.Graph() + g.parse(data=failxml, format="xml") diff --git a/test/test_issues/test_issue248.py b/test/test_issues/test_issue248.py index be403494a..9c80b81f9 100644 --- a/test/test_issues/test_issue248.py +++ b/test/test_issues/test_issue248.py @@ -1,85 +1,78 @@ -import unittest - import rdflib -class TestSerialization(unittest.TestCase): - def test_issue_248(self): - """ - Ed Summers Thu, 24 May 2007 12:21:17 -0700 - - As discussed with eikeon in #redfoot it appears that the n3 serializer - is ignoring the base option to Graph.serialize...example follows: - - -- +def test_issue_248(): + """ + Ed Summers Thu, 24 May 2007 12:21:17 -0700 - #!/usr/bin/env python + As discussed with eikeon in #redfoot it appears that the n3 serializer + is ignoring the base option to Graph.serialize...example follows: - from rdflib.Graph import Graph - from rdflib.URIRef import URIRef - from rdflib import Literal, Namespace, RDF + -- - graph = Graph() - DC = Namespace('http://purl.org/dc/terms/') - SKOS = Namespace('http://www.w3.org/2004/02/skos/core#') - LCCO = Namespace('http://loc.gov/catdir/cpso/lcco/') + #!/usr/bin/env python - graph.bind('dc', DC) - graph.bind('skos', SKOS) - graph.bind('lcco', LCCO) + from rdflib.Graph import Graph + from rdflib.URIRef import URIRef + from rdflib import Literal, Namespace, RDF - concept = URIRef(LCCO['1']) - graph.add((concept, RDF.type, SKOS['Concept'])) - graph.add((concept, SKOS['prefLabel'], Literal('Scrapbooks'))) - graph.add((concept, DC['LCC'], Literal('AC999.0999 - AC999999.Z9999'))) + graph = Graph() + DC = Namespace('http://purl.org/dc/terms/') + SKOS = Namespace('http://www.w3.org/2004/02/skos/core#') + LCCO = Namespace('http://loc.gov/catdir/cpso/lcco/') - print graph.serialize(format='n3', base=LCCO) + graph.bind('dc', DC) + graph.bind('skos', SKOS) + graph.bind('lcco', LCCO) - -- + concept = URIRef(LCCO['1']) + graph.add((concept, RDF.type, SKOS['Concept'])) + graph.add((concept, SKOS['prefLabel'], Literal('Scrapbooks'))) + graph.add((concept, DC['LCC'], Literal('AC999.0999 - AC999999.Z9999'))) - Which generates: + print graph.serialize(format='n3', base=LCCO) - -- + -- - @prefix dc: . - @prefix rdf: . - @prefix skos: . + Which generates: - a skos:Concept; - dc:LCC "AC999.0999 - AC999999.Z9999"; - skos:prefLabel "Scrapbooks". + -- - -- + @prefix dc: . + @prefix rdf: . + @prefix skos: . - Notice + a skos:Concept; + dc:LCC "AC999.0999 - AC999999.Z9999"; + skos:prefLabel "Scrapbooks". - a skos:Concept; + -- - instead of: + Notice - <1> a skos:Concept; + a skos:Concept; - //Ed + instead of: - """ - graph = rdflib.Graph() - DC = rdflib.Namespace("http://purl.org/dc/terms/") - SKOS = rdflib.Namespace("http://www.w3.org/2004/02/skos/core#") - LCCO = rdflib.Namespace("http://loc.gov/catdir/cpso/lcco/") + <1> a skos:Concept; - graph.bind("dc", DC) - graph.bind("skos", SKOS) - graph.bind("lcco", LCCO) + //Ed - concept = rdflib.URIRef(LCCO["1"]) - graph.add((concept, rdflib.RDF.type, SKOS["Concept"])) - graph.add((concept, SKOS["prefLabel"], rdflib.Literal("Scrapbooks"))) - graph.add((concept, DC["LCC"], rdflib.Literal("AC999.0999 - AC999999.Z9999"))) - sg = graph.serialize(format="n3", base=LCCO) - # See issue 248 - # Actual test should be the inverse of the below ... - self.assertTrue("<1> a skos:Concept ;" in sg, sg) + """ + graph = rdflib.Graph() + DC = rdflib.Namespace("http://purl.org/dc/terms/") # noqa: N806 + SKOS = rdflib.Namespace("http://www.w3.org/2004/02/skos/core#") # noqa: N806 + LCCO = rdflib.Namespace("http://loc.gov/catdir/cpso/lcco/") # noqa: N806 + graph.bind("dc", DC) + graph.bind("skos", SKOS) + graph.bind("lcco", LCCO) -if __name__ == "__main__": - unittest.main() + concept = rdflib.URIRef(LCCO["1"]) + graph.add((concept, rdflib.RDF.type, SKOS["Concept"])) + graph.add((concept, SKOS["prefLabel"], rdflib.Literal("Scrapbooks"))) + graph.add((concept, DC["LCC"], rdflib.Literal("AC999.0999 - AC999999.Z9999"))) + sg = graph.serialize(format="n3", base=LCCO) + # See issue 248 + # Actual test should be the inverse of the below ... + assert "<1> a skos:Concept ;" in sg, sg diff --git a/test/test_issues/test_issue274.py b/test/test_issues/test_issue274.py index a982577f8..bb66f976a 100644 --- a/test/test_issues/test_issue274.py +++ b/test/test_issues/test_issue274.py @@ -1,25 +1,25 @@ -from test.utils import eq_ from unittest import TestCase import pytest -from rdflib import RDFS, XSD, BNode, Graph, Literal, Namespace +from rdflib import RDFS, XSD, BNode, Graph, Literal from rdflib.plugins.sparql.operators import ( register_custom_function, unregister_custom_function, ) +from test.utils import eq_ +from test.utils.namespace import EGDO -EX = Namespace("http://example.org/") G = Graph() G.add((BNode(), RDFS.label, Literal("bnode"))) NS = { - "ex": EX, + "ex": EGDO, "rdfs": RDFS, "xsd": XSD, } -def query(querystr, initNs=NS, initBindings=None): +def query(querystr, initNs=NS, initBindings=None): # noqa: N803 return G.query(querystr, initNs=initNs, initBindings=initBindings) @@ -176,21 +176,21 @@ def f(x, y): return Literal("%s %s" % (x, y), datatype=XSD.string) def setUp(self): - register_custom_function(EX.f, self.f) + register_custom_function(EGDO.f, self.f) def tearDown(self): - unregister_custom_function(EX.f, self.f) + unregister_custom_function(EGDO.f, self.f) def test_register_twice_fail(self): with self.assertRaises(ValueError): - register_custom_function(EX.f, self.f) + register_custom_function(EGDO.f, self.f) def test_register_override(self): - register_custom_function(EX.f, self.f, override=True) + register_custom_function(EGDO.f, self.f, override=True) def test_wrong_unregister_warns(self): with pytest.warns(UserWarning): - unregister_custom_function(EX.notexist) + unregister_custom_function(EGDO.notexist) def test_f(self): res = query("""SELECT (ex:f(42, "hello") as ?x) {}""") diff --git a/test/test_issues/test_issue379.py b/test/test_issues/test_issue379.py index 348e3d0f4..bd9b4ec67 100644 --- a/test/test_issues/test_issue379.py +++ b/test/test_issues/test_issue379.py @@ -17,7 +17,7 @@ class TestCase(unittest.TestCase): - def assertIsInstance(self, obj, cls, msg=None, *args, **kwargs): + def assertIsInstance(self, obj, cls, msg=None, *args, **kwargs): # noqa: N802 """Python < v2.7 compatibility. Assert 'obj' is instance of 'cls'""" try: f = super(TestCase, self).assertIsInstance diff --git a/test/test_issues/test_issue381.py b/test/test_issues/test_issue381.py index a5997515d..190ee506b 100644 --- a/test/test_issues/test_issue381.py +++ b/test/test_issues/test_issue381.py @@ -1,7 +1,6 @@ -from rdflib import BNode, Graph, Namespace +from rdflib import BNode, Graph from rdflib.compare import isomorphic - -NS = Namespace("http://example.org/") +from test.utils.namespace import EGDO def test_no_spurious_semicolon(): @@ -15,8 +14,8 @@ def test_no_spurious_semicolon(): expected.addN( t + (expected,) for t in [ - (NS.a, NS.b, NS.c), - (NS.a, NS.d, NS.e), + (EGDO.a, EGDO.b, EGDO.c), + (EGDO.a, EGDO.d, EGDO.e), ] ) got = Graph().query(sparql).graph @@ -34,8 +33,8 @@ def test_one_spurious_semicolon(): expected.addN( t + (expected,) for t in [ - (NS.a, NS.b, NS.c), - (NS.a, NS.d, NS.e), + (EGDO.a, EGDO.b, EGDO.c), + (EGDO.a, EGDO.d, EGDO.e), ] ) got = Graph().query(sparql).graph @@ -53,8 +52,8 @@ def test_one_spurious_semicolon_no_perdiod(): expected.addN( t + (expected,) for t in [ - (NS.a, NS.b, NS.c), - (NS.a, NS.d, NS.e), + (EGDO.a, EGDO.b, EGDO.c), + (EGDO.a, EGDO.d, EGDO.e), ] ) got = Graph().query(sparql).graph @@ -72,8 +71,8 @@ def test_two_spurious_semicolons_no_period(): expected.addN( t + (expected,) for t in [ - (NS.a, NS.b, NS.c), - (NS.a, NS.d, NS.e), + (EGDO.a, EGDO.b, EGDO.c), + (EGDO.a, EGDO.d, EGDO.e), ] ) got = Graph().query(sparql).graph @@ -91,8 +90,8 @@ def test_one_spurious_semicolons_bnode(): expected.addN( t + (expected,) for t in [ - (BNode("a"), NS.b, NS.c), - (BNode("a"), NS.d, NS.e), + (BNode("a"), EGDO.b, EGDO.c), + (BNode("a"), EGDO.d, EGDO.e), ] ) got = Graph().query(sparql).graph @@ -116,9 +115,9 @@ def test_pathological(): expected.addN( t + (expected,) for t in [ - (NS.a, NS.b, NS.c), - (NS.a, NS.d, NS.e), - (NS.a, NS.f, NS.g), + (EGDO.a, EGDO.b, EGDO.c), + (EGDO.a, EGDO.d, EGDO.e), + (EGDO.a, EGDO.f, EGDO.g), ] ) got = Graph().query(sparql).graph @@ -137,9 +136,9 @@ def test_mixing_spurious_semicolons_and_commas(): expected.addN( t + (expected,) for t in [ - (NS.a, NS.b, NS.c), - (NS.a, NS.d, NS.e), - (NS.a, NS.d, NS.f), + (EGDO.a, EGDO.b, EGDO.c), + (EGDO.a, EGDO.d, EGDO.e), + (EGDO.a, EGDO.d, EGDO.f), ] ) got = Graph().query(sparql).graph diff --git a/test/test_issues/test_issue446.py b/test/test_issues/test_issue446.py index 710a0d417..bf4992936 100644 --- a/test/test_issues/test_issue446.py +++ b/test/test_issues/test_issue446.py @@ -1,4 +1,3 @@ -# coding=utf-8 # test for https://github.com/RDFLib/rdflib/issues/446 from rdflib import Graph, URIRef diff --git a/test/test_issues/test_issue492.py b/test/test_issues/test_issue492.py index 83d2d938f..eb865313d 100644 --- a/test/test_issues/test_issue492.py +++ b/test/test_issues/test_issue492.py @@ -17,4 +17,4 @@ def test_issue492(): g = rdflib.Graph() # raised a TypeError: unorderable types: SequencePath() < SequencePath() - result = g.query(query) + result = g.query(query) # noqa: F841 diff --git a/test/test_issues/test_issue532.py b/test/test_issues/test_issue532.py index 0e9fa89f0..12c0d3e73 100644 --- a/test/test_issues/test_issue532.py +++ b/test/test_issues/test_issue532.py @@ -48,7 +48,7 @@ def test_issue532(): FILTER (?date >= "2004-06-20"^^xsd:date) } - """ + """ # noqa: N806 result = list(g.query(getnewMeps)) assert len(result) == 1 diff --git a/test/test_issues/test_issue535.py b/test/test_issues/test_issue535.py index dbb7113ae..03f75d601 100644 --- a/test/test_issues/test_issue535.py +++ b/test/test_issues/test_issue535.py @@ -10,10 +10,10 @@ def test_nquads_default_graph(): . """ - publicID = URIRef("http://example.org/g0") + publicID = URIRef("http://example.org/g0") # noqa: N806 ds.parse(data=data, format="nquads", publicID=publicID) - assert len(ds) == 3, len(g) + assert len(ds) == 3, len(g) # noqa: F821 assert len(list(ds.contexts())) == 2, len(list(ds.contexts())) assert len(ds.default_context) == 2, len(ds.get_context(publicID)) diff --git a/test/test_issues/test_issue545.py b/test/test_issues/test_issue545.py index 3c6efbc49..b623ec297 100644 --- a/test/test_issues/test_issue545.py +++ b/test/test_issues/test_issue545.py @@ -3,7 +3,7 @@ def test_issue(): - query = sparql.prepareQuery( + query = sparql.prepareQuery( # noqa: F841 """ SELECT DISTINCT ?property ?parent WHERE{ diff --git a/test/test_issues/test_issue563.py b/test/test_issues/test_issue563.py index 2b3abe728..879151733 100644 --- a/test/test_issues/test_issue563.py +++ b/test/test_issues/test_issue563.py @@ -1,6 +1,5 @@ -from rdflib import Graph, Literal, Namespace +from rdflib import Graph, Literal -EX = Namespace("http://example.org/") QUERY = """ PREFIX rdf: PREFIX rdfs: diff --git a/test/test_issues/test_issue604.py b/test/test_issues/test_issue604.py index cb5aaac99..905d6b82e 100644 --- a/test/test_issues/test_issue604.py +++ b/test/test_issues/test_issue604.py @@ -3,12 +3,12 @@ def test_issue604(): - EX = Namespace("http://ex.co/") + EX = Namespace("http://ex.co/") # noqa: N806 g = Graph() bn = BNode() g.add((EX.s, EX.p, bn)) c = Collection(g, bn, map(Literal, [1, 2, 4])) c[2] = Literal(3) got = list(g.objects(bn, RDF.rest / RDF.rest / RDF.first)) - expected = [Literal(3)] + expected = [Literal(3)] # noqa: F841 assert got == [Literal(3)], got diff --git a/test/test_issues/test_issue655.py b/test/test_issues/test_issue655.py index 0e852ddc4..323617655 100644 --- a/test/test_issues/test_issue655.py +++ b/test/test_issues/test_issue655.py @@ -1,46 +1,38 @@ -import unittest from decimal import Decimal from rdflib import XSD, Graph, Literal, Namespace, URIRef from rdflib.compare import to_isomorphic -class TestIssue655(unittest.TestCase): - def test_issue655(self): - # make sure that inf and nan are serialized correctly - dt = XSD["double"].n3() - self.assertEqual(Literal(float("inf"))._literal_n3(True), '"INF"^^%s' % dt) - self.assertEqual(Literal(float("-inf"))._literal_n3(True), '"-INF"^^%s' % dt) - self.assertEqual(Literal(float("nan"))._literal_n3(True), '"NaN"^^%s' % dt) +def test_issue655(): + # make sure that inf and nan are serialized correctly + dt = XSD["double"].n3() + assert Literal(float("inf"))._literal_n3(True) == '"INF"^^%s' % dt + assert Literal(float("-inf"))._literal_n3(True) == '"-INF"^^%s' % dt + assert Literal(float("nan"))._literal_n3(True) == '"NaN"^^%s' % dt - dt = XSD["decimal"].n3() - self.assertEqual(Literal(Decimal("inf"))._literal_n3(True), '"INF"^^%s' % dt) - self.assertEqual(Literal(Decimal("-inf"))._literal_n3(True), '"-INF"^^%s' % dt) - self.assertEqual(Literal(Decimal("nan"))._literal_n3(True), '"NaN"^^%s' % dt) + dt = XSD["decimal"].n3() + assert Literal(Decimal("inf"))._literal_n3(True) == '"INF"^^%s' % dt + assert Literal(Decimal("-inf"))._literal_n3(True) == '"-INF"^^%s' % dt + assert Literal(Decimal("nan"))._literal_n3(True) == '"NaN"^^%s' % dt - self.assertEqual( - Literal("inf", datatype=XSD["decimal"])._literal_n3(True), '"INF"^^%s' % dt - ) + assert Literal("inf", datatype=XSD["decimal"])._literal_n3(True) == '"INF"^^%s' % dt - # assert that non-numerical aren't changed - self.assertEqual(Literal("inf")._literal_n3(True), '"inf"') - self.assertEqual(Literal("nan")._literal_n3(True), '"nan"') + # assert that non-numerical aren't changed + assert Literal("inf")._literal_n3(True) == '"inf"' + assert Literal("nan")._literal_n3(True) == '"nan"' - PROV = Namespace("http://www.w3.org/ns/prov#") + PROV = Namespace("http://www.w3.org/ns/prov#") # noqa: N806 - bob = URIRef("http://example.org/object/Bob") + bob = URIRef("http://example.org/object/Bob") - # g1 is a simple graph with an infinite and a nan values - g1 = Graph() - g1.add((bob, PROV.value, Literal(float("inf")))) - g1.add((bob, PROV.value, Literal(float("nan")))) + # g1 is a simple graph with an infinite and a nan values + g1 = Graph() + g1.add((bob, PROV.value, Literal(float("inf")))) + g1.add((bob, PROV.value, Literal(float("nan")))) - # Build g2 out of the deserialisation of g1 serialisation - g2 = Graph() - g2.parse(data=g1.serialize(format="turtle"), format="turtle") + # Build g2 out of the deserialisation of g1 serialisation + g2 = Graph() + g2.parse(data=g1.serialize(format="turtle"), format="turtle") - self.assertTrue(to_isomorphic(g1) == to_isomorphic(g2)) - - -if __name__ == "__main__": - unittest.main() + assert to_isomorphic(g1) == to_isomorphic(g2) diff --git a/test/test_issues/test_issue733.py b/test/test_issues/test_issue733.py index 3c4340249..502cd07ef 100644 --- a/test/test_issues/test_issue733.py +++ b/test/test_issues/test_issue733.py @@ -5,73 +5,65 @@ subject or the object. """ -import unittest - from rdflib import Graph -from rdflib.namespace import Namespace - +from test.utils.namespace import EGDO -class TestIssue733(unittest.TestCase): - def test_issue_733(self): - g = Graph() - example = Namespace("http://example.org/") - g.add((example.S, example.P, example.O1)) - g.add((example.S, example.P, example.O2)) - q = """ - prefix ex: - select ?lexical_or_value ?ot ?gt where { - {SELECT (count(*) as ?lexical_or_value) where { - ?s ?p ?o . - FILTER (?s=ex:S) - }} - {SELECT (count(*) as ?ot) where { - ?s ?p ?o . - FILTER (?o=ex:O1) - }} - {SELECT (count(*) as ?gt) where { - ?s ?p ?o . - FILTER (?o!=ex:O1 && ?s!=ex:O2) - }} - } - """ - res = g.query(q) - assert len(res) == 1 - results = [[lit.toPython() for lit in line] for line in res] - assert results[0][0] == 2 - assert results[0][1] == 1 - assert results[0][2] == 1 - def test_issue_733_independant(self): - g = Graph() - example = Namespace("http://example.org/") - g.add((example.S, example.P, example.O1)) - g.add((example.S, example.P, example.O2)) - q = """ - prefix ex: - select ?lexical_or_value where { - {SELECT (count(*) as ?lexical_or_value) where { - ?s ?p ?o . - FILTER (?s=ex:S) - }} - } - """ - res = g.query(q) - assert len(res) == 1 - results = [[lit.toPython() for lit in line] for line in res] - assert results[0][0] == 2 - q = """ - prefix ex: - select ?lexical_or_value where { - {SELECT (count(*) as ?lexical_or_value) where { - ?s ?p ?o . - FILTER (?o=ex:O1) - }} - } - """ - res = g.query(q) - results = [[lit.toPython() for lit in line] for line in res] - assert results[0][0] == 1 +def test_issue_733(): + g = Graph() + g.add((EGDO.S, EGDO.P, EGDO.O1)) + g.add((EGDO.S, EGDO.P, EGDO.O2)) + q = """ + prefix ex: + select ?lexical_or_value ?ot ?gt where { + {SELECT (count(*) as ?lexical_or_value) where { + ?s ?p ?o . + FILTER (?s=ex:S) + }} + {SELECT (count(*) as ?ot) where { + ?s ?p ?o . + FILTER (?o=ex:O1) + }} + {SELECT (count(*) as ?gt) where { + ?s ?p ?o . + FILTER (?o!=ex:O1 && ?s!=ex:O2) + }} + } + """ + res = g.query(q) + assert len(res) == 1 + results = [[lit.toPython() for lit in line] for line in res] + assert results[0][0] == 2 + assert results[0][1] == 1 + assert results[0][2] == 1 -if __name__ == "__main__": - unittest.main() +def test_issue_733_independant(): + g = Graph() + g.add((EGDO.S, EGDO.P, EGDO.O1)) + g.add((EGDO.S, EGDO.P, EGDO.O2)) + q = """ + prefix ex: + select ?lexical_or_value where { + {SELECT (count(*) as ?lexical_or_value) where { + ?s ?p ?o . + FILTER (?s=ex:S) + }} + } + """ + res = g.query(q) + assert len(res) == 1 + results = [[lit.toPython() for lit in line] for line in res] + assert results[0][0] == 2 + q = """ + prefix ex: + select ?lexical_or_value where { + {SELECT (count(*) as ?lexical_or_value) where { + ?s ?p ?o . + FILTER (?o=ex:O1) + }} + } + """ + res = g.query(q) + results = [[lit.toPython() for lit in line] for line in res] + assert results[0][0] == 1 diff --git a/test/test_issues/test_issue801.py b/test/test_issues/test_issue801.py index 0dfac052b..564711c73 100644 --- a/test/test_issues/test_issue801.py +++ b/test/test_issues/test_issue801.py @@ -1,22 +1,14 @@ """ Issue 801 - Problem with prefixes created for URIs containing %20 """ -import unittest -from rdflib import BNode, Graph, Literal, Namespace +from rdflib import BNode, Graph, Literal +from test.utils.namespace import EGDO -class TestIssue801(unittest.TestCase): - def test_issue_801(self): - g = Graph() - example = Namespace("http://example.org/") - g.bind("", example) - node = BNode() - g.add((node, example["first%20name"], Literal("John"))) - self.assertEqual( - g.serialize(format="turtle").split("\n")[-3], '[] :first%20name "John" .' - ) - - -if __name__ == "__main__": - unittest.main() +def test_issue_801(): + g = Graph() + g.bind("", EGDO) + node = BNode() + g.add((node, EGDO["first%20name"], Literal("John"))) + assert g.serialize(format="turtle").split("\n")[-3] == '[] :first%20name "John" .' diff --git a/test/test_issues/test_issue910.py b/test/test_issues/test_issue910.py index 2d6082fa9..a703e2f2f 100644 --- a/test/test_issues/test_issue910.py +++ b/test/test_issues/test_issue910.py @@ -1,66 +1,62 @@ -import unittest - from rdflib import Graph -class TestIssue910(unittest.TestCase): - def testA(self): - g = Graph() - q = g.query( - """ - SELECT * { - { BIND ("a" AS ?a) } - UNION - { BIND ("a" AS ?a) } - } - """ - ) - self.assertEqual(len(q) == 2, True) - - def testB(self): - g = Graph() - q = g.query( - """ - SELECT * { - { BIND ("a" AS ?a) } - UNION - { VALUES ?a { "a" } } - UNION - { SELECT ("a" AS ?a) {} } - } - """ - ) - self.assertEqual(len(q) == 3, True) - - def testC(self): - g = Graph() - q = g.query( - """ - SELECT * { - { BIND ("a" AS ?a) } - UNION - { VALUES ?a { "a" } } - UNION - { SELECT ("b" AS ?a) {} } - } - """ - ) - self.assertEqual(len(q) == 3, True) - - def testD(self): - g = Graph() - q = g.query( - """SELECT * { - { BIND ("a" AS ?a) } - UNION - { VALUES ?a { "b" } } - UNION - { SELECT ("c" AS ?a) {} } - } - """ - ) - self.assertEqual(len(q) == 3, True) - - -if __name__ == "__main__": - unittest.main() +def test_a(): + g = Graph() + q = g.query( + """ + SELECT * { + { BIND ("a" AS ?a) } + UNION + { BIND ("a" AS ?a) } + } + """ + ) + assert len(q) == 2 + + +def test_b(): + g = Graph() + q = g.query( + """ + SELECT * { + { BIND ("a" AS ?a) } + UNION + { VALUES ?a { "a" } } + UNION + { SELECT ("a" AS ?a) {} } + } + """ + ) + assert len(q) == 3 + + +def test_c(): + g = Graph() + q = g.query( + """ + SELECT * { + { BIND ("a" AS ?a) } + UNION + { VALUES ?a { "a" } } + UNION + { SELECT ("b" AS ?a) {} } + } + """ + ) + assert len(q) == 3 + + +def test_d(): + g = Graph() + q = g.query( + """SELECT * { + { BIND ("a" AS ?a) } + UNION + { VALUES ?a { "b" } } + UNION + { SELECT ("c" AS ?a) {} } + } + """ + ) + assert len(q) == 3 diff --git a/test/test_issues/test_issue920.py b/test/test_issues/test_issue920.py index 1375012a0..53b379156 100644 --- a/test/test_issues/test_issue920.py +++ b/test/test_issues/test_issue920.py @@ -9,28 +9,22 @@ g.parse(data=' .', format='n3') """ -import unittest from rdflib import Graph -class TestIssue920(unittest.TestCase): - def test_issue_920(self): - g = Graph() - # NT tests - g.parse(data=" .", format="nt") - g.parse(data=" .", format="nt") - g.parse(data=" .", format="nt") +def test_issue_920(): + g = Graph() + # NT tests + g.parse(data=" .", format="nt") + g.parse(data=" .", format="nt") + g.parse(data=" .", format="nt") - # related parser tests - g.parse(data=" .", format="turtle") - g.parse(data=" .", format="turtle") - g.parse(data=" .", format="turtle") + # related parser tests + g.parse(data=" .", format="turtle") + g.parse(data=" .", format="turtle") + g.parse(data=" .", format="turtle") - g.parse(data=" .", format="n3") - g.parse(data=" .", format="n3") - g.parse(data=" .", format="n3") - - -if __name__ == "__main__": - unittest.main() + g.parse(data=" .", format="n3") + g.parse(data=" .", format="n3") + g.parse(data=" .", format="n3") diff --git a/test/test_issues/test_issue923.py b/test/test_issues/test_issue923.py index 0d9d709f2..87bb4b827 100644 --- a/test/test_issues/test_issue923.py +++ b/test/test_issues/test_issue923.py @@ -1,6 +1,7 @@ """ Issue 923: split charset off of Content-Type before looking up Result-parsing plugin. """ + from io import StringIO from rdflib.query import Result diff --git a/test/test_issues/test_issue953.py b/test/test_issues/test_issue953.py index 1d3cbda87..289712aec 100644 --- a/test/test_issues/test_issue953.py +++ b/test/test_issues/test_issue953.py @@ -1,15 +1,9 @@ -import unittest from fractions import Fraction from rdflib import Literal, URIRef -class TestIssue953(unittest.TestCase): - def test_issue_939(self): - lit = Literal(Fraction("2/3")) - assert lit.datatype == URIRef("http://www.w3.org/2002/07/owl#rational") - assert lit.n3() == '"2/3"^^' - - -if __name__ == "__main__": - unittest.main() +def test_issue_939(): + lit = Literal(Fraction("2/3")) + assert lit.datatype == URIRef("http://www.w3.org/2002/07/owl#rational") + assert lit.n3() == '"2/3"^^' diff --git a/test/test_issues/test_issue977.py b/test/test_issues/test_issue977.py index 8800a8dfb..d3f4c83ef 100644 --- a/test/test_issues/test_issue977.py +++ b/test/test_issues/test_issue977.py @@ -1,41 +1,34 @@ -import unittest +import pytest from rdflib import RDFS, Graph, Literal, URIRef -class TestIssue977(unittest.TestCase): - def setUp(self): - self.graph = Graph() - # Bind prefixes. - self.graph.bind("isbn", "urn:isbn:") - self.graph.bind("webn", "http://w3c.org/example/isbn/") - # Populate graph. - self.graph.add( - (URIRef("urn:isbn:1503280780"), RDFS.label, Literal("Moby Dick")) - ) - self.graph.add( - ( - URIRef("http://w3c.org/example/isbn/1503280780"), - RDFS.label, - Literal("Moby Dick"), - ) +@pytest.fixture(scope="function") +def graph() -> Graph: + graph = Graph() + # Bind prefixes. + graph.bind("isbn", "urn:isbn:") + graph.bind("webn", "http://w3c.org/example/isbn/") + # Populate graph. + graph.add((URIRef("urn:isbn:1503280780"), RDFS.label, Literal("Moby Dick"))) + graph.add( + ( + URIRef("http://w3c.org/example/isbn/1503280780"), + RDFS.label, + Literal("Moby Dick"), ) + ) + return graph - def test_namespace_manager(self): - assert "isbn", "urn:isbn:" in tuple(self.graph.namespaces()) - assert "webn", "http://w3c.org/example/isbn/" in tuple(self.graph.namespaces()) - def test_turtle_serialization(self): - serialization = self.graph.serialize(None, format="turtle") - print(f"Test Issue 977, serialization output:\n---\n{serialization}---") - # Test serialization. - assert ( - "@prefix webn:" in serialization - ), "Prefix webn not found in serialization!" - assert ( - "@prefix isbn:" in serialization - ), "Prefix isbn not found in serialization!" +def test_namespace_manager(graph: Graph): + assert "isbn", "urn:isbn:" in tuple(graph.namespaces()) + assert "webn", "http://w3c.org/example/isbn/" in tuple(graph.namespaces()) -if __name__ == "__main__": - unittest.main() +def test_turtle_serialization(graph: Graph): + serialization = graph.serialize(None, format="turtle") + print(f"Test Issue 977, serialization output:\n---\n{serialization}---") + # Test serialization. + assert "@prefix webn:" in serialization, "Prefix webn not found in serialization!" + assert "@prefix isbn:" in serialization, "Prefix isbn not found in serialization!" diff --git a/test/test_issues/test_issue_git_336.py b/test/test_issues/test_issue_git_336.py index 511e68614..212f77b0f 100644 --- a/test/test_issues/test_issue_git_336.py +++ b/test/test_issues/test_issue_git_336.py @@ -27,7 +27,7 @@ def test_ns_localname_roundtrip() -> None: - XNS = rdflib.Namespace("http://example.net/fs") + XNS = rdflib.Namespace("http://example.net/fs") # noqa: N806 g = rdflib.Graph() g.bind("xns", str(XNS)) diff --git a/test/test_literal/test_datetime.py b/test/test_literal/test_datetime.py index a92a93dc1..0f24d73ff 100644 --- a/test/test_literal/test_datetime.py +++ b/test/test_literal/test_datetime.py @@ -17,7 +17,7 @@ def test_equality(self): def test_microseconds(self): dt1 = datetime(2009, 6, 15, 23, 37, 6, 522630) - l = Literal(dt1) + l = Literal(dt1) # noqa: E741 # datetime with microseconds should be cast as a literal with using # XML Schema dateTime as the literal datatype @@ -29,14 +29,18 @@ def test_microseconds(self): def test_to_python(self): dt = "2008-12-01T18:02:00" - l = Literal(dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime")) + l = Literal( # noqa: E741 + dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime") + ) assert isinstance(l.toPython(), datetime) assert l.toPython().isoformat() == dt def test_timezone_z(self): dt = "2008-12-01T18:02:00.522630Z" - l = Literal(dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime")) + l = Literal( # noqa: E741 + dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime") + ) assert isinstance(l.toPython(), datetime) assert datetime_isoformat( @@ -46,21 +50,27 @@ def test_timezone_z(self): def test_timezone_offset(self): dt = "2010-02-10T12:36:00+03:00" - l = Literal(dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime")) + l = Literal( # noqa: E741 + dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime") + ) assert isinstance(l.toPython(), datetime) assert l.toPython().isoformat() == dt def test_timezone_offset_to_utc(self): dt = "2010-02-10T12:36:00+03:00" - l = Literal(dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime")) + l = Literal( # noqa: E741 + dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime") + ) utc_dt = l.toPython().astimezone(UTC) assert datetime_isoformat(utc_dt) == "2010-02-10T09:36:00Z" def test_timezone_offset_millisecond(self): dt = "2011-01-16T19:39:18.239743+01:00" - l = Literal(dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime")) + l = Literal( # noqa: E741 + dt, datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime") + ) assert isinstance(l.toPython(), datetime) assert l.toPython().isoformat() == dt diff --git a/test/test_literal/test_duration.py b/test/test_literal/test_duration.py index c4e199c9a..8973e6b65 100644 --- a/test/test_literal/test_duration.py +++ b/test/test_literal/test_duration.py @@ -8,17 +8,17 @@ class TestDuration: def test_to_python_timedelta(self): - l = Literal("P4DT5H6M7S", datatype=XSD.dayTimeDuration) + l = Literal("P4DT5H6M7S", datatype=XSD.dayTimeDuration) # noqa: E741 assert isinstance(l.toPython(), timedelta) assert l.toPython() == parse_duration("P4DT5H6M7S") def test_to_python_ym_duration(self): - l = Literal("P1Y2M", datatype=XSD.yearMonthDuration) + l = Literal("P1Y2M", datatype=XSD.yearMonthDuration) # noqa: E741 assert isinstance(l.toPython(), Duration) assert l.toPython() == parse_duration("P1Y2M") def test_to_python_ymdhms_duration(self): - l = Literal("P1Y2M4DT5H6M7S", datatype=XSD.duration) + l = Literal("P1Y2M4DT5H6M7S", datatype=XSD.duration) # noqa: E741 assert isinstance(l.toPython(), Duration) assert l.toPython() == parse_duration("P1Y2M4DT5H6M7S") diff --git a/test/test_literal/test_hex_binary.py b/test/test_literal/test_hex_binary.py index 029579237..2ade8beda 100644 --- a/test/test_literal/test_hex_binary.py +++ b/test/test_literal/test_hex_binary.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import binascii from rdflib import XSD, Literal @@ -17,7 +15,7 @@ def _test_integer(self, i): len_hex_i = len(hex_i) hex_i = hex_i.zfill(len_hex_i + len_hex_i % 2) - l = Literal(hex_i, datatype=XSD.hexBinary) + l = Literal(hex_i, datatype=XSD.hexBinary) # noqa: E741 bin_i = l.toPython() assert int(binascii.hexlify(bin_i), 16) == i diff --git a/test/test_literal/test_literal.py b/test/test_literal/test_literal.py index 074abe1e6..59ebab5dd 100644 --- a/test/test_literal/test_literal.py +++ b/test/test_literal/test_literal.py @@ -1,3 +1,16 @@ +from __future__ import annotations + +import builtins +import datetime +import logging +from decimal import Decimal +from typing import Any, Callable, Generator, Optional, Type, Union + +from test.utils import affix_tuples +from test.utils.literal import LiteralChecker, literal_idfn +from test.utils.namespace import EGDC +from test.utils.outcome import OutcomeChecker, OutcomePrimitive, OutcomePrimitives + # NOTE: The config below enables strict mode for mypy. # mypy: no_ignore_errors # mypy: warn_unused_configs, disallow_any_generics @@ -7,20 +20,20 @@ # mypy: no_implicit_optional, warn_redundant_casts, warn_unused_ignores # mypy: warn_return_any, no_implicit_reexport, strict_equality -import datetime -import logging -from contextlib import ExitStack -from decimal import Decimal -from test.utils import affix_tuples -from test.utils.literal import LiteralChecker -from typing import Any, Callable, Generator, Iterable, Optional, Type, Union + +try: + import html5lib as _ # noqa: F401 + + _HAVE_HTML5LIB = True +except ImportError: + _HAVE_HTML5LIB = False import isodate import pytest import rdflib # needed for eval(repr(...)) below from rdflib import XSD -from rdflib.namespace import RDF, Namespace +from rdflib.namespace import RDF from rdflib.term import ( _XSD_BOOLEAN, _XSD_DATE, @@ -38,8 +51,6 @@ bind, ) -EGNS = Namespace("http://example.com/") - @pytest.fixture() def clear_bindings() -> Generator[None, None, None]: @@ -49,894 +60,845 @@ def clear_bindings() -> Generator[None, None, None]: _reset_bindings() -class TestLiteral: - def test_repr_apostrophe(self) -> None: - a = rdflib.Literal("'") - b = eval(repr(a)) - assert a == b +def test_repr_apostrophe() -> None: + a = rdflib.Literal("'") + b = eval(repr(a)) + assert a == b + - def test_repr_quote(self) -> None: - a = rdflib.Literal('"') - b = eval(repr(a)) - assert a == b +def test_repr_quote() -> None: + a = rdflib.Literal('"') + b = eval(repr(a)) + assert a == b - def test_backslash(self) -> None: - d = r""" + +def test_backslash() -> None: + d = r""" - - a\b - +xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" +xmlns:foo="http://example.org/foo#"> + + a\b + """ - g = rdflib.Graph() - g.parse(data=d, format="xml") - a = rdflib.Literal("a\\b") - b = list(g.objects())[0] - assert a == b - - def test_literal_from_bool(self) -> None: - _l = rdflib.Literal(True) - assert _l.datatype == rdflib.XSD["boolean"] - - -class TestNewPT: - # NOTE: TestNewPT is written for pytest so that pytest features like - # parametrize can be used. - # New tests should be added here instead of in TestNew. - @pytest.mark.parametrize( - "lang, exception_type", - [ - ({}, TypeError), - ([], TypeError), - (1, TypeError), - (b"en", TypeError), - ("999", ValueError), - ("-", ValueError), - ], - ) - def test_cant_pass_invalid_lang( - self, - lang: Any, - exception_type: Type[Exception], - ) -> None: - """ - Construction of Literal fails if the language tag is invalid. - """ - with pytest.raises(exception_type): - Literal("foo", lang=lang) - - @pytest.mark.parametrize( - "lexical, datatype, is_ill_typed", - [ - ("true", XSD.boolean, False), - ("1", XSD.boolean, False), - (b"false", XSD.boolean, False), - (b"0", XSD.boolean, False), - ("yes", XSD.boolean, True), - ("200", XSD.byte, True), - (b"-128", XSD.byte, False), - ("127", XSD.byte, False), - ("255", XSD.unsignedByte, False), - ("-100", XSD.unsignedByte, True), - (b"200", XSD.unsignedByte, False), - (b"64300", XSD.short, True), - ("-6000", XSD.short, False), - ("1000000", XSD.nonNegativeInteger, False), - ("-100", XSD.nonNegativeInteger, True), - ("a", XSD.double, True), - ("0", XSD.double, False), - ("0.1", XSD.double, False), - ("0.1", XSD.decimal, False), - ("0.g", XSD.decimal, True), - ("b", XSD.integer, True), - ("2147483647", XSD.int, False), - ("2147483648", XSD.int, True), - ("2147483648", XSD.integer, False), - ("valid ASCII", XSD.string, False), - pytest.param("هذا رجل ثلج⛄", XSD.string, False, id="snowman-ar"), - ("More ASCII", None, None), - ("Not a valid time", XSD.time, True), - ("Not a valid date", XSD.date, True), - ("7264666c6962", XSD.hexBinary, False), - # RDF.langString is not a recognized datatype IRI as we assign no literal value to it, though this should likely change. - ("English string", RDF.langString, None), - # The datatypes IRIs below should never be recognized. - ("[p]", EGNS.unrecognized, None), - ], - ) - def test_ill_typed_literals( - self, - lexical: Union[bytes, str], - datatype: Optional[URIRef], - is_ill_typed: Optional[bool], - ) -> None: - """ - ill_typed has the correct value. - """ - lit = Literal(lexical, datatype=datatype) - assert lit.ill_typed is is_ill_typed - if is_ill_typed is False: - # If the literal is not ill typed it should have a value associated with it. - assert lit.value is not None - - @pytest.mark.parametrize( - "a, b, op, expected_result", - [ - pytest.param( - Literal("20:00:00", datatype=_XSD_STRING), - Literal("23:30:00", datatype=_XSD_STRING), - "bminusa", - TypeError(r"unsupported operand type\(s\) for -: 'str' and 'str'"), - id="Attempt to subtract strings", - ), - pytest.param( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("23:30:00", datatype=_XSD_STRING), - "aplusb", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#string to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" - ), - id="Attempt to add string to time", - ), - pytest.param( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("23:30:00", datatype=_XSD_STRING), - "bminusa", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#string" - ), - id="Attempt to subtract string from time", - ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "aplusb", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" - ), - id="Attempt to add integer to time", + g = rdflib.Graph() + g.parse(data=d, format="xml") + a = rdflib.Literal("a\\b") + b = list(g.objects())[0] + assert a == b + + +def test_literal_from_bool() -> None: + _l = rdflib.Literal(True) + assert _l.datatype == rdflib.XSD["boolean"] + + +@pytest.mark.parametrize( + "lang, exception_type", + [ + ({}, TypeError), + ([], TypeError), + (1, TypeError), + (b"en", TypeError), + ("999", ValueError), + ("-", ValueError), + ], +) +def test_cant_pass_invalid_lang( + lang: Any, + exception_type: Type[Exception], +) -> None: + """ + Construction of Literal fails if the language tag is invalid. + """ + with pytest.raises(exception_type): + Literal("foo", lang=lang) + + +@pytest.mark.parametrize( + "lexical, datatype, is_ill_typed", + [ + ("true", XSD.boolean, False), + ("1", XSD.boolean, False), + (b"false", XSD.boolean, False), + (b"0", XSD.boolean, False), + ("yes", XSD.boolean, True), + ("200", XSD.byte, True), + (b"-128", XSD.byte, False), + ("127", XSD.byte, False), + ("255", XSD.unsignedByte, False), + ("-100", XSD.unsignedByte, True), + (b"200", XSD.unsignedByte, False), + (b"64300", XSD.short, True), + ("-6000", XSD.short, False), + ("1000000", XSD.nonNegativeInteger, False), + ("-100", XSD.nonNegativeInteger, True), + ("a", XSD.double, True), + ("0", XSD.double, False), + ("0.1", XSD.double, False), + ("0.1", XSD.decimal, False), + ("0.g", XSD.decimal, True), + ("b", XSD.integer, True), + ("2147483647", XSD.int, False), + ("2147483648", XSD.int, True), + ("2147483648", XSD.integer, False), + ("valid ASCII", XSD.string, False), + pytest.param("هذا رجل ثلج⛄", XSD.string, False, id="snowman-ar"), + ("More ASCII", None, None), + ("Not a valid time", XSD.time, True), + ("Not a valid date", XSD.date, True), + ("7264666c6962", XSD.hexBinary, False), + # RDF.langString is not a recognized datatype IRI as we assign no literal value to it, though this should likely change. + ("English string", RDF.langString, None), + # The datatypes IRIs below should never be recognized. + ("[p]", EGDC.unrecognized, None), + ], +) +def test_ill_typed_literals( + lexical: Union[bytes, str], + datatype: Optional[URIRef], + is_ill_typed: Optional[bool], +) -> None: + """ + ill_typed has the correct value. + """ + lit = Literal(lexical, datatype=datatype) + assert lit.ill_typed is is_ill_typed + if is_ill_typed is False: + # If the literal is not ill typed it should have a value associated with it. + assert lit.value is not None + + +@pytest.mark.parametrize( + "a, b, op, expected_result", + [ + pytest.param( + Literal("20:00:00", datatype=_XSD_STRING), + Literal("23:30:00", datatype=_XSD_STRING), + "bminusa", + TypeError(r"unsupported operand type\(s\) for -: 'str' and 'str'"), + id="Attempt to subtract strings", + ), + pytest.param( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("23:30:00", datatype=_XSD_STRING), + "aplusb", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#string to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "bplusa", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#time to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to add time to integer", + id="Attempt to add string to time", + ), + pytest.param( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("23:30:00", datatype=_XSD_STRING), + "bminusa", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#string" ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "aminusb", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#time" - ), - id="Attempt to subtract integer from time", + id="Attempt to subtract string from time", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "aplusb", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#time" ), - pytest.param( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12", datatype=_XSD_INTEGER), - "bminusa", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to subtract time from integer", + id="Attempt to add integer to time", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "bplusa", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#time to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "aplusb", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#duration to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to add duration to integer", + id="Attempt to add time to integer", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "aminusb", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#time" ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "bplusa", - TypeError( - "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" - ), - id="Attempt to add integer to duration", + id="Attempt to subtract integer from time", + ), + pytest.param( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12", datatype=_XSD_INTEGER), + "bminusa", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#time from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "aminusb", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#duration from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" - ), - id="Attempt to subtract duration from integer", + id="Attempt to subtract time from integer", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "aplusb", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#duration to a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" ), - pytest.param( - Literal("12", datatype=_XSD_INTEGER), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "bminusa", - TypeError( - "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" - ), - id="Attempt to subtract integer from duration", + id="Attempt to add duration to integer", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "bplusa", + TypeError( + "Cannot add a Literal of datatype http://www.w3.org/2001/XMLSchema#integer to a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" ), - ( - Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), - Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), - "bminusa", - Literal("P31D", datatype=_XSD_DURATION), + id="Attempt to add integer to duration", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "aminusb", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#duration from a Literal of datatype http://www.w3.org/2001/XMLSchema#integer" ), - ( - Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), - Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), - "bminusa", - Literal("P119D", datatype=_XSD_DURATION), + id="Attempt to subtract duration from integer", + ), + pytest.param( + Literal("12", datatype=_XSD_INTEGER), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "bminusa", + TypeError( + "Cannot subtract a Literal of datatype http://www.w3.org/2001/XMLSchema#integer from a Literal of datatype http://www.w3.org/2001/XMLSchema#duration" ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), - Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), - "aminusb", - Literal("-P122DT15H58M", datatype=_XSD_DURATION), + id="Attempt to subtract integer from duration", + ), + ( + Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), + Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), + "bminusa", + Literal("P31D", datatype=_XSD_DURATION), + ), + ( + Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), + Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), + "bminusa", + Literal("P119D", datatype=_XSD_DURATION), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), + Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), + "aminusb", + Literal("-P122DT15H58M", datatype=_XSD_DURATION), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), + Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), + "bminusa", + Literal("P122DT15H58M", datatype=_XSD_DURATION), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), + Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), + "bminusa", + Literal("P123D", datatype=_XSD_DURATION), + ), + ( + Literal("2006-08-01", datatype=_XSD_DATE), + Literal("2006-11-01", datatype=_XSD_DATE), + "bminusa", + Literal("P92D", datatype=_XSD_DURATION), + ), + ( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("12:50:00", datatype=_XSD_TIME), + "bminusa", + Literal("-PT8H2M", datatype=_XSD_DURATION), + ), + ( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("23:30:00", datatype=_XSD_TIME), + "bminusa", + Literal("PT3H30M", datatype=_XSD_DURATION), + ), + ( + Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), + Literal("P31D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), + ), + ( + Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), + Literal("P119D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), + Literal("P122DT15H58M", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), + ), + ( + Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), + Literal("P123D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), + ), + ( + Literal("2006-08-01", datatype=_XSD_DATE), + Literal("P92D", datatype=_XSD_DURATION), + "aplusb", + Literal("2006-11-01", datatype=_XSD_DATE), + ), + ( + Literal("20:52:00", datatype=_XSD_TIME), + Literal("-PT8H2M", datatype=_XSD_DURATION), + "aplusb", + Literal("12:50:00", datatype=_XSD_TIME), + ), + ( + Literal("20:00:00", datatype=_XSD_TIME), + Literal("PT3H30M", datatype=_XSD_DURATION), + "aplusb", + Literal("23:30:00", datatype=_XSD_TIME), + ), + ( + Literal("3", datatype=_XSD_INTEGER), + Literal("5", datatype=_XSD_INTEGER), + "aplusb", + Literal("8", datatype=_XSD_INTEGER), + ), + ( + Literal("3", datatype=_XSD_INTEGER), + Literal("5", datatype=_XSD_INTEGER), + "bminusa", + Literal("2", datatype=_XSD_INTEGER), + ), + ( + Literal("5.3", datatype=_XSD_FLOAT), + Literal("8.5", datatype=_XSD_FLOAT), + "bminusa", + Literal("3.2", datatype=_XSD_FLOAT), + ), + ( + Literal("5.3", datatype=_XSD_DECIMAL), + Literal("8.5", datatype=_XSD_DECIMAL), + "bminusa", + Literal("3.2", datatype=_XSD_DECIMAL), + ), + ( + Literal("5.3", datatype=_XSD_DOUBLE), + Literal("8.5", datatype=_XSD_DOUBLE), + "aminusb", + Literal("-3.2", datatype=_XSD_DOUBLE), + ), + ( + Literal("8.5", datatype=_XSD_DOUBLE), + Literal("5.3", datatype=_XSD_DOUBLE), + "aminusb", + Literal("3.2", datatype=_XSD_DOUBLE), + ), + ( + Literal(isodate.Duration(hours=1)), + Literal(isodate.Duration(hours=1)), + "aplusb", + Literal(isodate.Duration(hours=2)), + ), + ( + Literal(datetime.timedelta(days=1)), + Literal(datetime.timedelta(days=1)), + "aplusb", + Literal(datetime.timedelta(days=2)), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(isodate.Duration(hours=1)), + "aplusb", + Literal("05:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(isodate.Duration(days=1)), + "aplusb", + Literal("2011-11-05", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(isodate.Duration(days=1)), + "aplusb", + Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(datetime.timedelta(hours=1)), + "aplusb", + Literal("05:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(datetime.timedelta(days=1)), + "aplusb", + Literal("2011-11-05", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(datetime.timedelta(days=1)), + "aplusb", + Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(isodate.Duration(hours=1)), + "aminusb", + Literal("03:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(isodate.Duration(days=1)), + "aminusb", + Literal("2011-11-03", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(isodate.Duration(days=1)), + "aminusb", + Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(datetime.timedelta(hours=1)), + "aminusb", + Literal("03:23:01.000384", datatype=XSD.time), + ), + ( + Literal(datetime.date.fromisoformat("2011-11-04")), + Literal(datetime.timedelta(days=1)), + "aminusb", + Literal("2011-11-03", datatype=XSD.date), + ), + ( + Literal(datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00")), + Literal(datetime.timedelta(days=1)), + "aminusb", + Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), + ), + ( + Literal("5", datatype=XSD.integer), + Literal("10", datatype=XSD.integer), + "bminusa", + Literal("5", datatype=XSD.integer), + ), + ( + Literal("5"), + Literal("10", datatype=_XSD_INTEGER), + "aminusb", + TypeError( + "Minuend Literal must have Numeric, Date, Datetime or Time datatype." ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), - Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), - "bminusa", - Literal("P122DT15H58M", datatype=_XSD_DURATION), + ), + ( + Literal("5"), + Literal("10", datatype=_XSD_INTEGER), + "bminusa", + TypeError( + "Subtrahend Literal must have Numeric, Date, Datetime or Time datatype." ), + ), + *affix_tuples( ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), - Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), - "bminusa", - Literal("P123D", datatype=_XSD_DURATION), + Literal("5", datatype=_XSD_INTEGER), + Literal("10", datatype=_XSD_FLOAT), ), + [ + ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), + ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), + ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), + ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), + ], + None, + ), + *affix_tuples( ( - Literal("2006-08-01", datatype=_XSD_DATE), - Literal("2006-11-01", datatype=_XSD_DATE), - "bminusa", - Literal("P92D", datatype=_XSD_DURATION), + Literal("5", datatype=_XSD_FLOAT), + Literal("10", datatype=_XSD_DECIMAL), ), + [ + ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), + ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), + ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), + ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), + ], + None, + ), + *affix_tuples( ( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("12:50:00", datatype=_XSD_TIME), - "bminusa", - Literal("-PT8H2M", datatype=_XSD_DURATION), + Literal("5", datatype=_XSD_FLOAT), + Literal("10", datatype=_XSD_DOUBLE), ), + [ + ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), + ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), + ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), + ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), + ], + None, + ), + *affix_tuples( ( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("23:30:00", datatype=_XSD_TIME), - "bminusa", - Literal("PT3H30M", datatype=_XSD_DURATION), + Literal(Decimal("1.2121214312312")), + Literal(1), ), + [ + ("aminusb", Literal(Decimal("0.212121"))), + ("aplusb", Literal(Decimal("2.212121"))), + ("bminusa", Literal(Decimal("-0.212121"))), + ("bplusa", Literal(Decimal("2.212121"))), + ], + None, + ), + *affix_tuples( ( - Literal("2006-01-01T20:50:00", datatype=_XSD_DATETIME), Literal("P31D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-02-01T20:50:00", datatype=_XSD_DATETIME), + Literal("P5D", datatype=_XSD_DURATION), ), + [ + ("aplusb", Literal("P36D", datatype=_XSD_DURATION)), + ("aminusb", Literal("P26D", datatype=_XSD_DURATION)), + ], + None, + ), + *affix_tuples( ( - Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), Literal("P119D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-05-01T20:50:00", datatype=_XSD_DATETIME), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATETIME), - Literal("P122DT15H58M", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-11-01T12:50:00", datatype=_XSD_DATETIME), - ), - ( - Literal("2006-07-01T20:52:00", datatype=_XSD_DATE), - Literal("P123D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-11-01T12:50:00", datatype=_XSD_DATE), - ), - ( - Literal("2006-08-01", datatype=_XSD_DATE), - Literal("P92D", datatype=_XSD_DURATION), - "aplusb", - Literal("2006-11-01", datatype=_XSD_DATE), - ), - ( - Literal("20:52:00", datatype=_XSD_TIME), - Literal("-PT8H2M", datatype=_XSD_DURATION), - "aplusb", - Literal("12:50:00", datatype=_XSD_TIME), - ), - ( - Literal("20:00:00", datatype=_XSD_TIME), - Literal("PT3H30M", datatype=_XSD_DURATION), - "aplusb", - Literal("23:30:00", datatype=_XSD_TIME), - ), - ( - Literal("3", datatype=_XSD_INTEGER), - Literal("5", datatype=_XSD_INTEGER), - "aplusb", - Literal("8", datatype=_XSD_INTEGER), - ), - ( - Literal("3", datatype=_XSD_INTEGER), - Literal("5", datatype=_XSD_INTEGER), - "bminusa", - Literal("2", datatype=_XSD_INTEGER), - ), - ( - Literal("5.3", datatype=_XSD_FLOAT), - Literal("8.5", datatype=_XSD_FLOAT), - "bminusa", - Literal("3.2", datatype=_XSD_FLOAT), - ), - ( - Literal("5.3", datatype=_XSD_DECIMAL), - Literal("8.5", datatype=_XSD_DECIMAL), - "bminusa", - Literal("3.2", datatype=_XSD_DECIMAL), - ), - ( - Literal("5.3", datatype=_XSD_DOUBLE), - Literal("8.5", datatype=_XSD_DOUBLE), - "aminusb", - Literal("-3.2", datatype=_XSD_DOUBLE), - ), - ( - Literal("8.5", datatype=_XSD_DOUBLE), - Literal("5.3", datatype=_XSD_DOUBLE), - "aminusb", - Literal("3.2", datatype=_XSD_DOUBLE), - ), - ( - Literal(isodate.Duration(hours=1)), - Literal(isodate.Duration(hours=1)), - "aplusb", - Literal(isodate.Duration(hours=2)), + Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), ), + [ + ("aplusb", TypeError(r".*datatype.*")), + ("aminusb", TypeError(r".*datatype.*")), + ], + None, + ), + *affix_tuples( ( + Literal(isodate.Duration(days=4)), Literal(datetime.timedelta(days=1)), - Literal(datetime.timedelta(days=1)), - "aplusb", - Literal(datetime.timedelta(days=2)), - ), - ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), - Literal(isodate.Duration(hours=1)), - "aplusb", - Literal("05:23:01.000384", datatype=XSD.time), ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(isodate.Duration(days=1)), - "aplusb", - Literal("2011-11-05", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") + [ + ( + "aplusb", + TypeError( + r"Cannot add a Literal of datatype.*to a Literal of datatype.*" + ), ), - Literal(isodate.Duration(days=1)), - "aplusb", - Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), - ), - ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), - Literal(datetime.timedelta(hours=1)), - "aplusb", - Literal("05:23:01.000384", datatype=XSD.time), - ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(datetime.timedelta(days=1)), - "aplusb", - Literal("2011-11-05", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") + ( + "aminusb", + TypeError( + r"Cannot subtract a Literal of datatype.*from a Literal of datatype.*" + ), ), - Literal(datetime.timedelta(days=1)), - "aplusb", - Literal("2011-11-05T00:05:23.283000+00:00", datatype=XSD.dateTime), - ), - ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), - Literal(isodate.Duration(hours=1)), - "aminusb", - Literal("03:23:01.000384", datatype=XSD.time), - ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(isodate.Duration(days=1)), - "aminusb", - Literal("2011-11-03", datatype=XSD.date), - ), + ], + None, + ), + *affix_tuples( ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") - ), + Literal(isodate.Duration(days=4)), Literal(isodate.Duration(days=1)), - "aminusb", - Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), ), + [ + ("aplusb", Literal(isodate.Duration(days=5))), + ("aminusb", Literal(isodate.Duration(days=3))), + ], + None, + ), + *affix_tuples( ( - Literal(datetime.time.fromisoformat("04:23:01.000384")), + Literal(datetime.timedelta(hours=4)), Literal(datetime.timedelta(hours=1)), - "aminusb", - Literal("03:23:01.000384", datatype=XSD.time), - ), - ( - Literal(datetime.date.fromisoformat("2011-11-04")), - Literal(datetime.timedelta(days=1)), - "aminusb", - Literal("2011-11-03", datatype=XSD.date), - ), - ( - Literal( - datetime.datetime.fromisoformat("2011-11-04 00:05:23.283+00:00") - ), - Literal(datetime.timedelta(days=1)), - "aminusb", - Literal("2011-11-03T00:05:23.283000+00:00", datatype=XSD.dateTime), - ), - ( - Literal("5", datatype=XSD.integer), - Literal("10", datatype=XSD.integer), - "bminusa", - Literal("5", datatype=XSD.integer), - ), - ( - Literal("5"), - Literal("10", datatype=_XSD_INTEGER), - "aminusb", - TypeError( - "Minuend Literal must have Numeric, Date, Datetime or Time datatype." - ), - ), - ( - Literal("5"), - Literal("10", datatype=_XSD_INTEGER), - "bminusa", - TypeError( - "Subtrahend Literal must have Numeric, Date, Datetime or Time datatype." - ), - ), - *affix_tuples( - ( - Literal("5", datatype=_XSD_INTEGER), - Literal("10", datatype=_XSD_FLOAT), - ), - [ - ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), - ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), - ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), - ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), - ], - None, - ), - *affix_tuples( - ( - Literal("5", datatype=_XSD_FLOAT), - Literal("10", datatype=_XSD_DECIMAL), - ), - [ - ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), - ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), - ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), - ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), - ], - None, - ), - *affix_tuples( - ( - Literal("5", datatype=_XSD_FLOAT), - Literal("10", datatype=_XSD_DOUBLE), - ), - [ - ("aminusb", Literal("-5", datatype=_XSD_DECIMAL)), - ("aplusb", Literal("15", datatype=_XSD_DECIMAL)), - ("bminusa", Literal("5", datatype=_XSD_DECIMAL)), - ("bplusa", Literal("15", datatype=_XSD_DECIMAL)), - ], - None, - ), - *affix_tuples( - ( - Literal(Decimal("1.2121214312312")), - Literal(1), - ), - [ - ("aminusb", Literal(Decimal("0.212121"))), - ("aplusb", Literal(Decimal("2.212121"))), - ("bminusa", Literal(Decimal("-0.212121"))), - ("bplusa", Literal(Decimal("2.212121"))), - ], - None, - ), - *affix_tuples( - ( - Literal("P31D", datatype=_XSD_DURATION), - Literal("P5D", datatype=_XSD_DURATION), - ), - [ - ("aplusb", Literal("P36D", datatype=_XSD_DURATION)), - ("aminusb", Literal("P26D", datatype=_XSD_DURATION)), - ], - None, - ), - *affix_tuples( - ( - Literal("P119D", datatype=_XSD_DURATION), - Literal("2006-01-02T20:50:00", datatype=_XSD_DATETIME), - ), - [ - ("aplusb", TypeError(r".*datatype.*")), - ("aminusb", TypeError(r".*datatype.*")), - ], - None, - ), - *affix_tuples( - ( - Literal(isodate.Duration(days=4)), - Literal(datetime.timedelta(days=1)), - ), - [ - ( - "aplusb", - TypeError( - r"Cannot add a Literal of datatype.*to a Literal of datatype.*" - ), - ), - ( - "aminusb", - TypeError( - r"Cannot subtract a Literal of datatype.*from a Literal of datatype.*" - ), - ), - ], - None, - ), - *affix_tuples( - ( - Literal(isodate.Duration(days=4)), - Literal(isodate.Duration(days=1)), - ), - [ - ("aplusb", Literal(isodate.Duration(days=5))), - ("aminusb", Literal(isodate.Duration(days=3))), - ], - None, - ), - *affix_tuples( - ( - Literal(datetime.timedelta(hours=4)), - Literal(datetime.timedelta(hours=1)), - ), - [ - ("aplusb", Literal(datetime.timedelta(hours=5))), - ("aminusb", Literal(datetime.timedelta(hours=3))), - ], - None, ), - ], - ) - def test_literal_addsub( - self, - a: Literal, - b: Literal, - op: str, - expected_result: Union[Literal, Type[Exception], Exception], - ) -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - expected_exception: Optional[Exception] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) - elif isinstance(expected_result, Exception): - expected_exception = expected_result - catcher = xstack.enter_context(pytest.raises(type(expected_exception))) - if op == "aplusb": - result = a + b - - elif op == "aminusb": - result = a - b - elif op == "bminusa": - result = b - a - elif op == "bplusa": - result = b + a - else: - raise ValueError(f"invalid operation {op}") - logging.debug("result = %r", result) - if catcher is not None or expected_exception is not None: - assert catcher is not None - assert catcher.value is not None - if expected_exception is not None: - assert catcher.match(expected_exception.args[0]) + [ + ("aplusb", Literal(datetime.timedelta(hours=5))), + ("aminusb", Literal(datetime.timedelta(hours=3))), + ], + None, + ), + ], +) +def test_literal_addsub( + a: Literal, + b: Literal, + op: str, + expected_result: OutcomePrimitive[Literal], +) -> None: + checker = OutcomeChecker[Literal].from_primitive(expected_result) + with checker.context(): + if op == "aplusb": + result = a + b + + elif op == "aminusb": + result = a - b + elif op == "bminusa": + result = b - a + elif op == "bplusa": + result = b + a else: - assert isinstance(expected_result, Literal) - assert expected_result == result - - @pytest.mark.parametrize( - "a_value, b_value, result_value, datatype", - [ - [3, 5, 2, XSD.integer], - [5.3, 8.5, 3.2, XSD.decimal], - [5.3, 8.5, 3.2, XSD.double], - [5.3, 8.5, 3.2, XSD.float], - # [XSD.byte")], - [3, 5, 2, XSD.int], - [5.3, 8.5, 3.2, XSD.long], - [-3, -5, -2, XSD.negativeInteger], - [3, 5, 2, XSD.nonNegativeInteger], - [-5.3, -8.5, -3.2, XSD.nonPositiveInteger], - [3, 5, 2, XSD.positiveInteger], - [3, 5, 2, XSD.short], - [0, 0, 0, XSD.unsignedByte], - [3, 5, 2, XSD.unsignedInt], - [5.3, 8.5, 3.2, XSD.unsignedLong], - [5.3, 8.5, 3.2, XSD.unsignedShort], - ], - ) - def test_numeric_literals( - self, - a_value: Union[int, float], - b_value: Union[int, float], - result_value: Union[int, float], - datatype: URIRef, - ) -> None: - a = Literal(a_value, datatype=datatype) - b = Literal(b_value, datatype=datatype) - - result = b - a - expected = Literal(result_value, datatype=datatype) - assert result == expected, repr(result) - - -class TestNew: - # NOTE: Please use TestNewPT for new tests instead of this which is written - # for unittest. - def test_cant_pass_lang_and_datatype(self) -> None: - with pytest.raises(TypeError): - Literal("foo", lang="en", datatype=URIRef("http://example.com/")) - - def test_cant_pass_invalid_lang(self) -> None: - with pytest.raises(ValueError): - Literal("foo", lang="999") - - def test_from_other_literal(self) -> None: - l = Literal(1) - l2 = Literal(l) - assert isinstance(l.value, int) - assert isinstance(l2.value, int) - - # change datatype - l = Literal("1") - l2 = Literal(l, datatype=rdflib.XSD.integer) - assert isinstance(l2.value, int) - - def test_datatype_gets_auto_uri_ref_conversion(self) -> None: - # drewp disapproves of this behavior, but it should be - # represented in the tests - x = Literal("foo", datatype="http://example.com/") - assert isinstance(x.datatype, URIRef) - - x = Literal("foo", datatype=Literal("pennies")) - assert x.datatype == URIRef("pennies") - - -class TestRepr: - def test_omits_missing_datatype_and_lang(self) -> None: - assert repr(Literal("foo")) == "rdflib.term.Literal('foo')" - - def test_omits_missing_datatype(self) -> None: - assert ( - repr(Literal("foo", lang="en")) == "rdflib.term.Literal('foo', lang='en')" - ) - - def test_omits_missing_lang(self) -> None: - assert ( - repr(Literal("foo", datatype=URIRef("http://example.com/"))) - == "rdflib.term.Literal('foo', datatype=rdflib.term.URIRef('http://example.com/'))" - ) - - def test_subclass_name_appears_in_repr(self) -> None: - class MyLiteral(Literal): - pass - - x = MyLiteral("foo") - assert repr(x) == "MyLiteral('foo')" - - -class TestDoubleOutput: - def test_no_dangling_point(self) -> None: - """confirms the fix for https://github.com/RDFLib/rdflib/issues/237""" - vv = Literal("0.88", datatype=_XSD_DOUBLE) - out = vv._literal_n3(use_plain=True) - assert out in ["8.8e-01", "0.88"], out - - -class TestParseBoolean: - """confirms the fix for https://github.com/RDFLib/rdflib/issues/913""" - - def test_true_boolean(self) -> None: - test_value = Literal("tRue", datatype=_XSD_BOOLEAN) - assert test_value.value - test_value = Literal("1", datatype=_XSD_BOOLEAN) - assert test_value.value - - def test_false_boolean(self) -> None: - test_value = Literal("falsE", datatype=_XSD_BOOLEAN) - assert test_value.value is False - test_value = Literal("0", datatype=_XSD_BOOLEAN) - assert test_value.value is False - - def test_non_false_boolean(self) -> None: - with pytest.warns( - UserWarning, - match=r"Parsing weird boolean, 'abcd' does not map to True or False", - ): - test_value = Literal("abcd", datatype=_XSD_BOOLEAN) - assert test_value.value is False - - with pytest.warns( - UserWarning, - match=r"Parsing weird boolean, '10' does not map to True or False", - ): - test_value = Literal("10", datatype=_XSD_BOOLEAN) - assert test_value.value is False - - -class TestBindings: - def test_binding(self, clear_bindings: None) -> None: - class a: - def __init__(self, v: str) -> None: - self.v = v[3:-3] - - def __str__(self) -> str: - return "<<<%s>>>" % self.v - - dtA = rdflib.URIRef("urn:dt:a") - bind(dtA, a) - - va = a("<<<2>>>") - la = Literal(va, normalize=True) - assert la.value == va - assert la.datatype == dtA - - la2 = Literal("<<<2>>>", datatype=dtA) - assert isinstance(la2.value, a) - assert la2.value.v == va.v - - class b: - def __init__(self, v: str) -> None: - self.v = v[3:-3] - - def __str__(self) -> str: - return "B%s" % self.v - - dtB = rdflib.URIRef("urn:dt:b") - bind(dtB, b, None, lambda x: "<<<%s>>>" % x) - - vb = b("<<<3>>>") - lb = Literal(vb, normalize=True) - assert lb.value == vb - assert lb.datatype == dtB - - def test_specific_binding(self, clear_bindings: None) -> None: - def lexify(s: str) -> str: - return "--%s--" % s - - def unlexify(s: str) -> str: - return s[2:-2] - - datatype = rdflib.URIRef("urn:dt:mystring") - - # Datatype-specific rule - bind(datatype, str, unlexify, lexify, datatype_specific=True) - - s = "Hello" - normal_l = Literal(s) - assert str(normal_l) == s - assert normal_l.toPython() == s - assert normal_l.datatype is None - - specific_l = Literal("--%s--" % s, datatype=datatype) - assert str(specific_l) == lexify(s) - assert specific_l.toPython() == s - assert specific_l.datatype == datatype - - -class TestXsdLiterals: - @pytest.mark.parametrize( - ["lexical", "literal_type", "value_cls"], - [ - # these literals do not get converted to Python types - ("ABCD", XSD.integer, None), - ("ABCD", XSD.gYear, None), - ("-10000", XSD.gYear, None), - ("-1921-00", XSD.gYearMonth, None), - ("1921-00", XSD.gMonthDay, None), - ("1921-13", XSD.gMonthDay, None), - ("-1921-00", XSD.gMonthDay, None), - ("10", XSD.gDay, None), - ("-1", XSD.gDay, None), - ("0000", XSD.gYear, None), - ("0000-00-00", XSD.date, None), - ("NOT A VALID HEX STRING", XSD.hexBinary, None), - ("NOT A VALID BASE64 STRING", XSD.base64Binary, None), - # these literals get converted to python types - ("1921-05-01", XSD.date, datetime.date), - ("1921-05-01T00:00:00", XSD.dateTime, datetime.datetime), - ("1921-05", XSD.gYearMonth, datetime.date), - ("0001-01", XSD.gYearMonth, datetime.date), - ("0001-12", XSD.gYearMonth, datetime.date), - ("2002-01", XSD.gYearMonth, datetime.date), - ("9999-01", XSD.gYearMonth, datetime.date), - ("9999-12", XSD.gYearMonth, datetime.date), - ("1921", XSD.gYear, datetime.date), - ("2000", XSD.gYear, datetime.date), - ("0001", XSD.gYear, datetime.date), - ("9999", XSD.gYear, datetime.date), - ("1982", XSD.gYear, datetime.date), - ("2002", XSD.gYear, datetime.date), - ("1921-05-01T00:00:00+00:30", XSD.dateTime, datetime.datetime), - ("1921-05-01T00:00:00-00:30", XSD.dateTime, datetime.datetime), - ("true", XSD.boolean, bool), - ("abcdef0123", XSD.hexBinary, bytes), - ("", XSD.hexBinary, bytes), - ("UkRGTGli", XSD.base64Binary, bytes), - ("", XSD.base64Binary, bytes), - ("0.0000000000000000000000000000001", XSD.decimal, Decimal), - ("0.1", XSD.decimal, Decimal), - ("1", XSD.integer, int), - ], - ) - def test_make_literals( - self, lexical: str, literal_type: URIRef, value_cls: Optional[type] - ) -> None: - """ - Tests literal construction. - """ - self.check_make_literals(lexical, literal_type, value_cls) - - @pytest.mark.parametrize( - ["lexical", "literal_type", "value_cls"], - [ - pytest.param(*params, marks=pytest.mark.xfail(raises=AssertionError)) - for params in [ - ("1921-01Z", XSD.gYearMonth, datetime.date), - ("1921Z", XSD.gYear, datetime.date), - ("1921-00", XSD.gYearMonth, datetime.date), - ("1921-05-01Z", XSD.date, datetime.date), - ("1921-05-01+00:30", XSD.date, datetime.date), - ("1921-05-01+00:30", XSD.date, datetime.date), - ("1921-05-01+00:00", XSD.date, datetime.date), - ("1921-05-01+00:00", XSD.date, datetime.date), - ("1921-05-01T00:00:00Z", XSD.dateTime, datetime.datetime), - ("1e-31", XSD.decimal, None), # This is not a valid decimal value - ] - ], + raise ValueError(f"invalid operation {op}") + logging.debug("result = %r", result) + checker.check(result) + + +@pytest.mark.parametrize( + "a_value, b_value, result_value, datatype", + [ + [3, 5, 2, XSD.integer], + [5.3, 8.5, 3.2, XSD.decimal], + [5.3, 8.5, 3.2, XSD.double], + [5.3, 8.5, 3.2, XSD.float], + # [XSD.byte")], + [3, 5, 2, XSD.int], + [5.3, 8.5, 3.2, XSD.long], + [-3, -5, -2, XSD.negativeInteger], + [3, 5, 2, XSD.nonNegativeInteger], + [-5.3, -8.5, -3.2, XSD.nonPositiveInteger], + [3, 5, 2, XSD.positiveInteger], + [3, 5, 2, XSD.short], + [0, 0, 0, XSD.unsignedByte], + [3, 5, 2, XSD.unsignedInt], + [5.3, 8.5, 3.2, XSD.unsignedLong], + [5.3, 8.5, 3.2, XSD.unsignedShort], + ], +) +def test_numeric_literals( + a_value: Union[int, float], + b_value: Union[int, float], + result_value: Union[int, float], + datatype: URIRef, +) -> None: + a = Literal(a_value, datatype=datatype) + b = Literal(b_value, datatype=datatype) + + result = b - a + expected = Literal(result_value, datatype=datatype) + assert result == expected, repr(result) + + +def test_cant_pass_lang_and_datatype() -> None: + with pytest.raises(TypeError): + Literal("foo", lang="en", datatype=URIRef("http://example.com/")) + + +def test_cant_pass_invalid_lang_int() -> None: + with pytest.raises(ValueError): + Literal("foo", lang="999") + + +def test_from_other_literal() -> None: + l = Literal(1) # noqa: E741 + l2 = Literal(l) + assert isinstance(l.value, int) + assert isinstance(l2.value, int) + + # change datatype + l = Literal("1") # noqa: E741 + l2 = Literal(l, datatype=rdflib.XSD.integer) + assert isinstance(l2.value, int) + + +def test_datatype_gets_auto_uri_ref_conversion() -> None: + # drewp disapproves of this behavior, but it should be + # represented in the tests + x = Literal("foo", datatype="http://example.com/") + assert isinstance(x.datatype, URIRef) + + x = Literal("foo", datatype=Literal("pennies")) + assert x.datatype == URIRef("pennies") + + +def test_omits_missing_datatype_and_lang() -> None: + assert repr(Literal("foo")) == "rdflib.term.Literal('foo')" + + +def test_omits_missing_datatype() -> None: + assert repr(Literal("foo", lang="en")) == "rdflib.term.Literal('foo', lang='en')" + + +def test_omits_missing_lang() -> None: + assert ( + repr(Literal("foo", datatype=URIRef("http://example.com/"))) + == "rdflib.term.Literal('foo', datatype=rdflib.term.URIRef('http://example.com/'))" ) - def test_make_literals_ki( - self, lexical: str, literal_type: URIRef, value_cls: Optional[type] - ) -> None: - """ - Known issues with literal construction. - """ - self.check_make_literals(lexical, literal_type, value_cls) - - @classmethod - def check_make_literals( - cls, lexical: str, literal_type: URIRef, value_cls: Optional[type] - ) -> None: - literal = Literal(lexical, datatype=literal_type) - if value_cls is not None: - assert isinstance(literal.value, value_cls) - else: - assert literal.value is None - assert lexical == f"{literal}" + + +def test_subclass_name_appears_in_repr() -> None: + class MyLiteral(Literal): + pass + + x = MyLiteral("foo") + assert repr(x) == "MyLiteral('foo')" + + +def test_no_dangling_point() -> None: + """confirms the fix for https://github.com/RDFLib/rdflib/issues/237""" + vv = Literal("0.88", datatype=_XSD_DOUBLE) + out = vv._literal_n3(use_plain=True) + assert out in ["8.8e-01", "0.88"], out + + +def test_true_boolean() -> None: + test_value = Literal("tRue", datatype=_XSD_BOOLEAN) + assert test_value.value + test_value = Literal("1", datatype=_XSD_BOOLEAN) + assert test_value.value + + +def test_false_boolean() -> None: + test_value = Literal("falsE", datatype=_XSD_BOOLEAN) + assert test_value.value is False + test_value = Literal("0", datatype=_XSD_BOOLEAN) + assert test_value.value is False + + +def test_non_false_boolean() -> None: + with pytest.warns( + UserWarning, + match=r"Parsing weird boolean, 'abcd' does not map to True or False", + ): + test_value = Literal("abcd", datatype=_XSD_BOOLEAN) + assert test_value.value is False + + with pytest.warns( + UserWarning, + match=r"Parsing weird boolean, '10' does not map to True or False", + ): + test_value = Literal("10", datatype=_XSD_BOOLEAN) + assert test_value.value is False + + +def test_binding(clear_bindings: None) -> None: + class a: # noqa: N801 + def __init__(self, v: str) -> None: + self.v = v[3:-3] + + def __str__(self) -> str: + return "<<<%s>>>" % self.v + + dtA = rdflib.URIRef("urn:dt:a") # noqa: N806 + bind(dtA, a) + + va = a("<<<2>>>") + la = Literal(va, normalize=True) + assert la.value == va + assert la.datatype == dtA + + la2 = Literal("<<<2>>>", datatype=dtA) + assert isinstance(la2.value, a) + assert la2.value.v == va.v + + class b: # noqa: N801 + def __init__(self, v: str) -> None: + self.v = v[3:-3] + + def __str__(self) -> str: + return "B%s" % self.v + + dtB = rdflib.URIRef("urn:dt:b") # noqa: N806 + bind(dtB, b, None, lambda x: "<<<%s>>>" % x) + + vb = b("<<<3>>>") + lb = Literal(vb, normalize=True) + assert lb.value == vb + assert lb.datatype == dtB + + +def test_specific_binding(clear_bindings: None) -> None: + def lexify(s: str) -> str: + return "--%s--" % s + + def unlexify(s: str) -> str: + return s[2:-2] + + datatype = rdflib.URIRef("urn:dt:mystring") + + # Datatype-specific rule + bind(datatype, str, unlexify, lexify, datatype_specific=True) + + s = "Hello" + normal_l = Literal(s) + assert str(normal_l) == s + assert normal_l.toPython() == s + assert normal_l.datatype is None + + specific_l = Literal("--%s--" % s, datatype=datatype) + assert str(specific_l) == lexify(s) + assert specific_l.toPython() == s + assert specific_l.datatype == datatype + + +@pytest.mark.parametrize( + ["lexical", "literal_type", "value_cls"], + [ + # these literals do not get converted to Python types + ("ABCD", XSD.integer, None), + ("ABCD", XSD.gYear, None), + ("-10000", XSD.gYear, None), + ("-1921-00", XSD.gYearMonth, None), + ("1921-00", XSD.gMonthDay, None), + ("1921-13", XSD.gMonthDay, None), + ("-1921-00", XSD.gMonthDay, None), + ("10", XSD.gDay, None), + ("-1", XSD.gDay, None), + ("0000", XSD.gYear, None), + ("0000-00-00", XSD.date, None), + ("NOT A VALID HEX STRING", XSD.hexBinary, None), + ("NOT A VALID BASE64 STRING", XSD.base64Binary, None), + # these literals get converted to python types + ("1921-05-01", XSD.date, datetime.date), + ("1921-05-01T00:00:00", XSD.dateTime, datetime.datetime), + ("1921-05", XSD.gYearMonth, datetime.date), + ("0001-01", XSD.gYearMonth, datetime.date), + ("0001-12", XSD.gYearMonth, datetime.date), + ("2002-01", XSD.gYearMonth, datetime.date), + ("9999-01", XSD.gYearMonth, datetime.date), + ("9999-12", XSD.gYearMonth, datetime.date), + ("1921", XSD.gYear, datetime.date), + ("2000", XSD.gYear, datetime.date), + ("0001", XSD.gYear, datetime.date), + ("9999", XSD.gYear, datetime.date), + ("1982", XSD.gYear, datetime.date), + ("2002", XSD.gYear, datetime.date), + ("1921-05-01T00:00:00+00:30", XSD.dateTime, datetime.datetime), + ("1921-05-01T00:00:00-00:30", XSD.dateTime, datetime.datetime), + ("true", XSD.boolean, bool), + ("abcdef0123", XSD.hexBinary, bytes), + ("", XSD.hexBinary, bytes), + ("UkRGTGli", XSD.base64Binary, bytes), + ("", XSD.base64Binary, bytes), + ("0.0000000000000000000000000000001", XSD.decimal, Decimal), + ("0.1", XSD.decimal, Decimal), + ("1", XSD.integer, int), + ] + + [ + pytest.param(*params, marks=pytest.mark.xfail(raises=AssertionError)) + for params in [ + ("1921-01Z", XSD.gYearMonth, datetime.date), + ("1921Z", XSD.gYear, datetime.date), + ("1921-00", XSD.gYearMonth, datetime.date), + ("1921-05-01Z", XSD.date, datetime.date), + ("1921-05-01+00:30", XSD.date, datetime.date), + ("1921-05-01+00:30", XSD.date, datetime.date), + ("1921-05-01+00:00", XSD.date, datetime.date), + ("1921-05-01+00:00", XSD.date, datetime.date), + ("1921-05-01T00:00:00Z", XSD.dateTime, datetime.datetime), + ("1e-31", XSD.decimal, None), # This is not a valid decimal value + ] + ], +) +def test_literal_construction_value_class( + lexical: str, literal_type: URIRef, value_cls: Optional[type] +) -> None: + literal = Literal(lexical, datatype=literal_type) + if value_cls is not None: + assert isinstance(literal.value, value_cls) + else: + assert literal.value is None + assert lexical == f"{literal}" def test_exception_in_converter( @@ -965,8 +927,23 @@ def unlexify(s: str) -> str: ) +class _UnknownType: + """ + A class that is not known to rdflib, used to test the how + rdflib.term.Literal handles unknown python types. + """ + + def __repr__(self) -> str: + return "_UnknownType()" + + def __eq__(self, __value: object) -> bool: + if isinstance(__value, _UnknownType): + return True + return False + + @pytest.mark.parametrize( - ["literal_maker", "checks"], + ["literal_maker", "outcome"], [ ( lambda: Literal("foo"), @@ -1001,36 +978,74 @@ def unlexify(s: str) -> str: lambda: Literal(Literal("blue sky", "en")), Literal("blue sky", "en"), ), + ( + lambda: Literal("", datatype=RDF.HTML), + LiteralChecker( + ..., None, RDF.HTML, True if _HAVE_HTML5LIB else None, "" + ), + ), + ( + lambda: Literal("
", datatype=RDF.HTML), + LiteralChecker( + ..., + None, + RDF.HTML, + False if _HAVE_HTML5LIB else None, + "
", + ), + ), + ( + lambda: Literal(_UnknownType(), datatype=EGDC.UnknownType), + LiteralChecker( + _UnknownType(), None, EGDC.UnknownType, None, "_UnknownType()" + ), + ), ], + ids=literal_idfn, ) def test_literal_construction( literal_maker: Callable[[], Literal], - checks: Union[ - Iterable[Union[LiteralChecker, Literal]], - LiteralChecker, - Literal, - Type[Exception], + outcome: OutcomePrimitives[Literal], +) -> None: + checker = OutcomeChecker[Literal].from_primitives(outcome) + with checker.context(): + actual_outcome = literal_maker() + checker.check(actual_outcome) + + +@pytest.mark.parametrize( + ["literal_maker", "normalize_literals", "outcome"], + [ + ( + lambda: Literal("001000", datatype=XSD.integer), + ..., + LiteralChecker(1000, None, XSD.integer, False, "1000"), + ), + ( + lambda: Literal("001000", datatype=XSD.integer), + True, + LiteralChecker(1000, None, XSD.integer, False, "1000"), + ), + ( + lambda: Literal("001000", datatype=XSD.integer), + False, + LiteralChecker(1000, None, XSD.integer, False, "001000"), + ), ], + ids=literal_idfn, +) +def test_global_normalize( + literal_maker: Callable[[], Literal], + normalize_literals: Union[builtins.ellipsis, bool], + outcome: OutcomePrimitives[Literal], ) -> None: - check_error: Optional[Type[Exception]] = None - if isinstance(checks, type) and issubclass(checks, Exception): - check_error = checks - checks = [] - elif not isinstance(checks, Iterable): - checks = [checks] - - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if check_error is not None: - catcher = xstack.enter_context(pytest.raises(check_error)) - literal = literal_maker() - - if check_error is not None: - assert catcher is not None - assert catcher.value is not None - - for check in checks: - if isinstance(check, LiteralChecker): - check.check(literal) - else: - check = literal + _normalize_literals = rdflib.NORMALIZE_LITERALS + try: + if normalize_literals is not ...: + rdflib.NORMALIZE_LITERALS = normalize_literals + checker = OutcomeChecker[Literal].from_primitives(outcome) + with checker.context(): + actual_outcome = literal_maker() + checker.check(actual_outcome) + finally: + rdflib.NORMALIZE_LITERALS = _normalize_literals diff --git a/test/test_literal/test_literal_html5lib.py b/test/test_literal/test_literal_html5lib.py new file mode 100644 index 000000000..ce22568db --- /dev/null +++ b/test/test_literal/test_literal_html5lib.py @@ -0,0 +1,79 @@ +import xml.dom.minidom +from typing import Callable + +import pytest + +import rdflib.term +from rdflib.namespace import RDF +from rdflib.term import Literal +from test.utils.literal import LiteralChecker +from test.utils.outcome import OutcomeChecker, OutcomePrimitives + +try: + import html5lib as _ # noqa: F401 +except ImportError: + pytest.skip("html5lib not installed", allow_module_level=True) + + +def test_has_html5lib() -> None: + assert rdflib.term._HAS_HTML5LIB is True + assert RDF.HTML in rdflib.term.XSDToPython + rule = next( + ( + item + for item in rdflib.term._GenericPythonToXSDRules + if item[0] is xml.dom.minidom.DocumentFragment + ), + None, + ) + assert rule is not None + assert rule[1][1] == RDF.HTML + + +@pytest.mark.parametrize( + ["factory", "outcome"], + [ + # Ill-typed literals, these have lexical forms that result in + # errors when parsed as HTML by html5lib. + ( + lambda: Literal("

Hello, World!

", datatype=RDF.HTML), + LiteralChecker( + ..., None, RDF.HTML, True, "

Hello, World!

" + ), + ), + ( + lambda: Literal("", datatype=RDF.HTML), + LiteralChecker(..., None, RDF.HTML, True, ""), + ), + ( + lambda: Literal("THE TEXT IS IN HERE", datatype=RDF.HTML), + LiteralChecker( + ..., None, RDF.HTML, True, "THE TEXT IS IN HERE" + ), + ), + # Well-typed literals, these have lexical forms that parse + # without errors with html5lib. + ( + lambda: Literal("
", datatype=RDF.HTML), + LiteralChecker(..., None, RDF.HTML, False, "
"), + ), + ( + lambda: Literal("
", datatype=RDF.HTML, normalize=True), + LiteralChecker(..., None, RDF.HTML, False, "
"), + ), + ( + lambda: Literal( + "
", datatype=RDF.HTML, normalize=False + ), + LiteralChecker(..., None, RDF.HTML, False, "
"), + ), + ], +) +def test_literal_construction( + factory: Callable[[], Literal], + outcome: OutcomePrimitives[Literal], +) -> None: + checker = OutcomeChecker[Literal].from_primitives(outcome) + with checker.context(): + actual_outcome = factory() + checker.check(actual_outcome) diff --git a/test/test_literal/test_term.py b/test/test_literal/test_term.py index ca2a972f3..b395d64b1 100644 --- a/test/test_literal/test_term.py +++ b/test/test_literal/test_term.py @@ -32,7 +32,7 @@ def test_graceful_ordering(self): a = u > u a = u > BNode() a = u > QuotedGraph(g.store, u) - a = u > g + a = u > g # noqa: F841 class TestBNodeRepr: @@ -124,8 +124,8 @@ def isclose(a, b, rel_tol=1e-09, abs_tol=0.0): Literal(Decimal(2.1), datatype=XSD.decimal), ), (7, Literal(Decimal(1.1)), Literal(Decimal(1.1)), Literal(Decimal(2.2))), - (8, Literal(float(1)), Literal(float(1.1)), Literal(float(2.1))), - (9, Literal(float(1.1)), Literal(float(1.1)), Literal(float(2.2))), + (8, Literal(float(1)), Literal(1.1), Literal(2.1)), + (9, Literal(1.1), Literal(1.1), Literal(2.2)), (10, Literal(-1), Literal(-1), Literal(-2)), (12, Literal(Decimal(-1)), Literal(Decimal(-1)), Literal(Decimal(-2))), (13, Literal(float(-1)), Literal(float(-1)), Literal(float(-2))), @@ -138,14 +138,14 @@ def isclose(a, b, rel_tol=1e-09, abs_tol=0.0): Literal(Decimal(-1.1)), Literal(Decimal(-2.2)), ), - (18, Literal(float(-1)), Literal(float(-1.1)), Literal(float(-2.1))), - (19, Literal(float(-1.1)), Literal(float(-1.1)), Literal(float(-2.2))), + (18, Literal(float(-1)), Literal(-1.1), Literal(-2.1)), + (19, Literal(-1.1), Literal(-1.1), Literal(-2.2)), (20, Literal(1), Literal(1.0), Literal(2.0)), (21, Literal(1.0), Literal(1.0), Literal(2.0)), (22, Literal(Decimal(1)), Literal(Decimal(1.0)), Literal(Decimal(2.0))), (23, Literal(Decimal(1.0)), Literal(Decimal(1.0)), Literal(Decimal(2.0))), - (24, Literal(float(1)), Literal(float(1.0)), Literal(float(2.0))), - (25, Literal(float(1.0)), Literal(float(1.0)), Literal(float(2.0))), + (24, Literal(float(1)), Literal(1.0), Literal(2.0)), + (25, Literal(1.0), Literal(1.0), Literal(2.0)), ( 26, Literal(1, datatype=XSD.integer), @@ -206,8 +206,8 @@ def isclose(a, b, rel_tol=1e-09, abs_tol=0.0): (38, Literal(1.0), 1.0, Literal(2.0)), (39, Literal(Decimal(1.0)), Decimal(1), Literal(Decimal(2.0))), (40, Literal(Decimal(1.0)), Decimal(1.0), Literal(Decimal(2.0))), - (41, Literal(float(1.0)), float(1), Literal(float(2.0))), - (42, Literal(float(1.0)), float(1.0), Literal(float(2.0))), + (41, Literal(1.0), float(1), Literal(2.0)), + (42, Literal(1.0), 1.0, Literal(2.0)), ( 43, Literal(1, datatype=XSD.integer), diff --git a/test/test_literal/test_uriref_literal_comparison.py b/test/test_literal/test_uriref_literal_comparison.py index 2dfdb734d..62c00ba72 100644 --- a/test/test_literal/test_uriref_literal_comparison.py +++ b/test/test_literal/test_uriref_literal_comparison.py @@ -32,37 +32,37 @@ def setup_method(self): self.python_literal = "http://example.org/" self.python_literal_2 = "foo" - def testA(self): + def test_a(self): assert self.uriref != self.literal - def testB(self): + def test_b(self): assert self.literal != self.uriref - def testC(self): + def test_c(self): assert self.uriref != self.python_literal - def testD(self): + def test_d(self): assert self.python_literal != self.uriref - def testE(self): + def test_e(self): assert self.literal != self.python_literal - def testE2(self): + def test_e2(self): assert self.literal.eq(self.python_literal) - def testF(self): + def test_f(self): assert self.python_literal != self.literal - def testG(self): + def test_g(self): assert "foo" not in CORE_SYNTAX_TERMS - def testH(self): + def test_h(self): assert ( URIRef("http://www.w3.org/1999/02/22-rdf-syntax-ns#RDF") in CORE_SYNTAX_TERMS ) - def testI(self): + def test_i(self): g = Graph() g.add((self.uriref, RDF.value, self.literal)) g.add((self.uriref, RDF.value, self.uriref)) diff --git a/test/test_literal/test_xmlliterals.py b/test/test_literal/test_xmlliterals.py index 626755e24..38ae549d4 100644 --- a/test/test_literal/test_xmlliterals.py +++ b/test/test_literal/test_xmlliterals.py @@ -16,7 +16,7 @@ have_html5lib = False -def testPythonRoundtrip(): +def testPythonRoundtrip(): # noqa: N802 l1 = Literal("hello", datatype=RDF.XMLLiteral) assert l1.value is not None, "xml must have been parsed" assert l1.datatype == RDF.XMLLiteral, "literal must have right datatype" @@ -42,7 +42,7 @@ def testPythonRoundtrip(): rdflib.NORMALIZE_LITERALS = True -def testRDFXMLParse(): +def testRDFXMLParse(): # noqa: N802 rdfxml = """\ hello", datatype=RDF.XMLLiteral) assert l1.value is not None, "xml must have been parsed" assert l1.datatype == RDF.XMLLiteral, "literal must have right datatype" @@ -100,6 +100,11 @@ def testHTML(): assert l2.value is not None, "xml must have been parsed" assert l2.datatype == RDF.HTML, "literal must have right datatype" + l3 = Literal(">> assert is_ncname('') == False >>> assert is_ncname('999') == False >>> assert is_ncname('x') == True - >>> assert is_ncname(u'x') == True - >>> assert is_ncname(u'Michèle') == True + >>> assert is_ncname('Michèle') == True However, vanilla uuid4s are not necessarily NCNames: diff --git a/test/test_misc/test_events.py b/test/test_misc/test_events.py index 7e6849ae6..15fc73cc9 100644 --- a/test/test_misc/test_events.py +++ b/test/test_misc/test_events.py @@ -51,7 +51,7 @@ def __contains__(self, key): class TestEvent: - def testEvents(self): + def testEvents(self): # noqa: N802 c1 = Cache() c2 = Cache() c3 = Cache() diff --git a/test/test_misc/test_input_source.py b/test/test_misc/test_input_source.py index 90e6e238a..ce01cdaf9 100644 --- a/test/test_misc/test_input_source.py +++ b/test/test_misc/test_input_source.py @@ -7,20 +7,11 @@ import re from contextlib import ExitStack, contextmanager from dataclasses import dataclass - -# from itertools import product +from io import BytesIO, StringIO, TextIOWrapper from pathlib import Path -from test.utils import GraphHelper -from test.utils.exceptions import ExceptionChecker -from test.utils.httpfileserver import ( - HTTPFileInfo, - HTTPFileServer, - LocationType, - ProtoFileResource, - ProtoRedirectResource, -) from typing import ( # Callable, IO, + TYPE_CHECKING, BinaryIO, Collection, ContextManager, @@ -40,12 +31,22 @@ from rdflib.graph import Graph from rdflib.parser import ( + BytesIOWrapper, FileInputSource, InputSource, StringInputSource, URLInputSource, create_input_source, ) +from test.utils import GraphHelper +from test.utils.httpfileserver import ( + HTTPFileInfo, + HTTPFileServer, + LocationType, + ProtoFileResource, + ProtoRedirectResource, +) +from test.utils.outcome import ExceptionChecker from ..data import TEST_DATA_DIR @@ -648,9 +649,7 @@ def test_create_input_source( input_source: Optional[InputSource] = None with ExitStack() as xstack: if isinstance(test_params.expected_result, ExceptionChecker): - catcher = xstack.enter_context( - pytest.raises(test_params.expected_result.type) - ) + catcher = xstack.enter_context(test_params.expected_result.context()) input_source = xstack.enter_context( call_create_input_source( @@ -671,7 +670,64 @@ def test_create_input_source( logging.debug("input_source = %s, catcher = %s", input_source, catcher) - if isinstance(test_params.expected_result, ExceptionChecker): - assert catcher is not None - assert input_source is None - test_params.expected_result.check(catcher.value) + +def test_bytesio_wrapper(): + wrapper = BytesIOWrapper("hello world") + assert wrapper.seekable() + assert wrapper.read(1) == b"h" + assert wrapper.read(1) == b"e" + assert wrapper.seek(0) == 0 + assert wrapper.read() == b"hello world" + wrapper.seek(0) + ba = bytearray(7) + assert wrapper.readinto(ba) == 7 + assert ba == b"hello w" + assert not wrapper.closed + wrapper.close() + assert wrapper.closed + + text_stream = TextIOWrapper(BytesIO(b"hello world")) + wrapper = BytesIOWrapper(text_stream) + assert wrapper.seekable() + assert wrapper.read(1) == b"h" + assert wrapper.read(1) == b"e" + assert wrapper.tell() == 2 + assert wrapper.seek(0) == 0 + assert wrapper.read() == b"hello world" + ba = bytearray(7) + assert wrapper.readinto(ba) == 0 + wrapper.seek(0) + assert wrapper.readinto(ba) == 7 + assert ba == b"hello w" + + text_stream = StringIO("h∈llo world") + wrapper = BytesIOWrapper(text_stream) + assert wrapper.seekable() + assert wrapper.read(1) == b"h" + assert wrapper.read(1) == b"\xe2" + assert wrapper.read(1) == b"\x88" + assert wrapper.tell() == 3 + assert wrapper.read(2) == b"\x88l" + assert wrapper.seek(0) == 0 + assert wrapper.read() == b"h\xe2\x88\x88llo world" + ba = bytearray(7) + assert wrapper.readinto(ba) == 0 + wrapper.seek(0) + assert wrapper.readinto(ba) == 7 + assert ba == b"h\xe2\x88\x88llo" + nquads_dir = TEST_DATA_DIR.relative_to(Path.cwd()) / "nquads.rdflib" + + with open(Path(nquads_dir / "test1.nquads"), "r") as f: + # not binary file, opened as a TextIO + if TYPE_CHECKING: + assert isinstance(f, TextIOWrapper) + wrapper = BytesIOWrapper(f) + assert not wrapper.closed + assert wrapper.name == str(nquads_dir / "test1.nquads") + assert wrapper.seekable() + assert wrapper.read(1) == b"<" + assert wrapper.read(1) == b"h" + assert wrapper.tell() == 2 + assert wrapper.seek(0) == 0 + assert wrapper.read(1) == b"<" + assert wrapper.closed diff --git a/test/test_misc/test_networking_redirect.py b/test/test_misc/test_networking_redirect.py index acde10d71..0412f9bf7 100644 --- a/test/test_misc/test_networking_redirect.py +++ b/test/test_misc/test_networking_redirect.py @@ -1,7 +1,7 @@ +from __future__ import annotations + from contextlib import ExitStack from copy import deepcopy -from test.utils.exceptions import ExceptionChecker -from test.utils.http import headers_as_message as headers_as_message from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union from urllib.error import HTTPError from urllib.request import HTTPRedirectHandler, Request @@ -10,6 +10,8 @@ from _pytest.mark.structures import ParameterSet from rdflib._networking import _make_redirect_request +from test.utils.http import headers_as_message as headers_as_message +from test.utils.outcome import ExceptionChecker AnyT = TypeVar("AnyT") @@ -197,14 +199,13 @@ def test_make_redirect_request( result: Optional[Request] = None with ExitStack() as stack: if isinstance(expected_result, ExceptionChecker): - catcher = stack.enter_context(pytest.raises(expected_result.type)) + catcher = stack.enter_context(expected_result.context()) elif expected_result is RaisesIdentity: catcher = stack.enter_context(pytest.raises(HTTPError)) result = _make_redirect_request(http_request, http_error) if isinstance(expected_result, ExceptionChecker): assert catcher is not None - expected_result.check(catcher.value) elif isinstance(expected_result, type): assert catcher is not None assert http_error is catcher.value diff --git a/test/test_misc/test_parse_file_guess_format.py b/test/test_misc/test_parse_file_guess_format.py index b9e2c0658..ca35cb090 100644 --- a/test/test_misc/test_parse_file_guess_format.py +++ b/test/test_misc/test_parse_file_guess_format.py @@ -12,13 +12,13 @@ from pathlib import Path from shutil import copyfile from tempfile import TemporaryDirectory -from test.data import TEST_DATA_DIR import pytest from rdflib import Graph from rdflib.exceptions import ParserError from rdflib.util import guess_format +from test.data import TEST_DATA_DIR class TestFileParserGuessFormat: @@ -61,9 +61,9 @@ def test_n3(self) -> None: g.parse(os.path.join(TEST_DATA_DIR, "example-lots_of_graphs.n3")), Graph ) - def test_warning(self) -> None: + def test_warning(self, caplog: pytest.LogCaptureFixture) -> None: g = Graph() - graph_logger = logging.getLogger("rdflib") + graph_logger = logging.getLogger("rdflib") # noqa: F841 with TemporaryDirectory() as tmpdirname: newpath = Path(tmpdirname).joinpath("no_file_ext") @@ -78,9 +78,16 @@ def test_warning(self) -> None: ), str(newpath), ) - with pytest.raises(ParserError, match=r"Could not guess RDF format"): - with pytest.warns( - UserWarning, - match="does not look like a valid URI, trying to serialize this will break.", - ) as logwarning: - g.parse(str(newpath)) + with pytest.raises( + ParserError, match=r"Could not guess RDF format" + ), caplog.at_level("WARNING"): + g.parse(str(newpath)) + + assert any( + rec.levelno == logging.WARNING + and ( + "does not look like a valid URI, trying to serialize this will break." + in rec.message + ) + for rec in caplog.records + ) diff --git a/test/test_misc/test_plugins.py b/test/test_misc/test_plugins.py index 7263bc738..317c2618b 100644 --- a/test/test_misc/test_plugins.py +++ b/test/test_misc/test_plugins.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging import shutil diff --git a/test/test_misc/test_prefix_types.py b/test/test_misc/test_prefix_types.py index b168eef80..1498ca43e 100644 --- a/test/test_misc/test_prefix_types.py +++ b/test/test_misc/test_prefix_types.py @@ -14,7 +14,6 @@ class TestPrefixTypes: - """N3/Turtle serializers should use prefixes, also for types and datatypes diff --git a/test/test_misc/test_security.py b/test/test_misc/test_security.py index 652de6e73..090785a2d 100644 --- a/test/test_misc/test_security.py +++ b/test/test_misc/test_security.py @@ -1,12 +1,11 @@ +from __future__ import annotations + import enum import http.client import itertools import logging from contextlib import ExitStack from pathlib import Path -from test.utils.audit import AuditHookDispatcher -from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource -from test.utils.urlopen import context_urlopener from textwrap import dedent from typing import Any, Iterable, Tuple from urllib.request import HTTPHandler, OpenerDirector, Request @@ -15,13 +14,14 @@ from _pytest.mark.structures import ParameterSet from rdflib import Graph -from rdflib.namespace import Namespace +from test.utils.audit import AuditHookDispatcher +from test.utils.httpfileserver import HTTPFileServer, ProtoFileResource +from test.utils.namespace import EGDO +from test.utils.urlopen import context_urlopener from ..utils import GraphHelper from ..utils.path import ctx_chdir -EGNS = Namespace("http://example.org/") - JSONLD_CONTEXT = """ { "@context": { @@ -30,7 +30,7 @@ } """ -EXPECTED_GRAPH = Graph().add((EGNS.subject, EGNS.predicate, EGNS.object)) +EXPECTED_GRAPH = Graph().add((EGDO.subject, EGDO.predicate, EGDO.object)) def test_default(tmp_path: Path) -> None: diff --git a/test/test_n3.py b/test/test_n3.py index c22278efd..f3d7eeb07 100644 --- a/test/test_n3.py +++ b/test/test_n3.py @@ -1,6 +1,5 @@ import itertools import os -from test import TEST_DIR from urllib.error import URLError import pytest @@ -8,6 +7,7 @@ from rdflib.graph import ConjunctiveGraph, Graph from rdflib.plugins.parsers.notation3 import BadSyntax, exponent_syntax from rdflib.term import Literal, URIRef +from test import TEST_DIR test_data = """ # Definitions of terms describing the n3 model diff --git a/test/test_namespace/test_definednamespace.py b/test/test_namespace/test_definednamespace.py index 3a6fdd600..ea8e12969 100644 --- a/test/test_namespace/test_definednamespace.py +++ b/test/test_namespace/test_definednamespace.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import inspect import logging import subprocess @@ -6,7 +8,6 @@ from contextlib import ExitStack from dataclasses import dataclass from pathlib import Path -from test.data import TEST_DATA_DIR from typing import Optional, Type import pytest @@ -14,6 +15,7 @@ from rdflib import RDF, SKOS from rdflib.namespace import DefinedNamespace, Namespace from rdflib.term import URIRef +from test.data import TEST_DATA_DIR def test_definednamespace_creator_qb(): @@ -39,9 +41,8 @@ def test_definednamespace_creator_qb(): "http://purl.org/linked-data/cube#", "QB", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 0, "subprocess exited incorrectly" assert Path.is_file(Path("_QB.py")), "_QB.py file not created" @@ -87,9 +88,8 @@ def test_definednamespace_creator_fake(): "http://purl.org/linked-data/cube#", "QB", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 1, "subprocess exited incorrectly (failure expected)" @@ -118,9 +118,8 @@ def test_definednamespace_creator_bad_ns(): "http://purl.org/linked-data/cube", "QB", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 1, "subprocess exited incorrectly (failure expected)" diff --git a/test/test_namespace/test_definednamespace_creator.py b/test/test_namespace/test_definednamespace_creator.py index 3a76dbc18..4a77057f9 100644 --- a/test/test_namespace/test_definednamespace_creator.py +++ b/test/test_namespace/test_definednamespace_creator.py @@ -31,9 +31,8 @@ def test_definednamespace_creator_qb(): "http://purl.org/linked-data/cube#", "QB", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 0, "subprocess exited incorrectly" assert Path.is_file(Path("_QB.py")), "_QB.py file not created" @@ -81,9 +80,8 @@ def test_definednamespace_creator_fake(): "http://purl.org/linked-data/cube#", "QB", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 1, "subprocess exited incorrectly (failure expected)" @@ -112,9 +110,8 @@ def test_definednamespace_creator_bad_ns(): "http://purl.org/linked-data/cube", "QB", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 1, "subprocess exited incorrectly (failure expected)" @@ -145,9 +142,8 @@ def test_definednamespace_creator_multiple_comments(): "http://example.org/multiline-string-example#", "MULTILINESTRINGEXAMPLE", ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 0, "subprocess exited incorrectly" assert Path.is_file( diff --git a/test/test_namespace/test_namespace.py b/test/test_namespace/test_namespace.py index 3f439133c..409d703f3 100644 --- a/test/test_namespace/test_namespace.py +++ b/test/test_namespace/test_namespace.py @@ -1,5 +1,6 @@ -from contextlib import ExitStack -from typing import Any, Optional, Type, Union +from __future__ import annotations + +from typing import Any, Optional from warnings import warn import pytest @@ -18,6 +19,7 @@ URIPattern, ) from rdflib.term import BNode, Literal, URIRef +from test.utils.outcome import OutcomeChecker, OutcomePrimitive class TestNamespace: @@ -238,7 +240,7 @@ def test_contains_method(self): ref = URIRef("http://www.w3.org/ns/shacl#Info") assert ( - type(SH) == DefinedNamespaceMeta + type(SH) is DefinedNamespaceMeta ), f"SH no longer a DefinedNamespaceMeta (instead it is now {type(SH)}, update test." assert ref in SH, "sh:Info not in SH" @@ -306,22 +308,15 @@ def test_expand_curie_exception_messages(self) -> None: ], ) def test_expand_curie( - self, curie: Any, expected_result: Union[Type[Exception], URIRef, None] + self, curie: Any, expected_result: OutcomePrimitive[URIRef] ) -> None: g = Graph(bind_namespaces="none") nsm = g.namespace_manager nsm.bind("ex", "urn:example:") + + checker = OutcomeChecker.from_primitive(expected_result) + result: Optional[URIRef] = None - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) + with checker.context(): result = g.namespace_manager.expand_curie(curie) - - if catcher is not None: - assert result is None - assert catcher.value is not None - else: - assert expected_result == result + checker.check(result) diff --git a/test/test_namespace/test_namespacemanager.py b/test/test_namespace/test_namespacemanager.py index a35f3ac63..4ca182ae2 100644 --- a/test/test_namespace/test_namespacemanager.py +++ b/test/test_namespace/test_namespacemanager.py @@ -1,25 +1,14 @@ from __future__ import annotations import logging -import re -import sys from contextlib import ExitStack -from pathlib import Path -from test.utils.exceptions import ExceptionChecker from typing import TYPE_CHECKING, Any, Dict, Mapping, Optional, Set, Tuple, Type, Union import pytest +from rdflib import Graph from rdflib.graph import Dataset -from rdflib.term import URIRef - -if TYPE_CHECKING: - from rdflib._type_checking import _NamespaceSetString - - -sys.path.append(str(Path(__file__).parent.parent.absolute())) -from rdflib import Graph # noqa: E402 -from rdflib.namespace import ( # noqa: E402 +from rdflib.namespace import ( _NAMESPACE_PREFIXES_CORE, _NAMESPACE_PREFIXES_RDFLIB, OWL, @@ -27,6 +16,11 @@ Namespace, NamespaceManager, ) +from rdflib.term import URIRef +from test.utils.outcome import ExceptionChecker, OutcomeChecker, OutcomePrimitive + +if TYPE_CHECKING: + from rdflib._type_checking import _NamespaceSetString def test_core_prefixes_bound(): @@ -374,7 +368,7 @@ def test_compute_qname( manager_prefixes: Optional[Mapping[str, Namespace]], graph_prefixes: Optional[Mapping[str, Namespace]], store_prefixes: Optional[Mapping[str, Namespace]], - expected_result: Union[Tuple[str, URIRef, str], Type[Exception], Exception], + expected_result: OutcomePrimitive[Tuple[str, URIRef, str]], ) -> None: """ :param uri: argument to compute_qname() @@ -403,25 +397,13 @@ def test_compute_qname( nm.bind(prefix, ns) def check() -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) - if isinstance(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(type(expected_result))) + checker = OutcomeChecker[Tuple[str, URIRef, str]].from_primitive( + expected_result + ) + with checker.context(): actual_result = nm.compute_qname(uri, generate) logging.debug("actual_result = %s", actual_result) - if catcher is not None: - assert catcher is not None - assert catcher.value is not None - if isinstance(expected_result, Exception): - assert re.match(expected_result.args[0], f"{catcher.value}") - else: - assert isinstance(expected_result, tuple) - assert isinstance(actual_result, tuple) - assert actual_result == expected_result + checker.check(actual_result) check() # Run a second time to check caching @@ -452,7 +434,7 @@ def test_compute_qname_strict( generate: bool, bind_namespaces: _NamespaceSetString, additional_prefixes: Optional[Mapping[str, Namespace]], - expected_result: Union[Tuple[str, URIRef, str], Type[Exception], Exception], + expected_result: OutcomePrimitive[Tuple[str, str, str]], ) -> None: graph = Graph(bind_namespaces=bind_namespaces) nm = graph.namespace_manager @@ -462,25 +444,11 @@ def test_compute_qname_strict( nm.bind(prefix, ns) def check() -> None: - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass( - expected_result, Exception - ): - catcher = xstack.enter_context(pytest.raises(expected_result)) - if isinstance(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(type(expected_result))) + checker = OutcomeChecker[Tuple[str, str, str]].from_primitive(expected_result) + with checker.context(): actual_result = nm.compute_qname_strict(uri, generate) logging.debug("actual_result = %s", actual_result) - if catcher is not None: - assert catcher is not None - assert catcher.value is not None - if isinstance(expected_result, Exception): - assert re.match(expected_result.args[0], f"{catcher.value}") - else: - assert isinstance(expected_result, tuple) - assert isinstance(actual_result, tuple) - assert actual_result == expected_result + checker.check(actual_result) check() # Run a second time to check caching @@ -538,16 +506,15 @@ def test_nsm_function() -> NamespaceManager: def test_expand_curie( test_nsm_session: NamespaceManager, curie: str, - expected_result: Union[ExceptionChecker, str], + expected_result: OutcomePrimitive[str], ) -> None: nsm = test_nsm_session - with ExitStack() as xstack: - if isinstance(expected_result, ExceptionChecker): - xstack.enter_context(expected_result) - result = nsm.expand_curie(curie) - - if not isinstance(expected_result, ExceptionChecker): - assert URIRef(expected_result) == result + if isinstance(expected_result, str): + expected_result = URIRef(expected_result) + checker = OutcomeChecker[str].from_primitive(expected_result) + with checker.context(): + actual_result = nsm.expand_curie(curie) + checker.check(actual_result) @pytest.mark.parametrize( @@ -578,7 +545,7 @@ def test_generate_curie( test_nsm_function: NamespaceManager, uri: str, generate: Optional[bool], - expected_result: Union[ExceptionChecker, str], + expected_result: OutcomePrimitive[str], ) -> None: """ .. note:: @@ -587,13 +554,10 @@ def test_generate_curie( effects and will modify the namespace manager. """ nsm = test_nsm_function - with ExitStack() as xstack: - if isinstance(expected_result, ExceptionChecker): - xstack.enter_context(expected_result) + checker = OutcomeChecker[str].from_primitive(expected_result) + with checker.context(): if generate is None: - result = nsm.curie(uri) + actual_result = nsm.curie(uri) else: - result = nsm.curie(uri, generate=generate) - - if not isinstance(expected_result, ExceptionChecker): - assert expected_result == result + actual_result = nsm.curie(uri, generate=generate) + checker.check(actual_result) diff --git a/test/test_nt_misc.py b/test/test_nt_misc.py index 90a6e93a2..7f48b61c1 100644 --- a/test/test_nt_misc.py +++ b/test/test_nt_misc.py @@ -2,13 +2,13 @@ import os import re from pathlib import Path -from test.data import TEST_DATA_DIR from urllib.request import urlopen import pytest from rdflib import Graph, Literal, URIRef from rdflib.plugins.parsers import ntriples +from test.data import TEST_DATA_DIR log = logging.getLogger(__name__) @@ -19,253 +19,273 @@ def nt_file(fn): return os.path.join(NT_PATH, fn) -class TestNT: - def testIssue859(self): - graphA = Graph() - graphB = Graph() - graphA.parse(nt_file("quote-01.nt"), format="ntriples") - graphB.parse(nt_file("quote-02.nt"), format="ntriples") - for subjectA, predicateA, objA in graphA: - for subjectB, predicateB, objB in graphB: - assert subjectA == subjectB - assert predicateA == predicateB - assert objA == objB - - def testIssue78(self): - g = Graph() - g.add((URIRef("foo"), URIRef("foo"), Literal("R\u00E4ksm\u00F6rg\u00E5s"))) - s = g.serialize(format="nt") - assert type(s) == str - assert "R\u00E4ksm\u00F6rg\u00E5s" in s - - def testIssue146(self): - g = Graph() - g.add((URIRef("foo"), URIRef("foo"), Literal("test\n", lang="en"))) - s = g.serialize(format="nt").strip() - assert s == ' "test\\n"@en .' - - def testIssue1144_rdflib(self): - fname = nt_file("lists-02.nt") - g1 = Graph() - with open(fname, "r") as f: - g1.parse(f, format="nt") - assert 14 == len(g1) - g2 = Graph() - with open(fname, "rb") as fb: - g2.parse(fb, format="nt") - assert 14 == len(g2) - - def testIssue1144_w3c(self): - fname = nt_file("lists-02.nt") - sink1 = ntriples.NTGraphSink(Graph()) - p1 = ntriples.W3CNTriplesParser(sink1) - with open(fname, "r") as f: - p1.parse(f) - assert 14 == len(sink1.g) - sink2 = ntriples.NTGraphSink(Graph()) - p2 = ntriples.W3CNTriplesParser(sink2) - with open(fname, "rb") as f: - p2.parse(f) - assert 14 == len(sink2.g) - - def test_sink(self): - s = ntriples.DummySink() - assert s.length == 0 - s.triple(None, None, None) - assert s.length == 1 - - def test_nonvalidating_unquote(self): - safe = """ .""" - ntriples.validate = False - res = ntriples.unquote(safe) - assert isinstance(res, str) - - def test_validating_unquote(self): - quot = """ .""" - ntriples.validate = True - res = ntriples.unquote(quot) - # revert to default - ntriples.validate = False - log.debug("restype %s" % type(res)) - - def test_validating_unquote_raises(self): - ntriples.validate = True - uniquot = """ "R\\u00E4ksm\\u00F6rg\\u00E5s" .""" - with pytest.raises(ntriples.ParseError): - ntriples.unquote(uniquot) - uniquot = """ "R\\\\u00E4ksm\\u00F6rg\\u00E5s" .""" - with pytest.raises(ntriples.ParseError): - ntriples.unquote(uniquot) - # revert to default - ntriples.validate = False - - def test_nonvalidating_uriquote(self): - ntriples.validate = False - safe = """ .""" - res = ntriples.uriquote(safe) - assert res == safe - - def test_validating_uriquote(self): - ntriples.validate = True - uniquot = """ "R\\u00E4ksm\\u00F6rg\\u00E5s" .""" - res = ntriples.uriquote(uniquot) - # revert to default - ntriples.validate = False - assert res == uniquot - - def test_W3CNTriplesParser_fpath(self): - fpath = os.path.join(nt_file(os.listdir(NT_PATH)[0])) - p = ntriples.W3CNTriplesParser() - with pytest.raises(ntriples.ParseError): - p.parse(fpath) - - def test_W3CNTriplesParser_parsestring(self): - p = ntriples.W3CNTriplesParser() - data = 3 - with pytest.raises(ntriples.ParseError): - p.parsestring(data) - with open(nt_file("lists-02.nt"), "r") as f: - data = f.read() - p = ntriples.W3CNTriplesParser() - res = p.parsestring(data) - assert res is None - - def test_w3_ntriple_variants(self): - uri = Path(nt_file("test.nt")).absolute().as_uri() - - parser = ntriples.W3CNTriplesParser() - u = urlopen(uri) - sink = parser.parse(u) - u.close() - # ATM we are only really interested in any exceptions thrown - assert sink is not None - - def test_bad_line(self): - data = ( - """ 3 .\n""" - ) - p = ntriples.W3CNTriplesParser() - with pytest.raises(ntriples.ParseError): - p.parsestring(data) - - def test_cover_eat(self): - data = ( - """ 3 .\n""" - ) - p = ntriples.W3CNTriplesParser() - p.line = data - with pytest.raises(ntriples.ParseError): - p.eat(re.compile("")) - - def test_cover_subjectobjectliteral(self): - # data = ''' 3 .\n''' - p = ntriples.W3CNTriplesParser() - p.line = "baz" - with pytest.raises(ntriples.ParseError): - p.subject() - with pytest.raises(ntriples.ParseError): - p.object() - # p.line = '"baz"@fr^^' - # self.assertRaises(ntriples.ParseError, p.literal) - - -class TestBNodeContext: - def test_bnode_shared_across_instances(self): - my_sink = FakeSink() - bnode_context = dict() - p = ntriples.W3CNTriplesParser(my_sink, bnode_context=bnode_context) - p.parsestring( - """ - _:0 . +# Test NT + + +def test_issue859(): + graph_a = Graph() + graph_b = Graph() + graph_a.parse(nt_file("quote-01.nt"), format="ntriples") + graph_b.parse(nt_file("quote-02.nt"), format="ntriples") + for subject_a, predicate_a, obj_a in graph_a: + for subject_b, predicate_b, obj_b in graph_b: + assert subject_a == subject_b + assert predicate_a == predicate_b + assert obj_a == obj_b + + +def test_issue78(): + g = Graph() + g.add((URIRef("foo"), URIRef("foo"), Literal("R\u00E4ksm\u00F6rg\u00E5s"))) + s = g.serialize(format="nt") + assert type(s) == str # noqa: E721 + assert "R\u00E4ksm\u00F6rg\u00E5s" in s + + +def test_issue146(): + g = Graph() + g.add((URIRef("foo"), URIRef("foo"), Literal("test\n", lang="en"))) + s = g.serialize(format="nt").strip() + assert s == ' "test\\n"@en .' + + +def test_issue1144_rdflib(): + fname = nt_file("lists-02.nt") + g1 = Graph() + with open(fname, "r") as f: + g1.parse(f, format="nt") + assert 14 == len(g1) + g2 = Graph() + with open(fname, "rb") as fb: + g2.parse(fb, format="nt") + assert 14 == len(g2) + + +def test_issue1144_w3c(): + fname = nt_file("lists-02.nt") + sink1 = ntriples.NTGraphSink(Graph()) + p1 = ntriples.W3CNTriplesParser(sink1) + with open(fname, "r") as f: + p1.parse(f) + assert 14 == len(sink1.g) + sink2 = ntriples.NTGraphSink(Graph()) + p2 = ntriples.W3CNTriplesParser(sink2) + with open(fname, "rb") as f: + p2.parse(f) + assert 14 == len(sink2.g) + + +def test_sink(): + s = ntriples.DummySink() + assert s.length == 0 + s.triple(None, None, None) + assert s.length == 1 + + +def test_nonvalidating_unquote(): + safe = """ .""" + ntriples.validate = False + res = ntriples.unquote(safe) + assert isinstance(res, str) + + +def test_validating_unquote(): + quot = """ .""" + ntriples.validate = True + res = ntriples.unquote(quot) + # revert to default + ntriples.validate = False + log.debug("restype %s" % type(res)) + + +def test_validating_unquote_raises(): + ntriples.validate = True + uniquot = """ "R\\u00E4ksm\\u00F6rg\\u00E5s" .""" + with pytest.raises(ntriples.ParseError): + ntriples.unquote(uniquot) + uniquot = """ "R\\\\u00E4ksm\\u00F6rg\\u00E5s" .""" + with pytest.raises(ntriples.ParseError): + ntriples.unquote(uniquot) + # revert to default + ntriples.validate = False + + +def test_nonvalidating_uriquote(): + ntriples.validate = False + safe = """ .""" + res = ntriples.uriquote(safe) + assert res == safe + + +def test_validating_uriquote(): + ntriples.validate = True + uniquot = """ "R\\u00E4ksm\\u00F6rg\\u00E5s" .""" + res = ntriples.uriquote(uniquot) + # revert to default + ntriples.validate = False + assert res == uniquot + + +def test_w3d_ntriples_parser_fpath(): + fpath = os.path.join(nt_file(os.listdir(NT_PATH)[0])) + p = ntriples.W3CNTriplesParser() + with pytest.raises(ntriples.ParseError): + p.parse(fpath) + + +def test_w3c_ntriples_parser_parsestring(): + p = ntriples.W3CNTriplesParser() + data = 3 + with pytest.raises(ntriples.ParseError): + p.parsestring(data) + with open(nt_file("lists-02.nt"), "r") as f: + data = f.read() + p = ntriples.W3CNTriplesParser() + res = p.parsestring(data) + assert res is None + + +def test_w3_ntriple_variants(): + uri = Path(nt_file("test.nt")).absolute().as_uri() + + parser = ntriples.W3CNTriplesParser() + u = urlopen(uri) + sink = parser.parse(u) + u.close() + # ATM we are only really interested in any exceptions thrown + assert sink is not None + + +def test_bad_line(): + data = """ 3 .\n""" + p = ntriples.W3CNTriplesParser() + with pytest.raises(ntriples.ParseError): + p.parsestring(data) + + +def test_cover_eat(): + data = """ 3 .\n""" + p = ntriples.W3CNTriplesParser() + p.line = data + with pytest.raises(ntriples.ParseError): + p.eat(re.compile("")) + + +def test_cover_subjectobjectliteral(): + # data = ''' 3 .\n''' + p = ntriples.W3CNTriplesParser() + p.line = "baz" + with pytest.raises(ntriples.ParseError): + p.subject() + with pytest.raises(ntriples.ParseError): + p.object() + # p.line = '"baz"@fr^^' + # self.assertRaises(ntriples.ParseError, p.literal) + + +# Test BNode context + + +def test_bnode_shared_across_instances(): + my_sink = FakeSink() + bnode_context = dict() + p = ntriples.W3CNTriplesParser(my_sink, bnode_context=bnode_context) + p.parsestring( """ - ) + _:0 . + """ + ) - q = ntriples.W3CNTriplesParser(my_sink, bnode_context=bnode_context) - q.parsestring( - """ - _:0 . + q = ntriples.W3CNTriplesParser(my_sink, bnode_context=bnode_context) + q.parsestring( """ - ) + _:0 . + """ + ) - assert len(my_sink.subs) == 1 + assert len(my_sink.subs) == 1 - def test_bnode_distinct_across_instances(self): - my_sink = FakeSink() - p = ntriples.W3CNTriplesParser(my_sink) - p.parsestring( - """ - _:0 . + +def test_bnode_distinct_across_instances(): + my_sink = FakeSink() + p = ntriples.W3CNTriplesParser(my_sink) + p.parsestring( """ - ) + _:0 . + """ + ) - q = ntriples.W3CNTriplesParser(my_sink) - q.parsestring( - """ - _:0 . + q = ntriples.W3CNTriplesParser(my_sink) + q.parsestring( """ - ) + _:0 . + """ + ) + + assert len(my_sink.subs) == 2 - assert len(my_sink.subs) == 2 - def test_bnode_distinct_across_parse(self): - my_sink = FakeSink() - p = ntriples.W3CNTriplesParser(my_sink) +def test_bnode_distinct_across_parse(): + my_sink = FakeSink() + p = ntriples.W3CNTriplesParser(my_sink) - p.parsestring( - """ - _:0 . - """, - bnode_context=dict(), - ) + p.parsestring( + """ + _:0 . + """, + bnode_context=dict(), + ) + + p.parsestring( + """ + _:0 . + """, + bnode_context=dict(), + ) - p.parsestring( - """ - _:0 . - """, - bnode_context=dict(), - ) + assert len(my_sink.subs) == 2 - assert len(my_sink.subs) == 2 - def test_bnode_shared_across_parse(self): - my_sink = FakeSink() - p = ntriples.W3CNTriplesParser(my_sink) +def test_bnode_shared_across_parse(): + my_sink = FakeSink() + p = ntriples.W3CNTriplesParser(my_sink) - p.parsestring( - """ - _:0 . + p.parsestring( """ - ) + _:0 . + """ + ) - p.parsestring( - """ - _:0 . + p.parsestring( """ - ) - - assert len(my_sink.subs) == 1 - - def test_bnode_shared_across_instances_with_parse_option(self): - my_sink = FakeSink() - bnode_ctx = dict() - - p = ntriples.W3CNTriplesParser(my_sink) - p.parsestring( - """ - _:0 . - """, - bnode_context=bnode_ctx, - ) - - q = ntriples.W3CNTriplesParser(my_sink) - q.parsestring( - """ - _:0 . - """, - bnode_context=bnode_ctx, - ) - - assert len(my_sink.subs) == 1 + _:0 . + """ + ) + + assert len(my_sink.subs) == 1 + + +def test_bnode_shared_across_instances_with_parse_option(): + my_sink = FakeSink() + bnode_ctx = dict() + + p = ntriples.W3CNTriplesParser(my_sink) + p.parsestring( + """ + _:0 . + """, + bnode_context=bnode_ctx, + ) + + q = ntriples.W3CNTriplesParser(my_sink) + q.parsestring( + """ + _:0 . + """, + bnode_context=bnode_ctx, + ) + + assert len(my_sink.subs) == 1 class FakeSink: diff --git a/test/test_parsers/test_broken_parse_data_from_jena.py b/test/test_parsers/test_broken_parse_data_from_jena.py index ec176a4cc..353593837 100644 --- a/test/test_parsers/test_broken_parse_data_from_jena.py +++ b/test/test_parsers/test_broken_parse_data_from_jena.py @@ -1,9 +1,9 @@ import os -from test.data import TEST_DATA_DIR import pytest import rdflib +from test.data import TEST_DATA_DIR # Recovered from # https://github.com/RDFLib/rdflib/tree/6b4607018ebf589da74aea4c25408999f1acf2e2 diff --git a/test/test_parsers/test_empty_xml_base.py b/test/test_parsers/test_empty_xml_base.py index 1c245b3e5..0f3f18694 100644 --- a/test/test_parsers/test_empty_xml_base.py +++ b/test/test_parsers/test_empty_xml_base.py @@ -30,8 +30,8 @@ """ -baseUri = URIRef("http://example.com/") -baseUri2 = URIRef("http://example.com/foo/bar") +baseUri = URIRef("http://example.com/") # noqa: N816 +baseUri2 = URIRef("http://example.com/foo/bar") # noqa: N816 class TestEmptyBase: @@ -55,7 +55,7 @@ def test_relative_base_ref(self): assert ( len(self.graph) > 0 ), "There should be at least one statement in the graph" - resolvedBase = URIRef("http://example.com/baz") + resolvedBase = URIRef("http://example.com/baz") # noqa: N806 assert ( resolvedBase, RDF.type, diff --git a/test/test_parsers/test_n3parse_of_rdf_lists.py b/test/test_parsers/test_n3parse_of_rdf_lists.py index 88cffffb3..a886a8c1a 100644 --- a/test/test_parsers/test_n3parse_of_rdf_lists.py +++ b/test/test_parsers/test_n3parse_of_rdf_lists.py @@ -29,7 +29,7 @@ class TestOWLCollectionTest: def test_collection_rdfxml(self): g = Graph().parse(data=DATA, format="nt") g.namespace_manager.bind("owl", URIRef("http://www.w3.org/2002/07/owl#")) - s = g.serialize(format="pretty-xml") + s = g.serialize(format="pretty-xml") # noqa: F841 class TestListTest: diff --git a/test/test_parsers/test_nquads.py b/test/test_parsers/test_nquads.py index 0f80ed504..ad17b5aee 100644 --- a/test/test_parsers/test_nquads.py +++ b/test/test_parsers/test_nquads.py @@ -1,7 +1,7 @@ import os -from test.data import TEST_DATA_DIR from rdflib import ConjunctiveGraph, Namespace, URIRef +from test.data import TEST_DATA_DIR TEST_BASE = os.path.join(TEST_DATA_DIR, "nquads.rdflib") @@ -36,7 +36,7 @@ def test_03_get_value(self): g = self._load_example() s = URIRef("http://bibliographica.org/entity/E10009") - FOAF = Namespace("http://xmlns.com/foaf/0.1/") + FOAF = Namespace("http://xmlns.com/foaf/0.1/") # noqa: N806 assert g.value(s, FOAF.name).eq("Arco Publications") def test_context_is_optional(self): diff --git a/test/test_parsers/test_parse_with_skolemize.py b/test/test_parsers/test_parse_with_skolemize.py new file mode 100644 index 000000000..4b5261abd --- /dev/null +++ b/test/test_parsers/test_parse_with_skolemize.py @@ -0,0 +1,128 @@ +import pytest + +from rdflib import BNode, Dataset, Graph +from rdflib.compare import isomorphic + + +@pytest.mark.parametrize( + "data, data_format, expected_data, expected_data_format", + [ + [ + """ + _:internal-bnode-id-1 . + _:internal-bnode-id-1 "..." . + """, + "ntriples", + """ + . + "..." . + """, + "ntriples", + ] + ], +) +def test_parse_with_skolemize_triples( + data: str, data_format: str, expected_data: str, expected_data_format: str +): + graph = Graph().parse(data=data, format=data_format, skolemize=True) + assert len(graph) + + expected_graph = Graph().parse(data=expected_data, format=expected_data_format) + assert len(expected_graph) + + assert isomorphic(graph, expected_graph) + + de_skolem_graph = graph.de_skolemize() + expected_de_skolem_graph = expected_graph.de_skolemize() + assert isomorphic(de_skolem_graph, expected_de_skolem_graph) + + +@pytest.mark.parametrize( + "data, data_format, expected_data, expected_data_format, anonymous_graph_name", + [ + [ + """ + _:internal-bnode-id-1 _:graph-id . + _:internal-bnode-id-1 "..." _:graph-id . + """, + "nquads", + """ + . + "..." . + """, + "nquads", + "graph-id", + ], + [ + """ + ["urn:object", "urn:hasPart", "_:internal-bnode-id-1", "localId", "", "_:graph-id"] + ["_:internal-bnode-id-1", "urn:value", "...", "http://www.w3.org/2001/XMLSchema#string", "", "_:graph-id"] + """, + "hext", + """ + . + "..."^^ . + """, + "nquads", + "graph-id", + ], + [ + """ + [ + { + "@id": "_:graph-id", + "@graph": [ + { + "@id": "urn:object", + "urn:hasPart": { + "@id": "_:internal-bnode-id-1" + } + }, + { + "@id": "_:internal-bnode-id-1", + "urn:value": "..." + } + ] + } + ] + """, + "json-ld", + """ + . + "..." . + """, + "nquads", + "graph-id", + ], + ], +) +def test_parse_with_skolemize_quads( + data: str, + data_format: str, + expected_data: str, + expected_data_format: str, + anonymous_graph_name, +): + ds = Dataset(default_union=True) + ds.parse(data=data, format=data_format, skolemize=True) + assert len(ds) + + expected_ds = Dataset(default_union=True) + expected_ds.parse(data=expected_data, format=expected_data_format) + assert len(expected_ds) + + graph_name = BNode(anonymous_graph_name) + skolem_graph_name = graph_name.skolemize() + + skolem_graph = ds.graph(skolem_graph_name) + expected_skolem_graph = expected_ds.graph(skolem_graph_name) + assert len(skolem_graph) + assert len(expected_skolem_graph) + assert isomorphic(skolem_graph, expected_skolem_graph) + assert isomorphic(skolem_graph.de_skolemize(), expected_skolem_graph.de_skolemize()) + + # Note: Datasets must have default_union set to True, otherwise calling + # de_skolemize returns an empty graph. + assert isomorphic(ds.de_skolemize(), expected_ds.de_skolemize()) + + # TODO: There's no way to roundtrip datasets with skolemization? diff --git a/test/test_parsers/test_parser.py b/test/test_parsers/test_parser.py index f351a31cd..77ea2ef51 100644 --- a/test/test_parsers/test_parser.py +++ b/test/test_parsers/test_parser.py @@ -14,7 +14,7 @@ def setup_method(self): def teardown_method(self): self.graph.close() - def testNoPathWithHash(self): + def testNoPathWithHash(self): # noqa: N802 g = self.graph g.parse( data="""\ diff --git a/test/test_parsers/test_parser_hext.py b/test/test_parsers/test_parser_hext.py index 5f4a180b7..908c4950d 100644 --- a/test/test_parsers/test_parser_hext.py +++ b/test/test_parsers/test_parser_hext.py @@ -1,13 +1,39 @@ from pathlib import Path -from rdflib import ConjunctiveGraph, Dataset, Literal +from rdflib import BNode, ConjunctiveGraph, Dataset, Literal, URIRef +from rdflib.compare import isomorphic +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID from rdflib.namespace import XSD +def test_named_and_anonymous_graph_roundtrip(): + s = """ + ["http://example.com/s01", "http://example.com/a", "http://example.com/Type1", "globalId", "", "https://example.com/graph/1"] + ["http://example.com/s01", "http://example.com/label", "This is a Label", "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "en", "_:graph-2"] + ["http://example.com/s01", "http://example.com/comment", "This is a comment", "http://www.w3.org/2001/XMLSchema#string", "", ""] + """ + d = Dataset() + d.parse(data=s, format="hext") + + new_s = d.serialize(format="hext") + new_d = Dataset() + new_d.parse(data=new_s, format="hext") + + named_graph = URIRef("https://example.com/graph/1") + assert isomorphic(d.graph(named_graph), new_d.graph(named_graph)) + + anonymous_graph = BNode("graph-2") + assert isomorphic(d.graph(anonymous_graph), new_d.graph(anonymous_graph)) + + assert isomorphic( + d.graph(DATASET_DEFAULT_GRAPH_ID), new_d.graph(DATASET_DEFAULT_GRAPH_ID) + ) + + def test_small_string(): s = """ - ["http://example.com/s01", "http://example.com/a", "http://example.com/Type1", "globalId", "", ""] - ["http://example.com/s01", "http://example.com/label", "This is a Label", "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "en", ""] + ["http://example.com/s01", "http://example.com/a", "http://example.com/Type1", "globalId", "", "https://example.com/graph/1"] + ["http://example.com/s01", "http://example.com/label", "This is a Label", "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "en", "_:graph-2"] ["http://example.com/s01", "http://example.com/comment", "This is a comment", "http://www.w3.org/2001/XMLSchema#string", "", ""] ["http://example.com/s01", "http://example.com/creationDate", "2021-12-01", "http://www.w3.org/2001/XMLSchema#date", "", ""] ["http://example.com/s01", "http://example.com/creationTime", "2021-12-01T12:13:00", "http://www.w3.org/2001/XMLSchema#dateTime", "", ""] @@ -17,14 +43,24 @@ def test_small_string(): ["http://example.com/s01", "http://example.com/op1", "http://example.com/o2", "globalId", "", ""] ["http://example.com/s01", "http://example.com/op2", "http://example.com/o3", "globalId", "", ""] """ - d = Dataset().parse(data=s, format="hext") + d = Dataset() + d.parse(data=s, format="hext") + + expected_graph_names = ( + URIRef(DATASET_DEFAULT_GRAPH_ID), + URIRef("https://example.com/graph/1"), + BNode("graph-2"), + ) + for graph in d.contexts(): + assert graph.identifier in expected_graph_names + assert len(d) == 10 def test_small_string_cg(): s = """ - ["http://example.com/s01", "http://example.com/a", "http://example.com/Type1", "globalId", "", ""] - ["http://example.com/s01", "http://example.com/label", "This is a Label", "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "en", ""] + ["http://example.com/s01", "http://example.com/a", "http://example.com/Type1", "globalId", "", "https://example.com/graph/1"] + ["http://example.com/s01", "http://example.com/label", "This is a Label", "http://www.w3.org/1999/02/22-rdf-syntax-ns#langString", "en", "_:graph-2"] ["http://example.com/s01", "http://example.com/comment", "This is a comment", "http://www.w3.org/2001/XMLSchema#string", "", ""] ["http://example.com/s01", "http://example.com/creationDate", "2021-12-01", "http://www.w3.org/2001/XMLSchema#date", "", ""] ["http://example.com/s01", "http://example.com/creationTime", "2021-12-01T12:13:00", "http://www.w3.org/2001/XMLSchema#dateTime", "", ""] @@ -34,7 +70,17 @@ def test_small_string_cg(): ["http://example.com/s01", "http://example.com/op1", "http://example.com/o2", "globalId", "", ""] ["http://example.com/s01", "http://example.com/op2", "http://example.com/o3", "globalId", "", ""] """ - d = ConjunctiveGraph().parse(data=s, format="hext") + d = ConjunctiveGraph(identifier=DATASET_DEFAULT_GRAPH_ID) + d.parse(data=s, format="hext") + + expected_graph_names = ( + URIRef(DATASET_DEFAULT_GRAPH_ID), + URIRef("https://example.com/graph/1"), + BNode("graph-2"), + ) + for graph in d.contexts(): + assert graph.identifier in expected_graph_names + assert len(d) == 10 @@ -89,7 +135,7 @@ def test_small_file_multigraph_cg(): def test_roundtrip(): # these are some RDF files that HexT can round-trip since the have no # literals with no datatype declared: - TEST_DIR = Path(__file__).parent.absolute() / "nt" + TEST_DIR = Path(__file__).parent.absolute() / "nt" # noqa: N806 files_to_skip = { "paths-04.nt": "subject literal", "even_more_literals.nt": "JSON decoding error", @@ -128,7 +174,7 @@ def test_roundtrip(): if cg2.context_aware: for context in cg2.contexts(): for triple in context.triples((None, None, None)): - if type(triple[2]) == Literal: + if type(triple[2]) is Literal: if triple[2].datatype == XSD.string: context.remove((triple[0], triple[1], triple[2])) context.add( @@ -136,7 +182,7 @@ def test_roundtrip(): ) else: for triple in cg2.triples((None, None, None)): - if type(triple[2]) == Literal: + if type(triple[2]) is Literal: if triple[2].datatype == XSD.string: cg2.remove((triple[0], triple[1], triple[2])) cg2.add((triple[0], triple[1], Literal(str(triple[2])))) diff --git a/test/test_parsers/test_parser_turtlelike.py b/test/test_parsers/test_parser_turtlelike.py index e74a55e78..2f0002b26 100644 --- a/test/test_parsers/test_parser_turtlelike.py +++ b/test/test_parsers/test_parser_turtlelike.py @@ -3,6 +3,8 @@ Turtle, NTriples, NQauds and TriG. """ +from __future__ import annotations + import enum import itertools from dataclasses import dataclass, field @@ -11,11 +13,10 @@ import pytest from _pytest.mark.structures import Mark, MarkDecorator, ParameterSet -from rdflib import XSD, Graph, Literal, Namespace +from rdflib import XSD, Graph, Literal from rdflib.term import Identifier from rdflib.util import from_n3 - -EGNS = Namespace("http://example.com/") +from test.utils.namespace import EGDC class FormatTrait(enum.Enum): @@ -65,14 +66,14 @@ class Format: def parse_identifier(identifier_string: str, format: str) -> Identifier: g = Graph() g.parse( - data=f"""<{EGNS.subject}> <{EGNS.predicate}> {identifier_string} .""", + data=f"""<{EGDC.subject}> <{EGDC.predicate}> {identifier_string} .""", format=format, ) triples = list(g.triples((None, None, None))) assert len(triples) == 1 (subj, pred, obj) = triples[0] - assert subj == EGNS.subject - assert pred == EGNS.predicate + assert subj == EGDC.subject + assert pred == EGDC.predicate assert isinstance(obj, Identifier) return obj diff --git a/test/test_parsers/test_swap_n3.py b/test/test_parsers/test_swap_n3.py index e173b8452..5de06f927 100644 --- a/test/test_parsers/test_swap_n3.py +++ b/test/test_parsers/test_swap_n3.py @@ -1,9 +1,9 @@ import os -from test.data import TEST_DATA_DIR import pytest import rdflib +from test.data import TEST_DATA_DIR """ SWAP N3 parser test suite diff --git a/test/test_parsers/test_trix_parse.py b/test/test_parsers/test_trix_parse.py index 89b744657..e6f2ae91b 100644 --- a/test/test_parsers/test_trix_parse.py +++ b/test/test_parsers/test_trix_parse.py @@ -1,8 +1,7 @@ -#!/usr/bin/env python import os -from test.data import TEST_DATA_DIR from rdflib.graph import ConjunctiveGraph +from test.data import TEST_DATA_DIR class TestTrixParse: @@ -12,7 +11,7 @@ def setup_method(self): def teardown_method(self): pass - def testAperture(self): + def testAperture(self): # noqa: N802 g = ConjunctiveGraph() trix_path = os.path.relpath( @@ -29,7 +28,7 @@ def testAperture(self): # print "Parsed %d triples"%t - def testSpec(self): + def testSpec(self): # noqa: N802 g = ConjunctiveGraph() trix_path = os.path.relpath( @@ -40,7 +39,7 @@ def testSpec(self): # print "Parsed %d triples"%len(g) - def testNG4j(self): + def testNG4j(self): # noqa: N802 g = ConjunctiveGraph() trix_path = os.path.relpath( diff --git a/test/test_path.py b/test/test_path.py index ad967849f..1af022786 100644 --- a/test/test_path.py +++ b/test/test_path.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging from typing import Union @@ -54,15 +56,40 @@ "rdfs:subClassOf?", ), ( - RDF.type / RDFS.subClassOf * "*", + RDF.type / MulPath(RDFS.subClassOf, "*"), + f"<{RDF.type}>/<{RDFS.subClassOf}>*", + "rdf:type/rdfs:subClassOf*", + ), + ( + RDF.type / ((SequencePath(RDFS.subClassOf)) * "*"), f"<{RDF.type}>/<{RDFS.subClassOf}>*", "rdf:type/rdfs:subClassOf*", ), + ( + RDF.type / RDFS.subClassOf * "*", + f"(<{RDF.type}>/<{RDFS.subClassOf}>)*", + "(rdf:type/rdfs:subClassOf)*", + ), ( -(RDF.type | RDFS.subClassOf), f"!(<{RDF.type}>|<{RDFS.subClassOf}>)", "!(rdf:type|rdfs:subClassOf)", ), + ( + -(RDF.type | ((SequencePath(RDFS.subClassOf)) * "*")), + f"!(<{RDF.type}>|<{RDFS.subClassOf}>*)", + "!(rdf:type|rdfs:subClassOf*)", + ), + ( + SequencePath(RDFS.subClassOf), + f"<{RDFS.subClassOf}>", + "rdfs:subClassOf", + ), + ( + AlternativePath(RDFS.subClassOf), + f"<{RDFS.subClassOf}>", + "rdfs:subClassOf", + ), ], ) def test_paths_n3( diff --git a/test/test_roundtrip.py b/test/test_roundtrip.py index 5f233ea5a..bff01b285 100644 --- a/test/test_roundtrip.py +++ b/test/test_roundtrip.py @@ -1,26 +1,3 @@ -import enum -import logging -import os.path -from pathlib import Path -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper -from typing import Callable, Iterable, List, Optional, Set, Tuple, Type, Union -from xml.sax import SAXParseException - -import pytest -from _pytest.mark.structures import Mark, MarkDecorator, ParameterSet - -import rdflib -import rdflib.compare -from rdflib.graph import ConjunctiveGraph, Graph -from rdflib.namespace import XSD -from rdflib.parser import Parser, create_input_source -from rdflib.plugins.parsers.notation3 import BadSyntax -from rdflib.serializer import Serializer -from rdflib.util import guess_format - -logger = logging.getLogger(__name__) - """ Test round-tripping by all serializers/parser that are registered. This means, you may test more than just core rdflib! @@ -43,6 +20,32 @@ """ +from __future__ import annotations + +import enum +import logging +import os.path +from pathlib import Path +from typing import Callable, Iterable, List, Optional, Set, Tuple, Type, Union +from xml.sax import SAXParseException + +import pytest +from _pytest.mark.structures import Mark, MarkDecorator, ParameterSet + +import rdflib +import rdflib.compare +from rdflib.graph import ConjunctiveGraph, Graph +from rdflib.namespace import XSD +from rdflib.parser import Parser, create_input_source +from rdflib.plugins.parsers.notation3 import BadSyntax +from rdflib.serializer import Serializer +from rdflib.util import guess_format +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper + +logger = logging.getLogger(__name__) + + NT_DATA_DIR = Path(TEST_DATA_DIR) / "suites" / "nt_misc" INVALID_NT_FILES = { # illegal literal as subject @@ -269,7 +272,7 @@ def roundtrip( for c in g2.contexts(): # type error: Incompatible types in assignment (expression has type "Node", variable has type "str") for s, p, o in c.triples((None, None, None)): # type: ignore[assignment] - if type(o) == rdflib.Literal and o.datatype == XSD.string: + if type(o) is rdflib.Literal and o.datatype == XSD.string: # type error: Argument 1 to "remove" of "Graph" has incompatible type "Tuple[str, Node, Literal]"; expected "Tuple[Optional[Node], Optional[Node], Optional[Node]]" c.remove((s, p, o)) # type: ignore[arg-type] # type error: Argument 1 to "add" of "Graph" has incompatible type "Tuple[str, Node, Literal]"; expected "Tuple[Node, Node, Node]" diff --git a/test/test_serializers/test_finalnewline.py b/test/test_serializers/test_finalnewline.py index 4b5eb503f..bc5451397 100644 --- a/test/test_serializers/test_finalnewline.py +++ b/test/test_serializers/test_finalnewline.py @@ -19,7 +19,7 @@ def test_finalnewline(): failed = set() for p in rdflib.plugin.plugins(None, rdflib.plugin.Serializer): v = graph.serialize(format=p.name, encoding="utf-8") - lines = v.split("\n".encode("utf-8")) + lines = v.split(b"\n") if b"\n" not in v or (lines[-1] != b""): failed.add(p.name) # JSON-LD does not require a final newline (because JSON doesn't) diff --git a/test/test_serializers/test_prettyxml.py b/test/test_serializers/test_prettyxml.py index 0084aa249..6c798e825 100644 --- a/test/test_serializers/test_prettyxml.py +++ b/test/test_serializers/test_prettyxml.py @@ -1,4 +1,3 @@ -# -*- coding: UTF-8 -*- from io import BytesIO from rdflib.graph import ConjunctiveGraph @@ -12,12 +11,12 @@ class SerializerTestBase: def setup_method(self): graph = ConjunctiveGraph() - graph.parse(data=self.testContent, format=self.testContentFormat) - self.sourceGraph = graph + graph.parse(data=self.test_content, format=self.test_content_format) + self.source_graph = graph def test_serialize_and_reparse(self): - reparsedGraph = serialize_and_load(self.sourceGraph, self.serializer) - _assert_equal_graphs(self.sourceGraph, reparsedGraph) + reparsed_graph = serialize_and_load(self.source_graph, self.serializer) + _assert_equal_graphs(self.source_graph, reparsed_graph) def test_multiple(self): """Repeats ``test_serialize`` ``self.repeats`` times, to reduce sucess based on in-memory ordering.""" @@ -58,25 +57,25 @@ def isbnode(v): return gcopy -def serialize(sourceGraph, makeSerializer, getValue=True, extra_args={}): - serializer = makeSerializer(sourceGraph) +def serialize(source_graph, make_serializer, get_value=True, extra_args={}): + serializer = make_serializer(source_graph) stream = BytesIO() serializer.serialize(stream, **extra_args) - return getValue and stream.getvalue() or stream + return get_value and stream.getvalue() or stream -def serialize_and_load(sourceGraph, makeSerializer): - stream = serialize(sourceGraph, makeSerializer, False) +def serialize_and_load(source_graph, make_serializer): + stream = serialize(source_graph, make_serializer, False) stream.seek(0) - reparsedGraph = ConjunctiveGraph() - reparsedGraph.parse(stream, format="xml") - return reparsedGraph + reparsed_graph = ConjunctiveGraph() + reparsed_graph.parse(stream, format="xml") + return reparsed_graph class TestPrettyXmlSerializer(SerializerTestBase): serializer = PrettyXMLSerializer - testContent = """ + test_content = """ @prefix rdfs: . @prefix owl: . @prefix : . @@ -116,57 +115,57 @@ class TestPrettyXmlSerializer(SerializerTestBase): rdfs:seeAlso _:bnode2 . """ - testContentFormat = "n3" + test_content_format = "n3" def test_result_fragments(self): - rdfXml = serialize(self.sourceGraph, self.serializer) + rdf_xml = serialize(self.source_graph, self.serializer) assert ( - ''.encode("latin-1") in rdfXml + ''.encode("latin-1") in rdf_xml ) assert ( ''.encode("latin-1") - in rdfXml + in rdf_xml ) - assert 'Bee'.encode("latin-1") in rdfXml + assert 'Bee'.encode("latin-1") in rdf_xml assert ( '3'.encode( "latin-1" ) - in rdfXml + in rdf_xml ) assert ( - '' in rdfXml, onlyBNodesMsg - # assert not '' in rdf_xml, onlyBNodesMsg + # assert not ''.encode("latin-1") in rdfXml - assert ''.encode("latin-1") in rdfXml + assert 'xml:base="http://example.org/"'.encode("latin-1") in rdf_xml + assert ''.encode("latin-1") in rdf_xml + assert ''.encode("latin-1") in rdf_xml assert ( '3'.encode( "latin-1" ) - in rdfXml + in rdf_xml ) assert ( - '." % (type(o), p) diff --git a/test/test_serializers/test_serializer.py b/test/test_serializers/test_serializer.py index 549a21beb..6fab945f3 100644 --- a/test/test_serializers/test_serializer.py +++ b/test/test_serializers/test_serializer.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum import itertools import logging @@ -7,8 +9,6 @@ from dataclasses import dataclass, field from functools import lru_cache from pathlib import Path, PosixPath, PurePath -from test.utils import GraphHelper, get_unique_plugins -from test.utils.destination import DestinationType, DestParmType, DestRef from typing import ( IO, Callable, @@ -32,10 +32,9 @@ from rdflib import RDF, XSD, Graph, Literal, Namespace, URIRef from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, ConjunctiveGraph, Dataset from rdflib.serializer import Serializer - -EGSCHEMA = Namespace("example:") -EGURN = Namespace("urn:example:") -EGHTTP = Namespace("http://example.com/") +from test.utils import GraphHelper, get_unique_plugins +from test.utils.destination import DestinationType, DestParmType, DestRef +from test.utils.namespace import EGDC, EGSCHEME, EGURN @pytest.mark.parametrize( @@ -54,7 +53,7 @@ + [("trig", 3, False)], ) def test_rdf_type(format: str, tuple_index: int, is_keyword: bool) -> None: - NS = Namespace("example:") + NS = Namespace("example:") # noqa: N806 graph = ConjunctiveGraph() graph.bind("eg", NS) nodes = [NS.subj, NS.pred, NS.obj, NS.graph] @@ -85,18 +84,18 @@ def simple_graph() -> Graph: than that it contains no blank nodes. """ graph = Graph() - graph.add((EGSCHEMA.subject, EGSCHEMA.predicate, EGSCHEMA.object)) - graph.add((EGSCHEMA.subject, EGSCHEMA.predicate, Literal(12))) + graph.add((EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object)) + graph.add((EGSCHEME.subject, EGSCHEME.predicate, Literal(12))) graph.add( ( - EGHTTP.subject, - EGHTTP.predicate, + EGDC.subject, + EGDC.predicate, Literal("日本語の表記体系", lang="jpx"), ) ) - graph.add((EGURN.subject, EGSCHEMA.predicate, EGSCHEMA.subject)) + graph.add((EGURN.subject, EGSCHEME.predicate, EGSCHEME.subject)) graph.add( - (EGSCHEMA.object, EGHTTP.predicate, Literal("XSD string", datatype=XSD.string)) + (EGSCHEME.object, EGDC.predicate, Literal("XSD string", datatype=XSD.string)) ) return graph @@ -109,33 +108,31 @@ def simple_dataset() -> Dataset: than that it contains no blank nodes. """ graph = Dataset() - graph.default_context.add((EGSCHEMA.subject, EGSCHEMA.predicate, EGSCHEMA.object)) + graph.default_context.add((EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object)) graph.default_context.add((EGURN.subject, EGURN.predicate, EGURN.object)) - graph.default_context.add((EGHTTP.subject, EGHTTP.predicate, Literal("typeless"))) - graph.get_context(EGSCHEMA.graph).add( - (EGSCHEMA.subject, EGSCHEMA.predicate, EGSCHEMA.object) + graph.default_context.add((EGDC.subject, EGDC.predicate, Literal("typeless"))) + graph.get_context(EGSCHEME.graph).add( + (EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object) ) - graph.get_context(EGSCHEMA.graph).add( - (EGSCHEMA.subject, EGSCHEMA.predicate, Literal(12)) + graph.get_context(EGSCHEME.graph).add( + (EGSCHEME.subject, EGSCHEME.predicate, Literal(12)) ) - graph.get_context(EGSCHEMA.graph).add( + graph.get_context(EGSCHEME.graph).add( ( - EGHTTP.subject, - EGHTTP.predicate, + EGDC.subject, + EGDC.predicate, Literal("日本語の表記体系", lang="jpx"), ) ) - graph.get_context(EGSCHEMA.graph).add( - (EGURN.subject, EGSCHEMA.predicate, EGSCHEMA.subject) - ) - graph.get_context(EGURN.graph).add( - (EGSCHEMA.subject, EGSCHEMA.predicate, EGSCHEMA.object) + graph.get_context(EGSCHEME.graph).add( + (EGURN.subject, EGSCHEME.predicate, EGSCHEME.subject) ) graph.get_context(EGURN.graph).add( - (EGSCHEMA.subject, EGHTTP.predicate, EGHTTP.object) + (EGSCHEME.subject, EGSCHEME.predicate, EGSCHEME.object) ) + graph.get_context(EGURN.graph).add((EGSCHEME.subject, EGDC.predicate, EGDC.object)) graph.get_context(EGURN.graph).add( - (EGSCHEMA.subject, EGHTTP.predicate, Literal("XSD string", datatype=XSD.string)) + (EGSCHEME.subject, EGDC.predicate, Literal("XSD string", datatype=XSD.string)) ) return graph @@ -231,7 +228,7 @@ class GraphFormat(str, enum.Enum): @classmethod @lru_cache(maxsize=None) - def info_dict(cls) -> "GraphFormatInfoDict": + def info_dict(cls) -> GraphFormatInfoDict: return GraphFormatInfoDict.make( GraphFormatInfo( GraphFormat.TRIG, @@ -296,18 +293,18 @@ def info_dict(cls) -> "GraphFormatInfoDict": ) @property - def info(self) -> "GraphFormatInfo": + def info(self) -> GraphFormatInfo: return self.info_dict()[self] @classmethod @lru_cache(maxsize=None) - def set(cls) -> Set["GraphFormat"]: + def set(cls) -> Set[GraphFormat]: return set(*cls) @dataclass class GraphFormatInfo: - name: "GraphFormat" + name: GraphFormat graph_types: Set[GraphType] encodings: Set[str] serializer_list: Optional[List[str]] = field( @@ -330,13 +327,13 @@ def __post_init__(self) -> None: ) @property - def serializer(self) -> "str": + def serializer(self) -> str: if not self.serializers: raise RuntimeError("no serializers for {self.name}") return self.serializers[0] @property - def deserializer(self) -> "str": + def deserializer(self) -> str: if not self.deserializers: raise RuntimeError("no deserializer for {self.name}") return self.deserializer[0] @@ -344,7 +341,7 @@ def deserializer(self) -> "str": class GraphFormatInfoDict(Dict[str, GraphFormatInfo]): @classmethod - def make(cls, *graph_format: GraphFormatInfo) -> "GraphFormatInfoDict": + def make(cls, *graph_format: GraphFormatInfo) -> GraphFormatInfoDict: result = cls() for item in graph_format: result[item.name] = item @@ -720,4 +717,5 @@ def test_serialize_to_fileuri_with_authortiy( format=format.info.serializer, ) assert False # this should never happen as serialize should always fail - assert catcher.value is not None + # type error, mypy thinks this line is unreachable, but it works fine + assert catcher.value is not None # type: ignore[unreachable, unused-ignore] diff --git a/test/test_serializers/test_serializer_jsonld.py b/test/test_serializers/test_serializer_jsonld.py index aff0544e3..44b36c9dc 100644 --- a/test/test_serializers/test_serializer_jsonld.py +++ b/test/test_serializers/test_serializer_jsonld.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import json import logging import pprint @@ -6,10 +8,8 @@ import pytest from rdflib import Graph -from rdflib.namespace import Namespace from rdflib.plugins.shared.jsonld.context import Context - -EG = Namespace("http://example.org/") +from test.utils.namespace import EGDO @pytest.mark.parametrize( @@ -18,11 +18,11 @@ ( Context( { - "eg": f"{EG}", + "eg": f"{EGDO}", } ), ), - ({"eg": f"{EG}"},), + ({"eg": f"{EGDO}"},), ], ) def test_serialize_context(input: Union[Dict[str, Any], Context]) -> None: @@ -30,15 +30,15 @@ def test_serialize_context(input: Union[Dict[str, Any], Context]) -> None: The JSON-LD serializer accepts and correctly serializes the context argument to the output. """ graph = Graph() - graph.add((EG.subject, EG.predicate, EG.object0)) - graph.add((EG.subject, EG.predicate, EG.object1)) + graph.add((EGDO.subject, EGDO.predicate, EGDO.object0)) + graph.add((EGDO.subject, EGDO.predicate, EGDO.object1)) context = Context( { - "eg": f"{EG}", + "eg": f"{EGDO}", } ) logging.debug("context = %s", pprint.pformat(vars(context))) data = graph.serialize(format="json-ld", context=context) logging.debug("data = %s", data) obj = json.loads(data) - assert obj["@context"] == {"eg": f"{EG}"} + assert obj["@context"] == {"eg": f"{EGDO}"} diff --git a/test/test_serializers/test_serializer_longturtle.py b/test/test_serializers/test_serializer_longturtle.py index cc184787a..847d506ab 100644 --- a/test/test_serializers/test_serializer_longturtle.py +++ b/test/test_serializers/test_serializer_longturtle.py @@ -1,207 +1,255 @@ -# tests for the longturtle Serializer +import difflib +from textwrap import dedent -from rdflib import Graph +from rdflib import Graph, Namespace +from rdflib.namespace import GEO, SDO def test_longturtle(): - g = Graph() + """Compares the output of a longturtle graph serialization to a fixed, hand-typed, target + to test most of the longtertle differences to regular turtle - g.parse( + Includes basic triples, Blank Nodes - 2-levels deep - Collections and so on""" + # load graph with data + g = Graph().parse( data=""" - @prefix ex: . - @prefix ex2: . - @prefix rdf: . - @prefix xsd: . + { + "@context": { + "cn": "https://linked.data.gov.au/def/cn/", + "sdo": "https://schema.org/", + "Organization": "sdo:Organization", + "Person": "sdo:Person", + "Place": "sdo:Place", + "PostalAddress": "sdo:PostalAddress", + "address": "sdo:address", + "addressLocality": "sdo:addressLocality", + "addressRegion": "sdo:addressRegion", + "postalCode": "sdo:postalCode", + "addressCountry": "sdo:addressCountry", + "streetAddress": "sdo:streetAddress", + "age": "sdo:age", + "alternateName": "sdo:alternateName", + "geo": "sdo:geo", + "hasPart": "sdo:hasPart", + "identifier": "sdo:identifier", + "location": "sdo:location", + "name": "sdo:name", + "polygon": "sdo:polygon", + "value": "http://www.w3.org/1999/02/22-rdf-syntax-ns#value", + "wktLiteral": "http://www.opengis.net/ont/geosparql#wktLiteral", + "worksFor": "sdo:worksFor" + }, + "@graph": [ + { + "@id": "https://kurrawong.ai", + "@type": "Organization", + "location": { + "@id": "https://kurrawong.ai/hq" + } + }, + { + "@id": "https://kurrawong.ai/hq", + "@type": "Place", + "address": { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab8" + }, + "geo": { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab13" + }, + "name": "KurrawongAI HQ" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab8", + "@type": "PostalAddress", + "addressCountry": { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab12" + }, + "addressLocality": "Shorncliffe", + "addressRegion": "QLD", + "postalCode": 4017, + "streetAddress": { + "@list": [ + 72, + "Yundah", + "Street" + ] + } + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab12", + "identifier": "au", + "name": "Australia" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab13", + "polygon": { + "@type": "wktLiteral", + "@value": "POLYGON((153.082403 -27.325801, 153.08241 -27.32582, 153.082943 -27.325612, 153.083010 -27.325742, 153.083543 -27.325521, 153.083456 -27.325365, 153.082403 -27.325801))" + } + }, + { + "@id": "http://example.com/nicholas", + "@type": "Person", + "age": 41, + "alternateName": [ + "Nick Car", + "N.J. Car", + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab1" + } + ], + "name": { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab2" + }, + "worksFor": { + "@id": "https://kurrawong.ai" + } + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab1", + "name": "Dr N.J. Car" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab2", + "@type": "cn:CompoundName", + "hasPart": [{ + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab3" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab4" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab5" + } + ] + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab3", + "@type": "cn:CompoundName", + "value": "Nicholas" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab4", + "@type": "cn:CompoundName", + "value": "John" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab5", + "@type": "cn:CompoundName", + "hasPart": [{ + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab6" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab7" + } + ] + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab6", + "@type": "cn:CompoundName", + "value": "Car" + }, + { + "@id": "_:n6924e85bfee648a4a45bac9f4ab9909ab7", + "@type": "cn:CompoundName", + "value": "Maxov" + } + ] + } + """, + format="application/ld+json", + ) + + # declare a few namespaces for Turtle + g.bind("ex", Namespace("http://example.com/")) + g.bind("geo", GEO) + g.bind("cn", Namespace("https://linked.data.gov.au/def/cn/")) + g.bind("sdo", SDO) + + # run the long turtle serializer + output = g.serialize(format="longturtle") + + # fix the target + target = dedent( + """ PREFIX cn: + PREFIX ex: + PREFIX geo: + PREFIX rdf: + PREFIX sdo: + PREFIX xsd: - - a ex:Thing , ex:OtherThing ; - ex:name "Thing", "Other Thing"@en , "もの"@ja , "rzecz"@pl ; - ex:singleValueProp "propval" ; - ex:multiValueProp "propval 1" ; - ex:multiValueProp "propval 2" ; - ex:multiValueProp "propval 3" ; - ex:multiValueProp "propval 4" ; - ex:bnObj [ - ex:singleValueProp "propval" ; - ex:multiValueProp "propval 1" ; - ex:multiValueProp "propval 2" ; - ex:bnObj [ - ex:singleValueProp "propval" ; - ex:multiValueProp "propval 1" ; - ex:multiValueProp "propval 2" ; - ex:bnObj [ - ex:singleValueProp "propval" ; - ex:multiValueProp "propval 1" ; - ex:multiValueProp "propval 2" ; + ex:nicholas + a sdo:Person ; + sdo:age 41 ; + sdo:alternateName + [ + sdo:name "Dr N.J. Car" ; + ] , + "N.J. Car" , + "Nick Car" ; + sdo:name + [ + a cn:CompoundName ; + sdo:hasPart + [ + a cn:CompoundName ; + rdf:value "Nicholas" ; ] , [ - ex:singleValueProp "propval" ; - ex:multiValueProp "propval 1" ; - ex:multiValueProp "propval 2" ; + a cn:CompoundName ; + rdf:value "John" ; ] , [ - ex:singleValueProp "propval" ; - ex:multiValueProp "propval 1" ; - ex:multiValueProp "propval 2" ; + a cn:CompoundName ; + sdo:hasPart + [ + a cn:CompoundName ; + rdf:value "Car" ; + ] , + [ + a cn:CompoundName ; + rdf:value "Maxov" ; + ] ; ] ; - ] ; ] ; - . + sdo:worksFor ; + . - ex:b - rdf:type ex:Thing ; - ex:name "B" ; - ex2:name "B" . + + a sdo:Organization ; + sdo:location ; + . - ex:c - rdf:type ex:Thing ; - ex:name "C" ; - ex:lst2 ( - ex:one - ex:two - ex:three - ) ; - ex:lst ( - ex:one - ex:two - ex:three - ) , - ( - ex:four - ex:fize - ex:six - ) ; - ex:bnObj [ - ex:lst ( - ex:one - ex:two - ex:three - ) , - ( - ex:four - ex:fize - ex:six + + a sdo:Place ; + sdo:address + [ + a sdo:PostalAddress ; + sdo:addressCountry + [ + sdo:identifier "au" ; + sdo:name "Australia" ; + ] ; + sdo:addressLocality "Shorncliffe" ; + sdo:addressRegion "QLD" ; + sdo:postalCode 4017 ; + sdo:streetAddress ( + 72 + "Yundah" + "Street" ) ; - ] . - """, - format="turtle", - ) - s = g.serialize(format="longturtle") - lines = s.split("\n") - - assert "ex:b" in lines - assert " a ex:Thing ;" in lines - assert ( - """ ex2:name "B" ; -.""" - in s - ) - assert ( - """ ( - ex:one - ex:two - ex:three - ) ,""" - in s + ] ; + sdo:geo + [ + sdo:polygon "POLYGON((153.082403 -27.325801, 153.08241 -27.32582, 153.082943 -27.325612, 153.083010 -27.325742, 153.083543 -27.325521, 153.083456 -27.325365, 153.082403 -27.325801))"^^geo:wktLiteral ; + ] ; + sdo:name "KurrawongAI HQ" ; + . + """ ) - assert ' ex:singleValueProp "propval" ;' in lines - - expected_s = """PREFIX ex: -PREFIX ex2: -PREFIX rdf: - -ex:b - a ex:Thing ; - ex:name "B" ; - ex2:name "B" ; -. - -ex:c - a ex:Thing ; - ex:bnObj [ - ex:lst - ( - ex:one - ex:two - ex:three - ) , - ( - ex:four - ex:fize - ex:six - ) - ] ; - ex:lst - ( - ex:four - ex:fize - ex:six - ) , - ( - ex:one - ex:two - ex:three - ) ; - ex:lst2 ( - ex:one - ex:two - ex:three - ) ; - ex:name "C" ; -. - - - a - ex:OtherThing , - ex:Thing ; - ex:bnObj [ - ex:bnObj [ - ex:bnObj - [ - ex:multiValueProp - "propval 1" , - "propval 2" ; - ex:singleValueProp "propval" - ] , - [ - ex:multiValueProp - "propval 1" , - "propval 2" ; - ex:singleValueProp "propval" - ] , - [ - ex:multiValueProp - "propval 1" , - "propval 2" ; - ex:singleValueProp "propval" - ] ; - ex:multiValueProp - "propval 1" , - "propval 2" ; - ex:singleValueProp "propval" - ] ; - ex:multiValueProp - "propval 1" , - "propval 2" ; - ex:singleValueProp "propval" - ] ; - ex:multiValueProp - "propval 1" , - "propval 2" , - "propval 3" , - "propval 4" ; - ex:name - "Thing" , - "Other Thing"@en , - "もの"@ja , - "rzecz"@pl ; - ex:singleValueProp "propval" ; -. - -""" - assert s == expected_s + # compare output to target + # - any differences will produce output + diff = "\n".join(list(difflib.unified_diff(target.split("\n"), output.split("\n")))) - # re-parse test - g2 = Graph().parse(data=s) # turtle - assert len(g2) == len(g) + assert not diff, diff diff --git a/test/test_serializers/test_serializer_n3.py b/test/test_serializers/test_serializer_n3.py index afbdc6395..f266da715 100644 --- a/test/test_serializers/test_serializer_n3.py +++ b/test/test_serializers/test_serializer_n3.py @@ -1,13 +1,13 @@ import logging -from test.utils import GraphHelper import rdflib import rdflib.term from rdflib import Graph from rdflib.graph import QuotedGraph -from rdflib.namespace import Namespace from rdflib.plugins.parsers.notation3 import LOG_implies_URI from rdflib.term import BNode, URIRef +from test.utils import GraphHelper +from test.utils.namespace import EGDC logger = logging.getLogger(__name__) @@ -55,8 +55,6 @@ def test_implies(): ) in graph2 -EG = Namespace("http://example.com/") - LOG_implies = URIRef(LOG_implies_URI) @@ -70,13 +68,13 @@ def test_merging() -> None: {:a :b :c} => {:d :e :f}. """ graph = Graph() - assert (EG.a, EG.b, EG.c) not in graph + assert (EGDC.a, EGDC.b, EGDC.c) not in graph graph.parse(data=data_a, format="n3") - assert (EG.a, EG.b, EG.c) in graph + assert (EGDC.a, EGDC.b, EGDC.c) in graph graph.parse(data=data_b, format="n3") - assert (EG.a, EG.b, EG.c) in graph + assert (EGDC.a, EGDC.b, EGDC.c) in graph assert len(set(graph.triples((None, LOG_implies, None)))) == 1 data_s = graph.serialize(format="n3") @@ -86,7 +84,7 @@ def test_merging() -> None: graph.parse(data=data_s, format="n3") quad_set = GraphHelper.triple_set(graph) - assert (EG.a, EG.b, EG.c) in graph + assert (EGDC.a, EGDC.b, EGDC.c) in graph assert len(set(graph.triples((None, LOG_implies, None)))) == 1 logging.debug("quad_set = %s", quad_set) @@ -98,10 +96,10 @@ def test_single_simple_triple() -> None: :a :b :c. """ graph = Graph() - assert (EG.a, EG.b, EG.c) not in graph + assert (EGDC.a, EGDC.b, EGDC.c) not in graph graph.parse(data=data_a, format="n3") - assert (EG.a, EG.b, EG.c) in graph + assert (EGDC.a, EGDC.b, EGDC.c) in graph data_s = graph.serialize(format="n3") logging.debug("data_s = %s", data_s) @@ -110,13 +108,13 @@ def test_single_simple_triple() -> None: graph.parse(data=data_s, format="n3") quad_set = GraphHelper.triple_set(graph) - assert (EG.a, EG.b, EG.c) in graph + assert (EGDC.a, EGDC.b, EGDC.c) in graph logging.debug("quad_set = %s", quad_set) def test_implies_nothing() -> None: - triple_a = (EG.a, EG.b, EG.c) + triple_a = (EGDC.a, EGDC.b, EGDC.c) graph = Graph() qgraph_a = QuotedGraph(graph.store, BNode()) qgraph_a.add(triple_a) diff --git a/test/test_serializers/test_serializer_trix.py b/test/test_serializers/test_serializer_trix.py index f536c8d06..bdfc91c81 100644 --- a/test/test_serializers/test_serializer_trix.py +++ b/test/test_serializers/test_serializer_trix.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - from io import BytesIO from rdflib.graph import ConjunctiveGraph, Graph diff --git a/test/test_serializers/test_serializer_turtle.py b/test/test_serializers/test_serializer_turtle.py index d629faea8..c91459829 100644 --- a/test/test_serializers/test_serializer_turtle.py +++ b/test/test_serializers/test_serializer_turtle.py @@ -3,7 +3,7 @@ from rdflib.plugins.serializers.turtle import TurtleSerializer -def testTurtleFinalDot(): +def test_turtle_final_dot(): """ https://github.com/RDFLib/rdflib/issues/282 """ @@ -16,7 +16,7 @@ def testTurtleFinalDot(): assert b"ns:bob." not in s -def testTurtleBoolList(): +def test_turtle_bool_list(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() @@ -40,7 +40,7 @@ def testTurtleBoolList(): assert bool_list == [True, False, True] -def testUnicodeEscaping(): +def test_unicode_escaping(): turtle_string = " . . ." g = Graph() @@ -56,21 +56,21 @@ def testUnicodeEscaping(): def test_turtle_valid_list(): - NS = Namespace("http://example.org/ns/") + ns = Namespace("http://example.org/ns/") g = Graph() g.parse( data=""" @prefix : <{0}> . :s :p (""), (0), (false) . """.format( - NS + ns ), format="turtle", ) turtle_serializer = TurtleSerializer(g) - for o in g.objects(NS.s, NS.p): + for o in g.objects(ns.s, ns.p): assert turtle_serializer.isValidList(o) diff --git a/test/test_serializers/test_serializer_xml.py b/test/test_serializers/test_serializer_xml.py index ad9012939..eda0b3d43 100644 --- a/test/test_serializers/test_serializer_xml.py +++ b/test/test_serializers/test_serializer_xml.py @@ -11,12 +11,12 @@ class SerializerTestBase: def setup_method(self): graph = ConjunctiveGraph() - graph.parse(data=self.testContent, format=self.testContentFormat) - self.sourceGraph = graph + graph.parse(data=self.test_content, format=self.test_content_format) + self.source_graph = graph def test_serialize_and_reparse(self): - reparsedGraph = serialize_and_load(self.sourceGraph, self.serializer) - _assert_equal_graphs(self.sourceGraph, reparsedGraph) + reparsed_graph = serialize_and_load(self.source_graph, self.serializer) + _assert_equal_graphs(self.source_graph, reparsed_graph) def test_multiple(self): """Repeats ``test_serialize`` ``self.repeats`` times, to reduce sucess based on in-memory ordering.""" @@ -57,25 +57,25 @@ def isbnode(v): return gcopy -def serialize(sourceGraph, makeSerializer, getValue=True, extra_args={}): - serializer = makeSerializer(sourceGraph) +def serialize(source_graph, make_serializer, get_value=True, extra_args={}): + serializer = make_serializer(source_graph) stream = BytesIO() serializer.serialize(stream, **extra_args) - return getValue and stream.getvalue() or stream + return get_value and stream.getvalue() or stream -def serialize_and_load(sourceGraph, makeSerializer): - stream = serialize(sourceGraph, makeSerializer, False) +def serialize_and_load(source_graph, make_serializer): + stream = serialize(source_graph, make_serializer, False) stream.seek(0) - reparsedGraph = ConjunctiveGraph() - reparsedGraph.parse(stream, publicID=None, format="xml") - return reparsedGraph + reparsed_graph = ConjunctiveGraph() + reparsed_graph.parse(stream, publicID=None, format="xml") + return reparsed_graph class TestXMLSerializer(SerializerTestBase): serializer = XMLSerializer - testContent = """ + test_content = """ @prefix rdfs: . @prefix owl: . @prefix : . @@ -115,41 +115,41 @@ class TestXMLSerializer(SerializerTestBase): rdfs:seeAlso _:bnode2 . """ - testContentFormat = "n3" + test_content_format = "n3" def test_result_fragments(self): - rdfXml = serialize(self.sourceGraph, self.serializer) + rdf_xml = serialize(self.source_graph, self.serializer) # print "--------" - # print rdfXml + # print rdf_xml # print "--------" assert ( ''.encode("latin-1") - in rdfXml + in rdf_xml ) assert ( ''.encode( "latin-1" ) - in rdfXml + in rdf_xml ) assert ( ''.encode("latin-1") - in rdfXml + in rdf_xml ) - assert 'Bee'.encode("latin-1") in rdfXml + assert 'Bee'.encode("latin-1") in rdf_xml assert ( '3'.encode( "latin-1" ) - in rdfXml + in rdf_xml ) assert ( - ''.encode("latin-1") in rdfXml - assert ''.encode("latin-1") in rdfXml - assert ''.encode("latin-1") in rdfXml + assert 'xml:base="http://example.org/"'.encode("latin-1") in rdf_xml + assert ''.encode("latin-1") in rdf_xml + assert ''.encode("latin-1") in rdf_xml + assert ''.encode("latin-1") in rdf_xml assert ( '3'.encode( "latin-1" ) - in rdfXml + in rdf_xml ) assert ( - '." % (type(o), p) diff --git a/test/test_sparql/test_datetime_processing.py b/test/test_sparql/test_datetime_processing.py index 9fb0901a8..c934e9543 100644 --- a/test/test_sparql/test_datetime_processing.py +++ b/test/test_sparql/test_datetime_processing.py @@ -1,11 +1,11 @@ import io -from test.utils import eq_ import rdflib from rdflib import Graph +from test.utils import eq_ -def test_dateTime_dateTime_subs_issue(): +def test_datetime_datetime_subs_issue(): """ Test for query mentioned in the Issue #629 https://github.com/RDFLib/rdflib/issues/629 @@ -58,20 +58,20 @@ def test_dateTime_dateTime_subs_issue(): # FirstElement of these tuples will be a node with a path of directory of saved project # Second Element while represent the actual durations - expectedFirstDuration = rdflib.term.Literal( + expected_first_duration = rdflib.term.Literal( "P1DT1M", datatype=rdflib.term.URIRef("http://www.w3.org/2001/XMLSchema#duration"), ) - expectedSecondDuration = rdflib.term.Literal( + expected_second_duration = rdflib.term.Literal( "PT25M", datatype=rdflib.term.URIRef("http://www.w3.org/2001/XMLSchema#duration"), ) - eq_(answer[0][1], expectedFirstDuration) - eq_(answer[1][1], expectedSecondDuration) + eq_(answer[0][1], expected_first_duration) + eq_(answer[1][1], expected_second_duration) -def test_dateTime_duration_subs(): +def test_datetime_duration_subs(): """ Test cases for subtraction operation between dateTime and duration @@ -150,7 +150,7 @@ def test_dateTime_duration_subs(): eq_(list(result2)[1][0], expected[1]) -def test_dateTime_duration_add(): +def test_datetime_duration_add(): """ Test cases for addition operation between dateTime and duration @@ -232,7 +232,7 @@ def test_dateTime_duration_add(): eq_(list(result2)[1][0], expected[1]) -def test_dateTime_dateTime_subs(): +def test_datetime_datetime_subs(): """ Test cases for subtraction operation between dateTime and dateTime diff --git a/test/test_sparql/test_evaluate_bind.py b/test/test_sparql/test_evaluate_bind.py index 3ceeffa95..3821f1bf5 100644 --- a/test/test_sparql/test_evaluate_bind.py +++ b/test/test_sparql/test_evaluate_bind.py @@ -2,6 +2,7 @@ Verify evaluation of BIND expressions of different types. See . """ + import pytest from rdflib import Graph, Literal, URIRef, Variable diff --git a/test/test_sparql/test_expressions.py b/test/test_sparql/test_expressions.py index 45573d8a5..200c68543 100644 --- a/test/test_sparql/test_expressions.py +++ b/test/test_sparql/test_expressions.py @@ -1,11 +1,11 @@ from functools import partial -from test.utils import eq_ as eq import rdflib.plugins.sparql.parser as p from rdflib import Literal, Variable from rdflib.plugins.sparql.algebra import translatePName, traverse from rdflib.plugins.sparql.operators import simplify from rdflib.plugins.sparql.sparql import Prologue, QueryContext, SPARQLError +from test.utils import eq_ as eq def _eval(e, ctx=None): @@ -25,122 +25,120 @@ def _translate(e): return simplify(traverse(e, visitPost=partial(translatePName, prologue=Prologue()))) -def testRegex(): - assert _eval(_translate((p.Expression.parseString('REGEX("zxcabczxc","abc")')[0]))) +def test_regex(): + assert _eval(_translate(p.Expression.parseString('REGEX("zxcabczxc","abc")')[0])) eq( - bool(_eval(_translate((p.Expression.parseString('REGEX("zxczxc","abc")')[0])))), + bool(_eval(_translate(p.Expression.parseString('REGEX("zxczxc","abc")')[0]))), False, ) - assert _eval( - _translate((p.Expression.parseString('REGEX("bbbaaaaabbb","ba*b")')[0])) - ) + assert _eval(_translate(p.Expression.parseString('REGEX("bbbaaaaabbb","ba*b")')[0])) def test_arithmetic(): - eq(_eval(_translate((p.Expression.parseString("2+3")[0]))).value, 5) - eq(_eval(_translate((p.Expression.parseString("3-2")[0]))).value, 1) + eq(_eval(_translate(p.Expression.parseString("2+3")[0])).value, 5) + eq(_eval(_translate(p.Expression.parseString("3-2")[0])).value, 1) - eq(_eval(_translate((p.Expression.parseString("2*3")[0]))).value, 6) - eq(_eval(_translate((p.Expression.parseString("4/2")[0]))).value, 2) + eq(_eval(_translate(p.Expression.parseString("2*3")[0])).value, 6) + eq(_eval(_translate(p.Expression.parseString("4/2")[0])).value, 2) - eq(_eval(_translate((p.Expression.parseString("2+2+2")[0]))).value, 6) - eq(_eval(_translate((p.Expression.parseString("2-2+2")[0]))).value, 2) - eq(_eval(_translate((p.Expression.parseString("(2-2)+2")[0]))).value, 2) - eq(_eval(_translate((p.Expression.parseString("2-(2+2)")[0]))).value, -2) + eq(_eval(_translate(p.Expression.parseString("2+2+2")[0])).value, 6) + eq(_eval(_translate(p.Expression.parseString("2-2+2")[0])).value, 2) + eq(_eval(_translate(p.Expression.parseString("(2-2)+2")[0])).value, 2) + eq(_eval(_translate(p.Expression.parseString("2-(2+2)")[0])).value, -2) - eq(_eval(_translate((p.Expression.parseString("2*2*2")[0]))).value, 8) - eq(_eval(_translate((p.Expression.parseString("4/2*2")[0]))).value, 4) - eq(_eval(_translate((p.Expression.parseString("8/4*2")[0]))).value, 4) - eq(_eval(_translate((p.Expression.parseString("8/(4*2)")[0]))).value, 1) - eq(_eval(_translate((p.Expression.parseString("(2/2)*2")[0]))).value, 2) - eq(_eval(_translate((p.Expression.parseString("4/(2*2)")[0]))).value, 1) + eq(_eval(_translate(p.Expression.parseString("2*2*2")[0])).value, 8) + eq(_eval(_translate(p.Expression.parseString("4/2*2")[0])).value, 4) + eq(_eval(_translate(p.Expression.parseString("8/4*2")[0])).value, 4) + eq(_eval(_translate(p.Expression.parseString("8/(4*2)")[0])).value, 1) + eq(_eval(_translate(p.Expression.parseString("(2/2)*2")[0])).value, 2) + eq(_eval(_translate(p.Expression.parseString("4/(2*2)")[0])).value, 1) - eq(_eval(_translate((p.Expression.parseString("2+3*2")[0]))).value, 8) - eq(_eval(_translate((p.Expression.parseString("(2+3)*2")[0]))).value, 10) - eq(_eval(_translate((p.Expression.parseString("2+4/2")[0]))).value, 4) - eq(_eval(_translate((p.Expression.parseString("(2+4)/2")[0]))).value, 3) + eq(_eval(_translate(p.Expression.parseString("2+3*2")[0])).value, 8) + eq(_eval(_translate(p.Expression.parseString("(2+3)*2")[0])).value, 10) + eq(_eval(_translate(p.Expression.parseString("2+4/2")[0])).value, 4) + eq(_eval(_translate(p.Expression.parseString("(2+4)/2")[0])).value, 3) def test_arithmetic_var(): ctx = QueryContext() ctx[Variable("x")] = Literal(2) - eq(_eval(_translate((p.Expression.parseString("2+?x")[0])), ctx).value, 4) + eq(_eval(_translate(p.Expression.parseString("2+?x")[0]), ctx).value, 4) - eq(_eval(_translate((p.Expression.parseString("?x+3")[0])), ctx).value, 5) - eq(_eval(_translate((p.Expression.parseString("3-?x")[0])), ctx).value, 1) + eq(_eval(_translate(p.Expression.parseString("?x+3")[0]), ctx).value, 5) + eq(_eval(_translate(p.Expression.parseString("3-?x")[0]), ctx).value, 1) - eq(_eval(_translate((p.Expression.parseString("?x*3")[0])), ctx).value, 6) - eq(_eval(_translate((p.Expression.parseString("4/?x")[0])), ctx).value, 2) + eq(_eval(_translate(p.Expression.parseString("?x*3")[0]), ctx).value, 6) + eq(_eval(_translate(p.Expression.parseString("4/?x")[0]), ctx).value, 2) - eq(_eval(_translate((p.Expression.parseString("?x+?x+?x")[0])), ctx).value, 6) - eq(_eval(_translate((p.Expression.parseString("?x-?x+?x")[0])), ctx).value, 2) - eq(_eval(_translate((p.Expression.parseString("(?x-?x)+?x")[0])), ctx).value, 2) - eq(_eval(_translate((p.Expression.parseString("?x-(?x+?x)")[0])), ctx).value, -2) + eq(_eval(_translate(p.Expression.parseString("?x+?x+?x")[0]), ctx).value, 6) + eq(_eval(_translate(p.Expression.parseString("?x-?x+?x")[0]), ctx).value, 2) + eq(_eval(_translate(p.Expression.parseString("(?x-?x)+?x")[0]), ctx).value, 2) + eq(_eval(_translate(p.Expression.parseString("?x-(?x+?x)")[0]), ctx).value, -2) - eq(_eval(_translate((p.Expression.parseString("?x*?x*?x")[0])), ctx).value, 8) - eq(_eval(_translate((p.Expression.parseString("4/?x*?x")[0])), ctx).value, 4) - eq(_eval(_translate((p.Expression.parseString("8/4*?x")[0])), ctx).value, 4) - eq(_eval(_translate((p.Expression.parseString("8/(4*?x)")[0])), ctx).value, 1) - eq(_eval(_translate((p.Expression.parseString("(?x/?x)*?x")[0])), ctx).value, 2) - eq(_eval(_translate((p.Expression.parseString("4/(?x*?x)")[0])), ctx).value, 1) + eq(_eval(_translate(p.Expression.parseString("?x*?x*?x")[0]), ctx).value, 8) + eq(_eval(_translate(p.Expression.parseString("4/?x*?x")[0]), ctx).value, 4) + eq(_eval(_translate(p.Expression.parseString("8/4*?x")[0]), ctx).value, 4) + eq(_eval(_translate(p.Expression.parseString("8/(4*?x)")[0]), ctx).value, 1) + eq(_eval(_translate(p.Expression.parseString("(?x/?x)*?x")[0]), ctx).value, 2) + eq(_eval(_translate(p.Expression.parseString("4/(?x*?x)")[0]), ctx).value, 1) def test_comparisons(): - eq(bool(_eval(_translate((p.Expression.parseString("2<3")[0])))), True) - eq(bool(_eval(_translate((p.Expression.parseString("2<3.0")[0])))), True) - eq(bool(_eval(_translate((p.Expression.parseString("2<3e0")[0])))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2<3")[0]))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2<3.0")[0]))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2<3e0")[0]))), True) - eq(bool(_eval(_translate((p.Expression.parseString("4<3")[0])))), False) - eq(bool(_eval(_translate((p.Expression.parseString("4<3.0")[0])))), False) - eq(bool(_eval(_translate((p.Expression.parseString("4<3e0")[0])))), False) + eq(bool(_eval(_translate(p.Expression.parseString("4<3")[0]))), False) + eq(bool(_eval(_translate(p.Expression.parseString("4<3.0")[0]))), False) + eq(bool(_eval(_translate(p.Expression.parseString("4<3e0")[0]))), False) - eq(bool(_eval(_translate((p.Expression.parseString("2<2.1")[0])))), True) - eq(bool(_eval(_translate((p.Expression.parseString("2<21e-1")[0])))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2<2.1")[0]))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2<21e-1")[0]))), True) - eq(bool(_eval(_translate((p.Expression.parseString("2=2.0")[0])))), True) - eq(bool(_eval(_translate((p.Expression.parseString("2=2e0")[0])))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2=2.0")[0]))), True) + eq(bool(_eval(_translate(p.Expression.parseString("2=2e0")[0]))), True) - eq(bool(_eval(_translate((p.Expression.parseString('2="cake"')[0])))), False) + eq(bool(_eval(_translate(p.Expression.parseString('2="cake"')[0]))), False) def test_comparisons_var(): ctx = QueryContext() ctx[Variable("x")] = Literal(2) - eq(bool(_eval(_translate((p.Expression.parseString("?x<3")[0])), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString("?x<3.0")[0])), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString("?x<3e0")[0])), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x<3")[0]), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x<3.0")[0]), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x<3e0")[0]), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString("?x<2.1")[0])), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString("?x<21e-1")[0])), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x<2.1")[0]), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x<21e-1")[0]), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString("?x=2.0")[0])), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString("?x=2e0")[0])), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x=2.0")[0]), ctx)), True) + eq(bool(_eval(_translate(p.Expression.parseString("?x=2e0")[0]), ctx)), True) - eq(bool(_eval(_translate((p.Expression.parseString('?x="cake"')[0])), ctx)), False) + eq(bool(_eval(_translate(p.Expression.parseString('?x="cake"')[0]), ctx)), False) ctx = QueryContext() ctx[Variable("x")] = Literal(4) - eq(bool(_eval(_translate((p.Expression.parseString("?x<3")[0])), ctx)), False) - eq(bool(_eval(_translate((p.Expression.parseString("?x<3.0")[0])), ctx)), False) - eq(bool(_eval(_translate((p.Expression.parseString("?x<3e0")[0])), ctx)), False) + eq(bool(_eval(_translate(p.Expression.parseString("?x<3")[0]), ctx)), False) + eq(bool(_eval(_translate(p.Expression.parseString("?x<3.0")[0]), ctx)), False) + eq(bool(_eval(_translate(p.Expression.parseString("?x<3e0")[0]), ctx)), False) def test_and_or(): - eq(bool(_eval(_translate((p.Expression.parseString("3>2 && 3>1")[0])))), True) + eq(bool(_eval(_translate(p.Expression.parseString("3>2 && 3>1")[0]))), True) eq( - bool(_eval(_translate((p.Expression.parseString("3>2 && 3>4 || 2>1")[0])))), + bool(_eval(_translate(p.Expression.parseString("3>2 && 3>4 || 2>1")[0]))), True, ) eq( - bool(_eval(_translate((p.Expression.parseString("2>1 || 3>2 && 3>4")[0])))), + bool(_eval(_translate(p.Expression.parseString("2>1 || 3>2 && 3>4")[0]))), True, ) eq( - bool(_eval(_translate((p.Expression.parseString("(2>1 || 3>2) && 3>4")[0])))), + bool(_eval(_translate(p.Expression.parseString("(2>1 || 3>2) && 3>4")[0]))), False, ) diff --git a/test/test_sparql/test_forward_slash_escapes.py b/test/test_sparql/test_forward_slash_escapes.py index a33e43832..d7c72d07b 100644 --- a/test/test_sparql/test_forward_slash_escapes.py +++ b/test/test_sparql/test_forward_slash_escapes.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 - # This software was developed at the National Institute of Standards # and Technology by employees of the Federal Government in the course # of their official duties. Pursuant to title 17 Section 105 of the @@ -18,9 +16,8 @@ prefixed concepts, e.g. "application/json" somehow being "mime:application/json". """ +from __future__ import annotations -from test.data import TEST_DATA_DIR -from test.utils.graph import cached_graph from typing import Set import pytest @@ -29,6 +26,8 @@ from rdflib.plugins.sparql.processor import prepareQuery from rdflib.plugins.sparql.sparql import Query from rdflib.query import ResultRow +from test.data import TEST_DATA_DIR +from test.utils.graph import cached_graph query_string_expanded = r""" SELECT ?nIndividual diff --git a/test/test_sparql/test_initbindings.py b/test/test_sparql/test_initbindings.py index 9080c3bb0..0cb55f344 100644 --- a/test/test_sparql/test_initbindings.py +++ b/test/test_sparql/test_initbindings.py @@ -1,10 +1,11 @@ -from rdflib import ConjunctiveGraph, Literal, Namespace, URIRef, Variable +from rdflib import ConjunctiveGraph, Literal, URIRef, Variable from rdflib.plugins.sparql import prepareQuery +from test.utils.namespace import EGDC g = ConjunctiveGraph() -def testStr(): +def test_str(): a = set( g.query( "SELECT (STR(?target) AS ?r) WHERE { }", @@ -19,7 +20,7 @@ def testStr(): assert a == b, "STR: %r != %r" % (a, b) -def testIsIRI(): +def test_is_iri(): a = set( g.query( "SELECT (isIRI(?target) AS ?r) WHERE { }", @@ -34,7 +35,7 @@ def testIsIRI(): assert a == b, "isIRI: %r != %r" % (a, b) -def testIsBlank(): +def test_is_blank(): a = set( g.query( "SELECT (isBlank(?target) AS ?r) WHERE { }", @@ -49,7 +50,7 @@ def testIsBlank(): assert a == b, "isBlank: %r != %r" % (a, b) -def testIsLiteral(): +def test_is_literal(): a = set( g.query( "SELECT (isLiteral(?target) AS ?r) WHERE { }", @@ -64,7 +65,7 @@ def testIsLiteral(): assert a == b, "isLiteral: %r != %r" % (a, b) -def testUCase(): +def test_ucase(): a = set( g.query( "SELECT (UCASE(?target) AS ?r) WHERE { }", @@ -79,7 +80,7 @@ def testUCase(): assert a == b, "UCASE: %r != %r" % (a, b) -def testNoFunc(): +def test_no_func(): a = set( g.query("SELECT ?target WHERE { }", initBindings={"target": Literal("example")}) ) @@ -87,7 +88,7 @@ def testNoFunc(): assert a == b, "no func: %r != %r" % (a, b) -def testOrderBy(): +def test_order_by(): a = set( g.query( "SELECT ?target WHERE { } ORDER BY ?target", @@ -102,7 +103,7 @@ def testOrderBy(): assert a == b, "orderby: %r != %r" % (a, b) -def testOrderByFunc(): +def test_order_by_func(): a = set( g.query( "SELECT (UCASE(?target) as ?r) WHERE { } ORDER BY ?target", @@ -117,7 +118,7 @@ def testOrderByFunc(): assert a == b, "orderbyFunc: %r != %r" % (a, b) -def testNoFuncLimit(): +def test_no_func_limit(): a = set( g.query( "SELECT ?target WHERE { } LIMIT 1", @@ -128,7 +129,7 @@ def testNoFuncLimit(): assert a == b, "limit: %r != %r" % (a, b) -def testOrderByLimit(): +def test_order_by_limit(): a = set( g.query( "SELECT ?target WHERE { } ORDER BY ?target LIMIT 1", @@ -143,7 +144,7 @@ def testOrderByLimit(): assert a == b, "orderbyLimit: %r != %r" % (a, b) -def testOrderByFuncLimit(): +def test_order_by_func_limit(): a = set( g.query( "SELECT (UCASE(?target) as ?r) WHERE { } ORDER BY ?target LIMIT 1", @@ -158,7 +159,7 @@ def testOrderByFuncLimit(): assert a == b, "orderbyFuncLimit: %r != %r" % (a, b) -def testNoFuncOffset(): +def test_no_func_offset(): a = set( g.query( "SELECT ?target WHERE { } OFFSET 1", @@ -169,7 +170,7 @@ def testNoFuncOffset(): assert a == b, "offset: %r != %r" % (a, b) -def testNoFuncLimitOffset(): +def test_no_func_limit_offset(): a = set( g.query( "SELECT ?target WHERE { } LIMIT 1 OFFSET 1", @@ -184,7 +185,7 @@ def testNoFuncLimitOffset(): assert a == b, "limitOffset: %r != %r" % (a, b) -def testOrderByLimitOffset(): +def test_order_by_limit_offset(): a = set( g.query( "SELECT ?target WHERE { } ORDER BY ?target LIMIT 1 OFFSET 1", @@ -199,7 +200,7 @@ def testOrderByLimitOffset(): assert a == b, "orderbyLimitOffset: %r != %r" % (a, b) -def testOrderByFuncLimitOffset(): +def test_order_by_func_limit_offset(): a = set( g.query( "SELECT (UCASE(?target) as ?r) WHERE { } ORDER BY ?target LIMIT 1 OFFSET 1", @@ -214,7 +215,7 @@ def testOrderByFuncLimitOffset(): assert a == b, "orderbyFuncLimitOffset: %r != %r" % (a, b) -def testDistinct(): +def test_distinct(): a = set( g.query( "SELECT DISTINCT ?target WHERE { }", @@ -225,7 +226,7 @@ def testDistinct(): assert a == b, "distinct: %r != %r" % (a, b) -def testDistinctOrderBy(): +def test_distinct_order_by(): a = set( g.query( "SELECT DISTINCT ?target WHERE { } ORDER BY ?target", @@ -240,7 +241,7 @@ def testDistinctOrderBy(): assert a == b, "distinctOrderby: %r != %r" % (a, b) -def testDistinctOrderByLimit(): +def test_distinct_order_by_limit(): a = set( g.query( "SELECT DISTINCT ?target WHERE { } ORDER BY ?target LIMIT 1", @@ -255,7 +256,7 @@ def testDistinctOrderByLimit(): assert a == b, "distinctOrderbyLimit: %r != %r" % (a, b) -def testPrepare(): +def test_prepare(): q = prepareQuery("SELECT ?target WHERE { }") r = list(g.query(q)) e = [] @@ -270,7 +271,7 @@ def testPrepare(): assert r == e, "prepare: %r != %r" % (r, e) -def testData(): +def test_data(): data = ConjunctiveGraph() data += [ (URIRef("urn:a"), URIRef("urn:p"), Literal("a")), @@ -290,54 +291,55 @@ def testData(): assert a == b, "data: %r != %r" % (a, b) -def testAsk(): +def test_ask(): a = set(g.query("ASK { }", initBindings={"target": Literal("example")})) b = set(g.query("ASK { } VALUES (?target) {('example')}")) assert a == b, "ask: %r != %r" % (a, b) -EX = Namespace("http://example.com/") g2 = ConjunctiveGraph() -g2.bind("", EX) -g2.add((EX["s1"], EX["p"], EX["o1"])) -g2.add((EX["s2"], EX["p"], EX["o2"])) +g2.bind("", EGDC) +g2.add((EGDC["s1"], EGDC["p"], EGDC["o1"])) +g2.add((EGDC["s2"], EGDC["p"], EGDC["o2"])) -def testStringKey(): +def test_string_key(): results = list( - g2.query("SELECT ?o WHERE { ?s :p ?o }", initBindings={"s": EX["s1"]}) + g2.query("SELECT ?o WHERE { ?s :p ?o }", initBindings={"s": EGDC["s1"]}) ) assert len(results) == 1, results -def testStringKeyWithQuestionMark(): +def test_string_key_with_question_mark(): results = list( - g2.query("SELECT ?o WHERE { ?s :p ?o }", initBindings={"?s": EX["s1"]}) + g2.query("SELECT ?o WHERE { ?s :p ?o }", initBindings={"?s": EGDC["s1"]}) ) assert len(results) == 1, results -def testVariableKey(): +def test_variable_key(): results = list( - g2.query("SELECT ?o WHERE { ?s :p ?o }", initBindings={Variable("s"): EX["s1"]}) + g2.query( + "SELECT ?o WHERE { ?s :p ?o }", initBindings={Variable("s"): EGDC["s1"]} + ) ) assert len(results) == 1, results -def testVariableKeyWithQuestionMark(): +def test_variable_key_with_question_mark(): results = list( g2.query( - "SELECT ?o WHERE { ?s :p ?o }", initBindings={Variable("?s"): EX["s1"]} + "SELECT ?o WHERE { ?s :p ?o }", initBindings={Variable("?s"): EGDC["s1"]} ) ) assert len(results) == 1, results -def testFilter(): +def test_filter(): results = list( g2.query( "SELECT ?o WHERE { ?s :p ?o FILTER (?s = ?x)}", - initBindings={Variable("?x"): EX["s1"]}, + initBindings={Variable("?x"): EGDC["s1"]}, ) ) assert len(results) == 1, results diff --git a/test/test_sparql/test_nested_filters.py b/test/test_sparql/test_nested_filters.py new file mode 100644 index 000000000..eadd5442e --- /dev/null +++ b/test/test_sparql/test_nested_filters.py @@ -0,0 +1,348 @@ +#!/usr/bin/env python3 + +# Portions of this script contributed by NIST are governed by the +# following license: +# +# This software was developed at the National Institute of Standards +# and Technology by employees of the Federal Government in the course +# of their official duties. Pursuant to title 17 Section 105 of the +# United States Code this software is not subject to copyright +# protection and is in the public domain. NIST assumes no +# responsibility whatsoever for its use by other parties, and makes +# no guarantees, expressed or implied, about its quality, +# reliability, or any other characteristic. +# +# We would appreciate acknowledgement if the software is used. + +from __future__ import annotations + +import logging +from typing import Set, Tuple + +from rdflib import Graph, URIRef +from rdflib.query import ResultRow + + +def test_nested_filter_outer_binding_propagation() -> None: + expected: Set[URIRef] = { + URIRef("http://example.org/Superclass"), + } + computed: Set[URIRef] = set() + graph_data = """\ +@prefix ex: . +@prefix owl: . +@prefix rdfs: . + +ex:Superclass + a owl:Class ; +. +ex:Subclass1 + a owl:Class ; + rdfs:subClassOf ex:Superclass ; +. +ex:Subclass2 + a owl:Class ; + rdfs:subClassOf ex:Superclass ; + owl:deprecated true ; +. +""" + query = """\ +SELECT ?class +WHERE { + ?class a owl:Class . + FILTER EXISTS { + ?subclass rdfs:subClassOf ?class . + FILTER NOT EXISTS { ?subclass owl:deprecated true } + } +} +""" + graph = Graph() + graph.parse(data=graph_data) + for result in graph.query(query): + assert isinstance(result, ResultRow) + assert isinstance(result[0], URIRef) + computed.add(result[0]) + assert expected == computed + + +def test_nested_filter_outermost_binding_propagation() -> None: + """ + This test implements a query that requires functionality of nested FILTER NOT EXISTS query components. + + It encodes a single ground truth positive query result, a tuple where: + * The first member is a HistoricAction, + * The second member is a wholly redundant HistoricRecord in consideration of latter HistoricRecords that cover all non-HistoricRecord inputs to the Action, and + * The third member is the superseding record. + """ + expected: Set[Tuple[URIRef, URIRef, URIRef]] = { + ( + URIRef("http://example.org/kb/action-1-2"), + URIRef("http://example.org/kb/record-123-1"), + URIRef("http://example.org/kb/record-1-2"), + ) + } + computed: Set[Tuple[URIRef, URIRef, URIRef]] = set() + + historic_ontology_graph_data = """\ +@prefix case-investigation: . +@prefix ex: . +@prefix kb: . +@prefix owl: . +@prefix prov: . +@prefix rdf: . +@prefix rdfs: . +@prefix xsd: . + + + a owl:Ontology ; + rdfs:comment "This example ontology represents a history-analyzing application, where notes of things' handling are created and accompany the things as they are used in actions. For the sake of demonstration, classes and properties implemented here are simplifications of other ontologies' classes and properties. Otherwise, this ontology is narrowly similar to an application of the CASE and PROV-O ontologies."@en ; + rdfs:seeAlso ; + . + +# Begin ontology (TBox). + +ex:HistoricThing + a owl:Class ; + rdfs:subClassOf owl:Thing ; + rdfs:comment "A thing generated by some HistoricAction with an accompanying HistoricRecord, and is the input to other HistoricActions. When a HistoricThing is the input to a HistoricAction, a new HistoricRecord should be emitted by the HistoricAction."@en ; + rdfs:seeAlso prov:Entity ; + . + +ex:HistoricRecord + a owl:Class ; + rdfs:subClassOf ex:HistoricThing ; + rdfs:comment + "An example class analagous to PROV-O's Collection and CASE's ProvenanceRecord."@en , + "Only the latest HistoricRecord for an object should be an input to a HistoricAction."@en + ; + rdfs:seeAlso + case-investigation:ProvenanceRecord , + prov:Collection + ; + . + +ex:HistoricAction + a owl:Class ; + rdfs:subClassOf owl:Thing ; + rdfs:comment "An example class analagous to PROV-O's Activity and CASE's InvestigativeAction."@en ; + rdfs:seeAlso + case-investigation:InvestigativeAction , + prov:Activity + ; + owl:disjointWith ex:HistoricThing ; + . + +ex:hadMember + a owl:ObjectProperty ; + rdfs:domain ex:HistoricRecord ; + rdfs:range ex:HistoricThing ; + rdfs:seeAlso prov:hadMember ; + . + +ex:generated + a owl:ObjectProperty ; + rdfs:domain ex:HistoricAction ; + rdfs:range ex:HistoricThing ; + rdfs:seeAlso prov:wasGeneratedBy ; + . + +ex:used + a owl:ObjectProperty ; + rdfs:domain ex:HistoricAction ; + rdfs:range ex:HistoricThing ; + rdfs:seeAlso prov:used ; + . + +ex:wasDerivedFrom + a owl:ObjectProperty ; + rdfs:domain owl:Thing ; + rdfs:range owl:Thing ; + rdfs:seeAlso prov:wasDerivedFrom ; + . + +# Begin knowledge base (ABox). + +kb:record-123-1 + a ex:HistoricRecord ; + rdfs:comment "This is a first record of having handled thing-1, thing-2, and thing-3."@en ; + ex:hadMember + kb:thing-1 , + kb:thing-2 + ; + . + +kb:record-1-2 + a ex:HistoricRecord ; + rdfs:comment "This is a second record of having handled thing-1."@en ; + ex:hadMember kb:thing-1 ; + ex:wasDerivedFrom kb:record-123-1 ; + . + +kb:record-2-2 + a ex:HistoricRecord ; + rdfs:comment "This is a second record of having handled thing-2."@en ; + ex:hadMember kb:thing-2 ; + ex:wasDerivedFrom kb:record-123-1 ; + . + +kb:record-4-1 + a ex:HistoricRecord ; + rdfs:comment "This is a first record of having handled thing-4. thing-4 is independent in history of thing-1 and thing-2."@en ; + ex:hadMember kb:thing-4 ; + . + +kb:thing-1 + a ex:HistoricThing ; + . + +kb:thing-2 + a ex:HistoricThing ; + . + +kb:thing-3 + a ex:HistoricThing ; + . + +kb:thing-4 + a ex:HistoricThing ; + . + +kb:action-123-0 + a ex:HistoricAction ; + rdfs:comment "Generate things 1, 2, and 3."@en ; + ex:generated + kb:record-123-1 , + kb:thing-1 , + kb:thing-2 , + kb:thing-3 + . + +kb:action-4-0 + a ex:HistoricAction ; + rdfs:comment "Generate thing 4."@en ; + ex:generated + kb:record-4-1 , + kb:thing-4 + . + +kb:action-1-1 + a ex:HistoricAction ; + rdfs:comment "Handle thing-1."@en ; + ex:used + kb:record-123-1 , + kb:thing-1 + ; + ex:generated kb:record-1-2 ; + . + +kb:action-2-1 + a ex:HistoricAction ; + rdfs:comment "Handle thing-2."@en ; + ex:used + kb:record-123-1 , + kb:thing-2 + ; + ex:generated kb:record-2-2 ; + . + +kb:action-1-2 + a ex:HistoricAction ; + rdfs:comment "This node SHOULD be found by the query. record-123-1 is wholly redundant with record-1-2 with respect to the collective whole of action inputs."@en ; + ex:used + kb:record-123-1 , + kb:record-1-2 , + kb:thing-1 + ; + . + +kb:action-12-2 + a ex:HistoricAction ; + rdfs:comment "This node SHOULD NOT be found by the query. record-123-1 is partially, but not wholly, redundant with record-1-2, due to to thing-2 having record-123-1 as its only accompanying historic record."@en ; + ex:used + kb:record-123-1 , + kb:record-1-2 , + kb:thing-1 , + kb:thing-2 + ; + . + +kb:action-123-2 + a ex:HistoricAction ; + rdfs:comment "This node SHOULD NOT be found by the query. record-123-1 is partially, but not wholly, redundant with record-1-2 and record-2-2, due to thing-3 having record-123-1 as its only accompanying historic record."@en ; + ex:used + kb:record-123-1 , + kb:record-1-2 , + kb:record-2-2 , + kb:thing-1 , + kb:thing-2 , + kb:thing-3 + ; + . + +kb:action-1234-2 + a ex:HistoricAction ; + rdfs:comment "This node SHOULD NOT be found by the query. record-123-1 is partially, but not wholly, redundant with record-1-2 and record-2-2, due to thing-3 having record-123-1 as its only accompanying historic record. thing-4 also has no shared history with thing-1, -2, or -3."@en ; + ex:used + kb:record-123-1 , + kb:record-1-2 , + kb:record-2-2 , + kb:record-4-1 , + kb:thing-1 , + kb:thing-2 , + kb:thing-3 , + kb:thing-4 + ; + . +""" + + # See 'TEST OBJECTIVE' annotation. + query = """\ +PREFIX ex: +SELECT ?nAction ?nRedundantRecord ?nSupersedingRecord +WHERE { + ?nAction + ex:used + ?nThing1 , + ?nRedundantRecord , + ?nSupersedingRecord + ; + . + ?nRedundantRecord + a ex:HistoricRecord ; + ex:hadMember ?nThing1 ; + . + ?nSupersedingRecord + a ex:HistoricRecord ; + ex:wasDerivedFrom+ ?nRedundantRecord ; + ex:hadMember ?nThing1 ; + . + FILTER NOT EXISTS { + ?nAction ex:used ?nThing2 . + ?nRedundantRecord ex:hadMember ?nThing2 . + FILTER ( ?nThing1 != ?nThing2 ) + FILTER NOT EXISTS { + #### + # + # TEST OBJECTIVE: + # nThing2 must be passed from the outermost context. + # + #### + ?nSupersedingRecord ex:hadMember ?nThing2 . + } + } +} +""" + + graph = Graph() + graph.parse(data=historic_ontology_graph_data) + logging.debug(len(graph)) + + for result in graph.query(query): + assert isinstance(result, ResultRow) + assert isinstance(result[0], URIRef) + assert isinstance(result[1], URIRef) + assert isinstance(result[2], URIRef) + + computed.add((result[0], result[1], result[2])) + + assert expected == computed diff --git a/test/test_sparql/test_prefixed_name.py b/test/test_sparql/test_prefixed_name.py index 99d2fb108..ae869bf4a 100644 --- a/test/test_sparql/test_prefixed_name.py +++ b/test/test_sparql/test_prefixed_name.py @@ -1,16 +1,16 @@ +from __future__ import annotations + import itertools import logging -from contextlib import ExitStack -from typing import Type, Union import pyparsing import pytest -from pyparsing import Optional import rdflib from rdflib import Graph from rdflib.namespace import Namespace -from rdflib.term import URIRef +from rdflib.term import Node, URIRef +from test.utils.outcome import OutcomeChecker, OutcomePrimitive RESERVED_PCHARS = [ "%20", @@ -100,17 +100,15 @@ def blank_graph() -> Graph: def test_pnames( pname_ns: str, pname: str, - expected_result: Union[URIRef, Type[Exception]], + expected_result: OutcomePrimitive[Node], blank_graph: Graph, ) -> None: """ The given pname produces the expected result. """ - catcher: Optional[pytest.ExceptionInfo[Exception]] = None + checker = OutcomeChecker[Node].from_primitive(expected_result) - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(expected_result)) + with checker.context(): query_string = f"""\ PREFIX {pname_ns}: <{PNAME_PREFIX}> @@ -126,10 +124,4 @@ def test_pnames( triple = triples[0] result = triple[2] logging.debug("result = %s", result) - - if catcher is not None: - assert isinstance(catcher, pytest.ExceptionInfo) - assert catcher.value is not None - else: - assert isinstance(expected_result, URIRef) - assert expected_result == result + checker.check(result) diff --git a/test/test_sparql/test_result.py b/test/test_sparql/test_result.py index 8f211e337..ddf9a781c 100644 --- a/test/test_sparql/test_result.py +++ b/test/test_sparql/test_result.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum import inspect import itertools @@ -7,13 +9,6 @@ from contextlib import ExitStack from io import BytesIO, StringIO from pathlib import Path, PosixPath, PurePath -from test.utils.destination import DestinationType, DestParmType -from test.utils.result import ( - ResultFormat, - ResultFormatInfo, - ResultFormatTrait, - ResultType, -) from typing import ( IO, BinaryIO, @@ -39,6 +34,13 @@ from rdflib.namespace import Namespace from rdflib.query import Result, ResultRow from rdflib.term import BNode, Identifier, Literal, Node, Variable +from test.utils.destination import DestinationType, DestParmType +from test.utils.result import ( + ResultFormat, + ResultFormatInfo, + ResultFormatTrait, + ResultType, +) BindingsType = Sequence[Mapping[Variable, Identifier]] ParseOutcomeType = Union[BindingsType, Type[Exception]] @@ -439,4 +441,5 @@ def test_serialize_to_fileuri_with_authortiy( encoding=encoding, ) assert False # this should never happen as serialize should always fail - assert catcher.value is not None + # type error, mypy thinks this line is unreachable, but it works fine + assert catcher.value is not None # type: ignore[unreachable, unused-ignore] diff --git a/test/test_sparql/test_service.py b/test/test_sparql/test_service.py index 61c317ac6..83ac8388f 100644 --- a/test/test_sparql/test_service.py +++ b/test/test_sparql/test_service.py @@ -1,25 +1,18 @@ +from __future__ import annotations + import json -from contextlib import ExitStack -from test.utils import helper -from test.utils.http import MethodName, MockHTTPResponse -from test.utils.httpservermock import ServedBaseHTTPServerMock -from typing import ( - Dict, - FrozenSet, - List, - Mapping, - Optional, - Sequence, - Tuple, - Type, - Union, -) +from http.client import IncompleteRead, RemoteDisconnected +from typing import Dict, FrozenSet, List, Mapping, Sequence, Tuple, Type, Union import pytest from rdflib import Graph, Literal, URIRef, Variable from rdflib.namespace import XSD from rdflib.term import BNode, Identifier +from test.utils import helper +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock +from test.utils.outcome import OutcomeChecker @pytest.mark.webtest @@ -36,7 +29,10 @@ def test_service(): ?dbpComment . } } } limit 2""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") print(results.vars) print(results.bindings) assert len(results) == 2 @@ -61,7 +57,10 @@ def test_service_with_bind(): ?subject . } } } limit 2""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 2 for r in results: @@ -97,7 +96,10 @@ def test_service_with_bound_solutions(): } LIMIT 2 """ - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 2 for r in results: @@ -120,7 +122,10 @@ def test_service_with_values(): ?subject . } } } limit 2""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 2 for r in results: @@ -137,7 +142,10 @@ def test_service_with_implicit_select(): { values (?s ?p ?o) {( 1) ( 2)} }} limit 2""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 2 for r in results: @@ -155,7 +163,10 @@ def test_service_with_implicit_select_and_prefix(): { values (?s ?p ?o) {(ex:a ex:b 1) ( 2)} }} limit 2""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 2 for r in results: @@ -173,7 +184,10 @@ def test_service_with_implicit_select_and_base(): { values (?s ?p ?o) {( 1) ( 2)} }} limit 2""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 2 for r in results: @@ -191,7 +205,10 @@ def test_service_with_implicit_select_and_allcaps(): ?s ?sameAs . } } LIMIT 3""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert len(results) == 3 @@ -218,7 +235,10 @@ def test_simple_not_null(): VALUES (?s ?p ?o) {( "c")} } }""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") assert results.bindings[0].get(Variable("o")) == Literal("c") @@ -246,7 +266,10 @@ def test_service_node_types(): } FILTER( ?o IN (, "Simple Literal", "String Literal"^^xsd:string, "String Language"@en) ) }""" - results = helper.query_with_retry(g, q) + try: + results = helper.query_with_retry(g, q) + except (RemoteDisconnected, IncompleteRead): + pytest.skip("this test uses dbpedia which is down sometimes") expected = freeze_bindings( [ @@ -337,19 +360,15 @@ def test_with_mock( # dependent on the size of the service query. function_httpmock.responses[MethodName.GET].append(mock_response) function_httpmock.responses[MethodName.POST].append(mock_response) - catcher: Optional[pytest.ExceptionInfo[Exception]] = None - with ExitStack() as xstack: - if isinstance(expected_result, type) and issubclass(expected_result, Exception): - catcher = xstack.enter_context(pytest.raises(expected_result)) - else: - expected_bindings = [{Variable("var"): item} for item in expected_result] + checker = OutcomeChecker[Sequence[Mapping[Variable, Identifier]]].from_primitive( + [{Variable("var"): item} for item in expected_result] + if isinstance(expected_result, List) + else expected_result + ) + with checker.context(): bindings = graph.query(query).bindings - if catcher is not None: - assert catcher is not None - assert catcher.value is not None - else: - assert expected_bindings == bindings + checker.check(bindings) if __name__ == "__main__": diff --git a/test/test_sparql/test_sparql.py b/test/test_sparql/test_sparql.py index 807686040..a42f7ffa5 100644 --- a/test/test_sparql/test_sparql.py +++ b/test/test_sparql/test_sparql.py @@ -1,6 +1,6 @@ +from __future__ import annotations + import logging -from test.utils import eq_ -from test.utils.result import assert_bindings_collections_equal from typing import Any, Callable, Mapping, Sequence, Type import pytest @@ -21,6 +21,9 @@ from rdflib.plugins.sparql.sparql import SPARQLError from rdflib.query import Result, ResultRow from rdflib.term import Identifier, Variable +from test.utils import eq_ +from test.utils.namespace import EGDC +from test.utils.result import assert_bindings_collections_equal def test_graph_prefix(): @@ -156,7 +159,7 @@ def test_sparql_update_with_bnode_serialize_parse(): raised = False try: Graph().parse(data=string, format="ntriples") - except Exception as e: + except Exception as e: # noqa: F841 raised = True assert not raised @@ -344,9 +347,8 @@ def test_custom_eval() -> None: """ SPARQL custom eval function works as expected. """ - eg = Namespace("http://example.com/") - custom_function_uri = eg["function"] - custom_function_result = eg["result"] + custom_function_uri = EGDC["function"] + custom_function_result = EGDC["result"] def custom_eval_extended(ctx: Any, extend: Any) -> Any: for c in evalPart(ctx, extend.p): @@ -417,8 +419,7 @@ def test_custom_eval_exception( Exception raised from a ``CUSTOM_EVALS`` function during the execution of a query propagates to the caller. """ - eg = Namespace("http://example.com/") - custom_function_uri = eg["function"] + custom_function_uri = EGDC["function"] def custom_eval_extended(ctx: Any, extend: Any) -> Any: for c in evalPart(ctx, extend.p): @@ -845,11 +846,28 @@ def thrower(*args: Any, **kwargs: Any) -> None: ], id="select-group-concat-optional-many", ), + pytest.param( + """ + PREFIX rdf: + + SELECT * WHERE { + BIND(STRDT("", rdf:HTML) as ?tag1) # incorrectly disappearing literal + BIND("" as ?tag2) # correctly appearing literal + } + """, + [ + { + Variable("tag1"): Literal("", datatype=RDF.HTML), + Variable("tag2"): Literal(""), + } + ], + id="select-bind-strdt-html", + ), ], ) def test_queries( query_string: str, - expected_bindings: Sequence[Mapping["Variable", "Identifier"]], + expected_bindings: Sequence[Mapping[Variable, Identifier]], rdfs_graph: Graph, ) -> None: """ diff --git a/test/test_sparql/test_sparql_parser.py b/test/test_sparql/test_sparql_parser.py index 25659a1b1..ac6ff7856 100644 --- a/test/test_sparql/test_sparql_parser.py +++ b/test/test_sparql/test_sparql_parser.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import math import sys from typing import Set, Tuple @@ -24,8 +26,8 @@ def test_insert_large(self) -> None: self.do_insert(200) def do_insert(self, resource_count: int) -> None: - EGV = Namespace("http://example.org/vocab#") - EGI = Namespace("http://example.org/instance#") + EGV = Namespace("http://example.org/vocab#") # noqa: N806 + EGI = Namespace("http://example.org/instance#") # noqa: N806 prop0, prop1, prop2 = EGV["prop0"], EGV["prop1"], EGV["prop2"] g0 = Graph() for index in range(resource_count): diff --git a/test/test_sparql/test_translate_algebra.py b/test/test_sparql/test_translate_algebra.py index 20b23327a..59e747f65 100644 --- a/test/test_sparql/test_translate_algebra.py +++ b/test/test_sparql/test_translate_algebra.py @@ -1,9 +1,10 @@ +from __future__ import annotations + import logging import os from dataclasses import dataclass, field from io import StringIO from pathlib import Path -from test.data import TEST_DATA_DIR from typing import Collection, Tuple, Union, cast import pytest @@ -11,7 +12,9 @@ import rdflib.plugins.sparql.algebra as algebra import rdflib.plugins.sparql.parser as parser +from rdflib import Graph, Literal, URIRef from rdflib.plugins.sparql.algebra import translateAlgebra +from test.data import TEST_DATA_DIR @pytest.fixture @@ -304,3 +307,25 @@ def test_roundtrip(test_spec: AlgebraTest, data_path: Path) -> None: # TODO: Execute the raw query (query_text) and the reconstituted query # (query_from_query_from_algebra) against a well defined graph and ensure # they yield the same result. + + +def test_sparql_group_concat(): + """Tests if GROUP_CONCAT correctly uses the separator keyword""" + query = """ + PREFIX : + + SELECT ?subject (GROUP_CONCAT(?object; separator="") + AS ?concatenatedObjects) + WHERE { + VALUES (?subject ?object) { + (:pred "a") + (:pred "b") + (:pred "c") + } + } + GROUP BY ?subject + """ + + g = Graph() + q = dict(g.query(query)) + assert q[URIRef("http://example.org/pred")] == Literal("abc") diff --git a/test/test_sparql/test_update.py b/test/test_sparql/test_update.py new file mode 100644 index 000000000..17c7967fa --- /dev/null +++ b/test/test_sparql/test_update.py @@ -0,0 +1,92 @@ +import itertools +import logging +from typing import Callable + +import pytest + +from rdflib.graph import ConjunctiveGraph, Dataset, Graph +from test.data import TEST_DATA_DIR +from test.utils import GraphHelper +from test.utils.graph import GraphSource +from test.utils.namespace import EGDO + + +@pytest.mark.parametrize( + ("graph_factory", "source"), + itertools.product( + [Graph, ConjunctiveGraph, Dataset], + GraphSource.from_paths( + TEST_DATA_DIR / "variants" / "simple_triple.ttl", + TEST_DATA_DIR / "variants" / "relative_triple.ttl", + ), + ), + ids=GraphSource.idfn, +) +def test_load_into_default( + graph_factory: Callable[[], Graph], source: GraphSource +) -> None: + """ + Evaluation of ``LOAD `` into default graph works correctly. + """ + + expected_graph = graph_factory() + source.load(graph=expected_graph) + + actual_graph = graph_factory() + actual_graph.update(f"LOAD <{source.public_id_or_path_uri()}>") + + if logging.getLogger().isEnabledFor(logging.DEBUG): + debug_format = ( + "nquads" if isinstance(expected_graph, ConjunctiveGraph) else "ntriples" + ) + logging.debug( + "expected_graph = \n%s", expected_graph.serialize(format=debug_format) + ) + logging.debug( + "actual_graph = \n%s", actual_graph.serialize(format=debug_format) + ) + + if isinstance(expected_graph, ConjunctiveGraph): + assert isinstance(actual_graph, ConjunctiveGraph) + GraphHelper.assert_collection_graphs_equal(expected_graph, actual_graph) + else: + GraphHelper.assert_triple_sets_equals(expected_graph, actual_graph) + + +@pytest.mark.parametrize( + ("graph_factory", "source"), + itertools.product( + [ConjunctiveGraph, Dataset], + GraphSource.from_paths( + TEST_DATA_DIR / "variants" / "simple_triple.ttl", + TEST_DATA_DIR / "variants" / "relative_triple.ttl", + ), + ), + ids=GraphSource.idfn, +) +def test_load_into_named( + graph_factory: Callable[[], ConjunctiveGraph], source: GraphSource +) -> None: + """ + Evaluation of ``LOAD INTO GRAPH `` works correctly. + """ + + expected_graph = graph_factory() + source.load(graph=expected_graph.get_context(EGDO.graph)) + + actual_graph = graph_factory() + + actual_graph.update( + f"LOAD <{source.public_id_or_path_uri()}> INTO GRAPH <{EGDO.graph}>" + ) + + if logging.getLogger().isEnabledFor(logging.DEBUG): + debug_format = "nquads" + logging.debug( + "expected_graph = \n%s", expected_graph.serialize(format=debug_format) + ) + logging.debug( + "actual_graph = \n%s", actual_graph.serialize(format=debug_format) + ) + + GraphHelper.assert_collection_graphs_equal(expected_graph, actual_graph) diff --git a/test/test_store/test_namespace_binding.py b/test/test_store/test_namespace_binding.py index dfe7ff7ff..ae5ca680a 100644 --- a/test/test_store/test_namespace_binding.py +++ b/test/test_store/test_namespace_binding.py @@ -1,9 +1,10 @@ +from __future__ import annotations + import enum import itertools import logging from dataclasses import dataclass, field from pathlib import Path -from test.utils import pytest_mark_filter from typing import Any, Callable, Dict, Set, Union import pytest @@ -13,6 +14,7 @@ from rdflib.plugins.stores.berkeleydb import has_bsddb from rdflib.store import Store from rdflib.term import IdentifiedNode, URIRef +from test.utils import pytest_mark_filter class StoreTrait(enum.Enum): diff --git a/test/test_store/test_nodepickler.py b/test/test_store/test_nodepickler.py index 840f1d34d..4659ed7b4 100644 --- a/test/test_store/test_nodepickler.py +++ b/test/test_store/test_nodepickler.py @@ -35,7 +35,7 @@ def test_to_bits_from_bits_round_trip(self): def test_literal_cases(self): np = NodePickler() - for l in cases: + for l in cases: # noqa: E741 a = Literal(l) b = np.loads(np.dumps(a)) assert a == b diff --git a/test/test_store/test_store_auditable.py b/test/test_store/test_store_auditable.py index f8d8e3d46..dfb6e90ba 100644 --- a/test/test_store/test_store_auditable.py +++ b/test/test_store/test_store_auditable.py @@ -1,18 +1,16 @@ -# -*- coding=utf8 -*- import pytest -from rdflib import Graph, Namespace +from rdflib import Graph from rdflib.plugins.stores.auditable import AuditableStore - -EX = Namespace("http://example.org/") +from test.utils.namespace import EGDO @pytest.fixture def get_graph(): g = Graph("Memory") - g.add((EX.s0, EX.p0, EX.o0)) - g.add((EX.s0, EX.p0, EX.o0bis)) + g.add((EGDO.s0, EGDO.p0, EGDO.o0)) + g.add((EGDO.s0, EGDO.p0, EGDO.o0bis)) t = Graph(AuditableStore(g.store), g.identifier) @@ -21,44 +19,44 @@ def get_graph(): def test_add_commit(get_graph): g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) assert set(t) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) t.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) def test_remove_commit(get_graph): g, t = get_graph - t.remove((EX.s0, EX.p0, EX.o0)) + t.remove((EGDO.s0, EGDO.p0, EGDO.o0)) assert set(t) == set( [ - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) t.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_multiple_remove_commit(get_graph): g, t = get_graph - t.remove((EX.s0, EX.p0, None)) + t.remove((EGDO.s0, EGDO.p0, None)) assert set(t) == set([]) t.commit() assert set(g) == set([]) @@ -66,163 +64,163 @@ def test_multiple_remove_commit(get_graph): def test_noop_add_commit(get_graph): g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) + t.add((EGDO.s0, EGDO.p0, EGDO.o0)) assert set(t) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) t.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_noop_remove_commit(get_graph): g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) + t.add((EGDO.s0, EGDO.p0, EGDO.o0)) assert set(t) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) t.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_add_remove_commit(get_graph): g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) - t.remove((EX.s1, EX.p1, EX.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) + t.remove((EGDO.s1, EGDO.p1, EGDO.o1)) assert set(t) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) t.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_remove_add_commit(get_graph): g, t = get_graph - t.remove((EX.s1, EX.p1, EX.o1)) - t.add((EX.s1, EX.p1, EX.o1)) + t.remove((EGDO.s1, EGDO.p1, EGDO.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) assert set(t) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) t.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) def test_add_rollback(get_graph): g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_remove_rollback(get_graph): g, t = get_graph - t.remove((EX.s0, EX.p0, EX.o0)) + t.remove((EGDO.s0, EGDO.p0, EGDO.o0)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_multiple_remove_rollback(get_graph): g, t = get_graph - t.remove((EX.s0, EX.p0, None)) + t.remove((EGDO.s0, EGDO.p0, None)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_noop_add_rollback(get_graph): g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) + t.add((EGDO.s0, EGDO.p0, EGDO.o0)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_noop_remove_rollback(get_graph): g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) + t.add((EGDO.s0, EGDO.p0, EGDO.o0)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_add_remove_rollback(get_graph): g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) - t.remove((EX.s1, EX.p1, EX.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) + t.remove((EGDO.s1, EGDO.p1, EGDO.o1)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) def test_remove_add_rollback(get_graph): g, t = get_graph - t.remove((EX.s1, EX.p1, EX.o1)) - t.add((EX.s1, EX.p1, EX.o1)) + t.remove((EGDO.s1, EGDO.p1, EGDO.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) t.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) @@ -238,23 +236,23 @@ def get_empty_graph(): def test_add_commit_empty(get_empty_graph): g, t = get_empty_graph - t.add((EX.s1, EX.p1, EX.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) assert set(t) == set( [ - (EX.s1, EX.p1, EX.o1), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) t.commit() assert set(g) == set( [ - (EX.s1, EX.p1, EX.o1), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) def test_add_rollback_empty(get_empty_graph): g, t = get_empty_graph - t.add((EX.s1, EX.p1, EX.o1)) + t.add((EGDO.s1, EGDO.p1, EGDO.o1)) t.rollback() assert set(g) == set([]) @@ -263,14 +261,14 @@ def test_add_rollback_empty(get_empty_graph): def get_concurrent_graph(): g = Graph("Memory") - g.add((EX.s0, EX.p0, EX.o0)) - g.add((EX.s0, EX.p0, EX.o0bis)) + g.add((EGDO.s0, EGDO.p0, EGDO.o0)) + g.add((EGDO.s0, EGDO.p0, EGDO.o0bis)) t1 = Graph(AuditableStore(g.store), g.identifier) t2 = Graph(AuditableStore(g.store), g.identifier) - t1.add((EX.s1, EX.p1, EX.o1)) - t2.add((EX.s2, EX.p2, EX.o2)) - t1.remove((EX.s0, EX.p0, EX.o0)) - t2.remove((EX.s0, EX.p0, EX.o0bis)) + t1.add((EGDO.s1, EGDO.p1, EGDO.o1)) + t2.add((EGDO.s2, EGDO.p2, EGDO.o2)) + t1.remove((EGDO.s0, EGDO.p0, EGDO.o0)) + t2.remove((EGDO.s0, EGDO.p0, EGDO.o0bis)) yield g, t1, t2 @@ -281,8 +279,8 @@ def test_commit_commit(get_concurrent_graph): t2.commit() assert set(g) == set( [ - (EX.s1, EX.p1, EX.o1), - (EX.s2, EX.p2, EX.o2), + (EGDO.s1, EGDO.p1, EGDO.o1), + (EGDO.s2, EGDO.p2, EGDO.o2), ] ) @@ -293,8 +291,8 @@ def test_commit_rollback(get_concurrent_graph): t2.rollback() assert set(g) == set( [ - (EX.s1, EX.p1, EX.o1), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s1, EGDO.p1, EGDO.o1), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) @@ -305,8 +303,8 @@ def test_rollback_commit(get_concurrent_graph): t2.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s2, EGDO.p2, EGDO.o2), ] ) @@ -317,8 +315,8 @@ def test_rollback_rollback(get_concurrent_graph): t2.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) @@ -327,16 +325,16 @@ def test_rollback_rollback(get_concurrent_graph): def get_embedded_graph(): g = Graph("Memory") - g.add((EX.s0, EX.p0, EX.o0)) - g.add((EX.s0, EX.p0, EX.o0bis)) + g.add((EGDO.s0, EGDO.p0, EGDO.o0)) + g.add((EGDO.s0, EGDO.p0, EGDO.o0bis)) t1 = Graph(AuditableStore(g.store), g.identifier) - t1.add((EX.s1, EX.p1, EX.o1)) - t1.remove((EX.s0, EX.p0, EX.o0bis)) + t1.add((EGDO.s1, EGDO.p1, EGDO.o1)) + t1.remove((EGDO.s0, EGDO.p0, EGDO.o0bis)) t2 = Graph(AuditableStore(t1.store), t1.identifier) - t2.add((EX.s2, EX.p2, EX.o2)) - t2.remove((EX.s1, EX.p1, EX.o1)) + t2.add((EGDO.s2, EGDO.p2, EGDO.o2)) + t2.remove((EGDO.s1, EGDO.p1, EGDO.o1)) yield g, t1, t2 @@ -345,22 +343,22 @@ def test_commit_commit_embedded(get_embedded_graph): g, t1, t2 = get_embedded_graph assert set(t2) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s2, EGDO.p2, EGDO.o2), ] ) t2.commit() assert set(t1) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s2, EGDO.p2, EGDO.o2), ] ) t1.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s2, EGDO.p2, EGDO.o2), ] ) @@ -371,8 +369,8 @@ def test_commit_rollback_embedded(get_embedded_graph): t1.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) @@ -382,15 +380,15 @@ def test_rollback_commit_embedded(get_embedded_graph): t2.rollback() assert set(t1) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s1, EX.p1, EX.o1), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) t1.commit() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s1, EX.p1, EX.o1), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s1, EGDO.p1, EGDO.o1), ] ) @@ -401,7 +399,7 @@ def test_rollback_rollback_embedded(get_embedded_graph): t1.rollback() assert set(g) == set( [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), + (EGDO.s0, EGDO.p0, EGDO.o0), + (EGDO.s0, EGDO.p0, EGDO.o0bis), ] ) diff --git a/test/test_store/test_store_berkeleydb.py b/test/test_store/test_store_berkeleydb.py index a0edecc54..35a81972b 100644 --- a/test/test_store/test_store_berkeleydb.py +++ b/test/test_store/test_store_berkeleydb.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import logging import tempfile from typing import Iterable, Optional, Tuple diff --git a/test/test_store/test_store_sparqlstore.py b/test/test_store/test_store_sparqlstore.py index 5d8629354..325f3b651 100644 --- a/test/test_store/test_store_sparqlstore.py +++ b/test/test_store/test_store_sparqlstore.py @@ -1,10 +1,9 @@ +from __future__ import annotations + import logging import re import socket from http.server import BaseHTTPRequestHandler, HTTPServer -from test.utils import helper -from test.utils.http import MethodName, MockHTTPResponse -from test.utils.httpservermock import ServedBaseHTTPServerMock from threading import Thread from typing import Callable, ClassVar, Type from unittest.mock import patch @@ -14,6 +13,9 @@ from rdflib import Graph, Literal, URIRef from rdflib.namespace import FOAF, RDF, RDFS, XMLNS, XSD from rdflib.plugins.stores.sparqlconnector import SPARQLConnector +from test.utils import helper +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock class TestSPARQLStoreGraph: @@ -76,7 +78,7 @@ def setup_method(self): def teardown_method(self): self.graph.close() - def test_Query(self): + def test_query(self): query = "select distinct ?Concept where {[] a ?Concept} LIMIT 1" _query = SPARQLConnector.query self.httpmock.responses[MethodName.GET].append( @@ -105,7 +107,7 @@ def test_Query(self): count = 0 for i in res: count += 1 - assert type(i[0]) == URIRef, i[0].n3() + assert type(i[0]) is URIRef, i[0].n3() assert count > 0 mock.assert_called_once() args, kwargs = mock.call_args @@ -121,7 +123,7 @@ def unpacker(query, default_graph=None, named_graph=None): assert re.match(r"^/sparql", req.path) assert query in req.path_query["query"][0] - def test_initNs(self): + def test_init_ns(self): query = """\ SELECT ?label WHERE { ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10 @@ -167,9 +169,7 @@ def test_initNs(self): 1899–1900 in Belgian football -""".encode( - "utf8" - ), +""".encode(), {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]}, ) ) @@ -177,14 +177,14 @@ def test_initNs(self): query, initNs={"xyzzy": "http://www.w3.org/2004/02/skos/core#"} ) for i in res: - assert type(i[0]) == Literal, i[0].n3() + assert type(i[0]) is Literal, i[0].n3() assert self.httpmock.mocks[MethodName.GET].call_count == 1 req = self.httpmock.requests[MethodName.GET].pop(0) assert re.match(r"^/sparql", req.path) assert query in req.path_query["query"][0] - def test_noinitNs(self): + def test_noinit_ns(self): query = """\ SELECT ?label WHERE { ?s a xyzzy:Concept ; xyzzy:prefLabel ?label . } LIMIT 10 @@ -257,15 +257,13 @@ def test_query_with_added_prolog(self): 1899–1900 in Belgian football -""".encode( - "utf8" - ), +""".encode(), {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]}, ) ) res = helper.query_with_retry(self.graph, prologue + query) for i in res: - assert type(i[0]) == Literal, i[0].n3() + assert type(i[0]) is Literal, i[0].n3() assert self.httpmock.mocks[MethodName.GET].call_count == 1 req = self.httpmock.requests[MethodName.GET].pop(0) assert re.match(r"^/sparql", req.path) @@ -321,15 +319,13 @@ def test_query_with_added_rdf_prolog(self): 1899–1900 in Belgian football -""".encode( - "utf8" - ), +""".encode(), {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]}, ) ) res = helper.query_with_retry(self.graph, prologue + query) for i in res: - assert type(i[0]) == Literal, i[0].n3() + assert type(i[0]) is Literal, i[0].n3() assert self.httpmock.mocks[MethodName.GET].call_count == 1 req = self.httpmock.requests[MethodName.GET].pop(0) assert re.match(r"^/sparql", req.path) @@ -349,7 +345,7 @@ def test_counting_graph_and_store_queries(self): response = MockHTTPResponse( 200, "OK", - """\ + b"""\ @@ -371,9 +367,7 @@ def test_counting_graph_and_store_queries(self): http://www.openlinksw.com/virtrdf-data-formats#default-iid-nonblank - """.encode( - "utf8" - ), + """, {"Content-Type": ["application/sparql-results+xml; charset=UTF-8"]}, ) @@ -440,14 +434,14 @@ def setup_mocked_endpoint(self): def teardown_method(self): self.graph.close() - def test_Query(self): + def test_query(self): query = "insert data { }" res = self.graph.update(query) print(res) class SPARQL11ProtocolStoreMock(BaseHTTPRequestHandler): - def do_POST(self): + def do_POST(self): # noqa: N802 """ If the body should be analysed as well, just use: ``` @@ -484,7 +478,7 @@ def do_POST(self): self.end_headers() return - def do_GET(self): + def do_GET(self): # noqa: N802 # Process an HTTP GET request and return a response with an HTTP 200 status. self.send_response(200, "OK") self.end_headers() diff --git a/test/test_store/test_store_sparqlstore_query.py b/test/test_store/test_store_sparqlstore_query.py index b22585921..b8089e4d9 100644 --- a/test/test_store/test_store_sparqlstore_query.py +++ b/test/test_store/test_store_sparqlstore_query.py @@ -2,15 +2,15 @@ import itertools import logging -from test.utils import GraphHelper -from test.utils.http import MethodName, MockHTTPResponse -from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import Dict, Iterable, List, Optional, Set, Tuple import pytest from _pytest.mark.structures import ParameterSet from rdflib.graph import Graph +from test.utils import GraphHelper +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock def make_test_query_construct_format_cases() -> Iterable[ParameterSet]: diff --git a/test/test_store/test_store_sparqlstore_sparqlconnector.py b/test/test_store/test_store_sparqlstore_sparqlconnector.py index 992ef2b07..acc14389f 100644 --- a/test/test_store/test_store_sparqlstore_sparqlconnector.py +++ b/test/test_store/test_store_sparqlstore_sparqlconnector.py @@ -2,14 +2,14 @@ import json import logging -from test.utils.http import MethodName, MockHTTPResponse -from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import Optional import pytest from rdflib.graph import Graph from rdflib.plugins.stores.sparqlstore import SPARQLStore +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock @pytest.mark.parametrize( diff --git a/test/test_store/test_store_sparqlupdatestore.py b/test/test_store/test_store_sparqlupdatestore.py index c55b3ac62..5c9c38005 100644 --- a/test/test_store/test_store_sparqlupdatestore.py +++ b/test/test_store/test_store_sparqlupdatestore.py @@ -1,12 +1,10 @@ -# -*- coding: utf-8 -*- - import re -from test.data import bob, cheese, hates, likes, michel, pizza, tarek from urllib.request import urlopen import pytest from rdflib import BNode, ConjunctiveGraph, Graph, Literal, URIRef +from test.data import BOB, CHEESE, HATES, LIKES, MICHEL, PIZZA, TAREK HOST = "http://localhost:3031" DB = "/db/" @@ -34,7 +32,7 @@ @pytest.fixture def get_graph(): - longMessage = True + longMessage = True # noqa: F841 graph = ConjunctiveGraph("SPARQLUpdateStore") root = HOST + DB @@ -53,48 +51,48 @@ def get_graph(): def test_simple_graph(get_graph): graph = get_graph g = graph.get_context(graphuri) - g.add((tarek, likes, pizza)) - g.add((bob, likes, pizza)) - g.add((bob, likes, cheese)) + g.add((TAREK, LIKES, PIZZA)) + g.add((BOB, LIKES, PIZZA)) + g.add((BOB, LIKES, CHEESE)) g2 = graph.get_context(othergraphuri) - g2.add((michel, likes, pizza)) + g2.add((MICHEL, LIKES, PIZZA)) assert len(g) == 3, "graph contains 3 triples" assert len(g2) == 1, "other graph contains 1 triple" r = g.query("SELECT * WHERE { ?s . }") - assert len(list(r)) == 2, "two people like pizza" + assert len(list(r)) == 2, "two people like PIZZA" - r = g.triples((None, likes, pizza)) - assert len(list(r)) == 2, "two people like pizza" + r = g.triples((None, LIKES, PIZZA)) + assert len(list(r)) == 2, "two people like PIZZA" # Test initBindings r = g.query( "SELECT * WHERE { ?s . }", - initBindings={"s": tarek}, + initBindings={"s": TAREK}, ) - assert len(list(r)) == 1, "i was asking only about tarek" + assert len(list(r)) == 1, "i was asking only about TAREK" - r = g.triples((tarek, likes, pizza)) - assert len(list(r)) == 1, "i was asking only about tarek" + r = g.triples((TAREK, LIKES, PIZZA)) + assert len(list(r)) == 1, "i was asking only about TAREK" - r = g.triples((tarek, likes, cheese)) - assert len(list(r)) == 0, "tarek doesn't like cheese" + r = g.triples((TAREK, LIKES, CHEESE)) + assert len(list(r)) == 0, "TAREK doesn't like CHEESE" - g2.add((tarek, likes, pizza)) - g.remove((tarek, likes, pizza)) + g2.add((TAREK, LIKES, PIZZA)) + g.remove((TAREK, LIKES, PIZZA)) r = g.query("SELECT * WHERE { ?s . }") - assert len(list(r)) == 1, "only bob likes pizza" + assert len(list(r)) == 1, "only BOB LIKES PIZZA" def test_conjunctive_default(get_graph): graph = get_graph g = graph.get_context(graphuri) - g.add((tarek, likes, pizza)) + g.add((TAREK, LIKES, PIZZA)) g2 = graph.get_context(othergraphuri) - g2.add((bob, likes, pizza)) - g.add((tarek, hates, cheese)) + g2.add((BOB, LIKES, PIZZA)) + g.add((TAREK, HATES, CHEESE)) assert 2 == len(g), "graph contains 2 triples" @@ -115,27 +113,27 @@ def test_conjunctive_default(get_graph): ) r = graph.query("SELECT * WHERE { ?s . }") - assert len(list(r)) == 2, "two people like pizza" + assert len(list(r)) == 2, "two people like PIZZA" r = graph.query( "SELECT * WHERE { ?s . }", - initBindings={"s": tarek}, + initBindings={"s": TAREK}, ) - assert len(list(r)) == 1, "i was asking only about tarek" + assert len(list(r)) == 1, "i was asking only about TAREK" - r = graph.triples((tarek, likes, pizza)) - assert len(list(r)) == 1, "i was asking only about tarek" + r = graph.triples((TAREK, LIKES, PIZZA)) + assert len(list(r)) == 1, "i was asking only about TAREK" - r = graph.triples((tarek, likes, cheese)) - assert len(list(r)) == 0, "tarek doesn't like cheese" + r = graph.triples((TAREK, LIKES, CHEESE)) + assert len(list(r)) == 0, "TAREK doesn't like CHEESE" - g2.remove((bob, likes, pizza)) + g2.remove((BOB, LIKES, PIZZA)) r = graph.query("SELECT * WHERE { ?s . }") - assert len(list(r)) == 1, "only tarek likes pizza" + assert len(list(r)) == 1, "only TAREK LIKES PIZZA" -def testU_update(get_graph): +def test_u_update(get_graph): graph = get_graph graph.update( "INSERT DATA { GRAPH { . } }" @@ -145,7 +143,7 @@ def testU_update(get_graph): assert 1 == len(g), "graph contains 1 triples" -def testU_update_with_initns(get_graph): +def test_u_update_with_initns(get_graph): graph = get_graph graph.update( "INSERT DATA { GRAPH ns:graph { ns:michel ns:likes ns:pizza . } }", @@ -154,8 +152,8 @@ def testU_update_with_initns(get_graph): g = graph.get_context(graphuri) assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" + [(MICHEL, LIKES, PIZZA)] + ), "only MICHEL LIKES PIZZA" def test_update_with_init_bindings(get_graph): @@ -171,8 +169,8 @@ def test_update_with_init_bindings(get_graph): g = graph.get_context(graphuri) assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" + [(MICHEL, LIKES, PIZZA)] + ), "only MICHEL LIKES PIZZA" def test_update_with_blank_node(get_graph): @@ -187,7 +185,7 @@ def test_update_with_blank_node(get_graph): assert t[2].n3() == "" -def test_updateW_with_blank_node_serialize_and_parse(get_graph): +def test_update_w_with_blank_node_serialize_and_parse(get_graph): graph = get_graph graph.update( "INSERT DATA { GRAPH { _:blankA } }" @@ -197,7 +195,7 @@ def test_updateW_with_blank_node_serialize_and_parse(get_graph): raised = False try: Graph().parse(data=string, format="ntriples") - except Exception as e: + except Exception as e: # noqa: F841 raised = True assert raised is False, "Exception raised when parsing: " + string @@ -217,8 +215,8 @@ def test_multiple_update_with_init_bindings(get_graph): g = graph.get_context(graphuri) assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza), (bob, likes, pizza)] - ), "michel and bob like pizza" + [(MICHEL, LIKES, PIZZA), (BOB, LIKES, PIZZA)] + ), "MICHEL and BOB like PIZZA" def test_named_graph_update(get_graph): @@ -227,8 +225,8 @@ def test_named_graph_update(get_graph): r1 = "INSERT DATA { }" g.update(r1) assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" + [(MICHEL, LIKES, PIZZA)] + ), "only MICHEL LIKES PIZZA" r2 = ( "DELETE { } " @@ -236,8 +234,8 @@ def test_named_graph_update(get_graph): ) g.update(r2) assert set(g.triples((None, None, None))) == set( - [(bob, likes, pizza)] - ), "only bob likes pizza" + [(BOB, LIKES, PIZZA)] + ), "only BOB LIKES PIZZA" says = URIRef("urn:says") @@ -252,7 +250,7 @@ def test_named_graph_update(get_graph): g.update(r3) values = set() - for v in g.objects(bob, says): + for v in g.objects(BOB, says): values.add(str(v)) assert values == set(tricky_strs) @@ -277,7 +275,7 @@ def test_named_graph_update(get_graph): ) g.update(r4) values = set() - for v in g.objects(michel, says): + for v in g.objects(MICHEL, says): values.add(str(v)) assert values == set( [ @@ -299,7 +297,7 @@ def test_named_graph_update(get_graph): g.update(r5) values = set() - for v in g.objects(michel, hates): + for v in g.objects(MICHEL, HATES): values.add(str(v)) assert values == set(["urn:example:foo'bar?baz;a=1&b=2#fragment", "'}"]) @@ -313,19 +311,19 @@ def test_named_graph_update(get_graph): g.update(r6) values = set() - for v in g.objects(bob, hates): + for v in g.objects(BOB, HATES): values.add(v) - assert values == set([bob, michel]) + assert values == set([BOB, MICHEL]) def test_named_graph_update_with_init_bindings(get_graph): graph = get_graph g = graph.get_context(graphuri) r = "INSERT { ?a ?b ?c } WHERE {}" - g.update(r, initBindings={"a": michel, "b": likes, "c": pizza}) + g.update(r, initBindings={"a": MICHEL, "b": LIKES, "c": PIZZA}) assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" + [(MICHEL, LIKES, PIZZA)] + ), "only MICHEL LIKES PIZZA" def test_empty_named_graph(get_graph): diff --git a/test/test_store/test_store_sparqlupdatestore_mock.py b/test/test_store/test_store_sparqlupdatestore_mock.py index 16af87743..5d7e13eb8 100644 --- a/test/test_store/test_store_sparqlupdatestore_mock.py +++ b/test/test_store/test_store_sparqlupdatestore_mock.py @@ -1,12 +1,10 @@ -from test.utils.http import MethodName, MockHTTPResponse -from test.utils.httpservermock import ServedBaseHTTPServerMock from typing import ClassVar -from rdflib import Namespace from rdflib.graph import ConjunctiveGraph from rdflib.plugins.stores.sparqlstore import SPARQLUpdateStore - -EG = Namespace("http://example.org/") +from test.utils.http import MethodName, MockHTTPResponse +from test.utils.httpservermock import ServedBaseHTTPServerMock +from test.utils.namespace import EGDO class TestSPARQLConnector: @@ -37,7 +35,9 @@ def teardown_method(self): def test_graph_update(self): graph = ConjunctiveGraph("SPARQLUpdateStore") graph.open((self.query_endpoint, self.update_endpoint)) - update_statement = f"INSERT DATA {{ {EG['subj']} {EG['pred']} {EG['obj']}. }}" + update_statement = ( + f"INSERT DATA {{ {EGDO['subj']} {EGDO['pred']} {EGDO['obj']}. }}" + ) self.httpmock.responses[MethodName.POST].append( MockHTTPResponse( @@ -60,7 +60,9 @@ def test_graph_update(self): def test_update_encoding(self): graph = ConjunctiveGraph("SPARQLUpdateStore") graph.open((self.query_endpoint, self.update_endpoint)) - update_statement = f"INSERT DATA {{ {EG['subj']} {EG['pred']} {EG['obj']}. }}" + update_statement = ( + f"INSERT DATA {{ {EGDO['subj']} {EGDO['pred']} {EGDO['obj']}. }}" + ) self.httpmock.responses[MethodName.POST].append( MockHTTPResponse( @@ -84,7 +86,9 @@ def test_content_type(self): store = SPARQLUpdateStore( self.query_endpoint, self.update_endpoint, auth=("admin", "admin") ) - update_statement = f"INSERT DATA {{ {EG['subj']} {EG['pred']} {EG['obj']}. }}" + update_statement = ( + f"INSERT DATA {{ {EGDO['subj']} {EGDO['pred']} {EGDO['obj']}. }}" + ) for _ in range(2): # run it twice so we can pick up issues with order both ways. diff --git a/test/test_tools/test_chunk_serializer.py b/test/test_tools/test_chunk_serializer.py index 4f582b192..d683ca606 100644 --- a/test/test_tools/test_chunk_serializer.py +++ b/test/test_tools/test_chunk_serializer.py @@ -4,17 +4,17 @@ import os from contextlib import ExitStack from pathlib import Path -from test.data import TEST_DATA_DIR -from test.utils import GraphHelper -from test.utils.graph import cached_graph -from test.utils.namespace import MF -from test.utils.path import ctx_chdir from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union import pytest from rdflib import Graph from rdflib.tools.chunk_serializer import serialize_in_chunks +from test.data import TEST_DATA_DIR +from test.utils import GraphHelper +from test.utils.graph import cached_graph +from test.utils.namespace import MF +from test.utils.path import ctx_chdir if TYPE_CHECKING: from builtins import ellipsis diff --git a/test/test_tools/test_csv2rdf.py b/test/test_tools/test_csv2rdf.py index 48d32a545..450aa31bf 100644 --- a/test/test_tools/test_csv2rdf.py +++ b/test/test_tools/test_csv2rdf.py @@ -3,6 +3,7 @@ import subprocess import sys from tempfile import mkstemp + from test.data import TEST_DATA_DIR REALESTATE_FILE_PATH = os.path.join(TEST_DATA_DIR, "csv", "realestate.csv") @@ -17,9 +18,8 @@ def test_csv2rdf_cli(self): "rdflib.tools.csv2rdf", str(REALESTATE_FILE_PATH), ], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - universal_newlines=True, + capture_output=True, + text=True, ) assert completed.returncode == 0 assert "Converted 19 rows into 228 triples." in completed.stderr diff --git a/test/test_trig.py b/test/test_trig.py index de5c2108f..1c158fa86 100644 --- a/test/test_trig.py +++ b/test/test_trig.py @@ -60,15 +60,15 @@ def test_remember_namespace(): # prefix for the graph but later serialize() calls would work. first_out = g.serialize(format="trig", encoding="latin-1") second_out = g.serialize(format="trig", encoding="latin-1") - assert b"@prefix ns1: ." in second_out - assert b"@prefix ns1: ." in first_out + assert b"@prefix ns1: ." not in second_out + assert b"@prefix ns1: ." not in first_out def test_graph_qname_syntax(): g = rdflib.ConjunctiveGraph() g.add(TRIPLE + (rdflib.URIRef("http://example.com/graph1"),)) out = g.serialize(format="trig", encoding="latin-1") - assert b"ns1:graph1 {" in out + assert b"ns1:graph1 {" not in out def test_graph_uri_syntax(): @@ -178,9 +178,9 @@ def test_prefixes(): cg.parse(data=data, format="trig") data = cg.serialize(format="trig", encoding="latin-1") - assert "ns2: ".encode("latin-1") not in data, data - assert "ns2:document1".encode("latin-1") in data, data + assert "ns2:document1".encode("latin-1") not in data, data def test_issue_2154(): diff --git a/test/test_turtle_quoting.py b/test/test_turtle_quoting.py index 14d82bca2..aa523f57b 100644 --- a/test/test_turtle_quoting.py +++ b/test/test_turtle_quoting.py @@ -3,6 +3,8 @@ formats that are related to turtle, such as ntriples, nquads, trig and n3. """ +from __future__ import annotations + import itertools import logging import re @@ -10,10 +12,10 @@ import pytest -from rdflib import Namespace from rdflib.graph import ConjunctiveGraph, Graph from rdflib.plugins.parsers import ntriples from rdflib.term import Literal, URIRef +from test.utils.namespace import EGDC from .utils import GraphHelper @@ -155,9 +157,6 @@ def test_parse_correctness( assert obj.value == unquoted -EGNS = Namespace("http://example.com/") - - @pytest.mark.parametrize( "format, char, escaped", [ @@ -175,11 +174,11 @@ def test_parse_correctness( def test_pname_escaping(format: str, char: str, escaped: str) -> None: graph = Graph() triple = ( - URIRef(EGNS["prefix/John_Doe"]), - URIRef(EGNS[f"prefix/prop{char}"]), + URIRef(EGDC["prefix/John_Doe"]), + URIRef(EGDC[f"prefix/prop{char}"]), Literal("foo", lang="en"), ) - graph.bind("egns", EGNS["prefix/"]) + graph.bind("egns", EGDC["prefix/"]) graph.add(triple) data = graph.serialize(format=format) pattern = re.compile(f"\\segns:prop{re.escape(escaped)}\\s") @@ -206,12 +205,12 @@ def test_pname_escaping(format: str, char: str, escaped: str) -> None: def test_serialize_roundtrip(format: str, char: str) -> None: graph = Graph() triple = ( - URIRef(EGNS["prefix/John_Doe"]), - URIRef(EGNS[f"prefix/prop{char}"]), + URIRef(EGDC["prefix/John_Doe"]), + URIRef(EGDC[f"prefix/prop{char}"]), Literal("foo", lang="en"), ) graph.add(triple) - graph.bind("egns", EGNS["prefix/"]) + graph.bind("egns", EGDC["prefix/"]) data = graph.serialize(format=format) logging.debug("format = %s, char = %s, data = %s", format, char, data) parsed_graph = Graph() diff --git a/test/test_typing.py b/test/test_typing.py index 1b9113025..4934b330a 100644 --- a/test/test_typing.py +++ b/test/test_typing.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python3 - # This software was developed at the National Institute of Standards # and Technology by employees of the Federal Government in the course # of their official duties. Pursuant to title 17 Section 105 of the @@ -19,6 +17,7 @@ # mypy: check_untyped_defs, disallow_untyped_decorators # mypy: no_implicit_optional, warn_redundant_casts, warn_unused_ignores # mypy: warn_return_any, no_implicit_reexport, strict_equality +from __future__ import annotations from typing import Set, Tuple diff --git a/test/test_util.py b/test/test_util.py index 37d1db291..63c085033 100644 --- a/test/test_util.py +++ b/test/test_util.py @@ -1,12 +1,9 @@ -# -*- coding: utf-8 -*- +from __future__ import annotations import logging import time from contextlib import ExitStack from pathlib import Path -from test.data import TEST_DATA_DIR -from test.utils.graph import cached_graph -from test.utils.namespace import RDFT from typing import Any, Collection, List, Optional, Set, Tuple, Type, Union import pytest @@ -16,6 +13,9 @@ from rdflib.namespace import RDF, RDFS from rdflib.term import BNode, IdentifiedNode, Literal, Node, URIRef from rdflib.util import _coalesce, _iri2uri, find_roots, get_tree +from test.data import TEST_DATA_DIR +from test.utils.graph import cached_graph +from test.utils.namespace import RDFT n3source = """\ @prefix : . @@ -133,7 +133,7 @@ def setup_method(self): datatype=URIRef("http://www.w3.org/2001/XMLSchema#dateTime"), ) - def test_util_to_term_sisNone(self): + def test_util_to_term_sisNone(self): # noqa: N802 s = None assert util.to_term(s) == s assert util.to_term(s, default="") == "" diff --git a/test/test_w3c_spec/test_n3_w3c.py b/test/test_w3c_spec/test_n3_w3c.py index 61b851a70..a1558255d 100644 --- a/test/test_w3c_spec/test_n3_w3c.py +++ b/test/test_w3c_spec/test_n3_w3c.py @@ -1,10 +1,10 @@ """This runs the nt tests for the W3C RDF Working Group's N-Quads test suite.""" + +from __future__ import annotations + import itertools import os -from test.data import TEST_DATA_DIR -from test.utils.manifest import RDFTest, read_manifest -from test.utils.namespace import RDFT from typing import Callable, Dict import pytest @@ -13,6 +13,9 @@ from rdflib.compare import graph_diff, isomorphic from rdflib.namespace import split_uri from rdflib.term import Node, URIRef +from test.data import TEST_DATA_DIR +from test.utils.manifest import RDFTest, read_manifest +from test.utils.namespace import RDFT verbose = False diff --git a/test/test_w3c_spec/test_nquads_w3c.py b/test/test_w3c_spec/test_nquads_w3c.py index 0c6cd3989..255602b73 100644 --- a/test/test_w3c_spec/test_nquads_w3c.py +++ b/test/test_w3c_spec/test_nquads_w3c.py @@ -1,17 +1,20 @@ """This runs the nquads tests for the W3C RDF Working Group's N-Quads test suite.""" + +from __future__ import annotations + import logging from contextlib import ExitStack -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper, ensure_suffix -from test.utils.dawg_manifest import ManifestEntry, params_from_sources -from test.utils.iri import URIMapper -from test.utils.namespace import RDFT from typing import Optional import pytest from rdflib.graph import Dataset +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper, ensure_suffix +from test.utils.dawg_manifest import ManifestEntry, params_from_sources +from test.utils.iri import URIMapper +from test.utils.namespace import RDFT logger = logging.getLogger(__name__) diff --git a/test/test_w3c_spec/test_nt_w3c.py b/test/test_w3c_spec/test_nt_w3c.py index b857f0459..a37114211 100644 --- a/test/test_w3c_spec/test_nt_w3c.py +++ b/test/test_w3c_spec/test_nt_w3c.py @@ -1,17 +1,20 @@ """This runs the nt tests for the W3C RDF Working Group's N-Triples test suite.""" + +from __future__ import annotations + import logging from contextlib import ExitStack -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper, ensure_suffix -from test.utils.dawg_manifest import ManifestEntry, params_from_sources -from test.utils.iri import URIMapper -from test.utils.namespace import RDFT from typing import Optional import pytest from rdflib.graph import Graph +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper, ensure_suffix +from test.utils.dawg_manifest import ManifestEntry, params_from_sources +from test.utils.iri import URIMapper +from test.utils.namespace import RDFT logger = logging.getLogger(__name__) diff --git a/test/test_w3c_spec/test_rdfxml_w3c.py b/test/test_w3c_spec/test_rdfxml_w3c.py index a3b8e5029..cc3974ce9 100644 --- a/test/test_w3c_spec/test_rdfxml_w3c.py +++ b/test/test_w3c_spec/test_rdfxml_w3c.py @@ -1,15 +1,17 @@ +from __future__ import annotations + import logging from contextlib import ExitStack -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper, ensure_suffix -from test.utils.dawg_manifest import ManifestEntry, params_from_sources -from test.utils.iri import URIMapper -from test.utils.namespace import RDFT from typing import Optional import pytest from rdflib.graph import Graph +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper, ensure_suffix +from test.utils.dawg_manifest import ManifestEntry, params_from_sources +from test.utils.iri import URIMapper +from test.utils.namespace import RDFT logger = logging.getLogger(__name__) diff --git a/test/test_w3c_spec/test_sparql10_w3c.py b/test/test_w3c_spec/test_sparql10_w3c.py index 70df2d066..3f33ca006 100644 --- a/test/test_w3c_spec/test_sparql10_w3c.py +++ b/test/test_w3c_spec/test_sparql10_w3c.py @@ -1,7 +1,13 @@ """ Runs the SPARQL 1.0 test suite from. """ + from contextlib import ExitStack +from typing import Generator + +import pytest +from pytest import MonkeyPatch + from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources @@ -12,10 +18,6 @@ check_entry, ctx_configure_rdflib, ) -from typing import Generator - -import pytest -from pytest import MonkeyPatch REMOTE_BASE_IRI = "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/" LOCAL_BASE_DIR = TEST_DATA_DIR / "suites/w3c/dawg-data-r2/" @@ -23,10 +25,14 @@ (REMOTE_BASE_IRI, ensure_suffix(LOCAL_BASE_DIR.as_uri(), "/")), ) MARK_DICT: MarksDictType = { - f"{REMOTE_BASE_IRI}basic/manifest#term-6": pytest.mark.xfail( - reason="query misinterpreted." + f"{REMOTE_BASE_IRI}basic/manifest#term-6": pytest.mark.skip( + reason="using Sparql 1.1 which is not backwards compatible. " + "'456.' will be interpreted differently in query and data." + ), + f"{REMOTE_BASE_IRI}basic/manifest#term-7": pytest.mark.skip( + reason="using Sparql 1.1 which is not backwards compatible. " + "'456.' will be interpreted differently in query and data." ), - f"{REMOTE_BASE_IRI}basic/manifest#term-7": pytest.mark.xfail(reason="..."), f"{REMOTE_BASE_IRI}expr-builtin/manifest#dawg-datatype-2": pytest.mark.xfail( reason="additional row in output" ), @@ -112,9 +118,11 @@ def configure_rdflib() -> Generator[None, None, None]: LOCAL_BASE_DIR / "manifest-syntax.ttl", mark_dict=MARK_DICT, markers=( - lambda entry: pytest.mark.skip(reason="tester not implemented") - if entry.type in SKIP_TYPES - else None, + lambda entry: ( + pytest.mark.skip(reason="tester not implemented") + if entry.type in SKIP_TYPES + else None + ), ), report_prefix="rdflib_w3c_sparql10", ), diff --git a/test/test_w3c_spec/test_sparql11_w3c.py b/test/test_w3c_spec/test_sparql11_w3c.py index 2afcf910a..f68227470 100644 --- a/test/test_w3c_spec/test_sparql11_w3c.py +++ b/test/test_w3c_spec/test_sparql11_w3c.py @@ -1,7 +1,13 @@ """ Runs the SPARQL 1.1 test suite from. """ + from contextlib import ExitStack +from typing import Generator + +import pytest +from pytest import MonkeyPatch + from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources @@ -12,10 +18,6 @@ check_entry, ctx_configure_rdflib, ) -from typing import Generator - -import pytest -from pytest import MonkeyPatch REMOTE_BASE_IRI = "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/" LOCAL_BASE_DIR = TEST_DATA_DIR / "suites/w3c/sparql11/" @@ -253,9 +255,11 @@ def configure_rdflib() -> Generator[None, None, None]: LOCAL_BASE_DIR / "manifest-all.ttl", mark_dict=MARK_DICT, markers=( - lambda entry: pytest.mark.skip(reason="tester not implemented") - if entry.type in SKIP_TYPES - else None, + lambda entry: ( + pytest.mark.skip(reason="tester not implemented") + if entry.type in SKIP_TYPES + else None + ), ), report_prefix="rdflib_w3c_sparql11", ), diff --git a/test/test_w3c_spec/test_sparql_rdflib.py b/test/test_w3c_spec/test_sparql_rdflib.py index 73809109a..9e23f6d05 100644 --- a/test/test_w3c_spec/test_sparql_rdflib.py +++ b/test/test_w3c_spec/test_sparql_rdflib.py @@ -1,7 +1,13 @@ """ Runs the RDFLib SPARQL test suite. """ + from contextlib import ExitStack +from typing import Generator + +import pytest +from pytest import MonkeyPatch + from test.data import TEST_DATA_DIR from test.utils import ensure_suffix from test.utils.dawg_manifest import MarksDictType, params_from_sources @@ -12,10 +18,6 @@ check_entry, ctx_configure_rdflib, ) -from typing import Generator - -import pytest -from pytest import MonkeyPatch REMOTE_BASE_IRI = ( "http://raw.github.com/RDFLib/rdflib/main/test/data/suites/rdflib/sparql/" @@ -55,9 +57,11 @@ def configure_rdflib() -> Generator[None, None, None]: LOCAL_BASE_DIR / "manifest.ttl", mark_dict=MARK_DICT, markers=( - lambda entry: pytest.mark.skip(reason="tester not implemented") - if entry.type in SKIP_TYPES - else None, + lambda entry: ( + pytest.mark.skip(reason="tester not implemented") + if entry.type in SKIP_TYPES + else None + ), ), report_prefix="rdflib_sparql", ), diff --git a/test/test_w3c_spec/test_trig_w3c.py b/test/test_w3c_spec/test_trig_w3c.py index ea2b02edd..f3cf18ad6 100644 --- a/test/test_w3c_spec/test_trig_w3c.py +++ b/test/test_w3c_spec/test_trig_w3c.py @@ -1,18 +1,21 @@ """Runs the tests for the W3C RDF Working Group's TriG test suite. """ + +from __future__ import annotations + import logging from contextlib import ExitStack -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper, ensure_suffix -from test.utils.dawg_manifest import ManifestEntry, params_from_sources -from test.utils.iri import URIMapper -from test.utils.namespace import RDFT from typing import Optional import pytest from rdflib.graph import Dataset +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper, ensure_suffix +from test.utils.dawg_manifest import ManifestEntry, params_from_sources +from test.utils.iri import URIMapper +from test.utils.namespace import RDFT logger = logging.getLogger(__name__) @@ -173,12 +176,6 @@ def check_entry(entry: ManifestEntry) -> None: f"{REMOTE_BASE_IRI}#trig-syntax-bad-list-04": pytest.mark.xfail( reason="ignores badly formed quad" ), - f"{REMOTE_BASE_IRI}#trig-graph-bad-01": pytest.mark.xfail( - reason="accepts GRAPH with no name" - ), - f"{REMOTE_BASE_IRI}#trig-graph-bad-07": pytest.mark.xfail( - reason="accepts nested GRAPH" - ), } diff --git a/test/test_w3c_spec/test_turtle_w3c.py b/test/test_w3c_spec/test_turtle_w3c.py index 5d5d06d2a..bec4754a5 100644 --- a/test/test_w3c_spec/test_turtle_w3c.py +++ b/test/test_w3c_spec/test_turtle_w3c.py @@ -1,18 +1,20 @@ """This runs the turtle tests for the W3C RDF Working Group's Turtle test suite.""" +from __future__ import annotations + import logging from contextlib import ExitStack -from test.data import TEST_DATA_DIR -from test.utils import BNodeHandling, GraphHelper, ensure_suffix -from test.utils.dawg_manifest import ManifestEntry, params_from_sources -from test.utils.iri import URIMapper -from test.utils.namespace import RDFT from typing import Optional import pytest from rdflib.graph import Graph +from test.data import TEST_DATA_DIR +from test.utils import BNodeHandling, GraphHelper, ensure_suffix +from test.utils.dawg_manifest import ManifestEntry, params_from_sources +from test.utils.iri import URIMapper +from test.utils.namespace import RDFT logger = logging.getLogger(__name__) diff --git a/test/utils/__init__.py b/test/utils/__init__.py index dc27251a3..cdcedda9c 100644 --- a/test/utils/__init__.py +++ b/test/utils/__init__.py @@ -5,7 +5,7 @@ (``test/utils/tests/``). """ -from __future__ import print_function +from __future__ import annotations import enum import pprint @@ -19,6 +19,7 @@ Iterable, List, Optional, + Sequence, Set, Tuple, Type, @@ -35,7 +36,7 @@ from rdflib import BNode, ConjunctiveGraph, Graph from rdflib.graph import Dataset from rdflib.plugin import Plugin -from rdflib.term import Identifier, Literal, Node, URIRef +from rdflib.term import IdentifiedNode, Identifier, Literal, Node, URIRef PluginT = TypeVar("PluginT") @@ -257,6 +258,23 @@ def assert_quad_sets_equals( else: assert lhs_set != rhs_set + @classmethod + def assert_collection_graphs_equal( + cls, lhs: ConjunctiveGraph, rhs: ConjunctiveGraph + ) -> None: + """ + Assert that all graphs in the provided collections are equal, + comparing named graphs with identically named graphs. + """ + cls.assert_triple_sets_equals(lhs.default_context, rhs.default_context) + graph_names = cls.non_default_graph_names(lhs) | cls.non_default_graph_names( + rhs + ) + for identifier in graph_names: + cls.assert_triple_sets_equals( + lhs.get_context(identifier), rhs.get_context(identifier) + ) + @classmethod def assert_sets_equals( cls, @@ -381,6 +399,21 @@ def strip_literal_datatypes(cls, graph: Graph, datatypes: Set[URIRef]) -> None: if object.datatype in datatypes: object._datatype = None + @classmethod + def non_default_graph_names( + cls, container: ConjunctiveGraph + ) -> Set[IdentifiedNode]: + return set(context.identifier for context in container.contexts()) - { + container.default_context.identifier + } + + @classmethod + def non_default_graphs(cls, container: ConjunctiveGraph) -> Sequence[Graph]: + result = [] + for name in cls.non_default_graph_names(container): + result.append(container.get_context(name)) + return result + def eq_(lhs, rhs, msg=None): """ @@ -455,4 +488,24 @@ def ensure_suffix(value: str, suffix: str) -> str: return value +def idfns(*idfns: Callable[[Any], Optional[str]]) -> Callable[[Any], Optional[str]]: + """ + Returns an ID function which will try each of the provided ID + functions in order. + + :param idfns: The ID functions to try. + :return: An ID function which will try each of the provided ID + functions. + """ + + def _idfns(value: Any) -> Optional[str]: + for idfn in idfns: + result = idfn(value) + if result is not None: + return result + return None + + return _idfns + + from test.utils.iri import file_uri_to_path # noqa: E402 diff --git a/test/utils/dawg_manifest.py b/test/utils/dawg_manifest.py index ccb29b99b..ccb96bd7c 100644 --- a/test/utils/dawg_manifest.py +++ b/test/utils/dawg_manifest.py @@ -1,9 +1,7 @@ +from __future__ import annotations + import logging from dataclasses import dataclass, field -from test.utils import MarkListType, marks_to_list -from test.utils.graph import GraphSource, GraphSourceType -from test.utils.iri import URIMapper -from test.utils.namespace import MF from typing import ( Callable, Collection, @@ -24,6 +22,10 @@ from rdflib.graph import Graph from rdflib.namespace import RDF from rdflib.term import IdentifiedNode, Identifier, URIRef +from test.utils import MarkListType, marks_to_list +from test.utils.graph import GraphSource, GraphSourceType +from test.utils.iri import URIMapper +from test.utils.namespace import MF POFilterType = Tuple[Optional[URIRef], Optional[URIRef]] POFiltersType = Iterable[POFilterType] @@ -38,7 +40,7 @@ @dataclass class ManifestEntry: - manifest: "Manifest" + manifest: Manifest identifier: URIRef type: IdentifiedNode = field(init=False) action: Optional[IdentifiedNode] = field(init=False) @@ -46,12 +48,12 @@ class ManifestEntry: result_cardinality: Optional[URIRef] = field(init=False) def __post_init__(self) -> None: - type = self.value(RDF.type, IdentifiedNode) + type = self.value(RDF.type, IdentifiedNode) # type: ignore[type-abstract] assert type is not None self.type = type - self.action = self.value(MF.action, IdentifiedNode) - self.result = self.value(MF.result, IdentifiedNode) + self.action = self.value(MF.action, IdentifiedNode) # type: ignore[type-abstract] + self.result = self.value(MF.result, IdentifiedNode) # type: ignore[type-abstract] self.result_cardinality = self.value(MF.resultCardinality, URIRef) if self.result_cardinality is not None: assert self.result_cardinality == MF.LaxCardinality @@ -109,7 +111,7 @@ def from_graph( uri_mapper: URIMapper, graph: Graph, report_prefix: Optional[str] = None, - ) -> Generator["Manifest", None, None]: + ) -> Generator[Manifest, None, None]: for identifier in graph.subjects(RDF.type, MF.Manifest): assert isinstance(identifier, IdentifiedNode) manifest = Manifest( @@ -127,7 +129,7 @@ def from_sources( uri_mapper: URIMapper, *sources: GraphSourceType, report_prefix: Optional[str] = None, - ) -> Generator["Manifest", None, None]: + ) -> Generator[Manifest, None, None]: for source in sources: logging.debug("source(%s) = %r", id(source), source) source = GraphSource.from_source(source) @@ -141,14 +143,14 @@ def from_sources( local_base, public_id, ) - graph = source.load(public_id=public_id) + graph: Graph = source.load(public_id=public_id) yield from cls.from_graph( uri_mapper, graph, report_prefix, ) - def included(self) -> Generator["Manifest", None, None]: + def included(self) -> Generator[Manifest, None, None]: for includes in self.graph.objects(self.identifier, MF.include): for include in self.graph.items(includes): assert isinstance(include, str) @@ -161,10 +163,10 @@ def included(self) -> Generator["Manifest", None, None]: def entires( self, - entry_type: Type["ManifestEntryT"], + entry_type: Type[ManifestEntryT], exclude: Optional[POFiltersType] = None, include: Optional[POFiltersType] = None, - ) -> Generator["ManifestEntryT", None, None]: + ) -> Generator[ManifestEntryT, None, None]: for entries in self.graph.objects(self.identifier, MF.entries): for entry_iri in self.graph.items(entries): assert isinstance(entry_iri, URIRef) @@ -177,26 +179,26 @@ def entires( def params( self, - entry_type: Type["ManifestEntryT"], + entry_type: Type[ManifestEntryT], exclude: Optional[POFiltersType] = None, include: Optional[POFiltersType] = None, mark_dict: Optional[MarksDictType] = None, markers: Optional[Iterable[ManifestEntryMarkerType]] = None, - ) -> Generator["ParameterSet", None, None]: + ) -> Generator[ParameterSet, None, None]: for entry in self.entires(entry_type, exclude, include): yield entry.param(mark_dict, markers) def params_from_sources( uri_mapper: URIMapper, - entry_type: Type["ManifestEntryT"], + entry_type: Type[ManifestEntryT], *sources: GraphSourceType, exclude: Optional[POFiltersType] = None, include: Optional[POFiltersType] = None, mark_dict: Optional[MarksDictType] = None, markers: Optional[Iterable[ManifestEntryMarkerType]] = None, report_prefix: Optional[str] = None, -) -> Generator["ParameterSet", None, None]: +) -> Generator[ParameterSet, None, None]: for manifest in Manifest.from_sources( uri_mapper, *sources, report_prefix=report_prefix ): diff --git a/test/utils/destination.py b/test/utils/destination.py index ad767d1c4..7e5499e19 100644 --- a/test/utils/destination.py +++ b/test/utils/destination.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum from contextlib import contextmanager from dataclasses import dataclass @@ -27,7 +29,7 @@ def make_ref( self, tmp_path: Path, encoding: Optional[str] = None, - path_factory: Callable[[Path, "DestinationType", Optional[str]], Path] = ( + path_factory: Callable[[Path, DestinationType, Optional[str]], Path] = ( lambda tmp_path, type, encoding: tmp_path / f"file-{type.name}-{encoding}" ), ) -> Generator[Optional[DestRef], None, None]: diff --git a/test/utils/earl.py b/test/utils/earl.py index 7628c7870..2c9e605be 100644 --- a/test/utils/earl.py +++ b/test/utils/earl.py @@ -1,14 +1,14 @@ """ PYTEST_DONT_REWRITE """ + +from __future__ import annotations + import enum import logging from dataclasses import dataclass, field from datetime import datetime from pathlib import Path -from test.utils import GraphHelper -from test.utils.dawg_manifest import ManifestEntry -from test.utils.namespace import EARL, MF, RDFT from typing import ( TYPE_CHECKING, Callable, @@ -30,6 +30,9 @@ from rdflib import RDF, BNode, Graph, Literal, URIRef from rdflib.namespace import DC, DOAP, FOAF from rdflib.plugins.stores.memory import Memory +from test.utils import GraphHelper +from test.utils.dawg_manifest import ManifestEntry +from test.utils.namespace import EARL, MF, RDFT if TYPE_CHECKING: from _pytest.main import Session @@ -56,16 +59,16 @@ class EARLReport: This is a helper class for building an EARL report graph. """ - reporter: "EARLReporter" + reporter: EARLReporter output_file: Path - assertions: List[Tuple[URIRef, Set["_TripleType"]]] = field( + assertions: List[Tuple[URIRef, Set[_TripleType]]] = field( init=False, default_factory=list, repr=False ) def add_test_outcome( self, test_id: URIRef, outcome: URIRef, info: Optional[Literal] = None ): - triples: Set["_TripleType"] = set() + triples: Set[_TripleType] = set() assertion = BNode(f"{test_id}") triples.add((assertion, RDF.type, EARL.Assertion)) triples.add((assertion, EARL.test, test_id)) @@ -226,7 +229,7 @@ class TestResult(enum.Enum): class TestReportHelper: @classmethod - def get_rdf_test_uri(cls, report: "TestReport") -> Optional[URIRef]: + def get_rdf_test_uri(cls, report: TestReport) -> Optional[URIRef]: return next( ( cast(URIRef, item[1]) @@ -237,7 +240,7 @@ def get_rdf_test_uri(cls, report: "TestReport") -> Optional[URIRef]: ) @classmethod - def get_manifest_entry(cls, report: "TestReport") -> Optional[ManifestEntry]: + def get_manifest_entry(cls, report: TestReport) -> Optional[ManifestEntry]: return next( ( cast(ManifestEntry, item[1]) @@ -263,7 +266,7 @@ class EARLReporter: assertor_name: Optional[Literal] = None assertor_homepage: Optional[URIRef] = None add_datetime: bool = True - extra_triples: Set["_TripleType"] = field(default_factory=set) + extra_triples: Set[_TripleType] = field(default_factory=set) prefix_reports: Dict[str, EARLReport] = field(init=True, default_factory=dict) report: Optional[EARLReport] = field(init=True, default=None) @@ -332,22 +335,22 @@ def get_report_for(self, entry: Optional[ManifestEntry]) -> Optional[EARLReport] return None report = self.prefix_reports.get(manifest.report_prefix) if report is None: - report = self.prefix_reports[ - manifest.report_prefix - ] = self.make_report_with_prefix(manifest.report_prefix) + report = self.prefix_reports[manifest.report_prefix] = ( + self.make_report_with_prefix(manifest.report_prefix) + ) return report @pytest.hookimpl(hookwrapper=True) def pytest_runtest_makereport( - self, item: Item, call: "CallInfo[None]" - ) -> Generator[None, "_Result", None]: + self, item: Item, call: CallInfo[None] + ) -> Generator[None, _Result, None]: result = yield - report: "TestReport" = result.get_result() + report: TestReport = result.get_result() if not hasattr(item, "callspec"): return - callspec: "CallSpec2" = getattr(item, "callspec") + callspec: CallSpec2 = getattr(item, "callspec") rdf_test_uri = callspec.params.get("rdf_test_uri") if rdf_test_uri is not None: if isinstance(rdf_test_uri, str): @@ -373,7 +376,7 @@ def get_rdf_test_uri( return manifest_entry.identifier return None - def append_result(self, report: "TestReport", test_result: TestResult) -> None: + def append_result(self, report: TestReport, test_result: TestResult) -> None: rdf_test_uri = TestReportHelper.get_rdf_test_uri(report) manifest_entry = TestReportHelper.get_manifest_entry(report) rdf_test_uri = self.get_rdf_test_uri(rdf_test_uri, manifest_entry) @@ -396,7 +399,7 @@ def append_result(self, report: "TestReport", test_result: TestResult) -> None: else: earl_report.add_test_outcome(rdf_test_uri, EARL.cantTell) - def pytest_runtest_logreport(self, report: "TestReport") -> None: + def pytest_runtest_logreport(self, report: TestReport) -> None: logger.debug( "report: passed = %s, failed = %s, skipped = %s, when = %s, outcome = %s, keywords = %s", report.passed, @@ -422,7 +425,7 @@ def pytest_runtest_logreport(self, report: "TestReport") -> None: else: self.append_result(report, TestResult.ERROR) - def pytest_sessionfinish(self, session: "Session"): + def pytest_sessionfinish(self, session: Session): if self.report is not None: self.report.write() for report in self.prefix_reports.values(): diff --git a/test/utils/exceptions.py b/test/utils/exceptions.py deleted file mode 100644 index 94cfd9c29..000000000 --- a/test/utils/exceptions.py +++ /dev/null @@ -1,57 +0,0 @@ -from __future__ import annotations - -import logging -import re -from dataclasses import dataclass -from types import TracebackType -from typing import Any, ContextManager, Dict, Optional, Pattern, Type, Union - -import pytest -from pytest import ExceptionInfo - - -@dataclass -class ExceptionChecker(ContextManager[ExceptionInfo[Exception]]): - type: Type[Exception] - pattern: Optional[Union[Pattern[str], str]] = None - attributes: Optional[Dict[str, Any]] = None - - def __post_init__(self) -> None: - self._catcher = pytest.raises(self.type, match=self.pattern) - self._exception_info: Optional[ExceptionInfo[Exception]] = None - - def _check_attributes(self, exception: Exception) -> None: - if self.attributes is not None: - for key, value in self.attributes.items(): - logging.debug("checking exception attribute %s=%r", key, value) - assert hasattr(exception, key) - assert getattr(exception, key) == value - - def check(self, exception: Exception) -> None: - logging.debug("checking exception %s/%r", type(exception), exception) - pattern = self.pattern - if pattern is not None and not isinstance(pattern, re.Pattern): - pattern = re.compile(pattern) - try: - assert isinstance(exception, self.type) - if pattern is not None: - assert pattern.match(f"{exception}") - self._check_attributes(exception) - except Exception: - logging.error("problem checking exception", exc_info=exception) - raise - - def __enter__(self) -> ExceptionInfo[Exception]: - self._exception_info = self._catcher.__enter__() - return self._exception_info - - def __exit__( - self, - __exc_type: Optional[Type[BaseException]], - __exc_value: Optional[BaseException], - __traceback: Optional[TracebackType], - ) -> bool: - result = self._catcher.__exit__(__exc_type, __exc_value, __traceback) - if self._exception_info is not None: - self._check_attributes(self._exception_info.value) - return result diff --git a/test/utils/graph.py b/test/utils/graph.py index 42f62a189..4ea1cfbd4 100644 --- a/test/utils/graph.py +++ b/test/utils/graph.py @@ -1,14 +1,21 @@ +from __future__ import annotations + import logging from dataclasses import dataclass from functools import lru_cache from pathlib import Path -from typing import Optional, Tuple, Union +from runpy import run_path +from typing import Any, Optional, Tuple, Type, Union -from rdflib.graph import Graph +import rdflib.util +import test.data +from rdflib.graph import Graph, _GraphT from rdflib.util import guess_format GraphSourceType = Union["GraphSource", Path] +SUFFIX_FORMAT_MAP = {**rdflib.util.SUFFIX_FORMAT_MAP, "hext": "hext"} + @dataclass(frozen=True) class GraphSource: @@ -17,15 +24,26 @@ class GraphSource: public_id: Optional[str] = None @classmethod - def from_path(cls, path: Path, public_id: Optional[str] = None) -> "GraphSource": - format = guess_format(f"{path}") + def guess_format(cls, path: Path) -> Optional[str]: + format: Optional[str] + if path.suffix == ".py": + format = "python" + else: + format = guess_format(f"{path}", SUFFIX_FORMAT_MAP) + return format + + @classmethod + def from_path( + cls, path: Path, public_id: Optional[str] = None, format: Optional[str] = None + ) -> GraphSource: + if format is None: + format = cls.guess_format(path) if format is None: raise ValueError(f"could not guess format for source {path}") - return cls(path, format, public_id) @classmethod - def from_paths(cls, *paths: Path) -> Tuple["GraphSource", ...]: + def from_paths(cls, *paths: Path) -> Tuple[GraphSource, ...]: result = [] for path in paths: result.append(cls.from_path(path)) @@ -34,32 +52,63 @@ def from_paths(cls, *paths: Path) -> Tuple["GraphSource", ...]: @classmethod def from_source( cls, source: GraphSourceType, public_id: Optional[str] = None - ) -> "GraphSource": + ) -> GraphSource: logging.debug("source(%s) = %r", id(source), source) if isinstance(source, Path): source = GraphSource.from_path(source) return source + def public_id_or_path_uri(self) -> str: + if self.public_id is not None: + self.public_id + return self.path.as_uri() + def load( - self, graph: Optional[Graph] = None, public_id: Optional[str] = None - ) -> Graph: + self, + graph: Optional[_GraphT] = None, + public_id: Optional[str] = None, + # type error: Incompatible default for argument "graph_type" (default has type "Type[Graph]", argument has type "Type[_GraphT]") + # see https://github.com/python/mypy/issues/3737 + graph_type: Type[_GraphT] = Graph, # type: ignore[assignment] + ) -> _GraphT: if graph is None: - graph = Graph() - graph.parse( - source=self.path, - format=self.format, - publicID=self.public_id if public_id is None else public_id, - ) + graph = graph_type() + if self.format == "python": + load_from_python(self.path, graph, graph_type) + else: + graph.parse( + source=self.path, + format=self.format, + publicID=self.public_id if public_id is None else public_id, + ) return graph + @classmethod + def idfn(cls, val: Any) -> Optional[str]: + """ + ID function for GraphSource objects. + + :param val: The value to try to generate and identifier for. + :return: A string identifying the given value if the value is a + `GraphSource`, otherwise return `None`. + """ + if isinstance(val, cls): + try: + path_string = f"{val.path.relative_to(test.data.TEST_DATA_DIR)}" + except ValueError: + path_string = f"{val.path}" + return f"GS({path_string}, {val.format}, {val.public_id})" + return None + def load_sources( *sources: GraphSourceType, - graph: Optional[Graph] = None, + graph: Optional[_GraphT] = None, public_id: Optional[str] = None, -) -> Graph: + graph_type: Type[_GraphT] = Graph, # type: ignore[assignment] +) -> _GraphT: if graph is None: - graph = Graph() + graph = graph_type() for source in sources: GraphSource.from_source(source).load(graph, public_id) return graph @@ -67,6 +116,23 @@ def load_sources( @lru_cache(maxsize=None) def cached_graph( - sources: Tuple[Union[GraphSource, Path], ...], public_id: Optional[str] = None -) -> Graph: - return load_sources(*sources, public_id=public_id) + sources: Tuple[Union[GraphSource, Path], ...], + public_id: Optional[str] = None, + graph_type: Type[_GraphT] = Graph, # type: ignore[assignment] +) -> _GraphT: + return load_sources(*sources, public_id=public_id, graph_type=graph_type) + + +def load_from_python( + path: Path, + graph: Optional[_GraphT] = None, + graph_type: Type[_GraphT] = Graph, # type: ignore[assignment] +) -> _GraphT: + if graph is None: + graph = graph_type() + + mod = run_path(f"{path}") + if "populate_graph" not in mod: + raise ValueError(f"{path} does not contain a `populate_graph` function") + mod["populate_graph"](graph) + return graph diff --git a/test/utils/http.py b/test/utils/http.py index e40d2a8c8..a345a672b 100644 --- a/test/utils/http.py +++ b/test/utils/http.py @@ -1,10 +1,11 @@ +from __future__ import annotations + import collections import email.message import enum import random from contextlib import contextmanager from http.server import BaseHTTPRequestHandler, HTTPServer -from test.utils.wildcard import EQ_WILDCARD from threading import Thread from typing import ( Dict, @@ -20,6 +21,8 @@ ) from urllib.parse import ParseResult +from test.utils.wildcard import EQ_WILDCARD + __all__: List[str] = [] HeadersT = Union[Dict[str, List[str]], Iterable[Tuple[str, str]]] diff --git a/test/utils/httpfileserver.py b/test/utils/httpfileserver.py index 49c92e807..7dee493ab 100644 --- a/test/utils/httpfileserver.py +++ b/test/utils/httpfileserver.py @@ -7,11 +7,12 @@ from functools import lru_cache from http.server import BaseHTTPRequestHandler, HTTPServer from pathlib import Path -from test.utils.http import HeadersT, MethodName, MockHTTPRequest, apply_headers_to from typing import Dict, List, Optional, Sequence, Type from urllib.parse import parse_qs, urljoin, urlparse from uuid import uuid4 +from test.utils.http import HeadersT, MethodName, MockHTTPRequest, apply_headers_to + __all__: List[str] = [ "LocationType", "ProtoResource", diff --git a/test/utils/httpservermock.py b/test/utils/httpservermock.py index 6a87bf19c..41f6d5b19 100644 --- a/test/utils/httpservermock.py +++ b/test/utils/httpservermock.py @@ -1,13 +1,8 @@ +from __future__ import annotations + import logging from collections import defaultdict from http.server import BaseHTTPRequestHandler, HTTPServer -from test.utils.http import ( - MethodName, - MockHTTPRequest, - MockHTTPResponse, - apply_headers_to, - get_random_ip, -) from threading import Thread from types import TracebackType from typing import ( @@ -26,6 +21,14 @@ from unittest.mock import MagicMock, Mock from urllib.parse import parse_qs, urlparse +from test.utils.http import ( + MethodName, + MockHTTPRequest, + MockHTTPResponse, + apply_headers_to, + get_random_ip, +) + __all__: List[str] = ["make_spypair", "BaseHTTPServerMock", "ServedBaseHTTPServerMock"] if TYPE_CHECKING: @@ -42,7 +45,7 @@ def wrapper(self: Any, *args: Any, **kwargs: Any) -> Any: m(*args, **kwargs) return method(self, *args, **kwargs) - setattr(wrapper, "mock", m) # noqa + setattr(wrapper, "mock", m) return cast(GenericT, wrapper), m @@ -148,7 +151,7 @@ def address_string(self) -> str: def url(self) -> str: return f"http://{self.address_string}" - def __enter__(self) -> "ServedBaseHTTPServerMock": + def __enter__(self) -> ServedBaseHTTPServerMock: return self def __exit__( @@ -156,6 +159,6 @@ def __exit__( __exc_type: Optional[Type[BaseException]], __exc_value: Optional[BaseException], __traceback: Optional[TracebackType], - ) -> "te.Literal[False]": + ) -> te.Literal[False]: self.stop() return False diff --git a/test/utils/iri.py b/test/utils/iri.py index ad7419d59..4259e8762 100644 --- a/test/utils/iri.py +++ b/test/utils/iri.py @@ -2,19 +2,21 @@ Various utilities for working with IRIs and URIs. """ +from __future__ import annotations + import email.utils import http.client import logging import mimetypes from dataclasses import dataclass +from nturl2path import url2pathname as nt_url2pathname from pathlib import Path, PurePath, PurePosixPath, PureWindowsPath -from test.utils import ensure_suffix from typing import Callable, Optional, Set, Tuple, Type, TypeVar, Union from urllib.parse import quote, unquote, urljoin, urlparse, urlsplit, urlunsplit from urllib.request import BaseHandler, OpenerDirector, Request from urllib.response import addinfourl -from nturl2path import url2pathname as nt_url2pathname +from test.utils import ensure_suffix PurePathT = TypeVar("PurePathT", bound=PurePath) @@ -100,7 +102,7 @@ class URIMapping: local: str @classmethod - def from_tuple(cls, value: URIMappingTupleType) -> "URIMapping": + def from_tuple(cls, value: URIMappingTupleType) -> URIMapping: return cls(value[0], value[1]) @@ -145,8 +147,8 @@ def _map(self, value: Union[str, PurePath], to_local: bool = True) -> str: @classmethod def from_mappings( - cls, *values: Union["URIMapping", "URIMappingTupleType"] - ) -> "URIMapper": + cls, *values: Union[URIMapping, URIMappingTupleType] + ) -> URIMapper: result = set() for value in values: if isinstance(value, tuple): diff --git a/test/utils/literal.py b/test/utils/literal.py index 1b3f37988..75792f97a 100644 --- a/test/utils/literal.py +++ b/test/utils/literal.py @@ -1,14 +1,17 @@ from __future__ import annotations import builtins +import logging from dataclasses import dataclass -from typing import Any, Union +from typing import Any, Optional, Union +from xml.dom.minidom import DocumentFragment from rdflib.term import Literal, URIRef +from test.utils.outcome import NoExceptionChecker -@dataclass -class LiteralChecker: +@dataclass(frozen=True) +class LiteralChecker(NoExceptionChecker[Literal]): value: Union[builtins.ellipsis, Any] = ... language: Union[builtins.ellipsis, str, None] = ... datatype: Union[builtins.ellipsis, URIRef, None] = ... @@ -16,13 +19,40 @@ class LiteralChecker: lexical: Union[builtins.ellipsis, str] = ... def check(self, actual: Literal) -> None: + logging.debug( + "actual = %r, value = %r, ill_typed = %r", + actual, + actual.value, + actual.ill_typed, + ) if self.value is not Ellipsis: + if callable(self.value): + logging.debug(f"Checking value {actual.value} with {self.value}") + if isinstance(actual.value, DocumentFragment): + logging.debug(f"childNodes = {actual.value.childNodes}") + assert self.value(actual.value) + else: + assert self.value == actual.value assert self.value == actual.value if self.lexical is not Ellipsis: - assert self.lexical == f"{actual}" + assert self.lexical == f"{actual}", "Literal lexical form does not match" if self.ill_typed is not Ellipsis: - assert self.ill_typed == actual.ill_typed + assert ( + self.ill_typed == actual.ill_typed + ), "Literal ill_typed flag does not match" if self.language is not Ellipsis: - assert self.language == actual.language + assert self.language == actual.language, "Literal language does not match" if self.datatype is not Ellipsis: - assert self.datatype == actual.datatype + assert self.datatype == actual.datatype, "Literal datatype does not match" + + +def literal_idfn(value: Any) -> Optional[str]: + if callable(value): + try: + literal = value() + except Exception: + return None + return f"{literal}" + if isinstance(value, LiteralChecker): + return f"{value}" + return None diff --git a/test/utils/manifest.py b/test/utils/manifest.py index 940f7f8b7..8fc092d2f 100644 --- a/test/utils/manifest.py +++ b/test/utils/manifest.py @@ -1,11 +1,11 @@ from __future__ import annotations import logging -from test.utils.namespace import DAWGT, MF, QT, RDFT, UT from typing import Iterable, List, NamedTuple, Optional, Tuple, Union, cast from rdflib import RDF, RDFS, Graph from rdflib.term import Identifier, Node, URIRef +from test.utils.namespace import DAWGT, MF, QT, RDFT, UT logger = logging.getLogger(__name__) diff --git a/test/utils/namespace/_DAWGT.py b/test/utils/namespace/_DAWGT.py index d8863f4b7..64c5c8a3e 100644 --- a/test/utils/namespace/_DAWGT.py +++ b/test/utils/namespace/_DAWGT.py @@ -8,12 +8,16 @@ class DAWGT(DefinedNamespace): ResultForm: URIRef # Super class of all result forms Status: URIRef # Super class of all test status classes - approval: URIRef # The approval status of the test with respect to the working group. + approval: ( + URIRef # The approval status of the test with respect to the working group. + ) approvedBy: URIRef # Contains a reference to the minutes of the RDF Data Access Working Group where the test case status was last changed. description: URIRef # A human-readable summary of the test case. issue: URIRef # Contains a pointer to the associated issue on the RDF Data Access Working Group Tracking document. resultForm: URIRef # None - warning: URIRef # Indicates that while the test should pass, it may generate a warning. + warning: ( + URIRef # Indicates that while the test should pass, it may generate a warning. + ) NotClassified: URIRef # Class of tests that have not been classified Approved: URIRef # Class of tests that are Approved Rejected: URIRef # Class of tests that are Rejected diff --git a/test/utils/namespace/_EARL.py b/test/utils/namespace/_EARL.py index 9d3dd970c..5699973b5 100644 --- a/test/utils/namespace/_EARL.py +++ b/test/utils/namespace/_EARL.py @@ -16,26 +16,38 @@ class EARL(DefinedNamespace): Fail: URIRef # the class of outcomes to denote failing a test inapplicable: URIRef # the test is not applicable to the subject info: URIRef # additional warnings or error messages in a human-readable form - mainAssertor: URIRef # assertor that is primarily responsible for performing the test + mainAssertor: ( + URIRef # assertor that is primarily responsible for performing the test + ) manual: URIRef # where the test was carried out by human evaluators mode: URIRef # mode in which the test was performed NotApplicable: URIRef # the class of outcomes to denote the test is not applicable - NotTested: URIRef # the class of outcomes to denote the test has not been carried out + NotTested: ( + URIRef # the class of outcomes to denote the test has not been carried out + ) outcome: URIRef # outcome of performing the test OutcomeValue: URIRef # a discrete value that describes a resulting condition from carrying out the test passed: URIRef # the subject passed the test Pass: URIRef # the class of outcomes to denote passing a test - pointer: URIRef # location within a test subject that are most relevant to a test result + pointer: ( + URIRef # location within a test subject that are most relevant to a test result + ) result: URIRef # result of an assertion semiAuto: URIRef # where the test was partially carried out by software tools, but where human input or judgment was still required to decide or help decide the outcome of the test Software: URIRef # any piece of software such as an authoring tool, browser, or evaluation tool subject: URIRef # test subject of an assertion - TestCase: URIRef # an atomic test, usually one that is a partial test for a requirement - TestCriterion: URIRef # a testable statement, usually one that can be passed or failed + TestCase: ( + URIRef # an atomic test, usually one that is a partial test for a requirement + ) + TestCriterion: ( + URIRef # a testable statement, usually one that can be passed or failed + ) TestMode: URIRef # describes how a test was carried out TestRequirement: URIRef # a higher-level requirement that is tested by executing one or more sub-tests TestResult: URIRef # the actual result of performing the test - TestSubject: URIRef # the class of things that have been tested against some test criterion + TestSubject: ( + URIRef # the class of things that have been tested against some test criterion + ) test: URIRef # test criterion of an assertion undisclosed: URIRef # where the exact testing process is undisclosed unknownMode: URIRef # where the testing process is unknown or undetermined diff --git a/test/utils/namespace/_MF.py b/test/utils/namespace/_MF.py index af3215fe0..36cacb616 100644 --- a/test/utils/namespace/_MF.py +++ b/test/utils/namespace/_MF.py @@ -6,7 +6,9 @@ class MF(DefinedNamespace): _fail = True _NS = Namespace("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#") - IllFormedLiterals: URIRef # Tests that involve lexical forms which are illegal for the datatype + IllFormedLiterals: ( + URIRef # Tests that involve lexical forms which are illegal for the datatype + ) KnownTypesDefault2Neq: URIRef # Values in disjoint value spaces are not equal LangTagAwareness: URIRef # Tests that require langauge tag handling in FILTERs LaxCardinality: URIRef # The given mf:result for a test with an mf:resultCardinality of mf:ReducedCardinalityTest is the results as if the REDUCED keyword were omitted. To pass such a test, an implementation must produce a result set with each solution in the expected results appearing at least once and no more than the number of times it appears in the expected results. Of course, there must also be no results produced that are not in the expected results. diff --git a/test/utils/namespace/__init__.py b/test/utils/namespace/__init__.py index f22db1363..397c97895 100644 --- a/test/utils/namespace/__init__.py +++ b/test/utils/namespace/__init__.py @@ -1,3 +1,5 @@ +from rdflib.namespace import Namespace + from ._DAWGT import DAWGT from ._EARL import EARL from ._MF import MF @@ -5,6 +7,12 @@ from ._RDFT import RDFT from ._UT import UT +EGDC = Namespace("http://example.com/") +EGDO = Namespace("http://example.org/") +EGSCHEME = Namespace("example:") +EGURN = Namespace("urn:example:") + + __all__ = [ "EARL", "RDFT", @@ -12,4 +20,8 @@ "DAWGT", "QT", "UT", + "EGDC", + "EGDO", + "EGSCHEME", + "EGURN", ] diff --git a/test/utils/outcome.py b/test/utils/outcome.py new file mode 100644 index 000000000..82a96138c --- /dev/null +++ b/test/utils/outcome.py @@ -0,0 +1,213 @@ +from __future__ import annotations + +import abc +import contextlib +import logging +from collections.abc import Iterable as IterableABC +from dataclasses import dataclass +from typing import ( + Any, + Callable, + Dict, + Generator, + Generic, + Iterable, + NoReturn, + Optional, + Pattern, + Sequence, + Type, + TypeVar, + Union, + cast, +) + +import pytest +from pytest import ExceptionInfo + +AnyT = TypeVar("AnyT") + +OutcomePrimitive = Union[ + AnyT, Callable[[AnyT], None], "OutcomeChecker[AnyT]", Type[Exception], Exception +] + +OutcomePrimitives = Union[ + Iterable[Union[AnyT, Callable[[AnyT], None], "OutcomeChecker[AnyT]"]], + OutcomePrimitive, +] + + +class OutcomeChecker(abc.ABC, Generic[AnyT]): + """ + Validates expected outcomes for tests. + + Useful for parameterized test that can result in values or + exceptions. + """ + + @abc.abstractmethod + def check(self, actual: AnyT) -> None: + """ + Check the actual outcome against the expectation. + + This should run inside the checker's context. + + :param outcome: The actual outcome of the test. + :raises AssertionError: If the outcome does not match the + expectation. + :raises RuntimeError: If this method is called when no outcome + is expected. + """ + ... + + @contextlib.contextmanager + @abc.abstractmethod + def context(self) -> Generator[Optional[ExceptionInfo[Exception]], None, None]: + """ + The context in which the test code should run. + + This is necessary for checking exception outcomes. + + :return: A context manager that yields the exception info for + any exceptions that were raised in this context. + :raises AssertionError: If the test does not raise an exception + when one is expected, or if the exception does not match the + expectation. + """ + ... + + @classmethod + def from_primitive( + cls, + primitive: OutcomePrimitive[AnyT], + ) -> OutcomeChecker[AnyT]: + checker = cls._from_special(primitive) + if checker is not None: + return checker + return ValueChecker(cast(AnyT, primitive)) + + @classmethod + def _from_special( + cls, + primitive: Union[ + AnyT, + Callable[[AnyT], None], + OutcomeChecker[AnyT], + Type[Exception], + Exception, + ], + ) -> Optional[OutcomeChecker[AnyT]]: + if isinstance(primitive, OutcomeChecker): + return primitive + if isinstance(primitive, type) and issubclass(primitive, Exception): + return ExceptionChecker(primitive) + if isinstance(primitive, Exception): + return ExceptionChecker(type(primitive), match=primitive.args[0]) + if callable(primitive): + return CallableChecker(cast(Callable[[AnyT], None], primitive)) + return None + + @classmethod + def from_primitives( + cls, + primitives: OutcomePrimitives[AnyT], + ) -> OutcomeChecker[AnyT]: + checker = cls._from_special(primitives) # type: ignore[arg-type] + if checker is not None: + return checker + if isinstance(primitives, IterableABC) and not isinstance( + primitives, (str, bytes) + ): + primitives = iter(primitives) + return AggregateChecker([cls.from_primitive(p) for p in primitives]) + return ValueChecker(cast(AnyT, primitives)) + + +@dataclass(frozen=True) +class NoExceptionChecker(OutcomeChecker[AnyT]): + """ + Base class for checkers that do not expect exceptions. + """ + + @contextlib.contextmanager + def context(self) -> Generator[None, None, None]: + yield None + + +@dataclass(frozen=True) +class AggregateChecker(NoExceptionChecker[AnyT]): + """ + Validates that the outcome matches all of the given checkers. + """ + + checkers: Sequence[OutcomeChecker[AnyT]] + + def check(self, actual: AnyT) -> None: + for checker in self.checkers: + if isinstance(checker, ExceptionChecker): + raise ValueError( + "AggregateChecker should never contain ExceptionChecker" + ) + checker.check(actual) + + +@dataclass(frozen=True) +class ValueChecker(NoExceptionChecker[AnyT]): + """ + Validates that the outcome is a specific value. + + :param value: The expected value. + """ + + expected: AnyT + + def check(self, actual: AnyT) -> None: + assert self.expected == actual + + +@dataclass(frozen=True) +class CallableChecker(NoExceptionChecker[AnyT]): + """ + Validates the outcome with a callable. + + :param callable: The callable that will be called with the outcome + to validate it. + """ + + callable: Callable[[AnyT], None] + + def check(self, actual: AnyT) -> None: + self.callable(actual) + + +@dataclass(frozen=True) +class ExceptionChecker(OutcomeChecker[AnyT]): + """ + Validates that the outcome is a specific exception. + + :param type: The expected exception type. + :param match: A regular expression or string that the exception + message must match. + :param attributes: A dictionary of attributes that the exception + must have and their expected values. + """ + + type: Type[Exception] + match: Optional[Union[Pattern[str], str]] = None + attributes: Optional[Dict[str, Any]] = None + + def check(self, actual: AnyT) -> NoReturn: + raise RuntimeError("ExceptionResult.check_result should never be called") + + def _check_attributes(self, exception: Exception) -> None: + if self.attributes is not None: + for key, value in self.attributes.items(): + logging.debug("checking exception attribute %s=%r", key, value) + assert hasattr(exception, key) + assert getattr(exception, key) == value + + @contextlib.contextmanager + def context(self) -> Generator[ExceptionInfo[Exception], None, None]: + with pytest.raises(self.type, match=self.match) as catcher: + yield catcher + self._check_attributes(catcher.value) diff --git a/test/utils/result.py b/test/utils/result.py index 0a634a722..6fb8b6e80 100644 --- a/test/utils/result.py +++ b/test/utils/result.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import enum import logging import pprint @@ -25,7 +27,7 @@ class ResultType(str, enum.Enum): @classmethod @lru_cache(maxsize=None) - def info_dict(cls) -> "ResultTypeInfoDict": + def info_dict(cls) -> ResultTypeInfoDict: return ResultTypeInfo.make_dict( ResultTypeInfo(ResultType.CONSTRUCT, {ResultTypeTrait.GRAPH_RESULT}), ResultTypeInfo(ResultType.DESCRIBE, {ResultTypeTrait.GRAPH_RESULT}), @@ -34,12 +36,12 @@ def info_dict(cls) -> "ResultTypeInfoDict": ) @property - def info(self) -> "ResultTypeInfo": + def info(self) -> ResultTypeInfo: return self.info_dict()[self] @classmethod @lru_cache(maxsize=None) - def set(cls) -> Set["ResultType"]: + def set(cls) -> Set[ResultType]: return set(*cls) @@ -49,7 +51,7 @@ class ResultTypeInfo: traits: Set[ResultTypeTrait] @classmethod - def make_dict(cls, *items: "ResultTypeInfo") -> ResultTypeInfoDict: + def make_dict(cls, *items: ResultTypeInfo) -> ResultTypeInfoDict: return dict((info.type, info) for info in items) @@ -188,7 +190,7 @@ class ResultFormat(str, enum.Enum): @classmethod @lru_cache(maxsize=None) - def info_dict(cls) -> "ResultFormatInfoDict": + def info_dict(cls) -> ResultFormatInfoDict: return ResultFormatInfo.make_dict( ResultFormatInfo( ResultFormat.CSV, @@ -246,17 +248,17 @@ def info_dict(cls) -> "ResultFormatInfoDict": ) @property - def info(self) -> "ResultFormatInfo": + def info(self) -> ResultFormatInfo: return self.info_dict()[self] @classmethod @lru_cache(maxsize=None) - def set(cls) -> Set["ResultFormat"]: + def set(cls) -> Set[ResultFormat]: return set(cls) @classmethod @lru_cache(maxsize=None) - def info_set(cls) -> Set["ResultFormatInfo"]: + def info_set(cls) -> Set[ResultFormatInfo]: return {format.info for format in cls.set()} @@ -268,9 +270,9 @@ class ResultFormatInfo: encodings: FrozenSet[str] @classmethod - def make_dict(cls, *items: "ResultFormatInfo") -> ResultFormatInfoDict: + def make_dict(cls, *items: ResultFormatInfo) -> ResultFormatInfoDict: return dict((info.format, info) for info in items) @property - def name(self) -> "str": + def name(self) -> str: return f"{self.format.value}" diff --git a/test/utils/sparql_checker.py b/test/utils/sparql_checker.py index 477c9d3c8..79a4e71e9 100644 --- a/test/utils/sparql_checker.py +++ b/test/utils/sparql_checker.py @@ -1,17 +1,14 @@ """This runs the nt tests for the W3C RDF Working Group's N-Quads test suite.""" + +from __future__ import annotations + import enum import logging import pprint from contextlib import ExitStack, contextmanager from dataclasses import dataclass, field from io import BytesIO, StringIO -from test.utils import BNodeHandling, GraphHelper -from test.utils.dawg_manifest import Manifest, ManifestEntry -from test.utils.iri import URIMapper -from test.utils.namespace import MF, QT, UT -from test.utils.result import ResultType, assert_bindings_collections_equal -from test.utils.urlopen import context_urlopener from typing import Dict, Generator, Optional, Set, Tuple, Type, Union, cast from urllib.parse import urljoin @@ -28,6 +25,12 @@ from rdflib.query import Result from rdflib.term import BNode, IdentifiedNode, Identifier, Literal, Node, URIRef from rdflib.util import guess_format +from test.utils import BNodeHandling, GraphHelper +from test.utils.dawg_manifest import Manifest, ManifestEntry +from test.utils.iri import URIMapper +from test.utils.namespace import MF, QT, UT +from test.utils.result import ResultType, assert_bindings_collections_equal +from test.utils.urlopen import context_urlopener logger = logging.getLogger(__name__) @@ -69,7 +72,7 @@ def __post_init__(self) -> None: self.expected_outcome_property = UT.result @classmethod - def make_dict(cls, *test_types: "TypeInfo") -> Dict[Identifier, "TypeInfo"]: + def make_dict(cls, *test_types: TypeInfo) -> Dict[Identifier, TypeInfo]: return dict((test_type.id, test_type) for test_type in test_types) @@ -97,7 +100,7 @@ class GraphData: label: Optional[Literal] = None @classmethod - def from_graph(cls, graph: Graph, identifier: Identifier) -> "GraphData": + def from_graph(cls, graph: Graph, identifier: Identifier) -> GraphData: if isinstance(identifier, URIRef): return cls(identifier) elif isinstance(identifier, BNode): @@ -290,11 +293,11 @@ def check_syntax(monkeypatch: MonkeyPatch, entry: SPARQLEntry) -> None: if entry.type_info.negative: catcher = xstack.enter_context(pytest.raises(Exception)) if entry.type_info.query_type is QueryType.UPDATE: - tree = parseUpdate(query_text) - translateUpdate(tree) + parse_tree = parseUpdate(query_text) + translateUpdate(parse_tree) elif entry.type_info.query_type is QueryType.QUERY: - tree = parseQuery(query_text) - translateQuery(tree) + query_tree = parseQuery(query_text) + translateQuery(query_tree) if catcher is not None: assert catcher.value is not None logging.info("catcher.value = %s", catcher.value) diff --git a/test/utils/test/test_httpservermock.py b/test/utils/test/test_httpservermock.py index fe147c9ec..3b06de32c 100644 --- a/test/utils/test/test_httpservermock.py +++ b/test/utils/test/test_httpservermock.py @@ -1,10 +1,11 @@ -from test.utils.http import MethodName, MockHTTPResponse, ctx_http_handler -from test.utils.httpservermock import BaseHTTPServerMock, ServedBaseHTTPServerMock from urllib.error import HTTPError from urllib.request import Request, urlopen import pytest +from test.utils.http import MethodName, MockHTTPResponse, ctx_http_handler +from test.utils.httpservermock import BaseHTTPServerMock, ServedBaseHTTPServerMock + def test_base() -> None: httpmock = BaseHTTPServerMock() diff --git a/test/utils/test/test_iri.py b/test/utils/test/test_iri.py index 43c121337..3554887b9 100644 --- a/test/utils/test/test_iri.py +++ b/test/utils/test/test_iri.py @@ -1,11 +1,14 @@ +from __future__ import annotations + import logging from contextlib import ExitStack from pathlib import PurePath, PurePosixPath, PureWindowsPath -from test.utils.iri import file_uri_to_path, rebase_url from typing import Optional, Type, Union import pytest +from test.utils.iri import file_uri_to_path, rebase_url + @pytest.mark.parametrize( ["file_uri", "path_class", "expected_result"], diff --git a/test/utils/test/test_outcome.py b/test/utils/test/test_outcome.py new file mode 100644 index 000000000..8299bcaca --- /dev/null +++ b/test/utils/test/test_outcome.py @@ -0,0 +1,74 @@ +from __future__ import annotations + +from contextlib import ExitStack +from typing import Any, Callable, NoReturn, Optional, Type, Union, cast + +import pytest + +from test.utils.outcome import ExceptionChecker, OutcomeChecker + + +def _raise( + what: Union[Type[Exception], Callable[..., Exception]], + *args: Any, + **kwargs: Any, +) -> NoReturn: + if isinstance(what, type) and issubclass(what, Exception): + raise what(*args, **kwargs) + elif callable(what): + what_fn: Callable[..., Exception] = cast(Callable[..., Exception], what) + raise what_fn(*args, **kwargs) + + +@pytest.mark.parametrize( + ("action", "checker", "expected_exception"), + [ + (lambda: _raise(ValueError), ExceptionChecker(ValueError), None), + (None, ExceptionChecker(ValueError), RuntimeError), + ( + lambda: _raise(ValueError, "zzz"), + OutcomeChecker.from_primitive(ValueError(r"z.z")), + None, + ), + ( + lambda: _raise(ValueError, "zzz"), + OutcomeChecker.from_primitive(ValueError(r"zaz")), + AssertionError, + ), + ( + lambda: _raise(ValueError, "ae"), + ExceptionChecker(ValueError, r"ae", {"Not": "Found"}), + AssertionError, + ), + (33, OutcomeChecker.from_primitive(33), None), + (33, OutcomeChecker.from_primitive(44), AssertionError), + ( + lambda: _raise(TypeError, "something"), + OutcomeChecker.from_primitive(TypeError), + None, + ), + ( + lambda: 3, + OutcomeChecker.from_primitive(TypeError), + RuntimeError, + ), + ], +) +def test_checker( + action: Union[Callable[[], Any], Any], + checker: ExceptionChecker, + expected_exception: Optional[Type[BaseException]], +) -> None: + """ + Given the action, the checker raises the expected exception, or does + not raise anything if ``expected_exception`` is None. + """ + with ExitStack() as xstack: + if expected_exception is not None: + xstack.enter_context(pytest.raises(expected_exception)) + with checker.context(): + if callable(action): + actual_result = action() + else: + actual_result = action + checker.check(actual_result) diff --git a/test/utils/test/test_result.py b/test/utils/test/test_result.py index 1d9325791..432ea4755 100644 --- a/test/utils/test/test_result.py +++ b/test/utils/test/test_result.py @@ -1,12 +1,13 @@ +from __future__ import annotations + from contextlib import ExitStack -from test.utils.result import BindingsCollectionType, assert_bindings_collections_equal -from typing import Type, Union +from typing import Optional, Type, Union import pytest -from pyparsing import Optional from rdflib.namespace import XSD from rdflib.term import BNode, Literal, URIRef, Variable +from test.utils.result import BindingsCollectionType, assert_bindings_collections_equal @pytest.mark.parametrize( diff --git a/test/utils/test/test_testutils.py b/test/utils/test/test_testutils.py index 44a0292ec..4de78bcf4 100644 --- a/test/utils/test/test_testutils.py +++ b/test/utils/test/test_testutils.py @@ -1,7 +1,15 @@ +from __future__ import annotations + import os from contextlib import ExitStack from dataclasses import dataclass from pathlib import PurePosixPath, PureWindowsPath +from typing import Any, List, Optional, Tuple, Type, Union + +import pytest + +from rdflib.graph import ConjunctiveGraph, Dataset, Graph +from rdflib.term import URIRef from test.utils import ( COLLAPSED_BNODE, BNodeHandling, @@ -9,12 +17,6 @@ affix_tuples, file_uri_to_path, ) -from typing import Any, List, Optional, Tuple, Type, Union - -import pytest - -from rdflib.graph import ConjunctiveGraph, Dataset, Graph -from rdflib.term import URIRef def check( diff --git a/test/utils/urlopen.py b/test/utils/urlopen.py index fb6597077..9cfe76a88 100644 --- a/test/utils/urlopen.py +++ b/test/utils/urlopen.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import urllib.request from contextlib import contextmanager from typing import Generator, Optional diff --git a/test_reports/rdflib_w3c_sparql10-HEAD.ttl b/test_reports/rdflib_w3c_sparql10-HEAD.ttl index f43162420..78997b01c 100644 --- a/test_reports/rdflib_w3c_sparql10-HEAD.ttl +++ b/test_reports/rdflib_w3c_sparql10-HEAD.ttl @@ -323,7 +323,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:untested ] ; earl:subject ; earl:test . @@ -331,7 +331,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:untested ] ; earl:subject ; earl:test . diff --git a/test_reports/rdflib_w3c_trig-HEAD.ttl b/test_reports/rdflib_w3c_trig-HEAD.ttl index 7c22104d2..02e67f8f2 100644 --- a/test_reports/rdflib_w3c_trig-HEAD.ttl +++ b/test_reports/rdflib_w3c_trig-HEAD.ttl @@ -923,7 +923,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . @@ -971,7 +971,7 @@ earl:assertedBy ; earl:mode earl:automatic ; earl:result [ a earl:TestResult ; - earl:outcome earl:failed ] ; + earl:outcome earl:passed ] ; earl:subject ; earl:test . diff --git a/tox.ini b/tox.ini index d2ecc891a..e5baffcc5 100644 --- a/tox.ini +++ b/tox.ini @@ -1,9 +1,9 @@ # https://tox.wiki/en/latest/user_guide.html # https://tox.wiki/en/latest/config.html [tox] +minversion = 4.0.0 envlist = - flake8,py3{7,8,9,10,11},covreport,docs,precommit -isolated_build = True + lint,py3{8,9,10,11},covreport,docs,precommit toxworkdir={env:TOX_WORK_DIR:{tox_root}/.tox} [testenv] @@ -15,13 +15,17 @@ setenv = COVERAGE_FILE = {env:COVERAGE_FILE:{toxinidir}/.coverage.{envname}} MYPY_CACHE_DIR = {envdir}/.mypy_cache docs: POETRY_ARGS_docs = --only=docs - extensive: POETRY_ARGS_extensive = --extras=berkeleydb --extras=networkx + extensive: POETRY_ARGS_extensive = --extras=berkeleydb --extras=networkx --extras=html lxml: POETRY_ARGS_lxml = --extras=lxml commands_pre = - py3{7,8,9,10,11}: python -c 'import os; print("\n".join(f"{key}={value}" for key, value in os.environ.items()))' - py3{7,8,9,10,11}: poetry lock --check - py3{7,8,9,10,11}: poetry install --no-root --only=main --only=dev --only=flake8 --only=tests --extras=html {env:POETRY_ARGS_docs:} {env:POETRY_ARGS_extensive:} {env:POETRY_ARGS_lxml:} {env:POETRY_ARGS:} --sync + py3{8,9,10,11}: python -c 'import os; print("\n".join(f"{key}={value}" for key, value in os.environ.items()))' + py3{8,9,10,11}: poetry check --lock + py3{8,9,10,11}: poetry install --no-root --only=main --only=dev --only=lint --only=tests {env:POETRY_ARGS_docs:} {env:POETRY_ARGS_extensive:} {env:POETRY_ARGS_lxml:} {env:POETRY_ARGS:} --sync commands = + min: python -c 'import sys; print("min qualifier not supported on this environment"); sys.exit(1);' + poetry config --list + poetry env info + poetry run python -m pip freeze {env:TOX_EXTRA_COMMAND:} {env:TOX_MYPY_COMMAND:poetry run python -m mypy --show-error-context --show-error-codes --junit-xml=test_reports/{env:TOX_JUNIT_XML_PREFIX:}mypy-junit.xml} {posargs:poetry run {env:TOX_TEST_HARNESS:} pytest -ra --tb=native {env:TOX_PYTEST_ARGS:--junit-xml=test_reports/{env:TOX_JUNIT_XML_PREFIX:}pytest-junit.xml --cov --cov-report=} {env:TOX_PYTEST_EXTRA_ARGS:}} @@ -30,7 +34,7 @@ commands = [testenv:covreport] skip_install = true parallel_show_output = true -depends = py3{7,8,9,10}{-extensive,}{-docs,} +depends = py3{8,9,10,11}{-extensive,}{-docs,} setenv = COVERAGE_FILE= commands_pre = @@ -39,12 +43,12 @@ commands = poetry run python -m coverage combine poetry run python -m coverage report -[testenv:flake8] +[testenv:lint] skip_install = true commands_pre = - poetry install --no-root --only=flake8 + poetry install --no-root --only=lint commands = - poetry run python -m flakeheaven lint + poetry run ruff check . [testenv:docs] @@ -60,7 +64,7 @@ commands_pre = commands = poetry run sphinx-build -T -W -b html -d {envdir}/doctree docs docs/_build/html -[testenv:py37-extensive-min] +[testenv:py38-extensive-min] base = void deps = pytest==7.*