git reimport
This commit is contained in:
29
Pipfile
Normal file
29
Pipfile
Normal file
@@ -0,0 +1,29 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[packages]
|
||||
requests = "*"
|
||||
aiohttp = ">=3.4.4"
|
||||
pytest = "*"
|
||||
sqlalchemy = "*"
|
||||
alembic = "*"
|
||||
pandas = "*"
|
||||
"bs4" = "*"
|
||||
numpy = "*"
|
||||
regex = "*"
|
||||
flask = "*"
|
||||
pillow = "*"
|
||||
xmltodict = "*"
|
||||
tqdm = "*"
|
||||
pytest-asyncio = "*"
|
||||
pyyaml = "*"
|
||||
coverage = "*"
|
||||
python-slugify = "*"
|
||||
protobuf = "*"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[requires]
|
||||
python_version = "3.7"
|
546
Pipfile.lock
generated
Normal file
546
Pipfile.lock
generated
Normal file
@@ -0,0 +1,546 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "70b1c1d266d50df177d71c905bd2ce853fdbf5fff46d3832f4c090f18ec3870d"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.7"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"aiohttp": {
|
||||
"hashes": [
|
||||
"sha256:00d198585474299c9c3b4f1d5de1a576cc230d562abc5e4a0e81d71a20a6ca55",
|
||||
"sha256:0155af66de8c21b8dba4992aaeeabf55503caefae00067a3b1139f86d0ec50ed",
|
||||
"sha256:09654a9eca62d1bd6d64aa44db2498f60a5c1e0ac4750953fdd79d5c88955e10",
|
||||
"sha256:199f1d106e2b44b6dacdf6f9245493c7d716b01d0b7fbe1959318ba4dc64d1f5",
|
||||
"sha256:296f30dedc9f4b9e7a301e5cc963012264112d78a1d3094cd83ef148fdf33ca1",
|
||||
"sha256:368ed312550bd663ce84dc4b032a962fcb3c7cae099dbbd48663afc305e3b939",
|
||||
"sha256:40d7ea570b88db017c51392349cf99b7aefaaddd19d2c78368aeb0bddde9d390",
|
||||
"sha256:629102a193162e37102c50713e2e31dc9a2fe7ac5e481da83e5bb3c0cee700aa",
|
||||
"sha256:6d5ec9b8948c3d957e75ea14d41e9330e1ac3fed24ec53766c780f82805140dc",
|
||||
"sha256:87331d1d6810214085a50749160196391a712a13336cd02ce1c3ea3d05bcf8d5",
|
||||
"sha256:9a02a04bbe581c8605ac423ba3a74999ec9d8bce7ae37977a3d38680f5780b6d",
|
||||
"sha256:9c4c83f4fa1938377da32bc2d59379025ceeee8e24b89f72fcbccd8ca22dc9bf",
|
||||
"sha256:9cddaff94c0135ee627213ac6ca6d05724bfe6e7a356e5e09ec57bd3249510f6",
|
||||
"sha256:a25237abf327530d9561ef751eef9511ab56fd9431023ca6f4803f1994104d72",
|
||||
"sha256:a5cbd7157b0e383738b8e29d6e556fde8726823dae0e348952a61742b21aeb12",
|
||||
"sha256:a97a516e02b726e089cffcde2eea0d3258450389bbac48cbe89e0f0b6e7b0366",
|
||||
"sha256:acc89b29b5f4e2332d65cd1b7d10c609a75b88ef8925d487a611ca788432dfa4",
|
||||
"sha256:b05bd85cc99b06740aad3629c2585bda7b83bd86e080b44ba47faf905fdf1300",
|
||||
"sha256:c2bec436a2b5dafe5eaeb297c03711074d46b6eb236d002c13c42f25c4a8ce9d",
|
||||
"sha256:cc619d974c8c11fe84527e4b5e1c07238799a8c29ea1c1285149170524ba9303",
|
||||
"sha256:d4392defd4648badaa42b3e101080ae3313e8f4787cb517efd3f5b8157eaefd6",
|
||||
"sha256:e1c3c582ee11af7f63a34a46f0448fca58e59889396ffdae1f482085061a2889"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.5.4"
|
||||
},
|
||||
"alembic": {
|
||||
"hashes": [
|
||||
"sha256:16505782b229007ae905ef9e0ae6e880fddafa406f086ac7d442c1aaf712f8c2"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.7"
|
||||
},
|
||||
"async-timeout": {
|
||||
"hashes": [
|
||||
"sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f",
|
||||
"sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3"
|
||||
],
|
||||
"version": "==3.0.1"
|
||||
},
|
||||
"atomicwrites": {
|
||||
"hashes": [
|
||||
"sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
|
||||
"sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
|
||||
],
|
||||
"version": "==1.3.0"
|
||||
},
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69",
|
||||
"sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"
|
||||
],
|
||||
"version": "==18.2.0"
|
||||
},
|
||||
"beautifulsoup4": {
|
||||
"hashes": [
|
||||
"sha256:034740f6cb549b4e932ae1ab975581e6103ac8f942200a0e9759065984391858",
|
||||
"sha256:945065979fb8529dd2f37dbb58f00b661bdbcbebf954f93b32fdf5263ef35348",
|
||||
"sha256:ba6d5c59906a85ac23dadfe5c88deaf3e179ef565f4898671253e50a78680718"
|
||||
],
|
||||
"version": "==4.7.1"
|
||||
},
|
||||
"bs4": {
|
||||
"hashes": [
|
||||
"sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.0.1"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7",
|
||||
"sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"
|
||||
],
|
||||
"version": "==2018.11.29"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
|
||||
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
|
||||
],
|
||||
"version": "==3.0.4"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
|
||||
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
|
||||
],
|
||||
"version": "==7.0"
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f",
|
||||
"sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe",
|
||||
"sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d",
|
||||
"sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0",
|
||||
"sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607",
|
||||
"sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d",
|
||||
"sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b",
|
||||
"sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3",
|
||||
"sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e",
|
||||
"sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815",
|
||||
"sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36",
|
||||
"sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1",
|
||||
"sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14",
|
||||
"sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c",
|
||||
"sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794",
|
||||
"sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b",
|
||||
"sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840",
|
||||
"sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd",
|
||||
"sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82",
|
||||
"sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952",
|
||||
"sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389",
|
||||
"sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f",
|
||||
"sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4",
|
||||
"sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da",
|
||||
"sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647",
|
||||
"sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d",
|
||||
"sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42",
|
||||
"sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478",
|
||||
"sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b",
|
||||
"sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb",
|
||||
"sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.5.2"
|
||||
},
|
||||
"flask": {
|
||||
"hashes": [
|
||||
"sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48",
|
||||
"sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
|
||||
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
|
||||
],
|
||||
"version": "==2.8"
|
||||
},
|
||||
"itsdangerous": {
|
||||
"hashes": [
|
||||
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
|
||||
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
|
||||
],
|
||||
"version": "==1.1.0"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
|
||||
"sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
|
||||
],
|
||||
"version": "==2.10"
|
||||
},
|
||||
"mako": {
|
||||
"hashes": [
|
||||
"sha256:4e02fde57bd4abb5ec400181e4c314f56ac3e49ba4fb8b0d50bba18cb27d25ae"
|
||||
],
|
||||
"version": "==1.0.7"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432",
|
||||
"sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b",
|
||||
"sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9",
|
||||
"sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af",
|
||||
"sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834",
|
||||
"sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd",
|
||||
"sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d",
|
||||
"sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7",
|
||||
"sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b",
|
||||
"sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3",
|
||||
"sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c",
|
||||
"sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2",
|
||||
"sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7",
|
||||
"sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36",
|
||||
"sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1",
|
||||
"sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e",
|
||||
"sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1",
|
||||
"sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c",
|
||||
"sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856",
|
||||
"sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550",
|
||||
"sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492",
|
||||
"sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672",
|
||||
"sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401",
|
||||
"sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6",
|
||||
"sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6",
|
||||
"sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c",
|
||||
"sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd",
|
||||
"sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"
|
||||
],
|
||||
"version": "==1.1.0"
|
||||
},
|
||||
"more-itertools": {
|
||||
"hashes": [
|
||||
"sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
|
||||
"sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
|
||||
"sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
|
||||
],
|
||||
"version": "==5.0.0"
|
||||
},
|
||||
"multidict": {
|
||||
"hashes": [
|
||||
"sha256:024b8129695a952ebd93373e45b5d341dbb87c17ce49637b34000093f243dd4f",
|
||||
"sha256:041e9442b11409be5e4fc8b6a97e4bcead758ab1e11768d1e69160bdde18acc3",
|
||||
"sha256:045b4dd0e5f6121e6f314d81759abd2c257db4634260abcfe0d3f7083c4908ef",
|
||||
"sha256:047c0a04e382ef8bd74b0de01407e8d8632d7d1b4db6f2561106af812a68741b",
|
||||
"sha256:068167c2d7bbeebd359665ac4fff756be5ffac9cda02375b5c5a7c4777038e73",
|
||||
"sha256:148ff60e0fffa2f5fad2eb25aae7bef23d8f3b8bdaf947a65cdbe84a978092bc",
|
||||
"sha256:1d1c77013a259971a72ddaa83b9f42c80a93ff12df6a4723be99d858fa30bee3",
|
||||
"sha256:1d48bc124a6b7a55006d97917f695effa9725d05abe8ee78fd60d6588b8344cd",
|
||||
"sha256:31dfa2fc323097f8ad7acd41aa38d7c614dd1960ac6681745b6da124093dc351",
|
||||
"sha256:34f82db7f80c49f38b032c5abb605c458bac997a6c3142e0d6c130be6fb2b941",
|
||||
"sha256:3d5dd8e5998fb4ace04789d1d008e2bb532de501218519d70bb672c4c5a2fc5d",
|
||||
"sha256:4a6ae52bd3ee41ee0f3acf4c60ceb3f44e0e3bc52ab7da1c2b2aa6703363a3d1",
|
||||
"sha256:4b02a3b2a2f01d0490dd39321c74273fed0568568ea0e7ea23e02bd1fb10a10b",
|
||||
"sha256:4b843f8e1dd6a3195679d9838eb4670222e8b8d01bc36c9894d6c3538316fa0a",
|
||||
"sha256:5de53a28f40ef3c4fd57aeab6b590c2c663de87a5af76136ced519923d3efbb3",
|
||||
"sha256:61b2b33ede821b94fa99ce0b09c9ece049c7067a33b279f343adfe35108a4ea7",
|
||||
"sha256:6a3a9b0f45fd75dc05d8e93dc21b18fc1670135ec9544d1ad4acbcf6b86781d0",
|
||||
"sha256:76ad8e4c69dadbb31bad17c16baee61c0d1a4a73bed2590b741b2e1a46d3edd0",
|
||||
"sha256:7ba19b777dc00194d1b473180d4ca89a054dd18de27d0ee2e42a103ec9b7d014",
|
||||
"sha256:7c1b7eab7a49aa96f3db1f716f0113a8a2e93c7375dd3d5d21c4941f1405c9c5",
|
||||
"sha256:7fc0eee3046041387cbace9314926aa48b681202f8897f8bff3809967a049036",
|
||||
"sha256:8ccd1c5fff1aa1427100ce188557fc31f1e0a383ad8ec42c559aabd4ff08802d",
|
||||
"sha256:8e08dd76de80539d613654915a2f5196dbccc67448df291e69a88712ea21e24a",
|
||||
"sha256:c18498c50c59263841862ea0501da9f2b3659c00db54abfbf823a80787fde8ce",
|
||||
"sha256:c49db89d602c24928e68c0d510f4fcf8989d77defd01c973d6cbe27e684833b1",
|
||||
"sha256:ce20044d0317649ddbb4e54dab3c1bcc7483c78c27d3f58ab3d0c7e6bc60d26a",
|
||||
"sha256:d1071414dd06ca2eafa90c85a079169bfeb0e5f57fd0b45d44c092546fcd6fd9",
|
||||
"sha256:d3be11ac43ab1a3e979dac80843b42226d5d3cccd3986f2e03152720a4297cd7",
|
||||
"sha256:db603a1c235d110c860d5f39988ebc8218ee028f07a7cbc056ba6424372ca31b"
|
||||
],
|
||||
"version": "==4.5.2"
|
||||
},
|
||||
"numpy": {
|
||||
"hashes": [
|
||||
"sha256:0cdbbaa30ae69281b18dd995d3079c4e552ad6d5426977f66b9a2a95f11f552a",
|
||||
"sha256:2b0cca1049bd39d1879fa4d598624cafe82d35529c72de1b3d528d68031cdd95",
|
||||
"sha256:31d3fe5b673e99d33d70cfee2ea8fe8dccd60f265c3ed990873a88647e3dd288",
|
||||
"sha256:34dd4922aab246c39bf5df03ca653d6265e65971deca6784c956bf356bca6197",
|
||||
"sha256:384e2dfa03da7c8d54f8f934f61b6a5e4e1ebb56a65b287567629d6c14578003",
|
||||
"sha256:392e2ea22b41a22c0289a88053204b616181288162ba78e6823e1760309d5277",
|
||||
"sha256:4341a39fc085f31a583be505eabf00e17c619b469fef78dc7e8241385bfddaa4",
|
||||
"sha256:45080f065dcaa573ebecbfe13cdd86e8c0a68c4e999aa06bd365374ea7137706",
|
||||
"sha256:485cb1eb4c9962f4cd042fed9424482ec1d83fee5dc2ef3f2552ac47852cb259",
|
||||
"sha256:575cefd28d3e0da85b0864506ae26b06483ee4a906e308be5a7ad11083f9d757",
|
||||
"sha256:62784b35df7de7ca4d0d81c5b6af5983f48c5cdef32fc3635b445674e56e3266",
|
||||
"sha256:69c152f7c11bf3b4fc11bc4cc62eb0334371c0db6844ebace43b7c815b602805",
|
||||
"sha256:6ccfdcefd287f252cf1ea7a3f1656070da330c4a5658e43ad223269165cdf977",
|
||||
"sha256:7298fbd73c0b3eff1d53dc9b9bdb7add8797bb55eeee38c8ccd7906755ba28af",
|
||||
"sha256:79463d918d1bf3aeb9186e3df17ddb0baca443f41371df422f99ee94f4f2bbfe",
|
||||
"sha256:8bbee788d82c0ac656536de70e817af09b7694f5326b0ef08e5c1014fcb96bb3",
|
||||
"sha256:a863957192855c4c57f60a75a1ac06ce5362ad18506d362dd807e194b4baf3ce",
|
||||
"sha256:ae602ba425fb2b074e16d125cdce4f0194903da935b2e7fe284ebecca6d92e76",
|
||||
"sha256:b13faa258b20fa66d29011f99fdf498641ca74a0a6d9266bc27d83c70fea4a6a",
|
||||
"sha256:c2c39d69266621dd7464e2bb740d6eb5abc64ddc339cc97aa669f3bb4d75c103",
|
||||
"sha256:e9c88f173d31909d881a60f08a8494e63f1aff2a4052476b24d4f50e82c47e24",
|
||||
"sha256:f1a29267ac29fff0913de0f11f3a9edfcd3f39595f467026c29376fad243ebe3",
|
||||
"sha256:f69dde0c5a137d887676a8129373e44366055cf19d1b434e853310c7a1e68f93"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.16.1"
|
||||
},
|
||||
"pandas": {
|
||||
"hashes": [
|
||||
"sha256:02c830f951f3dc8c3164e2639a8961881390f7492f71a7835c2330f54539ad57",
|
||||
"sha256:179015834c72a577486337394493cc2969feee9a04a2ea09f50c724e4b52ab42",
|
||||
"sha256:3894960d43c64cfea5142ac783b101362f5008ee92e962392156a3f8d1558995",
|
||||
"sha256:435821cb2501eabbcee7e83614bd710940dc0cf28b5afbc4bdb816c31cec71af",
|
||||
"sha256:8294dea9aa1811f93558702856e3b68dd1dfd7e9dbc8e0865918a07ee0f21c2c",
|
||||
"sha256:844e745ab27a9a01c86925fe776f9d2e09575e65f0bf8eba5090edddd655dffc",
|
||||
"sha256:a08d49f5fa2a2243262fe5581cb89f6c0c7cc525b8d6411719ab9400a9dc4a82",
|
||||
"sha256:a435c251246075337eb9fdc4160fd15c8a87cc0679d8d61fb5255d8d5a12f044",
|
||||
"sha256:a799f03c0ec6d8687f425d7d6c075e8055a9a808f1ba87604d91f20507631d8d",
|
||||
"sha256:aea72ce5b3a016b578cc05c04a2f68d9cafacf5d784b6fe832e66381cb62c719",
|
||||
"sha256:c145e94c6da2af7eaf1fd827293ac1090a61a9b80150bebe99f8966a02378db9",
|
||||
"sha256:c8a7b470c88c779301b73b23cabdbbd94b83b93040b2ccffa409e06df23831c0",
|
||||
"sha256:c9e31b36abbd7b94c547d9047f13e1546e3ba967044cf4f9718575fcb7b81bb6",
|
||||
"sha256:d960b7a03c33c328c723cfc2f8902a6291645f4efa0a5c1d4c5fa008cdc1ea77",
|
||||
"sha256:da21fae4c173781b012217c9444f13c67449957a4d45184a9718268732c09564",
|
||||
"sha256:db26c0fea0bd7d33c356da98bafd2c0dfb8f338e45e2824ff8f4f3e61b5c5f25",
|
||||
"sha256:dc296c3f16ec620cfb4daf0f672e3c90f3920ece8261b2760cd0ebd9cd4daa55",
|
||||
"sha256:e8da67cb2e9333ec30d53cfb96e27a4865d1648688e5471699070d35d8ab38cf",
|
||||
"sha256:fb4f047a63f91f22aade4438aaf790400b96644e802daab4293e9b799802f93f",
|
||||
"sha256:fef9939176cba0c2526ebeefffb8b9807543dc0954877b7226f751ec1294a869"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.24.1"
|
||||
},
|
||||
"pillow": {
|
||||
"hashes": [
|
||||
"sha256:051de330a06c99d6f84bcf582960487835bcae3fc99365185dc2d4f65a390c0e",
|
||||
"sha256:0ae5289948c5e0a16574750021bd8be921c27d4e3527800dc9c2c1d2abc81bf7",
|
||||
"sha256:0b1efce03619cdbf8bcc61cfae81fcda59249a469f31c6735ea59badd4a6f58a",
|
||||
"sha256:163136e09bd1d6c6c6026b0a662976e86c58b932b964f255ff384ecc8c3cefa3",
|
||||
"sha256:18e912a6ccddf28defa196bd2021fe33600cbe5da1aa2f2e2c6df15f720b73d1",
|
||||
"sha256:24ec3dea52339a610d34401d2d53d0fb3c7fd08e34b20c95d2ad3973193591f1",
|
||||
"sha256:267f8e4c0a1d7e36e97c6a604f5b03ef58e2b81c1becb4fccecddcb37e063cc7",
|
||||
"sha256:3273a28734175feebbe4d0a4cde04d4ed20f620b9b506d26f44379d3c72304e1",
|
||||
"sha256:4c678e23006798fc8b6f4cef2eaad267d53ff4c1779bd1af8725cc11b72a63f3",
|
||||
"sha256:4d4bc2e6bb6861103ea4655d6b6f67af8e5336e7216e20fff3e18ffa95d7a055",
|
||||
"sha256:505738076350a337c1740a31646e1de09a164c62c07db3b996abdc0f9d2e50cf",
|
||||
"sha256:5233664eadfa342c639b9b9977190d64ad7aca4edc51a966394d7e08e7f38a9f",
|
||||
"sha256:5d95cb9f6cced2628f3e4de7e795e98b2659dfcc7176ab4a01a8b48c2c2f488f",
|
||||
"sha256:7eda4c737637af74bac4b23aa82ea6fbb19002552be85f0b89bc27e3a762d239",
|
||||
"sha256:801ddaa69659b36abf4694fed5aa9f61d1ecf2daaa6c92541bbbbb775d97b9fe",
|
||||
"sha256:825aa6d222ce2c2b90d34a0ea31914e141a85edefc07e17342f1d2fdf121c07c",
|
||||
"sha256:9c215442ff8249d41ff58700e91ef61d74f47dfd431a50253e1a1ca9436b0697",
|
||||
"sha256:a3d90022f2202bbb14da991f26ca7a30b7e4c62bf0f8bf9825603b22d7e87494",
|
||||
"sha256:a631fd36a9823638fe700d9225f9698fb59d049c942d322d4c09544dc2115356",
|
||||
"sha256:a6523a23a205be0fe664b6b8747a5c86d55da960d9586db039eec9f5c269c0e6",
|
||||
"sha256:a756ecf9f4b9b3ed49a680a649af45a8767ad038de39e6c030919c2f443eb000",
|
||||
"sha256:b117287a5bdc81f1bac891187275ec7e829e961b8032c9e5ff38b70fd036c78f",
|
||||
"sha256:ba04f57d1715ca5ff74bb7f8a818bf929a204b3b3c2c2826d1e1cc3b1c13398c",
|
||||
"sha256:cd878195166723f30865e05d87cbaf9421614501a4bd48792c5ed28f90fd36ca",
|
||||
"sha256:cee815cc62d136e96cf76771b9d3eb58e0777ec18ea50de5cfcede8a7c429aa8",
|
||||
"sha256:d1722b7aa4b40cf93ac3c80d3edd48bf93b9208241d166a14ad8e7a20ee1d4f3",
|
||||
"sha256:d7c1c06246b05529f9984435fc4fa5a545ea26606e7f450bdbe00c153f5aeaad",
|
||||
"sha256:e9c8066249c040efdda84793a2a669076f92a301ceabe69202446abb4c5c5ef9",
|
||||
"sha256:f227d7e574d050ff3996049e086e1f18c7bd2d067ef24131e50a1d3fe5831fbc",
|
||||
"sha256:fc9a12aad714af36cf3ad0275a96a733526571e52710319855628f476dcb144e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.4.1"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
"sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616",
|
||||
"sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a"
|
||||
],
|
||||
"version": "==0.8.1"
|
||||
},
|
||||
"protobuf": {
|
||||
"hashes": [
|
||||
"sha256:10394a4d03af7060fa8a6e1cbf38cea44be1467053b0aea5bbfcb4b13c4b88c4",
|
||||
"sha256:1489b376b0f364bcc6f89519718c057eb191d7ad6f1b395ffd93d1aa45587811",
|
||||
"sha256:1931d8efce896981fe410c802fd66df14f9f429c32a72dd9cfeeac9815ec6444",
|
||||
"sha256:196d3a80f93c537f27d2a19a4fafb826fb4c331b0b99110f985119391d170f96",
|
||||
"sha256:46e34fdcc2b1f2620172d3a4885128705a4e658b9b62355ae5e98f9ea19f42c2",
|
||||
"sha256:4b92e235a3afd42e7493b281c8b80c0c65cbef45de30f43d571d1ee40a1f77ef",
|
||||
"sha256:574085a33ca0d2c67433e5f3e9a0965c487410d6cb3406c83bdaf549bfc2992e",
|
||||
"sha256:59cd75ded98094d3cf2d79e84cdb38a46e33e7441b2826f3838dcc7c07f82995",
|
||||
"sha256:5ee0522eed6680bb5bac5b6d738f7b0923b3cafce8c4b1a039a6107f0841d7ed",
|
||||
"sha256:65917cfd5da9dfc993d5684643063318a2e875f798047911a9dd71ca066641c9",
|
||||
"sha256:685bc4ec61a50f7360c9fd18e277b65db90105adbf9c79938bd315435e526b90",
|
||||
"sha256:92e8418976e52201364a3174e40dc31f5fd8c147186d72380cbda54e0464ee19",
|
||||
"sha256:9335f79d1940dfb9bcaf8ec881fb8ab47d7a2c721fb8b02949aab8bbf8b68625",
|
||||
"sha256:a7ee3bb6de78185e5411487bef8bc1c59ebd97e47713cba3c460ef44e99b3db9",
|
||||
"sha256:ceec283da2323e2431c49de58f80e1718986b79be59c266bb0509cbf90ca5b9e",
|
||||
"sha256:fcfc907746ec22716f05ea96b7f41597dfe1a1c088f861efb8a0d4f4196a6f10"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.6.1"
|
||||
},
|
||||
"py": {
|
||||
"hashes": [
|
||||
"sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694",
|
||||
"sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"
|
||||
],
|
||||
"version": "==1.7.0"
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07",
|
||||
"sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.2.0"
|
||||
},
|
||||
"pytest-asyncio": {
|
||||
"hashes": [
|
||||
"sha256:9fac5100fd716cbecf6ef89233e8590a4ad61d729d1732e0a96b84182df1daaf",
|
||||
"sha256:d734718e25cfc32d2bf78d346e99d33724deeba774cc4afdf491530c6184b63b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.10.0"
|
||||
},
|
||||
"python-dateutil": {
|
||||
"hashes": [
|
||||
"sha256:7e6584c74aeed623791615e26efd690f29817a27c73085b78e4bad02493df2fb",
|
||||
"sha256:c89805f6f4d64db21ed966fda138f8a5ed7a4fdbc1a8ee329ce1b74e3c74da9e"
|
||||
],
|
||||
"version": "==2.8.0"
|
||||
},
|
||||
"python-editor": {
|
||||
"hashes": [
|
||||
"sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d",
|
||||
"sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b",
|
||||
"sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"
|
||||
],
|
||||
"version": "==1.0.4"
|
||||
},
|
||||
"python-slugify": {
|
||||
"hashes": [
|
||||
"sha256:d3e034397236020498e677a35e5c05dcc6ba1624b608b9ef7e5fe3090ccbd5a8"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.0.1"
|
||||
},
|
||||
"pytz": {
|
||||
"hashes": [
|
||||
"sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9",
|
||||
"sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c"
|
||||
],
|
||||
"version": "==2018.9"
|
||||
},
|
||||
"pyyaml": {
|
||||
"hashes": [
|
||||
"sha256:3d7da3009c0f3e783b2c873687652d83b1bbfd5c88e9813fb7e5b03c0dd3108b",
|
||||
"sha256:3ef3092145e9b70e3ddd2c7ad59bdd0252a94dfe3949721633e41344de00a6bf",
|
||||
"sha256:40c71b8e076d0550b2e6380bada1f1cd1017b882f7e16f09a65be98e017f211a",
|
||||
"sha256:558dd60b890ba8fd982e05941927a3911dc409a63dcb8b634feaa0cda69330d3",
|
||||
"sha256:a7c28b45d9f99102fa092bb213aa12e0aaf9a6a1f5e395d36166639c1f96c3a1",
|
||||
"sha256:aa7dd4a6a427aed7df6fb7f08a580d68d9b118d90310374716ae90b710280af1",
|
||||
"sha256:bc558586e6045763782014934bfaf39d48b8ae85a2713117d16c39864085c613",
|
||||
"sha256:d46d7982b62e0729ad0175a9bc7e10a566fc07b224d2c79fafb5e032727eaa04",
|
||||
"sha256:d5eef459e30b09f5a098b9cea68bebfeb268697f78d647bd255a085371ac7f3f",
|
||||
"sha256:e01d3203230e1786cd91ccfdc8f8454c8069c91bee3962ad93b87a4b2860f537",
|
||||
"sha256:e170a9e6fcfd19021dd29845af83bb79236068bf5fd4df3327c1be18182b2531"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.13"
|
||||
},
|
||||
"regex": {
|
||||
"hashes": [
|
||||
"sha256:017bf6f893db109dc5f82b902019f6fe089e605af5e1f0f6f7271f936b411eb4",
|
||||
"sha256:0969fdb610435d7f49dc5518f7642d7b1319ef19f0c3f1bd4d972afbb9877aa7",
|
||||
"sha256:3679f269790c87bd04e003e60e098b1be5392f17c48d28c2a3b9d16b3dcbca2a",
|
||||
"sha256:37150aee3411f38d08733edb5f3faa656f96ddae00ee7713e01d7423f0f72815",
|
||||
"sha256:4a1a1d963f462c13722b34ef1f82c4707091b0a3fb9b5fd79b6670c38b734095",
|
||||
"sha256:5da76d468d048fb163bcaedd5c0832a3ab95da1034598a6c673bf999ae61f259",
|
||||
"sha256:72dda5123ee45cde10031576710ca0c4972757c94a60b75023a45d8069da34ca",
|
||||
"sha256:7f40b720b81f6614a34a8857d2417fbe619734629f9d0627e2cc9e493979401d",
|
||||
"sha256:a22a11e9dd6e46529dc4409bd6c449f3e7525aa4b0d5e9b23363302cfe4db8e4"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2019.2.7"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e",
|
||||
"sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.21.0"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c",
|
||||
"sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"
|
||||
],
|
||||
"version": "==1.12.0"
|
||||
},
|
||||
"soupsieve": {
|
||||
"hashes": [
|
||||
"sha256:466910df7561796a60748826781ebe9a888f7a1668a636ae86783f44d10aae73",
|
||||
"sha256:87db12ae79194f0ff9808d2b1641c4f031ae39ffa3cab6b907ea7c1e5e5ed445"
|
||||
],
|
||||
"version": "==1.7.3"
|
||||
},
|
||||
"sqlalchemy": {
|
||||
"hashes": [
|
||||
"sha256:52a42dbf02d0562d6e90e7af59f177f1cc027e72833cc29c3a821eefa009c71d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.2.17"
|
||||
},
|
||||
"tqdm": {
|
||||
"hashes": [
|
||||
"sha256:d385c95361699e5cf7622485d9b9eae2d4864b21cd5a2374a9c381ffed701021",
|
||||
"sha256:e22977e3ebe961f72362f6ddfb9197cc531c9737aaf5f607ef09740c849ecd05"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.31.1"
|
||||
},
|
||||
"unidecode": {
|
||||
"hashes": [
|
||||
"sha256:092cdf7ad9d1052c50313426a625b717dab52f7ac58f859e09ea020953b1ad8f",
|
||||
"sha256:8b85354be8fd0c0e10adbf0675f6dc2310e56fda43fa8fe049123b6c475e52fb"
|
||||
],
|
||||
"version": "==1.0.23"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39",
|
||||
"sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"
|
||||
],
|
||||
"version": "==1.24.1"
|
||||
},
|
||||
"werkzeug": {
|
||||
"hashes": [
|
||||
"sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c",
|
||||
"sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b"
|
||||
],
|
||||
"version": "==0.14.1"
|
||||
},
|
||||
"xmltodict": {
|
||||
"hashes": [
|
||||
"sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21",
|
||||
"sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.12.0"
|
||||
},
|
||||
"yarl": {
|
||||
"hashes": [
|
||||
"sha256:024ecdc12bc02b321bc66b41327f930d1c2c543fa9a561b39861da9388ba7aa9",
|
||||
"sha256:2f3010703295fbe1aec51023740871e64bb9664c789cba5a6bdf404e93f7568f",
|
||||
"sha256:3890ab952d508523ef4881457c4099056546593fa05e93da84c7250516e632eb",
|
||||
"sha256:3e2724eb9af5dc41648e5bb304fcf4891adc33258c6e14e2a7414ea32541e320",
|
||||
"sha256:5badb97dd0abf26623a9982cd448ff12cb39b8e4c94032ccdedf22ce01a64842",
|
||||
"sha256:73f447d11b530d860ca1e6b582f947688286ad16ca42256413083d13f260b7a0",
|
||||
"sha256:7ab825726f2940c16d92aaec7d204cfc34ac26c0040da727cf8ba87255a33829",
|
||||
"sha256:b25de84a8c20540531526dfbb0e2d2b648c13fd5dd126728c496d7c3fea33310",
|
||||
"sha256:c6e341f5a6562af74ba55205dbd56d248daf1b5748ec48a0200ba227bb9e33f4",
|
||||
"sha256:c9bb7c249c4432cd47e75af3864bc02d26c9594f49c82e2a28624417f0ae63b8",
|
||||
"sha256:e060906c0c585565c718d1c3841747b61c5439af2211e185f6739a9412dfbde1"
|
||||
],
|
||||
"version": "==1.3.0"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
74
alembic.ini
Normal file
74
alembic.ini
Normal file
@@ -0,0 +1,74 @@
|
||||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = alembic
|
||||
|
||||
# template used to generate migration files
|
||||
file_template = %%(year)d-%%(month).2d-%%(day).2d_%%(hour).2d:%%(minute).2d:%%(second).2d_%%(slug)s_%%(rev)s
|
||||
|
||||
# timezone to use when rendering the date
|
||||
# within the migration file as well as the filename.
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
#truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; this defaults
|
||||
# to alembic/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path
|
||||
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = sqlite:///zvk.db
|
||||
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
1
alembic/README
Normal file
1
alembic/README
Normal file
@@ -0,0 +1 @@
|
||||
Generic single-database configuration.
|
83
alembic/env.py
Normal file
83
alembic/env.py
Normal file
@@ -0,0 +1,83 @@
|
||||
from __future__ import with_statement
|
||||
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import engine_from_config, pool
|
||||
|
||||
from main import read_config
|
||||
from zvk.bot.bot import Bot
|
||||
from zvk.util.db import DBBase
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
|
||||
bot = Bot(config=read_config())
|
||||
db = bot.db
|
||||
target_metadata = DBBase.metadata
|
||||
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url, target_metadata=target_metadata, literal_binds=True, render_as_batch=True)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
connectable = engine_from_config(
|
||||
config.get_section(config.config_ini_section),
|
||||
prefix='sqlalchemy.',
|
||||
poolclass=pool.NullPool)
|
||||
|
||||
with connectable.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
target_metadata=target_metadata,
|
||||
render_as_batch=True
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
24
alembic/script.py.mako
Normal file
24
alembic/script.py.mako
Normal file
@@ -0,0 +1,24 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
28
alembic/versions/2018-10-19_23:23:23_init_7cc70eff053e.py
Normal file
28
alembic/versions/2018-10-19_23:23:23_init_7cc70eff053e.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""init
|
||||
|
||||
Revision ID: 7cc70eff053e
|
||||
Revises:
|
||||
Create Date: 2018-10-19 23:23:23.765848
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7cc70eff053e'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
39
alembic/versions/2018-10-19_23:24:04_message_fbc835042266.py
Normal file
39
alembic/versions/2018-10-19_23:24:04_message_fbc835042266.py
Normal file
@@ -0,0 +1,39 @@
|
||||
"""Message
|
||||
|
||||
Revision ID: fbc835042266
|
||||
Revises: 7cc70eff053e
|
||||
Create Date: 2018-10-19 23:24:04.641316
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'fbc835042266'
|
||||
down_revision = '7cc70eff053e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('Message_auto',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('message_id', sa.Integer(), nullable=False),
|
||||
sa.Column('from_id', sa.Integer(), nullable=False),
|
||||
sa.Column('to_id', sa.Integer(), nullable=False),
|
||||
sa.Column('flags', sa.Integer(), nullable=False),
|
||||
sa.Column('timestamp', sa.Integer(), nullable=False),
|
||||
sa.Column('attachments', sa.String(), nullable=False),
|
||||
sa.Column('random_id', sa.Integer(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('message_id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('Message_auto')
|
||||
# ### end Alembic commands ###
|
33
alembic/versions/2018-10-20_01:10:54_vkevent_cbb2ece35ae0.py
Normal file
33
alembic/versions/2018-10-20_01:10:54_vkevent_cbb2ece35ae0.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""VKEvent
|
||||
|
||||
Revision ID: cbb2ece35ae0
|
||||
Revises: fbc835042266
|
||||
Create Date: 2018-10-20 01:10:54.532015
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'cbb2ece35ae0'
|
||||
down_revision = 'fbc835042266'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('VKEvent_auto',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('vk_event_type_id', sa.Integer(), nullable=False),
|
||||
sa.Column('vk_event_args_json', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('VKEvent_auto')
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,38 @@
|
||||
"""Message update
|
||||
|
||||
Revision ID: 27bdc8eab961
|
||||
Revises: cbb2ece35ae0
|
||||
Create Date: 2018-10-20 02:46:05.272680
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '27bdc8eab961'
|
||||
down_revision = 'cbb2ece35ae0'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('Message_auto', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('attachments_json', sa.String(), nullable=False))
|
||||
batch_op.add_column(sa.Column('extra_fields_json', sa.String(), nullable=False))
|
||||
batch_op.add_column(sa.Column('text', sa.String(), nullable=False))
|
||||
batch_op.drop_column('attachments')
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('Message_auto', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('attachments', sa.VARCHAR(), nullable=False))
|
||||
batch_op.drop_column('text')
|
||||
batch_op.drop_column('extra_fields_json')
|
||||
batch_op.drop_column('attachments_json')
|
||||
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,33 @@
|
||||
"""Message more fields
|
||||
|
||||
Revision ID: 952c5d97aba3
|
||||
Revises: 27bdc8eab961
|
||||
Create Date: 2018-10-23 23:37:23.112830
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '952c5d97aba3'
|
||||
down_revision = '27bdc8eab961'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('Message_auto', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('is_bot_message', sa.Boolean(), nullable=False, default=False))
|
||||
batch_op.add_column(sa.Column('is_outgoing', sa.Boolean(), nullable=False, default=False))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('Message_auto', schema=None) as batch_op:
|
||||
batch_op.drop_column('is_outgoing')
|
||||
batch_op.drop_column('is_bot_message')
|
||||
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,34 @@
|
||||
"""UserActivityPeriod
|
||||
|
||||
Revision ID: 20e307621653
|
||||
Revises: 952c5d97aba3
|
||||
Create Date: 2018-10-26 10:24:37.417570
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '20e307621653'
|
||||
down_revision = '952c5d97aba3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('UserActivityPeriod_auto',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.Column('period_start', sa.DateTime(), nullable=False),
|
||||
sa.Column('period_end', sa.DateTime(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('UserActivityPeriod_auto')
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,41 @@
|
||||
"""CachedDownload and CachedUpload
|
||||
|
||||
Revision ID: 253bd36e1037
|
||||
Revises: 20e307621653
|
||||
Create Date: 2018-10-27 02:06:35.782235
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '253bd36e1037'
|
||||
down_revision = '20e307621653'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('CachedDownload_auto',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('url', sa.String(), nullable=False),
|
||||
sa.Column('params_json', sa.String(), nullable=False),
|
||||
sa.Column('local_path', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('CachedUpload_auto',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('local_path', sa.String(), nullable=False),
|
||||
sa.Column('vk_object', sa.String(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('CachedUpload_auto')
|
||||
op.drop_table('CachedDownload_auto')
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,32 @@
|
||||
"""add peer_id to Message
|
||||
|
||||
Revision ID: 3283fcaa655e
|
||||
Revises: 253bd36e1037
|
||||
Create Date: 2018-11-09 20:29:11.432988
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '3283fcaa655e'
|
||||
down_revision = '253bd36e1037'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('Message_auto', schema=None) as batch_op:
|
||||
batch_op.add_column(sa.Column('peer_id', sa.Integer(), nullable=False, server_default='0'))
|
||||
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
with op.batch_alter_table('Message_auto', schema=None) as batch_op:
|
||||
batch_op.drop_column('peer_id')
|
||||
|
||||
# ### end Alembic commands ###
|
@@ -0,0 +1,32 @@
|
||||
"""add TimetableJson
|
||||
|
||||
Revision ID: 70e4787571c6
|
||||
Revises: 3283fcaa655e
|
||||
Create Date: 2019-03-02 18:49:05.324919
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '70e4787571c6'
|
||||
down_revision = '3283fcaa655e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('TimetableJson_auto',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('json', sa.String(), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade():
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_table('TimetableJson_auto')
|
||||
# ### end Alembic commands ###
|
50
deploy.sh
Executable file
50
deploy.sh
Executable file
@@ -0,0 +1,50 @@
|
||||
#!/bin/zsh
|
||||
|
||||
set -eux
|
||||
set -o pipefail
|
||||
|
||||
cd /home/misc-user/zvk
|
||||
|
||||
echo "Pulling"
|
||||
if git pull; then
|
||||
echo "OK"
|
||||
else
|
||||
echo "-----"
|
||||
echo "-----Could not pull-----"
|
||||
echo "-----"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Restarting"
|
||||
if sudo /bin/systemctl restart a-zvk; then
|
||||
echo "OK"
|
||||
else
|
||||
echo "-----"
|
||||
echo "-----Could not restart-----"
|
||||
echo "-----"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Waiting"
|
||||
sleep 3
|
||||
|
||||
echo "Checking"
|
||||
if sudo /bin/systemctl is-active a-zvk; then
|
||||
echo "OK"
|
||||
else
|
||||
echo "-----"
|
||||
echo "-----Could not start up-----"
|
||||
echo "-----"
|
||||
sudo /bin/systemctl status a-zvk;
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Deploy successful"
|
||||
|
||||
info=$'🤖 zvk change 🤖\n'$(git show --stat)
|
||||
|
||||
sleep 5
|
||||
|
||||
#curl -X POST -F "peer_id=9002294" -F "message=$info" localhost:32155/message/send
|
||||
|
||||
echo "Notification sent"
|
27
example_config.yaml
Normal file
27
example_config.yaml
Normal file
@@ -0,0 +1,27 @@
|
||||
db_url: "sqlite:///zvk.db"
|
||||
|
||||
net:
|
||||
timeout: 60
|
||||
|
||||
api:
|
||||
access_token: "token"
|
||||
|
||||
permissions:
|
||||
admin:
|
||||
- 0
|
||||
mod:
|
||||
- 0
|
||||
- 50951365 # Nika
|
||||
user:
|
||||
- 0
|
||||
- 50951365 # Nika
|
||||
- 172350539 # Vladlen
|
||||
- 173489181 # Disa
|
||||
|
||||
plugins:
|
||||
api_keys:
|
||||
wolfram: "321321"
|
||||
open_weather: "321321321"
|
||||
yandex_dict: "321321"
|
||||
blacklist: []
|
||||
whitelist: []
|
BIN
relics/6407-2.ftt
Normal file
BIN
relics/6407-2.ftt
Normal file
Binary file not shown.
16
run.sh
Executable file
16
run.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env zsh
|
||||
|
||||
set euxo PIPEFAIL
|
||||
|
||||
export PIPENV_VENV_IN_PROJECT=1
|
||||
export PYTHONPATH=src/:tests/:alembic/
|
||||
|
||||
git pull
|
||||
|
||||
pipenv install
|
||||
|
||||
pipenv run alembic upgrade head
|
||||
|
||||
pipenv run pytest tests/
|
||||
|
||||
pipenv run python3.7 src/main.py
|
40
src/main.py
Normal file
40
src/main.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import yaml
|
||||
|
||||
from zvk.bot.bot import Bot
|
||||
from zvk.util.paths import CONFIG_PATH
|
||||
from zvk.util.zlogging import logger, formatter
|
||||
|
||||
|
||||
def read_config():
|
||||
try:
|
||||
with open(CONFIG_PATH) as file:
|
||||
return yaml.load(file)
|
||||
except Exception:
|
||||
logger.exception(f'Could not read config')
|
||||
raise
|
||||
|
||||
|
||||
def prod_logging():
|
||||
info_file = logging.FileHandler('info.log')
|
||||
info_file.setLevel(logging.INFO)
|
||||
info_file.setFormatter(formatter)
|
||||
logger.addHandler(info_file)
|
||||
|
||||
warning_file = logging.FileHandler('warning.log')
|
||||
warning_file.setLevel(logging.WARNING)
|
||||
warning_file.setFormatter(formatter)
|
||||
logger.addHandler(warning_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
prod_logging()
|
||||
|
||||
bot = Bot(config=read_config())
|
||||
|
||||
try:
|
||||
asyncio.run(bot.run())
|
||||
except KeyboardInterrupt:
|
||||
logger.info('KeyboardInterrupt, shutting down')
|
0
src/zvk/__init__.py
Normal file
0
src/zvk/__init__.py
Normal file
0
src/zvk/bot/__init__.py
Normal file
0
src/zvk/bot/__init__.py
Normal file
116
src/zvk/bot/bot.py
Normal file
116
src/zvk/bot/bot.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import glob
|
||||
from typing import Any, Dict
|
||||
|
||||
from zvk.bot.event_type import BotEventType
|
||||
from zvk.bot.plugin import Plugin
|
||||
from zvk.bot.trunk import Trunk
|
||||
from zvk.event.event import Event
|
||||
from zvk.event.queue import EventQueue
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.util.db import Database
|
||||
from zvk.util.network import Network
|
||||
from zvk.util.paths import PLUGIN_GLOB
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class Bot:
|
||||
"""
|
||||
An instance of a bot running on a specified account.
|
||||
|
||||
Attributes:
|
||||
config: A config dict, holding information about keys, plugins, etc.
|
||||
net: Network communication abstraction.
|
||||
api: An interface to interact with VK servers.
|
||||
event_queue: EventQueue that processes various events happening inside the bot.
|
||||
"""
|
||||
|
||||
config: Dict[str, Any]
|
||||
net: Network
|
||||
api: VKApi
|
||||
event_queue: EventQueue
|
||||
plugins: Dict[str, Plugin]
|
||||
db: Database
|
||||
trunk: Trunk
|
||||
|
||||
def __init__(self, config):
|
||||
"""
|
||||
Initialize the bot, but do not start yet.
|
||||
|
||||
Args:
|
||||
config: A config dict.
|
||||
"""
|
||||
|
||||
self.config = config
|
||||
self.net = Network(self.config)
|
||||
self.api = VKApi(self.config, self.net)
|
||||
self.db = Database(self.config['db_url'])
|
||||
self.trunk = Trunk()
|
||||
self.event_queue = EventQueue()
|
||||
self.plugins = dict()
|
||||
|
||||
self._load_plugins()
|
||||
|
||||
def starting_env(self):
|
||||
return dict(
|
||||
bot=self,
|
||||
config=self.config,
|
||||
api=self.api,
|
||||
net=self.net,
|
||||
db=self.db,
|
||||
trunk=self.trunk,
|
||||
)
|
||||
|
||||
async def initialize(self) -> None:
|
||||
self.net.initialize()
|
||||
self.trunk.initialize()
|
||||
|
||||
async def run(self) -> bool:
|
||||
"""
|
||||
Asynchronously run the bot until the event queue finishes.
|
||||
"""
|
||||
|
||||
logger.info('Initializing the bot')
|
||||
await self.initialize()
|
||||
logger.info('Initialization finished')
|
||||
|
||||
logger.info('Starting the bot')
|
||||
await self.event_queue.run([Event(BotEventType.STARTUP, **self.starting_env())])
|
||||
logger.info('Main queue finished, shutting down')
|
||||
|
||||
return not self.event_queue.is_dirty
|
||||
|
||||
def die(self) -> None:
|
||||
"""
|
||||
Schedules an end event to happen in the queue.
|
||||
"""
|
||||
|
||||
logger.info('Suicide by forcibly stopping the queue')
|
||||
|
||||
self.event_queue.omae_wa_mou_shindeiru()
|
||||
|
||||
def _load_plugins(self):
|
||||
paths = glob.glob(PLUGIN_GLOB, recursive=True)
|
||||
|
||||
whitelist = self.config['plugins']['whitelist']
|
||||
blacklist = self.config['plugins']['blacklist']
|
||||
|
||||
for path in paths:
|
||||
plugin = Plugin(path)
|
||||
plugin.read()
|
||||
|
||||
if plugin.is_degenerate:
|
||||
continue
|
||||
|
||||
self.plugins[plugin.name] = plugin
|
||||
|
||||
if plugin.name in blacklist:
|
||||
logger.info(f'Plugin {plugin.name} is blacklisted')
|
||||
continue
|
||||
|
||||
if whitelist and plugin.name not in whitelist:
|
||||
logger.info(f'Plugin {plugin.name} is not whitelisted')
|
||||
continue
|
||||
|
||||
plugin.activate(self.event_queue)
|
||||
|
||||
logger.info(f'{len(self.plugins)} plugins loaded')
|
5
src/zvk/bot/event_type.py
Normal file
5
src/zvk/bot/event_type.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from enum import Enum, auto
|
||||
|
||||
|
||||
class BotEventType(Enum):
|
||||
STARTUP = auto()
|
64
src/zvk/bot/plugin.py
Normal file
64
src/zvk/bot/plugin.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import importlib
|
||||
import re
|
||||
from typing import Set
|
||||
|
||||
from zvk.event.consumer import EventConsumer
|
||||
from zvk.event.queue import EventQueue
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class Plugin:
|
||||
path: str
|
||||
import_path: str
|
||||
name: str
|
||||
|
||||
consumers: Set[EventConsumer]
|
||||
is_degenerate: bool
|
||||
is_activated: bool
|
||||
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
|
||||
self.import_path = self.path
|
||||
self.import_path = re.sub(r'/', '.', self.import_path)
|
||||
self.import_path = re.sub(r'.py$', '', self.import_path)
|
||||
self.import_path = re.sub(r'^src\.', '', self.import_path)
|
||||
|
||||
self.name = re.sub(r'^zvk\.plugins\.', '', self.import_path)
|
||||
|
||||
self.consumers = set()
|
||||
self.is_degenerate = False
|
||||
self.is_activated = False
|
||||
|
||||
def read(self):
|
||||
logger.info(f'Reading plugin {self.name}')
|
||||
|
||||
module = importlib.import_module(self.import_path)
|
||||
|
||||
for sub_name in dir(module):
|
||||
sub = getattr(module, sub_name)
|
||||
|
||||
if isinstance(sub, EventConsumer):
|
||||
self.consumers.add(sub)
|
||||
|
||||
logger.debug(f'Found a consumer {self.name} -> {sub_name} = {sub}')
|
||||
|
||||
if len(self.consumers) == 0:
|
||||
self.is_degenerate = True
|
||||
|
||||
def activate(self, queue: EventQueue):
|
||||
for consumer in self.consumers:
|
||||
queue.register_consumer(consumer)
|
||||
|
||||
logger.info(f'Plugin {self.name} activated')
|
||||
self.is_activated = True
|
||||
|
||||
def deactivate(self, queue: EventQueue):
|
||||
for consumer in self.consumers:
|
||||
queue.deregister_consumer(consumer)
|
||||
|
||||
logger.info(f'Plugin {self.name} deactivated')
|
||||
self.is_activated = False
|
||||
|
||||
def __str__(self):
|
||||
return self.name
|
26
src/zvk/bot/trunk.py
Normal file
26
src/zvk/bot/trunk.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import asyncio
|
||||
from typing import Dict, Any
|
||||
|
||||
|
||||
class Trunk:
|
||||
contents: Dict[str, asyncio.Future]
|
||||
loop: asyncio.AbstractEventLoop
|
||||
|
||||
def __init__(self):
|
||||
self.contents = dict()
|
||||
self.loop = None
|
||||
|
||||
def initialize(self) -> None:
|
||||
self.loop = asyncio.get_running_loop()
|
||||
|
||||
def set(self, key, value) -> None:
|
||||
if key not in self.contents:
|
||||
self.contents[key] = self.loop.create_future()
|
||||
|
||||
self.contents[key].set_result(value)
|
||||
|
||||
async def get(self, key) -> Any:
|
||||
if key not in self.contents:
|
||||
self.contents[key] = self.loop.create_future()
|
||||
|
||||
return await self.contents[key]
|
0
src/zvk/event/__init__.py
Normal file
0
src/zvk/event/__init__.py
Normal file
87
src/zvk/event/consumer.py
Normal file
87
src/zvk/event/consumer.py
Normal file
@@ -0,0 +1,87 @@
|
||||
from typing import List, AsyncGenerator
|
||||
|
||||
from zvk.bot.event_type import BotEventType
|
||||
from zvk.event.event import EventType, CoroutineFactory, Event
|
||||
from zvk.event.reflection import run_with_env
|
||||
|
||||
|
||||
async def async_generator_adapter(coro) -> AsyncGenerator[Event, None]:
|
||||
if hasattr(coro, '__anext__') and hasattr(coro, '__aiter__'):
|
||||
# our coro is an async generator, let's forward the output events downstream
|
||||
async for output_event in coro:
|
||||
yield output_event
|
||||
elif hasattr(coro, '__await__'):
|
||||
# coro is a simple man, just run it
|
||||
await coro
|
||||
else:
|
||||
raise ValueError(f'{coro} is not an awaitable at all')
|
||||
|
||||
|
||||
class EventConsumer:
|
||||
"""
|
||||
Wrapper around a coroutine factory (async def ...) that feeds it events of declared types.
|
||||
|
||||
Attributes:
|
||||
consumes: List of `EventType`s that this consumer is going to trigger on.
|
||||
coroutine_factory: `Callable` that is going to be called when corresponding events happen.
|
||||
"""
|
||||
|
||||
consumes: List[EventType]
|
||||
coroutine_factory: CoroutineFactory
|
||||
|
||||
def __init__(self, consumes: List[EventType]):
|
||||
self.consumes = consumes
|
||||
self.coroutine_factory = None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
if self.coroutine_factory is None:
|
||||
self.coroutine_factory = args[0]
|
||||
return self
|
||||
|
||||
return self.coroutine_factory(*args, **kwargs)
|
||||
|
||||
async def consume(self, event: Event) -> AsyncGenerator[Event, None]:
|
||||
"""
|
||||
This is an async generator that consumes an event and optionally generates a sequence of other events.
|
||||
|
||||
Args:
|
||||
event: Event to consume.
|
||||
|
||||
Yields:
|
||||
Events produced by the consumer.
|
||||
"""
|
||||
|
||||
coro = run_with_env(event.env, self.coroutine_factory)
|
||||
|
||||
async for event in async_generator_adapter(coro):
|
||||
yield event
|
||||
|
||||
|
||||
# def async_gen_adapter()
|
||||
|
||||
def event_consumer(consumes: List[str]) -> EventConsumer:
|
||||
"""
|
||||
Decorator for event consumers.
|
||||
|
||||
Args:
|
||||
consumes: List of `EventType`s that this consumer wants.
|
||||
|
||||
Returns:
|
||||
An initialized EventConsumer instance.
|
||||
"""
|
||||
|
||||
if callable(consumes):
|
||||
# direct decoration
|
||||
raise TypeError('Direct decoration is forbidden')
|
||||
|
||||
# create the decorating object first
|
||||
return EventConsumer(consumes=consumes)
|
||||
|
||||
|
||||
def on_startup(func=None) -> EventConsumer:
|
||||
consumer = EventConsumer(consumes=[BotEventType.STARTUP])
|
||||
|
||||
if callable(func):
|
||||
return consumer(func)
|
||||
|
||||
return consumer
|
50
src/zvk/event/event.py
Normal file
50
src/zvk/event/event.py
Normal file
@@ -0,0 +1,50 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Dict, Callable, Hashable, Any
|
||||
|
||||
EventType = Hashable
|
||||
CoroutineFactory = Callable
|
||||
|
||||
|
||||
@dataclass
|
||||
class Event:
|
||||
"""
|
||||
Describes a pretty general event.
|
||||
|
||||
Attributes:
|
||||
event_type: A reference for event consumers. Can be anything hashable.
|
||||
env: Environment that the event consumers are going to run inside. Will be used for fancy parameter substitution.
|
||||
"""
|
||||
|
||||
event_type: EventType
|
||||
env: Dict[str, Any]
|
||||
|
||||
def __init__(self, event_type, **kwargs):
|
||||
if not hasattr(event_type, '__hash__'):
|
||||
raise ValueError(f'Bad event type {event_type}')
|
||||
|
||||
self.event_type = event_type
|
||||
|
||||
self.env = dict()
|
||||
self.env['env'] = self.env
|
||||
self.env['event'] = self
|
||||
self.env['event_type'] = event_type
|
||||
|
||||
self.env.update(kwargs)
|
||||
|
||||
def prepopulate_env_from(self, previous_event: Event) -> None:
|
||||
"""
|
||||
If an event was produced by a consumer from another event, populate its environment with old values.
|
||||
|
||||
Args:
|
||||
previous_event: Event that produced `self`.
|
||||
"""
|
||||
|
||||
new_env = dict(previous_event.env)
|
||||
new_env.update(self.env)
|
||||
self.env = new_env
|
||||
|
||||
def __str__(self):
|
||||
return f'Event(event_type={self.event_type}, env=#{len(self.env)})'
|
||||
|
49
src/zvk/event/periodic.py
Normal file
49
src/zvk/event/periodic.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum, auto
|
||||
|
||||
from zvk.bot.event_type import BotEventType
|
||||
from zvk.event.consumer import EventConsumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class PeriodicEventConsumer(EventConsumer):
|
||||
def __init__(self, period_secs):
|
||||
class SpecificPeriodicTick(Enum):
|
||||
TICK = auto()
|
||||
|
||||
self.tick_event_type = SpecificPeriodicTick.TICK
|
||||
|
||||
super().__init__(consumes=[
|
||||
BotEventType.STARTUP,
|
||||
SpecificPeriodicTick.TICK
|
||||
])
|
||||
self.period = timedelta(seconds=period_secs)
|
||||
|
||||
self.tick_started_at = None
|
||||
|
||||
async def consume(self, event: Event):
|
||||
self.tick_started_at = datetime.utcnow()
|
||||
|
||||
async for output_event in super().consume(event):
|
||||
yield output_event
|
||||
|
||||
exec_duration = datetime.utcnow() - self.tick_started_at
|
||||
wait_duration = self.period - exec_duration
|
||||
|
||||
logger.debug(f'A periodic hook is going to sleep for {wait_duration}')
|
||||
if wait_duration > timedelta():
|
||||
await asyncio.sleep(wait_duration.total_seconds())
|
||||
|
||||
logger.debug(f'A periodic hook woke up')
|
||||
|
||||
yield Event(self.tick_event_type)
|
||||
|
||||
|
||||
def periodic(period_secs) -> EventConsumer:
|
||||
if callable(period_secs):
|
||||
# direct decoration
|
||||
raise TypeError('Direct decoration is forbidden')
|
||||
|
||||
return PeriodicEventConsumer(period_secs)
|
152
src/zvk/event/queue.py
Normal file
152
src/zvk/event/queue.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Set, Dict, List
|
||||
|
||||
from zvk.event.consumer import EventConsumer
|
||||
from zvk.event.event import Event, EventType
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class Task:
|
||||
queue: EventQueue
|
||||
consumer: EventConsumer
|
||||
event: Event
|
||||
asyncio_task: asyncio.Task
|
||||
is_finalized: bool
|
||||
|
||||
def __init__(self, queue, consumer, event):
|
||||
self.queue = queue
|
||||
self.consumer = consumer
|
||||
self.event = event
|
||||
self.asyncio_task = None
|
||||
self.is_finalized = False
|
||||
|
||||
def finalize(self):
|
||||
if self.is_finalized:
|
||||
return
|
||||
|
||||
self.queue.all_running_tasks.remove(self)
|
||||
|
||||
self.queue.consumer_to_running_tasks[self.consumer].remove(self)
|
||||
|
||||
if len(self.queue.all_running_tasks) == 0:
|
||||
self.queue.has_finished.set()
|
||||
|
||||
self.is_finalized = True
|
||||
|
||||
def schedule(self):
|
||||
self.asyncio_task = asyncio.create_task(self.run())
|
||||
|
||||
self.queue.all_running_tasks.add(self)
|
||||
|
||||
self.queue.consumer_to_running_tasks[self.consumer].add(self)
|
||||
|
||||
self.queue.has_finished.clear()
|
||||
|
||||
async def run(self):
|
||||
try:
|
||||
output_events_generator = self.consumer.consume(self.event)
|
||||
async for output_event in output_events_generator:
|
||||
logger.debug(f'Process {self.event} -> {output_event}')
|
||||
output_event.prepopulate_env_from(self.event)
|
||||
self.queue._route_event(output_event)
|
||||
except asyncio.CancelledError:
|
||||
logger.warning(f'A meal was cancelled, perhaps we are shutting down?')
|
||||
except Exception:
|
||||
# TODO: consumer banning/revival
|
||||
logger.exception(f'A consumer died eating his cake. What am I gonna do?')
|
||||
|
||||
logger.info(f'Killing offending consumer {self.consumer}')
|
||||
self.queue.deregister_consumer(consumer=self.consumer)
|
||||
self.queue.is_dirty = True
|
||||
finally:
|
||||
self.finalize()
|
||||
|
||||
def cancel(self):
|
||||
self.asyncio_task.cancel()
|
||||
self.finalize()
|
||||
|
||||
|
||||
class EventQueue:
|
||||
all_consumers: Set[EventConsumer]
|
||||
event_type_to_consumers: Dict[EventType, Set[EventConsumer]]
|
||||
|
||||
all_running_tasks: Set[asyncio.Task]
|
||||
consumer_to_running_tasks: Dict[EventConsumer, Set[asyncio.Task]]
|
||||
|
||||
is_dead: bool
|
||||
has_finished: asyncio.Event
|
||||
is_dirty: bool
|
||||
|
||||
def __init__(self):
|
||||
|
||||
self.all_consumers = set()
|
||||
self.event_type_to_consumers = dict()
|
||||
|
||||
self.all_running_tasks = set()
|
||||
self.consumer_to_running_tasks = dict()
|
||||
|
||||
self.is_dead = False
|
||||
self.has_finished = None
|
||||
self.is_dirty = False
|
||||
|
||||
def _route_event(self, event: Event):
|
||||
logger.debug(f'Routing event {event}')
|
||||
|
||||
if event.event_type not in self.event_type_to_consumers:
|
||||
logger.info(f'No consumers defined for event {event}')
|
||||
return
|
||||
|
||||
for consumer in self.event_type_to_consumers[event.event_type]:
|
||||
Task(self, consumer, event).schedule()
|
||||
|
||||
def register_consumer(self, consumer: EventConsumer) -> None:
|
||||
if consumer in self.all_consumers:
|
||||
raise ValueError(f'Consumer {consumer} is already registered.')
|
||||
|
||||
self.all_consumers.add(consumer)
|
||||
|
||||
# register tasks
|
||||
self.consumer_to_running_tasks[consumer] = set()
|
||||
|
||||
# register hooks
|
||||
for event_type in consumer.consumes:
|
||||
self.event_type_to_consumers.setdefault(event_type, set()).add(consumer)
|
||||
|
||||
def deregister_consumer(self, consumer: EventConsumer) -> None:
|
||||
if consumer not in self.all_consumers:
|
||||
raise ValueError(f'Consumer {consumer} is not registered.')
|
||||
|
||||
self.all_consumers.remove(consumer)
|
||||
|
||||
# deregister tasks
|
||||
for running_task in set(self.consumer_to_running_tasks[consumer]):
|
||||
running_task.cancel()
|
||||
del self.consumer_to_running_tasks[consumer]
|
||||
|
||||
# deregister hooks
|
||||
for event_type in consumer.consumes:
|
||||
self.event_type_to_consumers[event_type].remove(consumer)
|
||||
if len(self.event_type_to_consumers[event_type]) == 0:
|
||||
del self.event_type_to_consumers[event_type]
|
||||
|
||||
async def run(self, starting_events: List[Event]) -> None:
|
||||
self.has_finished = asyncio.Event()
|
||||
self.has_finished.set()
|
||||
|
||||
for event in starting_events:
|
||||
event.env['event_queue'] = self
|
||||
|
||||
self._route_event(event)
|
||||
|
||||
await self.has_finished.wait()
|
||||
|
||||
def omae_wa_mou_shindeiru(self) -> None:
|
||||
logger.warning(f'Shutting down the queue')
|
||||
|
||||
for consumer in set(self.all_consumers):
|
||||
self.deregister_consumer(consumer)
|
||||
|
||||
assert len(self.all_consumers) == 0
|
||||
assert len(self.all_running_tasks) == 0
|
24
src/zvk/event/reflection.py
Normal file
24
src/zvk/event/reflection.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import inspect
|
||||
from typing import Callable
|
||||
|
||||
|
||||
def run_with_env(env: dict, f: Callable):
|
||||
"""
|
||||
Magic to run a function with arguments taken from a dict `env`.
|
||||
|
||||
:param env: environment with possible arguments.
|
||||
:param f: function to run.
|
||||
:return: execution result.
|
||||
"""
|
||||
signature = inspect.signature(f)
|
||||
|
||||
bind = {}
|
||||
for name, parameter in signature.parameters.items():
|
||||
if name in env:
|
||||
bind[name] = env[name]
|
||||
elif parameter.default is not inspect.Parameter.empty:
|
||||
bind[name] = parameter.default
|
||||
else:
|
||||
raise TypeError(f'Cannot find desired parameter: {f} wants {name} from {env}')
|
||||
|
||||
return f(**bind)
|
0
src/zvk/misc/__init__.py
Normal file
0
src/zvk/misc/__init__.py
Normal file
427
src/zvk/misc/timetable_pb2.py
Normal file
427
src/zvk/misc/timetable_pb2.py
Normal file
@@ -0,0 +1,427 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: Timetable.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='Timetable.proto',
|
||||
package='',
|
||||
serialized_pb=_b('\n\x0fTimetable.proto\"\xe2\x01\n\tTimetable\x12\x1a\n\nproperties\x18\x01 \x02(\x0b\x32\x06.Props\x12\x18\n\x07subject\x18\x02 \x03(\x0b\x32\x07.Record\x12\x18\n\x07teacher\x18\x03 \x03(\x0b\x32\x07.Record\x12\x16\n\x05place\x18\x04 \x03(\x0b\x32\x07.Record\x12\x15\n\x04kind\x18\x05 \x03(\x0b\x32\x07.Record\x12\x16\n\x05group\x18\x06 \x03(\x0b\x32\x07.Record\x12\x10\n\x08subgroup\x18\x07 \x03(\t\x12\x17\n\x06lesson\x18\x08 \x03(\x0b\x32\x07.Lesson\x12\x13\n\x04task\x18\t \x03(\x0b\x32\x05.Task\"T\n\x05Props\x12\x12\n\nterm_start\x18\x01 \x02(\x06\x12\x13\n\x0bterm_length\x18\x02 \x02(\x05\x12\x13\n\x0bweeks_count\x18\x03 \x02(\x05\x12\r\n\x05times\x18\x04 \x02(\t\"V\n\x06Record\x12\x0b\n\x03gid\x18\x01 \x02(\x06\x12\x0c\n\x04name\x18\x02 \x02(\t\x12\x11\n\tfull_name\x18\x03 \x01(\t\x12\x10\n\x08\x63olor_id\x18\x04 \x01(\x05\x12\x0c\n\x04link\x18\x05 \x01(\t\"\xc0\x01\n\x06Lesson\x12\x13\n\x0bsubgroup_id\x18\x01 \x01(\x05\x12\x0b\n\x03\x64\x61y\x18\x02 \x02(\x05\x12\x0c\n\x04time\x18\x03 \x02(\t\x12\r\n\x05weeks\x18\x04 \x02(\t\x12\x12\n\nsubject_id\x18\x05 \x02(\x05\x12\x0f\n\x07kind_id\x18\x06 \x01(\x05\x12\x10\n\x08place_id\x18\x07 \x01(\x05\x12\x16\n\nteacher_id\x18\x08 \x03(\x05\x42\x02\x10\x01\x12\x14\n\x08group_id\x18\t \x03(\x05\x42\x02\x10\x01\x12\x12\n\nno_silence\x18\x64 \x01(\x08\"b\n\x04Task\x12\x12\n\nsubject_id\x18\x01 \x02(\x05\x12\x11\n\tday_index\x18\x02 \x02(\x05\x12\r\n\x05title\x18\x03 \x02(\t\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x0f\n\x07\x64one_at\x18\x05 \x01(\x06')
|
||||
)
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_TIMETABLE = _descriptor.Descriptor(
|
||||
name='Timetable',
|
||||
full_name='Timetable',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='properties', full_name='Timetable.properties', index=0,
|
||||
number=1, type=11, cpp_type=10, label=2,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='subject', full_name='Timetable.subject', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='teacher', full_name='Timetable.teacher', index=2,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='place', full_name='Timetable.place', index=3,
|
||||
number=4, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='kind', full_name='Timetable.kind', index=4,
|
||||
number=5, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='group', full_name='Timetable.group', index=5,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='subgroup', full_name='Timetable.subgroup', index=6,
|
||||
number=7, type=9, cpp_type=9, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='lesson', full_name='Timetable.lesson', index=7,
|
||||
number=8, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='task', full_name='Timetable.task', index=8,
|
||||
number=9, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=20,
|
||||
serialized_end=246,
|
||||
)
|
||||
|
||||
|
||||
_PROPS = _descriptor.Descriptor(
|
||||
name='Props',
|
||||
full_name='Props',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='term_start', full_name='Props.term_start', index=0,
|
||||
number=1, type=6, cpp_type=4, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='term_length', full_name='Props.term_length', index=1,
|
||||
number=2, type=5, cpp_type=1, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='weeks_count', full_name='Props.weeks_count', index=2,
|
||||
number=3, type=5, cpp_type=1, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='times', full_name='Props.times', index=3,
|
||||
number=4, type=9, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=248,
|
||||
serialized_end=332,
|
||||
)
|
||||
|
||||
|
||||
_RECORD = _descriptor.Descriptor(
|
||||
name='Record',
|
||||
full_name='Record',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='gid', full_name='Record.gid', index=0,
|
||||
number=1, type=6, cpp_type=4, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='Record.name', index=1,
|
||||
number=2, type=9, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='full_name', full_name='Record.full_name', index=2,
|
||||
number=3, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='color_id', full_name='Record.color_id', index=3,
|
||||
number=4, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='link', full_name='Record.link', index=4,
|
||||
number=5, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=334,
|
||||
serialized_end=420,
|
||||
)
|
||||
|
||||
|
||||
_LESSON = _descriptor.Descriptor(
|
||||
name='Lesson',
|
||||
full_name='Lesson',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='subgroup_id', full_name='Lesson.subgroup_id', index=0,
|
||||
number=1, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='day', full_name='Lesson.day', index=1,
|
||||
number=2, type=5, cpp_type=1, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='time', full_name='Lesson.time', index=2,
|
||||
number=3, type=9, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='weeks', full_name='Lesson.weeks', index=3,
|
||||
number=4, type=9, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='subject_id', full_name='Lesson.subject_id', index=4,
|
||||
number=5, type=5, cpp_type=1, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='kind_id', full_name='Lesson.kind_id', index=5,
|
||||
number=6, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='place_id', full_name='Lesson.place_id', index=6,
|
||||
number=7, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='teacher_id', full_name='Lesson.teacher_id', index=7,
|
||||
number=8, type=5, cpp_type=1, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='group_id', full_name='Lesson.group_id', index=8,
|
||||
number=9, type=5, cpp_type=1, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=_descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='no_silence', full_name='Lesson.no_silence', index=9,
|
||||
number=100, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=423,
|
||||
serialized_end=615,
|
||||
)
|
||||
|
||||
|
||||
_TASK = _descriptor.Descriptor(
|
||||
name='Task',
|
||||
full_name='Task',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='subject_id', full_name='Task.subject_id', index=0,
|
||||
number=1, type=5, cpp_type=1, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='day_index', full_name='Task.day_index', index=1,
|
||||
number=2, type=5, cpp_type=1, label=2,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='title', full_name='Task.title', index=2,
|
||||
number=3, type=9, cpp_type=9, label=2,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='description', full_name='Task.description', index=3,
|
||||
number=4, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='done_at', full_name='Task.done_at', index=4,
|
||||
number=5, type=6, cpp_type=4, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=617,
|
||||
serialized_end=715,
|
||||
)
|
||||
|
||||
_TIMETABLE.fields_by_name['properties'].message_type = _PROPS
|
||||
_TIMETABLE.fields_by_name['subject'].message_type = _RECORD
|
||||
_TIMETABLE.fields_by_name['teacher'].message_type = _RECORD
|
||||
_TIMETABLE.fields_by_name['place'].message_type = _RECORD
|
||||
_TIMETABLE.fields_by_name['kind'].message_type = _RECORD
|
||||
_TIMETABLE.fields_by_name['group'].message_type = _RECORD
|
||||
_TIMETABLE.fields_by_name['lesson'].message_type = _LESSON
|
||||
_TIMETABLE.fields_by_name['task'].message_type = _TASK
|
||||
DESCRIPTOR.message_types_by_name['Timetable'] = _TIMETABLE
|
||||
DESCRIPTOR.message_types_by_name['Props'] = _PROPS
|
||||
DESCRIPTOR.message_types_by_name['Record'] = _RECORD
|
||||
DESCRIPTOR.message_types_by_name['Lesson'] = _LESSON
|
||||
DESCRIPTOR.message_types_by_name['Task'] = _TASK
|
||||
|
||||
Timetable = _reflection.GeneratedProtocolMessageType('Timetable', (_message.Message,), dict(
|
||||
DESCRIPTOR = _TIMETABLE,
|
||||
__module__ = 'Timetable_pb2'
|
||||
# @@protoc_insertion_point(class_scope:Timetable)
|
||||
))
|
||||
_sym_db.RegisterMessage(Timetable)
|
||||
|
||||
Props = _reflection.GeneratedProtocolMessageType('Props', (_message.Message,), dict(
|
||||
DESCRIPTOR = _PROPS,
|
||||
__module__ = 'Timetable_pb2'
|
||||
# @@protoc_insertion_point(class_scope:Props)
|
||||
))
|
||||
_sym_db.RegisterMessage(Props)
|
||||
|
||||
Record = _reflection.GeneratedProtocolMessageType('Record', (_message.Message,), dict(
|
||||
DESCRIPTOR = _RECORD,
|
||||
__module__ = 'Timetable_pb2'
|
||||
# @@protoc_insertion_point(class_scope:Record)
|
||||
))
|
||||
_sym_db.RegisterMessage(Record)
|
||||
|
||||
Lesson = _reflection.GeneratedProtocolMessageType('Lesson', (_message.Message,), dict(
|
||||
DESCRIPTOR = _LESSON,
|
||||
__module__ = 'Timetable_pb2'
|
||||
# @@protoc_insertion_point(class_scope:Lesson)
|
||||
))
|
||||
_sym_db.RegisterMessage(Lesson)
|
||||
|
||||
Task = _reflection.GeneratedProtocolMessageType('Task', (_message.Message,), dict(
|
||||
DESCRIPTOR = _TASK,
|
||||
__module__ = 'Timetable_pb2'
|
||||
# @@protoc_insertion_point(class_scope:Task)
|
||||
))
|
||||
_sym_db.RegisterMessage(Task)
|
||||
|
||||
|
||||
_LESSON.fields_by_name['teacher_id'].has_options = True
|
||||
_LESSON.fields_by_name['teacher_id']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
|
||||
_LESSON.fields_by_name['group_id'].has_options = True
|
||||
_LESSON.fields_by_name['group_id']._options = _descriptor._ParseOptions(descriptor_pb2.FieldOptions(), _b('\020\001'))
|
||||
# @@protoc_insertion_point(module_scope)
|
0
src/zvk/plugins/__init__.py
Normal file
0
src/zvk/plugins/__init__.py
Normal file
0
src/zvk/plugins/commands/__init__.py
Normal file
0
src/zvk/plugins/commands/__init__.py
Normal file
55
src/zvk/plugins/commands/control.py
Normal file
55
src/zvk/plugins/commands/control.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from zvk.bot.bot import Bot
|
||||
from zvk.plugins.vk.command import command, Argument
|
||||
from zvk.plugins.vk.command_parser import Echo
|
||||
from zvk.util import emoji
|
||||
|
||||
|
||||
@command('list_plugins',
|
||||
permissions=['admin'])
|
||||
async def command_list_plugins(bot: Bot, echo: Echo):
|
||||
plugin_lines = [f'- {plugin} {emoji.CHECK if plugin.is_activated else emoji.CROSS}' for plugin in bot.plugins.values()]
|
||||
plugin_str = '\n'.join(sorted(plugin_lines))
|
||||
|
||||
s = f'{len(bot.plugins)} plugins:\n{plugin_str}'
|
||||
|
||||
await echo(s)
|
||||
|
||||
|
||||
async def switch_plugin(bot: Bot, echo: Echo, plugin_name: str, new_state: bool):
|
||||
if plugin_name not in bot.plugins:
|
||||
await echo('No such plugin')
|
||||
return
|
||||
|
||||
plugin = bot.plugins[plugin_name]
|
||||
|
||||
if plugin.is_activated == new_state:
|
||||
await echo(f'Already {new_state}')
|
||||
return
|
||||
|
||||
if new_state:
|
||||
plugin.activate(bot.event_queue)
|
||||
else:
|
||||
plugin.deactivate(bot.event_queue)
|
||||
|
||||
await echo('Ok')
|
||||
|
||||
|
||||
@command('disable_plugin',
|
||||
Argument('plugin_name', type=str),
|
||||
permissions=['admin'])
|
||||
async def command_disable_plugin(bot: Bot, echo: Echo, plugin_name: str):
|
||||
await switch_plugin(bot, echo, plugin_name, False)
|
||||
|
||||
|
||||
@command('enable_plugin',
|
||||
Argument('plugin_name', type=str),
|
||||
permissions=['admin'])
|
||||
async def command_enable_plugin(bot: Bot, echo: Echo, plugin_name: str):
|
||||
await switch_plugin(bot, echo, plugin_name, True)
|
||||
|
||||
|
||||
@command('die',
|
||||
permissions=['admin'])
|
||||
async def command_die(bot: Bot, echo: Echo):
|
||||
await echo(emoji.SKULL)
|
||||
bot.die()
|
97
src/zvk/plugins/commands/gi.py
Normal file
97
src/zvk/plugins/commands/gi.py
Normal file
@@ -0,0 +1,97 @@
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import List
|
||||
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.command import command
|
||||
from zvk.plugins.vk.command_parser import Echo
|
||||
from zvk.plugins.vk.upload import upload_image
|
||||
from zvk.util.db import Database
|
||||
from zvk.util.download import download_file
|
||||
from zvk.util.network import Network
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
GI_IMAGES_TO_ATTACH = 10
|
||||
|
||||
|
||||
async def search_google_images(net: Network,
|
||||
q: str,
|
||||
start: int) -> List[str]:
|
||||
params = {
|
||||
'q': q,
|
||||
'searchType': 'image',
|
||||
'cx': '000676372658842926074:vgd-eou7wlq',
|
||||
'key': 'AIzaSyBO15j9tuSmLhxhXMQyKGL2YzHnFhBP8o4',
|
||||
'start': start
|
||||
}
|
||||
|
||||
logger.info(f'Calling google api with params {params}')
|
||||
|
||||
_, data = await net.get_json('https://www.googleapis.com/customsearch/v1', params=params)
|
||||
|
||||
items = data.get('items', [])
|
||||
|
||||
results = []
|
||||
|
||||
for item in items:
|
||||
link = item.get('link', None)
|
||||
if link:
|
||||
results.append(link)
|
||||
else:
|
||||
logger.warning(f'Unexpected response format: {item}')
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def download_and_upload_image(db: Database,
|
||||
net: Network,
|
||||
api: VKApi,
|
||||
url: str) -> str:
|
||||
try:
|
||||
local_path = await download_file(db, net, url, target_directory='google_images')
|
||||
return await upload_image(db, net, api, local_path)
|
||||
except Exception as e:
|
||||
logger.warning(f'Could not download/upload {url}: {e}')
|
||||
|
||||
|
||||
# Almost no one is evil. Almost everything is broken.
|
||||
|
||||
@command('gi', whole_argstring=True)
|
||||
async def command_gi(db: Database,
|
||||
net: Network,
|
||||
api: VKApi,
|
||||
argstring: str,
|
||||
echo: Echo):
|
||||
started_at = datetime.utcnow()
|
||||
|
||||
logger.info(f'Searching google images for request {argstring}')
|
||||
|
||||
url_tasks = await asyncio.gather(search_google_images(net, argstring, start=1),
|
||||
search_google_images(net, argstring, start=11))
|
||||
urls = sum(url_tasks, [])
|
||||
|
||||
if len(urls) == 0:
|
||||
await echo('No results O_o')
|
||||
return
|
||||
|
||||
uploaded_objects = []
|
||||
|
||||
download_tasks = [download_and_upload_image(db, net, api, url) for url in urls]
|
||||
for coro in asyncio.as_completed(download_tasks):
|
||||
result = await coro
|
||||
if result is None:
|
||||
continue
|
||||
|
||||
uploaded_objects.append(result)
|
||||
|
||||
if len(uploaded_objects) == GI_IMAGES_TO_ATTACH:
|
||||
break
|
||||
|
||||
logger.info(f'From {len(urls)} urls downloaded and uploaded {len(uploaded_objects)} images')
|
||||
|
||||
attachment = ','.join(uploaded_objects)
|
||||
|
||||
await echo(f'"{argstring}": '
|
||||
f'{len(uploaded_objects)} images in '
|
||||
f'{(datetime.utcnow() - started_at).total_seconds():.2f} secs',
|
||||
attachment=attachment)
|
151
src/zvk/plugins/commands/timetable.py
Normal file
151
src/zvk/plugins/commands/timetable.py
Normal file
@@ -0,0 +1,151 @@
|
||||
import json
|
||||
import zipfile
|
||||
import datetime
|
||||
from io import BytesIO
|
||||
|
||||
import pandas
|
||||
from sqlalchemy import Column, Integer, String
|
||||
|
||||
from zvk.misc.timetable_pb2 import Timetable
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.command import command, Argument
|
||||
from zvk.plugins.vk.command_parser import Echo
|
||||
from zvk.util.db import DBBase, Database
|
||||
from zvk.util.network import Network
|
||||
|
||||
|
||||
class TimetableJson(DBBase):
|
||||
id = Column(Integer, primary_key=True)
|
||||
json = Column(String)
|
||||
|
||||
|
||||
@command('import_timetable',
|
||||
permissions=['admin'])
|
||||
async def command_import_timetable(attachments, echo: Echo, api: VKApi, net: Network, db: Database):
|
||||
if 'attach1' not in attachments:
|
||||
await echo('You should attach something')
|
||||
return
|
||||
|
||||
doc_id = attachments['attach1']
|
||||
|
||||
info = await api.docs.getById(docs=doc_id)
|
||||
|
||||
url = info[0]['url']
|
||||
|
||||
_, timetable_zip_bytes = await net.get_bytes(url)
|
||||
|
||||
timetable_zip = zipfile.ZipFile(BytesIO(timetable_zip_bytes))
|
||||
|
||||
timetable_pb_bytes = timetable_zip.read('timetable.pb')
|
||||
|
||||
timetable = Timetable.FromString(timetable_pb_bytes)
|
||||
|
||||
term_start = datetime.datetime.fromtimestamp(timetable.properties.term_start / 1000)
|
||||
weeks_count = timetable.properties.weeks_count
|
||||
|
||||
def convert_weeks(s):
|
||||
if s == 'a':
|
||||
return list(range(1, weeks_count + 1))
|
||||
if s == 'o':
|
||||
return list(range(1, weeks_count + 1, 2))
|
||||
if s == 'e':
|
||||
return list(range(2, weeks_count + 1, 2))
|
||||
if s.startswith('c'):
|
||||
return list(map(int, s[1:].split(',')))
|
||||
|
||||
raise Exception(f'Bad week identifier {s}')
|
||||
|
||||
timetable_dict = {
|
||||
'term_start': term_start.timestamp(),
|
||||
'weeks_count': weeks_count,
|
||||
'lessons': []
|
||||
}
|
||||
|
||||
for lesson in timetable.lesson:
|
||||
timetable_dict['lessons'].append({
|
||||
'day': lesson.day,
|
||||
'time': [lesson.time[:4], lesson.time[4:]],
|
||||
'weeks': convert_weeks(lesson.weeks),
|
||||
'subject': timetable.subject[lesson.subject_id - 1].name,
|
||||
'kind': timetable.kind[lesson.kind_id - 1].name,
|
||||
'place': timetable.place[lesson.place_id - 1].name,
|
||||
'teachers': [timetable.teacher[teacher_id - 1].name
|
||||
for teacher_id in lesson.teacher_id]
|
||||
})
|
||||
|
||||
timetable_json = json.dumps(timetable_dict)
|
||||
|
||||
with db as session:
|
||||
session.query(TimetableJson).delete()
|
||||
|
||||
timetable_json = TimetableJson(json=timetable_json)
|
||||
session.add(timetable_json)
|
||||
|
||||
await echo(f'Imported timetable: {len(timetable_dict["lessons"])} lessons')
|
||||
|
||||
|
||||
def get_day_timetable(timetable, now):
|
||||
dt_from_start = now - datetime.datetime.utcfromtimestamp(timetable['term_start'])
|
||||
days_from_start = dt_from_start // datetime.timedelta(days=1)
|
||||
week_number = days_from_start // 7 + 1
|
||||
day = days_from_start % 7 + 1
|
||||
|
||||
today = []
|
||||
|
||||
for lesson in timetable['lessons']:
|
||||
if lesson['day'] != day or week_number not in lesson['weeks']:
|
||||
continue
|
||||
today.append(lesson)
|
||||
|
||||
today = sorted(today, key=lambda x: x['time'][0])
|
||||
|
||||
return today, week_number
|
||||
|
||||
|
||||
def format_lesson(lesson):
|
||||
ftime = lambda s: f'{s[:2]}:{s[2:]}'
|
||||
return f'{ftime(lesson["time"][0])}->{ftime(lesson["time"][1])} ' \
|
||||
f'{lesson["kind"]} {lesson["subject"]} {lesson["place"]}'
|
||||
|
||||
|
||||
@command('tt',
|
||||
Argument('n', type=int, default=2, nargs='?'))
|
||||
async def command_tt(echo: Echo, db: Database, n):
|
||||
if n <= 0 or n > 10:
|
||||
await echo('fuk you')
|
||||
return
|
||||
|
||||
with db as session:
|
||||
timetable_json = session.query(TimetableJson).first().json
|
||||
|
||||
timetable = json.loads(timetable_json)
|
||||
|
||||
day_names = {
|
||||
-1: 'Вчера',
|
||||
0: 'Сегодня',
|
||||
1: 'Завтра',
|
||||
2: 'Послезавтра',
|
||||
}
|
||||
|
||||
printed_nothing = True
|
||||
|
||||
dayds = list(range(min(0, n), max(1, n)))
|
||||
|
||||
for dayd in dayds:
|
||||
dt = datetime.datetime.utcnow() + datetime.timedelta(days=dayd)
|
||||
rounded = pandas.Timestamp(dt).floor('d').to_pydatetime()
|
||||
|
||||
tt, week_number = get_day_timetable(timetable, dt)
|
||||
|
||||
fmt = '\n'.join(map(format_lesson, tt))
|
||||
|
||||
if not fmt:
|
||||
continue
|
||||
|
||||
day_name = day_names.get(dayd, f'{rounded.strftime("%b %d - %a")}')
|
||||
|
||||
await echo(f'{day_name} Неделя #{week_number}:\n{fmt}')
|
||||
printed_nothing = False
|
||||
|
||||
if printed_nothing:
|
||||
await echo(f'ниче нету')
|
0
src/zvk/plugins/init/__init__.py
Normal file
0
src/zvk/plugins/init/__init__.py
Normal file
16
src/zvk/plugins/init/identify_self.py
Normal file
16
src/zvk/plugins/init/identify_self.py
Normal file
@@ -0,0 +1,16 @@
|
||||
from zvk.bot.trunk import Trunk
|
||||
from zvk.event.consumer import on_startup
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
@on_startup
|
||||
async def who_am_i(api: VKApi, trunk: Trunk):
|
||||
owner = (await api.users.get())[0]
|
||||
|
||||
owner_id = owner['id']
|
||||
trunk.set('owner_id', owner_id)
|
||||
owner_name = f'{owner["first_name"]} {owner["last_name"]}'
|
||||
trunk.set('owner_name', owner_name)
|
||||
|
||||
logger.info(f'Running @{owner_id} as {owner_name}')
|
41
src/zvk/plugins/init/permissions.py
Normal file
41
src/zvk/plugins/init/permissions.py
Normal file
@@ -0,0 +1,41 @@
|
||||
from typing import List, Set, Dict
|
||||
|
||||
from zvk.bot.trunk import Trunk
|
||||
from zvk.event.consumer import on_startup
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class PermissionManager:
|
||||
categories: Dict[str, Set[int]]
|
||||
|
||||
def __init__(self, config: dict):
|
||||
self.categories = dict()
|
||||
|
||||
for category, members in config['permissions'].items():
|
||||
members = set(members)
|
||||
|
||||
self.categories[category] = members
|
||||
|
||||
def update_owner(self, owner_id):
|
||||
for members in self.categories.values():
|
||||
if 0 in members:
|
||||
members.remove(0)
|
||||
members.add(owner_id)
|
||||
|
||||
def shall_pass(self, user_id: int, permissions: List[str]):
|
||||
for permission_category in permissions:
|
||||
if user_id in self.categories.get(permission_category, set()):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
@on_startup
|
||||
async def initialize_permissions(config: dict, trunk: Trunk, bot):
|
||||
permission_manager = PermissionManager(config)
|
||||
|
||||
permission_manager.update_owner(await trunk.get('owner_id'))
|
||||
|
||||
trunk.set('permissions', permission_manager)
|
||||
|
||||
logger.info(f'{len(permission_manager.categories)} user categories initialized')
|
0
src/zvk/plugins/misc/__init__.py
Normal file
0
src/zvk/plugins/misc/__init__.py
Normal file
9
src/zvk/plugins/misc/always_online.py
Normal file
9
src/zvk/plugins/misc/always_online.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from zvk.event.periodic import periodic
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
@periodic(period_secs=300)
|
||||
async def periodic_mark_online(api: VKApi):
|
||||
await api.account.setOnline(voip=0)
|
||||
logger.info('Marked as online')
|
85
src/zvk/plugins/misc/online_tracker.py
Normal file
85
src/zvk/plugins/misc/online_tracker.py
Normal file
@@ -0,0 +1,85 @@
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from sqlalchemy import DateTime, Integer
|
||||
|
||||
from zvk.bot.trunk import Trunk
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.event_type import VKEventType
|
||||
from zvk.plugins.vk.longpoll import LongpollEvent
|
||||
from zvk.util.db import DBBase, NNColumn, Database
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
ACTIVITY_BREAK_TIMEOUT = timedelta(minutes=5)
|
||||
|
||||
|
||||
class UserActivityPeriod(DBBase):
|
||||
id = NNColumn(Integer, primary_key=True)
|
||||
user_id = NNColumn(Integer)
|
||||
period_start = NNColumn(DateTime)
|
||||
period_end = NNColumn(DateTime)
|
||||
|
||||
|
||||
@event_consumer(consumes=[LongpollEvent.FIRST_EVENT])
|
||||
async def online_init(api: VKApi, trunk: Trunk, db: Database):
|
||||
online_users = set()
|
||||
|
||||
r = await api.friends.get()
|
||||
friend_list = r['items']
|
||||
|
||||
now = datetime.utcnow()
|
||||
r = await api.users.get(user_ids=','.join(map(str, friend_list)), fields='online')
|
||||
|
||||
with db as session:
|
||||
for i in r:
|
||||
if i['online'] == 1:
|
||||
online_users.add(i['id'])
|
||||
|
||||
session.add(UserActivityPeriod(
|
||||
user_id=i['id'],
|
||||
period_start=now,
|
||||
period_end=now,
|
||||
))
|
||||
|
||||
logger.info(f'Initialized online tracker: {len(online_users)}')
|
||||
|
||||
trunk.set('online_users', online_users)
|
||||
|
||||
|
||||
@event_consumer(consumes=[VKEventType.USER_CAME_OFFLINE, VKEventType.USER_CAME_ONLINE])
|
||||
async def online_listener(trunk: Trunk, db: Database, event_type, vk_event_args):
|
||||
online_users = await trunk.get('online_users')
|
||||
|
||||
if event_type == VKEventType.USER_CAME_OFFLINE:
|
||||
pass
|
||||
|
||||
if event_type == VKEventType.USER_CAME_ONLINE:
|
||||
pass
|
||||
|
||||
user_id = -vk_event_args[0]
|
||||
timestamp = vk_event_args[2]
|
||||
|
||||
with db as session:
|
||||
if event_type == VKEventType.USER_CAME_OFFLINE and user_id in online_users:
|
||||
last_period = session \
|
||||
.query(UserActivityPeriod) \
|
||||
.filter_by(user_id=user_id) \
|
||||
.order_by(UserActivityPeriod.id.desc()) \
|
||||
.first()
|
||||
|
||||
if not last_period:
|
||||
return
|
||||
|
||||
last_period.period_end = datetime.fromtimestamp(timestamp)
|
||||
|
||||
online_users.remove(user_id)
|
||||
|
||||
if event_type == VKEventType.USER_CAME_ONLINE and user_id not in online_users:
|
||||
session.add(UserActivityPeriod(
|
||||
user_id=user_id,
|
||||
period_start=datetime.fromtimestamp(timestamp),
|
||||
period_end=datetime.fromtimestamp(timestamp),
|
||||
))
|
||||
|
||||
online_users.add(user_id)
|
0
src/zvk/plugins/vk/__init__.py
Normal file
0
src/zvk/plugins/vk/__init__.py
Normal file
86
src/zvk/plugins/vk/api.py
Normal file
86
src/zvk/plugins/vk/api.py
Normal file
@@ -0,0 +1,86 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
|
||||
from zvk.util.network import Network
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
# BOT_MESSAGE_RANDOM_ID_MIN = 1337000000
|
||||
# BOT_MESSAGE_RANDOM_ID_MAX = 1338000000
|
||||
|
||||
API_VERSION = '5.85'
|
||||
|
||||
|
||||
class MagicAccumulatingAttributeCatcher:
|
||||
api: VKApi
|
||||
full_method_name: str
|
||||
|
||||
def __init__(self, api, full_method_name=None):
|
||||
self.api = api
|
||||
|
||||
if full_method_name is None:
|
||||
full_method_name = ''
|
||||
|
||||
self.full_method_name = full_method_name
|
||||
|
||||
def __getattr__(self, item):
|
||||
return MagicAccumulatingAttributeCatcher(self.api, f'{self.full_method_name}.{item}')
|
||||
|
||||
async def __call__(self, **kwargs):
|
||||
return await self.api.call_method(self.full_method_name, **kwargs)
|
||||
|
||||
|
||||
API_CALL_RETRY_COUNT = 3
|
||||
|
||||
|
||||
class VKApi:
|
||||
"""
|
||||
VK api asynchronous interaction interface. Supports dope syntax like
|
||||
`await api.messages.get()`.
|
||||
"""
|
||||
|
||||
_access_token: str
|
||||
_net: Network
|
||||
|
||||
def __init__(self, config, net):
|
||||
self._access_token = config['api']['access_token']
|
||||
self._net = net
|
||||
|
||||
def __getattr__(self, name):
|
||||
return MagicAccumulatingAttributeCatcher(self, name)
|
||||
|
||||
async def call_method(self, full_method_name, **params):
|
||||
url = f'https://api.vk.com/method/{full_method_name}'
|
||||
|
||||
params['access_token'] = self._access_token
|
||||
params['v'] = API_VERSION
|
||||
|
||||
# TODO: random_id management
|
||||
# if self.name == 'messages.send' and 'random_id' not in params:
|
||||
# params['random_id'] = random.randint(BOT_MESSAGE_RANDOM_ID_MIN, BOT_MESSAGE_RANDOM_ID_MAX)
|
||||
# if self.name == 'messages.send' and 'message' in params:
|
||||
# params['message'] = params['message'].replace('--', '--')
|
||||
|
||||
# filter empty values
|
||||
params = {k: v for k, v in params.items() if v is not None}
|
||||
|
||||
for retry in range(API_CALL_RETRY_COUNT):
|
||||
response, result = await self._net.post_json(url, sequential=True, data=params)
|
||||
|
||||
if 'error' in result:
|
||||
error = result['error']
|
||||
if error['error_code'] == 6:
|
||||
logger.warning('Too many requests, waiting and retrying...')
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
raise RuntimeError(f'A significant VKApi error occurred {full_method_name}({params}) -> {error}')
|
||||
|
||||
if 'response' not in result:
|
||||
raise RuntimeError(f'Malformed api response {full_method_name}({params}) -> {result}')
|
||||
|
||||
logger.debug(f'VKApi call {full_method_name}({params}) -> {result}')
|
||||
|
||||
return result['response']
|
||||
|
||||
raise RuntimeError(f'VKApi call unsuccessful after retries: {full_method_name}({params})')
|
120
src/zvk/plugins/vk/command.py
Normal file
120
src/zvk/plugins/vk/command.py
Normal file
@@ -0,0 +1,120 @@
|
||||
import argparse
|
||||
import shlex
|
||||
from typing import List
|
||||
|
||||
from zvk.event.consumer import EventConsumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.plugins.vk.command_parser import CommandEventType
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class CommandParseException(Exception):
|
||||
def __init__(self, message):
|
||||
self.message = message
|
||||
|
||||
|
||||
class CommandArgumentParser(argparse.ArgumentParser):
|
||||
def print_help(self, file=None):
|
||||
pass
|
||||
|
||||
def exit(self, status=0, message=None):
|
||||
raise CommandParseException(self.format_help())
|
||||
|
||||
def error(self, message):
|
||||
raise CommandParseException(f'Command .{self.prog} {message}')
|
||||
|
||||
|
||||
class Argument:
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.args = args
|
||||
self.kwargs = kwargs
|
||||
|
||||
|
||||
class CommandEventConsumer(EventConsumer):
|
||||
command_name: str
|
||||
whole_argstring: bool
|
||||
parser: CommandArgumentParser
|
||||
allowed_permission_categories: List[str]
|
||||
|
||||
def __init__(self,
|
||||
command_name: str,
|
||||
*args: Argument,
|
||||
whole_argstring: bool = False,
|
||||
description: str = None,
|
||||
permissions: List[str] = None):
|
||||
self.command_name = command_name
|
||||
self.whole_argstring = whole_argstring
|
||||
self.parser = CommandArgumentParser(prog=self.command_name)
|
||||
|
||||
self.allowed_permission_categories = permissions
|
||||
|
||||
if description is None:
|
||||
self.parser.description = 'TODO'
|
||||
else:
|
||||
self.parser.description = description
|
||||
|
||||
if self.whole_argstring:
|
||||
if args:
|
||||
raise TypeError('Whole argstring cannot have other args')
|
||||
|
||||
self.parser.add_argument('argstring',
|
||||
type=str,
|
||||
nargs=argparse.REMAINDER,
|
||||
metavar='...',
|
||||
help='the whole command line')
|
||||
|
||||
for arg in args:
|
||||
self.parser.add_argument(*arg.args, **arg.kwargs)
|
||||
|
||||
super().__init__(consumes=[CommandEventType(command_name=command_name)])
|
||||
|
||||
def parse_argstring(self, argstring: str) -> dict:
|
||||
if self.whole_argstring:
|
||||
return dict(argstring=argstring)
|
||||
|
||||
try:
|
||||
parts = shlex.split(argstring)
|
||||
except ValueError as e:
|
||||
raise CommandParseException(e.args[0])
|
||||
|
||||
namespace = self.parser.parse_args(args=parts)
|
||||
|
||||
return vars(namespace)
|
||||
|
||||
async def consume(self, event: Event):
|
||||
if self.allowed_permission_categories is not None:
|
||||
from_id = event.env['message'].from_id
|
||||
permission_manager = await event.env['trunk'].get('permissions')
|
||||
|
||||
if not permission_manager.shall_pass(from_id, self.allowed_permission_categories):
|
||||
# TODO: fancier
|
||||
await event.env['echo']('Access denied')
|
||||
logger.warning(f'Access denied {from_id} {event}')
|
||||
return
|
||||
|
||||
try:
|
||||
event.env.update(self.parse_argstring(event.env['command_argstring']))
|
||||
except CommandParseException as e:
|
||||
await event.env['echo'](e.message)
|
||||
logger.warning(f'Wrong command {event} {e.message}')
|
||||
return
|
||||
|
||||
async for output_event in super().consume(event):
|
||||
yield output_event
|
||||
|
||||
|
||||
def command(command_name: str,
|
||||
*args: Argument,
|
||||
whole_argstring: bool = False,
|
||||
description: str = None,
|
||||
permissions: List[str] = None) -> EventConsumer:
|
||||
if callable(command_name):
|
||||
# direct decoration
|
||||
raise TypeError('Direct command decoration is forbidden')
|
||||
|
||||
return CommandEventConsumer(
|
||||
command_name,
|
||||
*args,
|
||||
whole_argstring=whole_argstring,
|
||||
description=description,
|
||||
permissions=permissions)
|
66
src/zvk/plugins/vk/command_parser.py
Normal file
66
src/zvk/plugins/vk/command_parser.py
Normal file
@@ -0,0 +1,66 @@
|
||||
import html
|
||||
import json
|
||||
import re
|
||||
from dataclasses import dataclass
|
||||
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.event_type import ParsedEventType
|
||||
from zvk.plugins.vk.message_parser import Message
|
||||
from zvk.util import emoji
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
# old
|
||||
# COMMAND_REGEX = r'\.(\S+)\s*(.*)'
|
||||
|
||||
# starts with comma to avoid collisions with old schema
|
||||
COMMAND_REGEX = r'\,(\S+)\s*(.*)'
|
||||
|
||||
|
||||
def preprocess_argstring(s):
|
||||
return html.unescape(s) \
|
||||
.replace('—', '--') \
|
||||
.replace('«', '<<') \
|
||||
.replace('»', '>>') \
|
||||
.replace('<br>', '\n')
|
||||
|
||||
|
||||
@dataclass
|
||||
class CommandEventType:
|
||||
command_name: str
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.command_name)
|
||||
|
||||
|
||||
@dataclass
|
||||
class Echo:
|
||||
api: VKApi
|
||||
peer_id: int
|
||||
|
||||
async def __call__(self, message='', notext=False, **kwargs):
|
||||
if not notext:
|
||||
result = await self.api.messages.send(peer_id=self.peer_id, message=f'{emoji.ROBOT}: {message}', **kwargs)
|
||||
else:
|
||||
result = await self.api.messages.send(peer_id=self.peer_id, **kwargs)
|
||||
|
||||
logger.info(f'Echo to {self.peer_id}: {message}{f"extra: {kwargs}" if kwargs else ""}')
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@event_consumer(consumes=[ParsedEventType.MESSAGE])
|
||||
async def command_parser(api: VKApi, message: Message):
|
||||
match = re.fullmatch(COMMAND_REGEX, message.text)
|
||||
if match is None:
|
||||
return
|
||||
|
||||
command_name = match.group(1)
|
||||
command_argstring = preprocess_argstring(match.group(2))
|
||||
|
||||
yield Event(CommandEventType(command_name),
|
||||
command_name=command_name,
|
||||
command_argstring=command_argstring,
|
||||
echo=Echo(api, message.peer_id),
|
||||
attachments=json.loads(message.attachments_json))
|
25
src/zvk/plugins/vk/event_saver.py
Normal file
25
src/zvk/plugins/vk/event_saver.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import json
|
||||
|
||||
from sqlalchemy import Integer, String
|
||||
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.util.db import DBBase, NNColumn, Database
|
||||
from zvk.util.zlogging import logger
|
||||
from zvk.plugins.vk.event_type import VKEventType
|
||||
|
||||
|
||||
class VKEvent(DBBase):
|
||||
id = NNColumn(Integer, primary_key=True)
|
||||
|
||||
vk_event_type_id = NNColumn(Integer)
|
||||
vk_event_args_json = NNColumn(String)
|
||||
|
||||
|
||||
@event_consumer(consumes=list(VKEventType))
|
||||
async def save_event(db: Database, event_type: VKEventType, vk_event_args):
|
||||
logger.debug(f'Persisting event {event_type} {vk_event_args}')
|
||||
|
||||
with db as session:
|
||||
session.add(VKEvent(
|
||||
vk_event_type_id=event_type.value,
|
||||
vk_event_args_json=json.dumps(vk_event_args)))
|
36
src/zvk/plugins/vk/event_type.py
Normal file
36
src/zvk/plugins/vk/event_type.py
Normal file
@@ -0,0 +1,36 @@
|
||||
from enum import Enum, auto
|
||||
|
||||
|
||||
class VKEventType(Enum):
|
||||
MESSAGE_FLAG_REPLACEMENT = 1
|
||||
MESSAGE_FLAG_SETTING = 2
|
||||
MESSAGE_FLAG_REMOVAL = 3
|
||||
|
||||
MESSAGE_NEW = 4
|
||||
MESSAGE_EDIT = 5
|
||||
|
||||
MESSAGE_READ_INCOMING = 6
|
||||
MESSAGE_READ_OUTGOING = 7
|
||||
|
||||
USER_CAME_ONLINE = 8
|
||||
USER_CAME_OFFLINE = 9
|
||||
|
||||
CHAT_FLAG_REPLACEMENT = 10
|
||||
CHAT_FLAG_SETTING = 11
|
||||
CHAT_FLAG_REMOVAL = 12
|
||||
|
||||
MESSAGE_DELETED = 13
|
||||
MESSAGE_RESTORED = 14
|
||||
|
||||
USER_TYPING = 61
|
||||
USER_TYPING_CHAT = 62
|
||||
|
||||
CALL_NEW = 70
|
||||
UNREAD_COUNTER_UPDATE = 80
|
||||
|
||||
CHAT_NOTIFICATION_SETTINGS_CHANGE = 114
|
||||
|
||||
|
||||
class ParsedEventType(Enum):
|
||||
MESSAGE = auto()
|
||||
MESSAGE_EDIT = auto()
|
94
src/zvk/plugins/vk/longpoll.py
Normal file
94
src/zvk/plugins/vk/longpoll.py
Normal file
@@ -0,0 +1,94 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from enum import Enum, auto
|
||||
|
||||
from zvk.bot.event_type import BotEventType
|
||||
from zvk.util.network import Network
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.util.zlogging import logger
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.event_type import VKEventType
|
||||
|
||||
# attachments + extended + extra + random_id
|
||||
LONGPOLL_MODE = 2 | 8 | 64 | 128
|
||||
LONGPOLL_VERSION = 3
|
||||
CALLS_PER_SERVER = 100
|
||||
|
||||
# TODO: increase?
|
||||
WAIT_SECONDS = 25
|
||||
|
||||
|
||||
class LongpollEvent(Enum):
|
||||
FIRST_EVENT = auto()
|
||||
|
||||
|
||||
@event_consumer(consumes=[BotEventType.STARTUP])
|
||||
async def longpoll_loop(bot, net: Network, api: VKApi):
|
||||
last_event_timestamp = None
|
||||
sent_announce = False
|
||||
|
||||
while True:
|
||||
try:
|
||||
server = await api.messages.getLongPollServer()
|
||||
|
||||
if last_event_timestamp is None:
|
||||
last_event_timestamp = server['ts']
|
||||
|
||||
logger.info(f'Starting longpolling at {last_event_timestamp} from {server["server"]}')
|
||||
|
||||
for longpoll_call in range(CALLS_PER_SERVER):
|
||||
poll_url = f'https://{server["server"]}'
|
||||
poll_params = {
|
||||
'act': 'a_check',
|
||||
'key': server['key'],
|
||||
'ts': last_event_timestamp,
|
||||
'wait': WAIT_SECONDS,
|
||||
'mode': LONGPOLL_MODE,
|
||||
'version': LONGPOLL_VERSION
|
||||
}
|
||||
|
||||
response, payload = await net.get_json(poll_url, params=poll_params)
|
||||
|
||||
failed_code = payload.get('failed', 0)
|
||||
if failed_code > 0:
|
||||
logger.warn(f'Longpolling call failed with {payload}')
|
||||
|
||||
if failed_code == 1:
|
||||
# ts is lost
|
||||
last_event_timestamp = payload['ts']
|
||||
continue
|
||||
elif failed_code == 2:
|
||||
# key is stale
|
||||
break
|
||||
elif failed_code == 3:
|
||||
# user is lost
|
||||
break
|
||||
elif failed_code == 4:
|
||||
# bad version
|
||||
logger.error(f'Bad LP version {LONGPOLL_VERSION}')
|
||||
bot.die()
|
||||
|
||||
logger.debug(f'Longpoll tick @{last_event_timestamp} {len(payload["updates"])} events')
|
||||
|
||||
last_event_timestamp = payload['ts']
|
||||
|
||||
for update in payload['updates']:
|
||||
vk_event_type_id = update[0]
|
||||
vk_event_args = update[1:]
|
||||
|
||||
vk_event_type = VKEventType(vk_event_type_id)
|
||||
|
||||
logger.info(f'{vk_event_type} {vk_event_args}')
|
||||
|
||||
if not sent_announce:
|
||||
yield Event(LongpollEvent.FIRST_EVENT)
|
||||
sent_announce = True
|
||||
|
||||
yield Event(vk_event_type, vk_event_args=vk_event_args)
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception(f'Longpoll failed o_O')
|
||||
last_event_timestamp = None
|
26
src/zvk/plugins/vk/message_flags.py
Normal file
26
src/zvk/plugins/vk/message_flags.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import Enum
|
||||
from typing import Set
|
||||
|
||||
import numpy
|
||||
|
||||
|
||||
class MessageFlag(Enum):
|
||||
UNREAD = 1 << 0
|
||||
OUTBOX = 1 << 1
|
||||
REPLIED = 1 << 2
|
||||
IMPORTANT = 1 << 3
|
||||
FRIENDS = 1 << 5
|
||||
SPAM = 1 << 6
|
||||
DELETED = 1 << 7
|
||||
DELETED_FOR_ALL = 1 << 17
|
||||
|
||||
@staticmethod
|
||||
def parse_flags(flags: int) -> Set[MessageFlag]:
|
||||
return {i for i in MessageFlag if i.value & flags > 0}
|
||||
|
||||
@staticmethod
|
||||
def encode_flags(flags: Set[MessageFlag]) -> int:
|
||||
# noinspection PyTypeChecker
|
||||
return numpy.bitwise_or.reduce([i.value for i in flags])
|
130
src/zvk/plugins/vk/message_parser.py
Normal file
130
src/zvk/plugins/vk/message_parser.py
Normal file
@@ -0,0 +1,130 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, Integer, String
|
||||
|
||||
from zvk.bot.trunk import Trunk
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.plugins.vk.event_type import ParsedEventType, VKEventType
|
||||
from zvk.plugins.vk.message_flags import MessageFlag
|
||||
from zvk.util.db import DBBase, Database, NNColumn
|
||||
|
||||
|
||||
class Message(DBBase):
|
||||
id = NNColumn(Integer, primary_key=True)
|
||||
|
||||
message_id = NNColumn(Integer, nullable=False, unique=True)
|
||||
|
||||
from_id = NNColumn(Integer)
|
||||
to_id = NNColumn(Integer)
|
||||
peer_id = NNColumn(Integer)
|
||||
|
||||
flags = NNColumn(Integer)
|
||||
timestamp = NNColumn(DateTime)
|
||||
|
||||
text = NNColumn(String)
|
||||
|
||||
extra_fields_json = NNColumn(String)
|
||||
attachments_json = NNColumn(String)
|
||||
|
||||
random_id = NNColumn(Integer)
|
||||
|
||||
is_outgoing = NNColumn(Boolean)
|
||||
is_bot_message = NNColumn(Boolean)
|
||||
|
||||
|
||||
@event_consumer(consumes=[VKEventType.MESSAGE_NEW])
|
||||
async def new_message(db: Database, trunk: Trunk, vk_event_args):
|
||||
message_id, flags, peer_id, timestamp, text, extra_fields, attachments, random_id = vk_event_args
|
||||
|
||||
peer_id = peer_id
|
||||
|
||||
if 'from' in extra_fields:
|
||||
# message to a chat
|
||||
from_id = extra_fields['from']
|
||||
to_id = peer_id
|
||||
else:
|
||||
# direct message
|
||||
from_id = peer_id
|
||||
to_id = await trunk.get('owner_id')
|
||||
|
||||
flag_set = MessageFlag.parse_flags(flags)
|
||||
|
||||
is_outgoing = MessageFlag.OUTBOX in flag_set
|
||||
is_bot_message = False
|
||||
|
||||
# TODO: api.messages.getById
|
||||
# if 'fwd_count' in extra_fields:
|
||||
# pass
|
||||
|
||||
# TODO: also trigger on message edit
|
||||
|
||||
with db as session:
|
||||
message = Message(
|
||||
message_id=message_id,
|
||||
|
||||
from_id=from_id,
|
||||
to_id=to_id,
|
||||
peer_id=peer_id,
|
||||
|
||||
flags=flags,
|
||||
timestamp=datetime.fromtimestamp(timestamp),
|
||||
|
||||
text=text,
|
||||
|
||||
extra_fields_json=json.dumps(extra_fields),
|
||||
attachments_json=json.dumps(attachments),
|
||||
|
||||
random_id=random_id,
|
||||
|
||||
is_outgoing=is_outgoing,
|
||||
is_bot_message=is_bot_message,
|
||||
)
|
||||
|
||||
session.add(message)
|
||||
|
||||
yield Event(ParsedEventType.MESSAGE, message=message)
|
||||
|
||||
|
||||
# @event_consumer(consumes=[VKEventType.MESSAGE_EDIT])
|
||||
# async def edit_message(db: Database, trunk: Trunk, vk_event_args):
|
||||
# message_id, mask, peer_id, timestamp, new_text, attachments, _ = vk_event_args
|
||||
#
|
||||
#
|
||||
#
|
||||
# flag_set = MessageFlag.parse_flags(flags)
|
||||
#
|
||||
# is_outgoing = MessageFlag.OUTBOX in flag_set
|
||||
# is_bot_message = False
|
||||
#
|
||||
# # TODO: api.messages.getById
|
||||
# # if 'fwd_count' in extra_fields:
|
||||
# # pass
|
||||
#
|
||||
# # TODO: also trigger on message edit
|
||||
#
|
||||
# with db as session:
|
||||
# message = Message(
|
||||
# message_id=message_id,
|
||||
#
|
||||
# from_id=from_id,
|
||||
# to_id=to_id,
|
||||
#
|
||||
# flags=flags,
|
||||
# timestamp=datetime.fromtimestamp(timestamp),
|
||||
#
|
||||
# text=text,
|
||||
#
|
||||
# extra_fields_json=json.dumps(extra_fields),
|
||||
# attachments_json=json.dumps(attachments),
|
||||
#
|
||||
# random_id=random_id,
|
||||
#
|
||||
# is_outgoing=is_outgoing,
|
||||
# is_bot_message=is_bot_message,
|
||||
# )
|
||||
#
|
||||
# session.add(message)
|
||||
#
|
||||
# yield Event(ParsedEventType.MESSAGE, message=message)
|
65
src/zvk/plugins/vk/upload.py
Normal file
65
src/zvk/plugins/vk/upload.py
Normal file
@@ -0,0 +1,65 @@
|
||||
import mimetypes
|
||||
|
||||
from sqlalchemy import Integer, String
|
||||
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.util.db import DBBase, NNColumn, Database
|
||||
from zvk.util.network import Network
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class CachedUpload(DBBase):
|
||||
id = NNColumn(Integer, primary_key=True)
|
||||
local_path = NNColumn(String)
|
||||
vk_object = NNColumn(String)
|
||||
|
||||
|
||||
def is_local_file_an_image(local_path):
|
||||
mimetype, _ = mimetypes.guess_type(local_path)
|
||||
return mimetype in ['image/jpeg', 'image/png', 'image/gif']
|
||||
|
||||
|
||||
async def upload_image(db: Database,
|
||||
net: Network,
|
||||
api: VKApi,
|
||||
local_path: str,
|
||||
use_cache: bool = True) -> str:
|
||||
logger.info(f'Uploading local image {local_path}')
|
||||
|
||||
if use_cache:
|
||||
with db as session:
|
||||
cached_upload = session \
|
||||
.query(CachedUpload) \
|
||||
.filter_by(local_path=local_path) \
|
||||
.first()
|
||||
if cached_upload:
|
||||
logger.info(f'Found {local_path} in upload cache: {cached_upload.vk_object}')
|
||||
return cached_upload.vk_object
|
||||
|
||||
if not is_local_file_an_image(local_path):
|
||||
raise ValueError(f'{local_path} does not look like an image!')
|
||||
|
||||
response = await api.photos.getMessagesUploadServer()
|
||||
upload_url = response['upload_url']
|
||||
|
||||
with open(local_path, 'rb') as f:
|
||||
files = {'photo': f}
|
||||
|
||||
_, response = await net.post_json(upload_url, data=files)
|
||||
|
||||
if 'server' not in response or 'photo' not in response or 'hash' not in response:
|
||||
raise RuntimeError(f'Could not upload {local_path}')
|
||||
|
||||
logger.info(f'Uploaded {local_path} as {response}')
|
||||
|
||||
response = await api.photos.saveMessagesPhoto(**response)
|
||||
if len(response) < 1:
|
||||
raise RuntimeError(f'Could not save image {local_path}')
|
||||
|
||||
vk_object = f'photo{response[0]["owner_id"]}_{response[0]["id"]}'
|
||||
|
||||
if use_cache:
|
||||
with db as session:
|
||||
session.add(CachedUpload(local_path=local_path, vk_object=vk_object))
|
||||
|
||||
return vk_object
|
0
src/zvk/util/__init__.py
Normal file
0
src/zvk/util/__init__.py
Normal file
59
src/zvk/util/db.py
Normal file
59
src/zvk/util/db.py
Normal file
@@ -0,0 +1,59 @@
|
||||
from functools import partial
|
||||
from typing import Any, Callable
|
||||
|
||||
from sqlalchemy import create_engine, Column
|
||||
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
|
||||
from sqlalchemy.orm import sessionmaker
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
|
||||
class AutoTableNamer(DeclarativeMeta):
|
||||
def __new__(cls, name, bases, classdict):
|
||||
if '__tablename__' in classdict:
|
||||
raise TypeError(f'Table name already defined for {name}')
|
||||
|
||||
classdict['__tablename__'] = f'{name}_auto'
|
||||
|
||||
res_cls = super().__new__(cls, name, bases, classdict)
|
||||
|
||||
res_cls.f = 1
|
||||
|
||||
return res_cls
|
||||
|
||||
|
||||
class Database:
|
||||
session: Session
|
||||
|
||||
def __init__(self, url, **kwargs):
|
||||
self.session = None
|
||||
|
||||
self.engine = create_engine(url, **kwargs)
|
||||
|
||||
self._session_factory = sessionmaker(bind=self.engine, expire_on_commit=False)
|
||||
|
||||
def __enter__(self) -> Session:
|
||||
# self.create_all()
|
||||
|
||||
if self.session is not None:
|
||||
raise RuntimeError('nested db sessions')
|
||||
|
||||
self.session = self._session_factory()
|
||||
|
||||
return self.session
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if exc_type:
|
||||
self.session.rollback()
|
||||
else:
|
||||
self.session.commit()
|
||||
|
||||
self.session.close()
|
||||
self.session = None
|
||||
|
||||
def create_all(self):
|
||||
DBBase.metadata.create_all(bind=self.engine)
|
||||
|
||||
|
||||
DBBase: None = declarative_base(metaclass=AutoTableNamer)
|
||||
|
||||
NNColumn = partial(Column, nullable=False)
|
98
src/zvk/util/download.py
Normal file
98
src/zvk/util/download.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import json
|
||||
import mimetypes
|
||||
import os
|
||||
|
||||
from slugify import slugify
|
||||
from sqlalchemy import Integer, String
|
||||
|
||||
from zvk.util import paths
|
||||
from zvk.util.db import DBBase, Database, NNColumn
|
||||
from zvk.util.network import Network
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class CachedDownload(DBBase):
|
||||
id = NNColumn(Integer, primary_key=True)
|
||||
url = NNColumn(String)
|
||||
params_json = NNColumn(String)
|
||||
local_path = NNColumn(String)
|
||||
|
||||
|
||||
async def download_file(db: Database,
|
||||
net: Network,
|
||||
url: str,
|
||||
filename: str = None,
|
||||
target_directory: str = 'unspecified',
|
||||
params: dict = None,
|
||||
use_cache: bool = True) -> str:
|
||||
"""
|
||||
Asynchronously downloads a file from an url and stores it locally.
|
||||
"""
|
||||
|
||||
if filename is None:
|
||||
filename = slugify(url)
|
||||
|
||||
if params is None:
|
||||
params = {}
|
||||
|
||||
params_json = json.dumps(params)
|
||||
|
||||
logger.info(f'Downloading {url}({params}) to {target_directory}/{filename}')
|
||||
|
||||
if use_cache:
|
||||
with db as session:
|
||||
cached_download = session \
|
||||
.query(CachedDownload) \
|
||||
.filter_by(url=url,
|
||||
params_json=params_json) \
|
||||
.first()
|
||||
|
||||
if cached_download:
|
||||
logger.info(f'Found {url}({params}) in download cache: {cached_download.local_path}')
|
||||
if not os.path.exists(cached_download.local_path):
|
||||
logger.warning(f'No file from {url}({params}) exists in {cached_download.local_path}, purging from cache')
|
||||
session.delete(cached_download)
|
||||
else:
|
||||
return cached_download.local_path
|
||||
|
||||
response, content = await net.get_bytes(url, params=params)
|
||||
|
||||
if response.status != 200:
|
||||
raise RuntimeError(f'Got a bad status code {response.status} from {url}({params})')
|
||||
|
||||
logger.info(f'Downloaded {len(content)} bytes from {url}({params})')
|
||||
|
||||
directory = os.path.join(paths.DOWNLOAD_DIR, target_directory)
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
|
||||
filename += guess_suffix(response)
|
||||
local_path = os.path.join(directory, filename)
|
||||
|
||||
with open(local_path, 'wb') as file:
|
||||
file.write(content)
|
||||
|
||||
if use_cache:
|
||||
with db as session:
|
||||
session.add(CachedDownload(url=url,
|
||||
params_json=params_json,
|
||||
local_path=local_path))
|
||||
|
||||
logger.info(f'Saved {len(content)} bytes from {url}({params}) to {local_path}')
|
||||
|
||||
return local_path
|
||||
|
||||
|
||||
def guess_suffix(response):
|
||||
mimetype = response.headers.get('Content-Type')
|
||||
|
||||
if not mimetype:
|
||||
mimetype, _ = mimetypes.guess_type(str(response.real_url))
|
||||
|
||||
if mimetype == 'image/jpeg':
|
||||
return f'.jpg'
|
||||
elif mimetype == 'image/png':
|
||||
return f'.png'
|
||||
elif mimetype == 'image/gif':
|
||||
return f'.gif'
|
||||
|
||||
return ''
|
14
src/zvk/util/emoji.py
Normal file
14
src/zvk/util/emoji.py
Normal file
@@ -0,0 +1,14 @@
|
||||
THUMBS_UP = '👍'
|
||||
FLEX = '💪'
|
||||
HEART = '💗'
|
||||
RESTART = '🔄'
|
||||
ROBOT = '🤖'
|
||||
LEMON = '🍋'
|
||||
PIE = '🍰'
|
||||
OK = '👌'
|
||||
HUNDRED = '💯'
|
||||
COW_FACE = '🐮'
|
||||
SKULL = '💀'
|
||||
WINK = '😉'
|
||||
CHECK = '✅'
|
||||
CROSS = '❌'
|
107
src/zvk/util/network.py
Normal file
107
src/zvk/util/network.py
Normal file
@@ -0,0 +1,107 @@
|
||||
import asyncio
|
||||
import json
|
||||
from enum import Enum, auto
|
||||
from typing import Any, Tuple
|
||||
|
||||
import aiohttp
|
||||
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
class ReturnType(Enum):
|
||||
BYTES = auto()
|
||||
TEXT = auto()
|
||||
JSON = auto()
|
||||
|
||||
|
||||
class Network:
|
||||
"""
|
||||
Encapsulation of asynchronous network interaction
|
||||
"""
|
||||
|
||||
_timeout: int
|
||||
_lock: asyncio.Lock
|
||||
|
||||
def __init__(self, config):
|
||||
self._timeout = config['net']['timeout']
|
||||
|
||||
self._lock = None
|
||||
|
||||
def initialize(self):
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def _request(self, method, url, sequential, return_type, **kwargs):
|
||||
if sequential:
|
||||
await self._lock.acquire()
|
||||
|
||||
result = None
|
||||
|
||||
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=self._timeout)) as session:
|
||||
async with session.request(method, url, **kwargs) as response:
|
||||
if return_type == ReturnType.BYTES:
|
||||
result = await response.read()
|
||||
|
||||
if return_type == ReturnType.TEXT:
|
||||
result = await response.text()
|
||||
|
||||
if return_type == ReturnType.JSON:
|
||||
result = json.loads(await response.text())
|
||||
|
||||
if sequential:
|
||||
self._lock.release()
|
||||
|
||||
return response, result
|
||||
|
||||
async def get_bytes(self, url, sequential=False, **kwargs) -> Tuple[aiohttp.ClientResponse, bytes]:
|
||||
return await self._request(
|
||||
method='GET',
|
||||
url=url,
|
||||
sequential=sequential,
|
||||
return_type=ReturnType.BYTES,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
async def post_bytes(self, url, sequential=False, **kwargs) -> Tuple[aiohttp.ClientResponse, bytes]:
|
||||
return await self._request(
|
||||
method='POST',
|
||||
url=url,
|
||||
sequential=sequential,
|
||||
return_type=ReturnType.BYTES,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
async def get_text(self, url, sequential=False, **kwargs) -> Tuple[aiohttp.ClientResponse, str]:
|
||||
return await self._request(
|
||||
method='GET',
|
||||
url=url,
|
||||
sequential=sequential,
|
||||
return_type=ReturnType.TEXT,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
async def post_text(self, url, sequential=False, **kwargs) -> Tuple[aiohttp.ClientResponse, str]:
|
||||
return await self._request(
|
||||
method='POST',
|
||||
url=url,
|
||||
sequential=sequential,
|
||||
return_type=ReturnType.TEXT,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
async def get_json(self, url, sequential=False, **kwargs) -> Tuple[aiohttp.ClientResponse, Any]:
|
||||
return await self._request(
|
||||
method='GET',
|
||||
url=url,
|
||||
sequential=sequential,
|
||||
return_type=ReturnType.JSON,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
async def post_json(self, url, sequential=False, **kwargs) -> Tuple[aiohttp.ClientResponse, Any]:
|
||||
return await self._request(
|
||||
method='POST',
|
||||
url=url,
|
||||
sequential=sequential,
|
||||
return_type=ReturnType.JSON,
|
||||
**kwargs
|
||||
)
|
5
src/zvk/util/paths.py
Normal file
5
src/zvk/util/paths.py
Normal file
@@ -0,0 +1,5 @@
|
||||
CONFIG_PATH = 'config.yaml'
|
||||
|
||||
PLUGIN_GLOB = 'src/zvk/plugins/**/*.py'
|
||||
|
||||
DOWNLOAD_DIR = 'downloads'
|
25
src/zvk/util/zlogging.py
Normal file
25
src/zvk/util/zlogging.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
logger = logging.getLogger('zvk')
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
formatter = logging.Formatter(
|
||||
'{asctime}.{msecs:03.0f} {levelname:>8} {module:>15.15}:{lineno:03d} - {message}',
|
||||
style='{',
|
||||
datefmt='%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# debug_file = logging.FileHandler('debug.log')
|
||||
# debug_file.setLevel(logging.DEBUG)
|
||||
# debug_file.setFormatter(formatter)
|
||||
# logger.addHandler(debug_file)
|
||||
#
|
||||
# warning_file = logging.FileHandler('warning.log')
|
||||
# warning_file.setLevel(logging.WARNING)
|
||||
# warning_file.setFormatter(formatter)
|
||||
# logger.addHandler(warning_file)
|
||||
|
||||
info_console = logging.StreamHandler(stream=sys.stderr)
|
||||
# info_console.setLevel(logging.INFO)
|
||||
info_console.setFormatter(formatter)
|
||||
logger.addHandler(info_console)
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
0
tests/zvk/__init__.py
Normal file
0
tests/zvk/__init__.py
Normal file
0
tests/zvk/bot/__init__.py
Normal file
0
tests/zvk/bot/__init__.py
Normal file
22
tests/zvk/bot/test_bot.py
Normal file
22
tests/zvk/bot/test_bot.py
Normal file
@@ -0,0 +1,22 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import on_startup
|
||||
|
||||
|
||||
@on_startup
|
||||
async def kamikaze(bot):
|
||||
bot.die()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_do_commit_die(bot):
|
||||
bot.event_queue.register_consumer(kamikaze)
|
||||
assert await bot.run()
|
||||
|
||||
|
||||
def test_nonasync(bot):
|
||||
asyncio.run(bot.run())
|
||||
|
||||
|
32
tests/zvk/bot/test_trunk.py
Normal file
32
tests/zvk/bot/test_trunk.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from zvk.bot.trunk import Trunk
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_trunk():
|
||||
trunk = Trunk()
|
||||
trunk.initialize()
|
||||
|
||||
counter = [0]
|
||||
|
||||
async def f1():
|
||||
counter[0] += await trunk.get('thing')
|
||||
|
||||
async def f2():
|
||||
await asyncio.sleep(0.1)
|
||||
trunk.set('thing', 1)
|
||||
|
||||
async def f3():
|
||||
await asyncio.sleep(0.2)
|
||||
counter[0] += await trunk.get('thing')
|
||||
|
||||
await asyncio.gather(
|
||||
f1(),
|
||||
f2(),
|
||||
f3(),
|
||||
)
|
||||
|
||||
assert counter[0] == 2
|
134
tests/zvk/conftest.py
Normal file
134
tests/zvk/conftest.py
Normal file
@@ -0,0 +1,134 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from logging import DEBUG
|
||||
from typing import Dict, Any, List
|
||||
|
||||
import pytest
|
||||
|
||||
from main import read_config
|
||||
from zvk.bot.bot import Bot
|
||||
from zvk.event.event import Event
|
||||
from zvk.plugins.vk.event_type import ParsedEventType
|
||||
from zvk.plugins.vk.message_parser import Message
|
||||
from zvk.util.db import Database
|
||||
from zvk.util.zlogging import logger
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
|
||||
logger.setLevel(DEBUG)
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def db(bot) -> Database:
|
||||
return bot.db
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def api() -> TestingVKApi:
|
||||
api = TestingVKApi()
|
||||
|
||||
api.expect('users.get').set_result([{'id': 111, 'first_name': 'testing', 'last_name': 'testing'}])
|
||||
|
||||
return api
|
||||
|
||||
|
||||
@pytest.fixture(scope='function')
|
||||
def bot(api) -> TestingBot:
|
||||
return TestingBot(api)
|
||||
|
||||
|
||||
class TestingBot(Bot):
|
||||
def __init__(self, api: TestingVKApi):
|
||||
test_bot_config = read_config()
|
||||
test_bot_config['db_url'] = 'sqlite:///:memory:'
|
||||
test_bot_config['plugins']['whitelist'] = [
|
||||
'vk.command_parser',
|
||||
'vk.event_saver',
|
||||
'vk.message_parser',
|
||||
'init.identify_self',
|
||||
'init.permissions'
|
||||
]
|
||||
|
||||
super().__init__(config=test_bot_config)
|
||||
self.db.create_all()
|
||||
|
||||
self.testing_counter = 0
|
||||
|
||||
self.api = api
|
||||
|
||||
def dummy_message_event(self, text: str, from_id=None):
|
||||
return Event(ParsedEventType.MESSAGE, message=Message(
|
||||
message_id=123,
|
||||
from_id=123 if from_id is None else from_id,
|
||||
peer_id=123 if from_id is None else from_id,
|
||||
to_id=0,
|
||||
flags=0,
|
||||
timestamp=datetime.utcnow(),
|
||||
text=text,
|
||||
extra_fields_json='{}',
|
||||
attachments_json='{}',
|
||||
random_id=0,
|
||||
is_outgoing=False,
|
||||
is_bot_message=False,
|
||||
))
|
||||
|
||||
|
||||
@dataclass
|
||||
class MockVKApiCall:
|
||||
method_name: str
|
||||
params: Dict[str, Any]
|
||||
result: Any = None
|
||||
single_use: bool = True
|
||||
|
||||
def does_match(self, method_name: str, params: Dict[str, Any]) -> bool:
|
||||
if self.method_name != method_name:
|
||||
return False
|
||||
|
||||
if set(self.params.keys()) != set(params.keys()):
|
||||
return False
|
||||
|
||||
for key in self.params.keys():
|
||||
expected_value = self.params[key]
|
||||
actual_value = params[key]
|
||||
|
||||
if expected_value == '*':
|
||||
continue
|
||||
|
||||
if expected_value != actual_value:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def set_result(self, result) -> MockVKApiCall:
|
||||
self.result = result
|
||||
return self
|
||||
|
||||
def set_single_use(self, single_use) -> MockVKApiCall:
|
||||
self.single_use = single_use
|
||||
return self
|
||||
|
||||
|
||||
class TestingVKApi(VKApi):
|
||||
_expectations: List[MockVKApiCall]
|
||||
|
||||
# noinspection PyMissingConstructor
|
||||
def __init__(self):
|
||||
self._expectations = []
|
||||
|
||||
def expect(self, method_name, **params):
|
||||
mock_vk_api_call = MockVKApiCall(method_name=method_name, params=params)
|
||||
self._expectations.append(mock_vk_api_call)
|
||||
return mock_vk_api_call
|
||||
|
||||
async def call_method(self, method_name, **params):
|
||||
for expectation in self._expectations:
|
||||
if expectation.does_match(method_name, params):
|
||||
if expectation.single_use:
|
||||
self._expectations.remove(expectation)
|
||||
|
||||
logger.debug(f'Expected api call {method_name}({params}) -> {expectation.result}')
|
||||
return expectation.result
|
||||
|
||||
raise ValueError(f'Unexpected api call {method_name}({params})')
|
0
tests/zvk/event/__init__.py
Normal file
0
tests/zvk/event/__init__.py
Normal file
28
tests/zvk/event/test_async_gen.py
Normal file
28
tests/zvk/event/test_async_gen.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import pytest
|
||||
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_async_gen():
|
||||
async def f1():
|
||||
logger.info('hi 1')
|
||||
|
||||
async def f2():
|
||||
logger.info('hi 2')
|
||||
yield 1
|
||||
|
||||
async def f3():
|
||||
logger.info('hi 3')
|
||||
if False:
|
||||
yield 1
|
||||
|
||||
with pytest.raises(Exception):
|
||||
async for i in f1():
|
||||
assert i == 1
|
||||
|
||||
async for i in f2():
|
||||
assert i == 1
|
||||
|
||||
async for i in f3():
|
||||
assert i == 1
|
74
tests/zvk/event/test_event.py
Normal file
74
tests/zvk/event/test_event.py
Normal file
@@ -0,0 +1,74 @@
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.event.queue import EventQueue
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
@event_consumer(consumes=['event1'])
|
||||
async def consumer_simple(counter):
|
||||
logger.debug('simple_consumer')
|
||||
counter[0] += 1
|
||||
|
||||
|
||||
@event_consumer(consumes=['event1'])
|
||||
async def consumer_defaults(counter, default_inc=1):
|
||||
logger.debug('simple_consumer_with_defaults')
|
||||
counter[0] += default_inc
|
||||
|
||||
|
||||
@event_consumer(consumes=['event1'])
|
||||
async def consumer_defaults_overridden(counter, inc=100):
|
||||
logger.debug('simple_consumer_with_overridden_defaults')
|
||||
counter[0] += inc
|
||||
|
||||
|
||||
@event_consumer(consumes=['event1'])
|
||||
async def consumer_producer(counter):
|
||||
logger.debug('consumer_producer')
|
||||
counter[0] += 1
|
||||
yield Event('event2')
|
||||
|
||||
|
||||
@event_consumer(consumes=['event2'])
|
||||
async def consumer_new_event(counter):
|
||||
logger.debug('another_simple_consumer')
|
||||
counter[0] += 1
|
||||
|
||||
yield Event('event3', new_inc=1)
|
||||
|
||||
|
||||
@event_consumer(consumes=['event3'])
|
||||
async def consumer_finisher(counter, new_inc):
|
||||
logger.debug('finisher')
|
||||
counter[0] += new_inc
|
||||
|
||||
# await event_queue.omae_wa_mou_shindeiru()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_queue():
|
||||
event_queue = EventQueue()
|
||||
|
||||
counter = [0]
|
||||
|
||||
event_queue.register_consumer(consumer_simple)
|
||||
event_queue.register_consumer(consumer_defaults)
|
||||
event_queue.register_consumer(consumer_defaults_overridden)
|
||||
event_queue.register_consumer(consumer_producer)
|
||||
event_queue.register_consumer(consumer_new_event)
|
||||
event_queue.register_consumer(consumer_finisher)
|
||||
|
||||
starting_events = [Event('event1', counter=counter, inc=1)]
|
||||
|
||||
await event_queue.run(starting_events)
|
||||
assert counter[0] == 6
|
||||
|
||||
await event_queue.run(starting_events)
|
||||
await event_queue.run(starting_events)
|
||||
assert counter[0] == 18
|
||||
|
||||
event_queue.deregister_consumer(consumer_producer)
|
||||
await event_queue.run(starting_events)
|
||||
assert counter[0] == 21
|
35
tests/zvk/event/test_event_loop.py
Normal file
35
tests/zvk/event/test_event_loop.py
Normal file
@@ -0,0 +1,35 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.event.queue import EventQueue
|
||||
|
||||
|
||||
@event_consumer(consumes=['event_loop'])
|
||||
async def consumer_loop(event, counter):
|
||||
counter[0] += 1
|
||||
await asyncio.sleep(0.1)
|
||||
|
||||
yield event
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_event_cancellation():
|
||||
event_queue = EventQueue()
|
||||
|
||||
counter = [0]
|
||||
|
||||
event_queue.register_consumer(consumer_loop)
|
||||
|
||||
starting_events = [Event('event_loop', counter=counter)]
|
||||
|
||||
queue_task = asyncio.create_task(event_queue.run(starting_events))
|
||||
|
||||
await asyncio.sleep(0.25)
|
||||
|
||||
event_queue.omae_wa_mou_shindeiru()
|
||||
await queue_task
|
||||
|
||||
assert counter[0] == 3
|
36
tests/zvk/event/test_periodic.py
Normal file
36
tests/zvk/event/test_periodic.py
Normal file
@@ -0,0 +1,36 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from zvk.bot.event_type import BotEventType
|
||||
from zvk.event.event import Event
|
||||
from zvk.event.periodic import periodic
|
||||
from zvk.event.queue import EventQueue
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
@periodic(period_secs=0.1)
|
||||
async def periodic_f(counter):
|
||||
counter[0] += 1
|
||||
logger.debug('tick')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_periodic():
|
||||
event_queue = EventQueue()
|
||||
event_queue.register_consumer(periodic_f)
|
||||
|
||||
counter = [0]
|
||||
|
||||
starting_events = [Event(BotEventType.STARTUP, counter=counter)]
|
||||
|
||||
queue_task = asyncio.create_task(event_queue.run(starting_events))
|
||||
|
||||
await asyncio.sleep(0.45)
|
||||
assert counter[0] == 5
|
||||
|
||||
event_queue.omae_wa_mou_shindeiru()
|
||||
|
||||
assert counter[0] == 5
|
||||
|
||||
await queue_task
|
39
tests/zvk/event/test_reflection.py
Normal file
39
tests/zvk/event/test_reflection.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import pytest
|
||||
|
||||
from zvk.event.reflection import run_with_env
|
||||
|
||||
|
||||
def test_reflection():
|
||||
calls = 0
|
||||
|
||||
def no_args():
|
||||
nonlocal calls
|
||||
calls += 1
|
||||
|
||||
run_with_env(None, no_args)
|
||||
assert calls == 1
|
||||
|
||||
def some_args(a, b=1):
|
||||
nonlocal calls
|
||||
calls += 1
|
||||
|
||||
assert a == 1
|
||||
assert b == 2
|
||||
|
||||
run_with_env(dict(a=1, b=2), some_args)
|
||||
assert calls == 2
|
||||
|
||||
def default_check(a, b=1):
|
||||
nonlocal calls
|
||||
calls += 1
|
||||
|
||||
assert a == 1
|
||||
assert b == 1
|
||||
|
||||
run_with_env(dict(a=1), default_check)
|
||||
assert calls == 3
|
||||
|
||||
with pytest.raises(TypeError):
|
||||
run_with_env(None, default_check)
|
||||
|
||||
assert calls == 3
|
51
tests/zvk/event/test_registration.py
Normal file
51
tests/zvk/event/test_registration.py
Normal file
@@ -0,0 +1,51 @@
|
||||
import asyncio
|
||||
|
||||
import pytest
|
||||
|
||||
from zvk.bot.event_type import BotEventType
|
||||
from zvk.event.consumer import event_consumer
|
||||
from zvk.event.event import Event
|
||||
from zvk.event.periodic import periodic
|
||||
from zvk.event.queue import EventQueue
|
||||
|
||||
|
||||
@periodic(period_secs=0.1)
|
||||
async def loop1():
|
||||
yield Event('inc')
|
||||
|
||||
|
||||
@periodic(period_secs=0.1)
|
||||
async def loop2():
|
||||
yield Event('inc')
|
||||
|
||||
|
||||
@event_consumer(consumes=['inc'])
|
||||
async def inc(counter):
|
||||
counter[0] += 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_event_cancellation():
|
||||
event_queue = EventQueue()
|
||||
|
||||
counter = [0]
|
||||
|
||||
event_queue.register_consumer(loop1)
|
||||
event_queue.register_consumer(loop2)
|
||||
event_queue.register_consumer(inc)
|
||||
|
||||
starting_events = [Event(BotEventType.STARTUP, counter=counter)]
|
||||
|
||||
queue_task = asyncio.create_task(event_queue.run(starting_events))
|
||||
|
||||
await asyncio.sleep(0.15)
|
||||
assert counter[0] == 4
|
||||
|
||||
event_queue.deregister_consumer(loop2)
|
||||
await asyncio.sleep(0.2)
|
||||
assert counter[0] == 6
|
||||
|
||||
event_queue.omae_wa_mou_shindeiru()
|
||||
await queue_task
|
||||
|
||||
assert counter[0] == 6
|
0
tests/zvk/plugins/__init__.py
Normal file
0
tests/zvk/plugins/__init__.py
Normal file
0
tests/zvk/plugins/commands/__init__.py
Normal file
0
tests/zvk/plugins/commands/__init__.py
Normal file
56
tests/zvk/plugins/commands/test_timetable.py
Normal file
56
tests/zvk/plugins/commands/test_timetable.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import json
|
||||
from datetime import datetime
|
||||
|
||||
from zvk.misc.timetable_pb2 import Timetable
|
||||
import zipfile
|
||||
|
||||
import numpy as np
|
||||
|
||||
|
||||
def test_read_timetable_pb():
|
||||
zfile = zipfile.ZipFile('relics/6407-2.ftt')
|
||||
|
||||
timetable_pb_bytes = zfile.read('timetable.pb')
|
||||
|
||||
timetable = Timetable.FromString(timetable_pb_bytes)
|
||||
|
||||
term_start = datetime.fromtimestamp(timetable.properties.term_start / 1000)
|
||||
weeks_count = timetable.properties.weeks_count
|
||||
|
||||
def convert_weeks(s):
|
||||
if s == 'a':
|
||||
return list(range(1, weeks_count + 1))
|
||||
if s == 'o':
|
||||
return list(range(1, weeks_count + 1, 2))
|
||||
if s == 'e':
|
||||
return list(range(2, weeks_count + 1, 2))
|
||||
if s.startswith('c'):
|
||||
return list(map(int, s[1:].split(',')))
|
||||
|
||||
raise Exception(f'Bad week identifier {s}')
|
||||
|
||||
timetable_dict = {
|
||||
'term_start': term_start.timestamp(),
|
||||
'weeks_count': weeks_count,
|
||||
'lessons': []
|
||||
}
|
||||
|
||||
for lesson in timetable.lesson:
|
||||
timetable_dict['lessons'].append({
|
||||
'day': lesson.day,
|
||||
'time': [lesson.time[:4], lesson.time[4:]],
|
||||
'weeks': convert_weeks(lesson.weeks),
|
||||
'subject': timetable.subject[lesson.subject_id - 1].name,
|
||||
'kind': timetable.kind[lesson.kind_id - 1].name,
|
||||
'place': timetable.place[lesson.place_id - 1].name,
|
||||
'teachers': [timetable.teacher[teacher_id - 1].name
|
||||
for teacher_id in lesson.teacher_id]
|
||||
})
|
||||
|
||||
timetable_json = json.dumps(timetable_dict)
|
||||
print(timetable_json)
|
||||
|
||||
assert 'Дегтярев А. А.' in np.sum([
|
||||
i['teachers']
|
||||
for i in timetable_dict['lessons']
|
||||
])
|
0
tests/zvk/plugins/vk/__init__.py
Normal file
0
tests/zvk/plugins/vk/__init__.py
Normal file
33
tests/zvk/plugins/vk/test_command.py
Normal file
33
tests/zvk/plugins/vk/test_command.py
Normal file
@@ -0,0 +1,33 @@
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import on_startup
|
||||
from zvk.plugins.vk.command import Argument, command
|
||||
|
||||
|
||||
@on_startup
|
||||
async def event_emitter(bot):
|
||||
yield bot.dummy_message_event(',a --inc=100', from_id=111)
|
||||
yield bot.dummy_message_event(',a --inc=10', from_id=111)
|
||||
yield bot.dummy_message_event(',a --inc=a', from_id=111)
|
||||
yield bot.dummy_message_event(',a --inc=1', from_id=123)
|
||||
|
||||
|
||||
@command('a', Argument('--inc', type=int, default=0), permissions=['admin'])
|
||||
async def command_a(bot, inc):
|
||||
bot.testing_counter += inc
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_command(bot, api):
|
||||
api.expect(method_name='messages.send', peer_id=111,
|
||||
message="🤖: Command .a argument --inc: invalid int value: 'a'")
|
||||
|
||||
api.expect(method_name='messages.send', peer_id=123,
|
||||
message="🤖: Access denied")
|
||||
|
||||
bot.event_queue.register_consumer(event_emitter)
|
||||
bot.event_queue.register_consumer(command_a)
|
||||
|
||||
assert await bot.run()
|
||||
|
||||
assert bot.testing_counter == 110
|
59
tests/zvk/plugins/vk/test_command_parser.py
Normal file
59
tests/zvk/plugins/vk/test_command_parser.py
Normal file
@@ -0,0 +1,59 @@
|
||||
import pytest
|
||||
|
||||
from zvk.bot.bot import Bot
|
||||
from zvk.event.consumer import event_consumer, on_startup
|
||||
from zvk.event.event import Event
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.command_parser import CommandEventType
|
||||
from zvk.plugins.vk.event_type import VKEventType
|
||||
|
||||
|
||||
@on_startup
|
||||
async def vk_event_emitter():
|
||||
yield Event(VKEventType.MESSAGE_NEW,
|
||||
vk_event_args=[528220, 33, 50951365, 1539933254, 'Я в автобусе щас ваще', {'title': ' ... '}, {}, 0])
|
||||
yield Event(VKEventType.MESSAGE_NEW,
|
||||
vk_event_args=[528392, 532481, 2000000049, 1539947094, ',command1', {'from': '363656437'}, {}, 0])
|
||||
yield Event(VKEventType.MESSAGE_NEW,
|
||||
vk_event_args=[528393, 532481, 2000000049, 1539947094, ',command2', {'from': '363656437'}, {}, 0])
|
||||
yield Event(VKEventType.MESSAGE_NEW,
|
||||
vk_event_args=[528397, 33, 173489181, 1539955700, 'Я литералли ходил на перекур с преподом',
|
||||
{'fwd_all_count': '0', 'fwd_count': '1', 'title': ' ... '}, {'fwd': '0_0'}, 0])
|
||||
|
||||
yield Event(VKEventType.MESSAGE_NEW,
|
||||
vk_event_args=[540583, 35, 50951365, 1541763627, ',command2 sponge bob square pants', {'title': ' ... '},
|
||||
{}, 431864521])
|
||||
|
||||
yield Event(VKEventType.MESSAGE_NEW,
|
||||
vk_event_args=[540974, 8227, 2000000055, 1541779800, ',command3 thinking stock imagte',
|
||||
{'from': '9002294'},
|
||||
{}, 1729925714])
|
||||
|
||||
|
||||
@event_consumer(consumes=[CommandEventType(command_name='command1')])
|
||||
async def consumer1(bot):
|
||||
bot.testing_counter += 1
|
||||
|
||||
|
||||
@event_consumer(consumes=[CommandEventType(command_name='command2')])
|
||||
async def consumer2(bot):
|
||||
bot.testing_counter += 2
|
||||
|
||||
|
||||
@event_consumer(consumes=[CommandEventType(command_name='command3')])
|
||||
async def consumer3(bot, echo):
|
||||
await echo('chat reply')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_command_parser(bot: Bot, api: VKApi):
|
||||
api.expect('messages.send', peer_id=2000000055, message='🤖: chat reply')
|
||||
|
||||
bot.event_queue.register_consumer(vk_event_emitter)
|
||||
bot.event_queue.register_consumer(consumer1)
|
||||
bot.event_queue.register_consumer(consumer2)
|
||||
bot.event_queue.register_consumer(consumer3)
|
||||
|
||||
assert await bot.run()
|
||||
|
||||
assert bot.testing_counter == 5
|
27
tests/zvk/plugins/vk/test_echo.py
Normal file
27
tests/zvk/plugins/vk/test_echo.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import pytest
|
||||
|
||||
from zvk.bot.bot import Bot
|
||||
from zvk.event.consumer import event_consumer, on_startup
|
||||
from zvk.plugins.vk.api import VKApi
|
||||
from zvk.plugins.vk.command_parser import CommandEventType
|
||||
from zvk.util import emoji
|
||||
|
||||
|
||||
@on_startup
|
||||
async def vk_event_emitter(bot):
|
||||
yield bot.dummy_message_event('.command1')
|
||||
|
||||
|
||||
@event_consumer(consumes=[CommandEventType(command_name='command1')])
|
||||
async def command_consumer(echo):
|
||||
assert (await echo('hi')) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_echo(bot: Bot, api: VKApi):
|
||||
api.expect('messages.send', peer_id=123, message=f'{emoji.ROBOT}: hi').set_result(1)
|
||||
|
||||
bot.event_queue.register_consumer(vk_event_emitter)
|
||||
bot.event_queue.register_consumer(command_consumer)
|
||||
|
||||
assert await bot.run()
|
49
tests/zvk/plugins/vk/test_signatures.py
Normal file
49
tests/zvk/plugins/vk/test_signatures.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import pytest
|
||||
|
||||
from zvk.plugins.vk.command import Argument, CommandEventConsumer, CommandParseException
|
||||
|
||||
|
||||
def test_signatures_easy():
|
||||
a = CommandEventConsumer('a', Argument('n', type=int))
|
||||
|
||||
assert a.parse_argstring('1') == {'n': 1}
|
||||
|
||||
with pytest.raises(Exception):
|
||||
assert a.parse_argstring('dsa') == {'n': 1}
|
||||
|
||||
with pytest.raises(CommandParseException):
|
||||
assert a.parse_argstring('-h')
|
||||
|
||||
with pytest.raises(CommandParseException):
|
||||
assert a.parse_argstring('dsa')
|
||||
|
||||
with pytest.raises(CommandParseException):
|
||||
assert a.parse_argstring('')
|
||||
|
||||
with pytest.raises(CommandParseException):
|
||||
assert a.parse_argstring('--arg=1')
|
||||
|
||||
assert a.parse_argstring('"1"') == {'n': 1}
|
||||
|
||||
with pytest.raises(CommandParseException):
|
||||
assert a.parse_argstring('"')
|
||||
|
||||
|
||||
def test_signatures_complex():
|
||||
a = CommandEventConsumer('a',
|
||||
Argument('n', nargs='?', type=int, default=0),
|
||||
Argument('-n', '--n', dest='m', type=int, default=2),
|
||||
Argument('-v', action='store_true'),
|
||||
Argument('--s', type=str))
|
||||
|
||||
assert a.parse_argstring('1') == {'n': 1, 'm': 2, 'v': False, 's': None}
|
||||
assert a.parse_argstring('--n=1') == {'n': 0, 'm': 1, 'v': False, 's': None}
|
||||
assert a.parse_argstring('--n 1') == {'n': 0, 'm': 1, 'v': False, 's': None}
|
||||
assert a.parse_argstring('-n 1') == {'n': 0, 'm': 1, 'v': False, 's': None}
|
||||
assert a.parse_argstring('-vn1') == {'n': 0, 'm': 1, 'v': True, 's': None}
|
||||
|
||||
|
||||
def test_signatures_whole():
|
||||
a = CommandEventConsumer('a', whole_argstring=True)
|
||||
|
||||
assert a.parse_argstring('d ksja jd j jj jj --n -h 2') == {'argstring': 'd ksja jd j jj jj --n -h 2'}
|
27
tests/zvk/plugins/vk/test_testing_api.py
Normal file
27
tests/zvk/plugins/vk/test_testing_api.py
Normal file
@@ -0,0 +1,27 @@
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import on_startup
|
||||
|
||||
|
||||
@on_startup
|
||||
async def inc(api, bot):
|
||||
bot.counter += await api.get_magic.inc(type='test')
|
||||
bot.counter += await api.get_magic.inc(type='test')
|
||||
bot.counter += await api.get_magic.inc(type='test')
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
await api.get_magic.inc(type='test')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_api(bot):
|
||||
bot.api.expect('get_magic.inc', type='test').set_result(1)
|
||||
bot.api.expect('get_magic.inc', type='test').set_result(2)
|
||||
bot.api.expect('get_magic.inc', type='*').set_result(3)
|
||||
|
||||
bot.counter = 0
|
||||
bot.event_queue.register_consumer(inc)
|
||||
|
||||
assert await bot.run()
|
||||
|
||||
assert bot.counter == 6
|
28
tests/zvk/plugins/vk/test_vk_events.py
Normal file
28
tests/zvk/plugins/vk/test_vk_events.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import on_startup
|
||||
from zvk.event.event import Event
|
||||
from zvk.plugins.vk.event_saver import VKEvent
|
||||
from zvk.plugins.vk.message_parser import Message
|
||||
from zvk.util.db import Database
|
||||
from zvk.plugins.vk.event_type import VKEventType
|
||||
|
||||
|
||||
@on_startup
|
||||
async def vk_event_emitter():
|
||||
yield Event(VKEventType.UNREAD_COUNTER_UPDATE, vk_event_args=[3, 0])
|
||||
yield Event(VKEventType.MESSAGE_NEW, vk_event_args=[528220, 33, 50951365, 1539933254, 'Я в автобусе щас ваще', {'title': ' ... '}, {}, 0])
|
||||
yield Event(VKEventType.MESSAGE_NEW, vk_event_args=[528392, 532481, 2000000049, 1539947094, 'Где философия?', {'from': '363656437'}, {}, 0])
|
||||
yield Event(VKEventType.MESSAGE_NEW, vk_event_args=[528397, 33, 173489181, 1539955700, 'Я литералли ходил на перекур с преподом', {'fwd_all_count': '0', 'fwd_count': '1', 'title': ' ... '}, {'fwd': '0_0'}, 0])
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test(db: Database, bot, api):
|
||||
bot.event_queue.register_consumer(vk_event_emitter)
|
||||
|
||||
assert await bot.run()
|
||||
|
||||
with db as session:
|
||||
assert session.query(VKEvent).count() == 4
|
||||
|
||||
assert session.query(Message).count() == 3
|
1
tests/zvk/plugins/vk/util.py
Normal file
1
tests/zvk/plugins/vk/util.py
Normal file
@@ -0,0 +1 @@
|
||||
a = 1
|
0
tests/zvk/util/__init__.py
Normal file
0
tests/zvk/util/__init__.py
Normal file
23
tests/zvk/util/test_db.py
Normal file
23
tests/zvk/util/test_db.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from pytest import fixture
|
||||
from sqlalchemy import Integer, String, Column
|
||||
|
||||
from zvk.util.db import Database, DBBase
|
||||
|
||||
|
||||
|
||||
|
||||
class SomeTable(DBBase):
|
||||
id = Column(Integer, primary_key=True)
|
||||
|
||||
s = Column(String)
|
||||
|
||||
|
||||
def test_db(db):
|
||||
db.create_all()
|
||||
|
||||
with db as session:
|
||||
session.add(SomeTable(s='123'))
|
||||
session.add(SomeTable(s='132'))
|
||||
session.add(SomeTable(s='1232132132'))
|
||||
|
||||
assert len(session.query(SomeTable).all()) == 3
|
28
tests/zvk/util/test_download.py
Normal file
28
tests/zvk/util/test_download.py
Normal file
@@ -0,0 +1,28 @@
|
||||
import pytest
|
||||
|
||||
from zvk.event.consumer import on_startup
|
||||
from zvk.util.download import download_file
|
||||
from zvk.util.zlogging import logger
|
||||
|
||||
|
||||
@on_startup
|
||||
async def test_action(bot, net, db):
|
||||
filename = await download_file(
|
||||
db,
|
||||
net,
|
||||
'https://www.google.com/images/branding/googlelogo/2x/googlelogo_color_272x92dp.png',
|
||||
)
|
||||
logger.info(f'{filename}')
|
||||
filename = await download_file(
|
||||
db,
|
||||
net,
|
||||
'https://yastatic.net/www/_/x/Q/xk8YidkhGjIGOrFm_dL5781YA.svg',
|
||||
)
|
||||
logger.info(f'{filename}')
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_download(bot):
|
||||
bot.event_queue.register_consumer(test_action)
|
||||
|
||||
assert await bot.run()
|
19
tests/zvk/util/test_network.py
Normal file
19
tests/zvk/util/test_network.py
Normal file
@@ -0,0 +1,19 @@
|
||||
import pytest
|
||||
|
||||
from zvk.util.network import Network
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request():
|
||||
net = Network({
|
||||
'net': {
|
||||
'timeout': 10
|
||||
}
|
||||
})
|
||||
net.initialize()
|
||||
|
||||
response, text = await net.get_text('http://ya.ru')
|
||||
|
||||
assert response.status == 200
|
||||
|
||||
assert len(text) > 200
|
Reference in New Issue
Block a user