kopia lustrzana https://github.com/bugout-dev/moonstream
Porównaj commity
1237 Commity
moonstream
...
main
Autor | SHA1 | Data |
---|---|---|
Andrey Dolgolev | 263ee77d22 | |
Andrey | c9319cdbeb | |
Andrey | 0f61a486bc | |
Andrey | 1ab1a9cc38 | |
Andrey Dolgolev | cd7ab58ec1 | |
kompotkot | a7712e405d | |
Andrey | 7de295bf0d | |
Sergei Sumarokov | 565e24231e | |
kompotkot | 34aba694c4 | |
Sergei Sumarokov | a1e3bb44fe | |
kompotkot | b67e0eb908 | |
Andrey Dolgolev | 7962bc1f90 | |
Andrey | 3a2c01f47b | |
kompotkot | 0d955a56c3 | |
Andrey Dolgolev | 62adb3becd | |
Andrey | 436ce38640 | |
Andrey Dolgolev | 6f29d15d07 | |
kompotkot | 87985fd51e | |
Andrey | 37c5b6e4cf | |
Andrey | fa0b626683 | |
kompotkot | ead383414f | |
Sergei Sumarokov | 3634763d81 | |
kompotkot | a741ad720d | |
kompotkot | 6695a3b353 | |
Sergei Sumarokov | e27476ac53 | |
kompotkot | 12dc59ee9b | |
kompotkot | 7faa533046 | |
Andrey | d45333dc0e | |
Andrey | 776a97a8f2 | |
Andrey | abcdc9f7ed | |
Andrey | 19944c124f | |
kompotkot | 7e346cc2ce | |
Andrey Dolgolev | c35b88d25d | |
Andrey | 95aec6bece | |
Andrey | 568250396d | |
Andrey | 54c83a371a | |
Andrey Dolgolev | 1d4ef733c8 | |
Andrey | 348d115819 | |
Sergei Sumarokov | 7db8b88a2f | |
Andrey Dolgolev | da2da94b55 | |
Andrey | 18e650d233 | |
kompotkot | 27e9a2242f | |
Sergei Sumarokov | e2fb5b10e5 | |
Andrey Dolgolev | 3e813a21bc | |
Andrey | 2c71082895 | |
kompotkot | daf47028a4 | |
kompotkot | 15567735c9 | |
kompotkot | 516e507123 | |
Andrey | af1ad369a9 | |
Andrey | 819e0b18b2 | |
Andrey | 63383a40e2 | |
kompotkot | 9a31d145e0 | |
kompotkot | f6e541edab | |
kompotkot | e1c62d9932 | |
kompotkot | 12345e42a3 | |
kompotkot | e700efff5b | |
Andrey Dolgolev | 135af1765c | |
Andrey | 6874ff62f4 | |
Andrey | 8506995513 | |
Sergei Sumarokov | 2caaa82953 | |
Sergei Sumarokov | 3299380916 | |
kompotkot | 03a6601ded | |
kompotkot | 1bb23eeda4 | |
Sergei Sumarokov | 262ea4c792 | |
kompotkot | a6ba053f10 | |
Andrey Dolgolev | f3a3601a88 | |
kompotkot | f6bc86f082 | |
Andrey | 3eb0301304 | |
Andrey | a74d5ce76c | |
Andrey | a31ce2c588 | |
Andrey | 0854656a6e | |
Andrey | 3721ec9f6f | |
Andrey | 3bd8a68bd7 | |
Andrey | c42d8ccd10 | |
Andrey | a15b2f94ed | |
Andrey | ce628b6e0b | |
Andrey | 44764abdaa | |
Andrey | c64eec10f6 | |
Andrey | 911f7e7ebe | |
Sergei Sumarokov | 3cab309c42 | |
kompotkot | 8d1f5f4b4c | |
Sergei Sumarokov | f5ff9427fe | |
kompotkot | 9a5bc824c4 | |
Sergei Sumarokov | 41787fafaa | |
kompotkot | e5f324b841 | |
Sergei Sumarokov | 1d0fcda13c | |
kompotkot | bf653b3202 | |
kompotkot | 8a556b0b1f | |
kompotkot | 6a773965b2 | |
kompotkot | 525abb8a5f | |
kompotkot | 3d81cb79fc | |
Sergei Sumarokov | 50d7c826be | |
kompotkot | ece119afdd | |
Sergei Sumarokov | 13054433da | |
kompotkot | 9bb392dc0b | |
kompotkot | df3365167a | |
Sergei Sumarokov | becb5b8843 | |
kompotkot | c4e313e15c | |
kompotkot | 2221b45bc9 | |
Sergei Sumarokov | 163049b9b6 | |
Sergei Sumarokov | 7994da519b | |
kompotkot | f84788a6c1 | |
kompotkot | 784ff2a740 | |
kompotkot | 49b49153ac | |
Sergei Sumarokov | 25fc91569d | |
kompotkot | c06d9df61b | |
Sergei Sumarokov | 7ec2ee5003 | |
kompotkot | 23b20284f9 | |
Sergei Sumarokov | 1f31738b44 | |
Andrey Dolgolev | c139c488c1 | |
Andrey | 1e0ce12634 | |
Andrey | 220e7f1a7d | |
Andrey | aa08db0473 | |
Andrey | c3dc91b0f5 | |
Andrey | 4a41852bea | |
Andrey Dolgolev | d22606508e | |
Andrey | 46c708db4c | |
Andrey Dolgolev | d235d3b9a7 | |
Andrey | 474ee11a2f | |
Andrey | 6f210a2a69 | |
kompotkot | 640a8d12a0 | |
Andrey | 6923754115 | |
Andrey | cc5791578d | |
Andrey | f9258b2eee | |
Andrey | b5f14d5759 | |
Sergei Sumarokov | 19a17f83e2 | |
kompotkot | 5ab43d398e | |
kompotkot | e406fc5b8a | |
kompotkot | 8af25265b3 | |
kompotkot | b8846f49e2 | |
kompotkot | d85870b525 | |
Sergei Sumarokov | cbbac43d05 | |
kompotkot | f60eb178b9 | |
Sergei Sumarokov | e6989aba3d | |
kompotkot | 7f620e997b | |
Sergei Sumarokov | 45acffa709 | |
kompotkot | 0a092e6fac | |
Sergei Sumarokov | c832392b24 | |
kompotkot | 966eaec1e0 | |
Sergei Sumarokov | cff438e0bb | |
kompotkot | 52712aba3b | |
kompotkot | 5cf319305b | |
kompotkot | 67e540b069 | |
kompotkot | 8196912e6e | |
Sergei Sumarokov | 0b9e4b1008 | |
kompotkot | 3b0729f439 | |
kompotkot | f103d6fac8 | |
kompotkot | fe6bba86d2 | |
Sergei Sumarokov | e8051972f0 | |
kompotkot | d4db66c551 | |
Andrey Dolgolev | 160d7d8209 | |
kompotkot | 5030865ef1 | |
Andrey | 6b6b08533c | |
Andrey Dolgolev | a90f73ad39 | |
kompotkot | 51f6e5d02e | |
Andrey | e96bb59965 | |
Andrey | 4dead75827 | |
Andrey | fa87560385 | |
kompotkot | 44b656fb9b | |
kompotkot | d0cd24dd5f | |
kompotkot | 650d45a334 | |
Andrey Dolgolev | e243442f5c | |
Andrey | da0f089c0d | |
Andrey | d7281b6d14 | |
Sergei Sumarokov | d5d46c7521 | |
kompotkot | b09cc626c9 | |
Sergei Sumarokov | bf447b46dd | |
kompotkot | 93252308f4 | |
kompotkot | f913ec73ae | |
Andrey | 7de9bda100 | |
Andrey | c6b689a9a2 | |
Andrey | 4a9034a117 | |
Andrey | 3f9a2923a4 | |
Andrey | 41b817660a | |
Andrey Dolgolev | 49f99bb6e2 | |
Andrey | 4e8b4960e9 | |
Andrey Dolgolev | fce5f904ad | |
Andrey | 1626bf178f | |
Andrey Dolgolev | 62e7db76e6 | |
Andrey | c5581321e6 | |
Andrey | 0b60395120 | |
Andrey | 62857b08d9 | |
Andrey Dolgolev | 3a0ccb889f | |
Andrey Dolgolev | c939d49296 | |
Andrey Dolgolev | 4bf8df636c | |
Andrey | 1e1459071f | |
Andrey | 01e597aeff | |
Andrey | e3cc04f1c5 | |
Andrey | 168fecfa29 | |
Andrey Dolgolev | cae13d46cd | |
Andrey Dolgolev | 5a1e1e155e | |
Andrey Dolgolev | 192961d008 | |
Sergei Sumarokov | 28dc39c71d | |
Sergei Sumarokov | d571ca24c7 | |
kompotkot | 6d0046961e | |
Sergei Sumarokov | 4bda0d01cc | |
kompotkot | de50816edc | |
Andrey | 657ffd93c1 | |
Andrey | 49065314ae | |
Andrey | 64a4036fd7 | |
Andrey | 10f37621e0 | |
Andrey | 3a0aa8993f | |
Andrey | c9ef01b331 | |
Andrey | 84c61dcf70 | |
Andrey | e8e0c0911c | |
Andrey Dolgolev | 38947e1379 | |
Sergei Sumarokov | 9ebe300e45 | |
kompotkot | 52fdbeede9 | |
Sergei Sumarokov | 2e8ae13bb3 | |
kompotkot | 0d36368dd2 | |
Andrey Dolgolev | dce4d052ce | |
kompotkot | 5a24a517bb | |
kompotkot | 683bc121a7 | |
kompotkot | e1c6f7c13d | |
kompotkot | 41264e663a | |
Andrey Dolgolev | 11f5252b8e | |
kompotkot | b9966558c2 | |
Andrey Dolgolev | 72f5eeee35 | |
Andrey | 346bcae89e | |
Andrey | 06d8f63931 | |
Andrey | c7c89aaadd | |
Andrey | e723dabaaf | |
kompotkot | 32b0088fdb | |
Andrey | 243f367d82 | |
Andrey | 0932c94550 | |
Andrey | 613f8abc0c | |
Andrey | e7be016c98 | |
Andrey | 52aafc328b | |
Andrey | b2e67ffec6 | |
Sergei Sumarokov | 974128528e | |
kompotkot | b7aa897c50 | |
Sergei Sumarokov | c95ba1e50c | |
kompotkot | 03c3a2bc34 | |
Sergei Sumarokov | ecbe6482f7 | |
kompotkot | a3745888ae | |
Sergei Sumarokov | 954cf517f7 | |
kompotkot | 00a5b21505 | |
Andrey Dolgolev | cfebe18257 | |
Andrey | cfeae159d4 | |
Andrey Dolgolev | d8baa5b114 | |
Andrey Dolgolev | 854a473043 | |
Andrey | 7a24e2fb85 | |
Andrey | 4d90055813 | |
Andrey | 919c7c6a79 | |
Andrey | ad9e332d15 | |
Andrey | b8b6d0c8c7 | |
Andrey Dolgolev | b84e484f67 | |
Andrey Dolgolev | 03481546e6 | |
Andrey | 8bf0e723f0 | |
Andrey | 73df559c3e | |
Andrey | 859f2ef278 | |
Andrey Dolgolev | b7ea3445ce | |
Andrey | 8a2e624e09 | |
Andrey Dolgolev | 6693a12896 | |
Andrey Dolgolev | 3b1ff405c4 | |
kompotkot | f9a265b97a | |
Andrey | 691bdbf50f | |
Andrey | a8a0b165a2 | |
Andrey | f04ac73865 | |
Andrey | 5a72358e99 | |
Andrey | 21fb0ca0d7 | |
Andrey | 6554b1632d | |
kompotkot | 12bcd84eb1 | |
kompotkot | 065ff03476 | |
kompotkot | f3b4e3e502 | |
kompotkot | 6b749b5fef | |
kompotkot | 4746d5ceec | |
Sergei Sumarokov | 29c8c2c4a0 | |
kompotkot | ede0093414 | |
Andrey | 799353b39a | |
Sergei Sumarokov | 7ef8f61b7b | |
kompotkot | ea04e61f60 | |
kompotkot | 65f253145c | |
Neeraj Kashyap | 44a79272df | |
Neeraj Kashyap | 354ae7ddc7 | |
Neeraj Kashyap | 9bb8c19283 | |
Neeraj Kashyap | 71e2ffbed3 | |
Andrey | e9273e2d38 | |
Andrey | de2afed559 | |
Andrey | 0eb6c3e2b9 | |
Andrey | 5f90fc4b56 | |
Andrey | aef32862c2 | |
Andrey | 6a250fe518 | |
Andrey | e6b75d59d9 | |
Andrey | 77dfd5d070 | |
Andrey | 324ff58466 | |
Andrey | 5878e66aaf | |
Andrey Dolgolev | c4f1c41f36 | |
Andrey | e217434e4e | |
Andrey | 532e1bdae4 | |
Neeraj Kashyap | 5d6fb5620e | |
Neeraj Kashyap | 825b699b3f | |
Neeraj Kashyap | 90246465e9 | |
Neeraj Kashyap | 88f36f5d63 | |
Neeraj Kashyap | 7295b9c023 | |
Andrey | 949f6c4905 | |
Sergei Sumarokov | a70f26b8e1 | |
kompotkot | 1d2c418873 | |
Neeraj Kashyap | 560ccb6784 | |
Andrey Dolgolev | 1091ad699d | |
Andrey Dolgolev | 2c49f975e8 | |
Andrey Dolgolev | a7a2300d73 | |
Sergei Sumarokov | 559c51b2d6 | |
kompotkot | e9c46f7a5b | |
Andrey | 51359548e4 | |
Andrey | befeb057cf | |
Andrey | 3ce6cf70c6 | |
Andrey | d4c3ebd84d | |
kompotkot | c196c9e0d2 | |
Andrey Dolgolev | 63cd514016 | |
Andrey | 1397f87d54 | |
Sergei Sumarokov | 1bb132d63c | |
Andrey Dolgolev | ddfd2e518d | |
Andrey | 4f3688fcbd | |
Andrey Dolgolev | 06629ee283 | |
kompotkot | 017c921a15 | |
Andrey | c192f278ac | |
Andrey | 017154207c | |
Andrey Dolgolev | 6e35ab9927 | |
Andrey | 01eeb2f0e6 | |
Sergei Sumarokov | 78ea9b845e | |
kompotkot | fa3caefe06 | |
kompotkot | 8051e78f5b | |
Andrey Dolgolev | 6949a4dafe | |
kompotkot | 1439fb10fa | |
kompotkot | 8e9023afc5 | |
Andrey | 105f29dfbe | |
kompotkot | 0ed9ad2ecc | |
kompotkot | 74f956ff64 | |
kompotkot | 7fb0963d24 | |
kompotkot | d3effd952c | |
kompotkot | 69e375ab61 | |
kompotkot | 47b76ec26d | |
kompotkot | a92aeed916 | |
kompotkot | a46afe65d7 | |
kompotkot | 1a282c5811 | |
Sergei Sumarokov | 3bc6070768 | |
kompotkot | 3a6ef4fd03 | |
Sergei Sumarokov | a8c96e4ca7 | |
kompotkot | e27308f809 | |
kompotkot | eed1e2297d | |
Sergei Sumarokov | b4ed1e5c80 | |
Andrey | 0c7839cd91 | |
Andrey | 1ab1d022cc | |
kompotkot | 911042da26 | |
Andrey | ac235cd15d | |
Andrey | 04c3c7aad5 | |
Andrey | 1e848f72d4 | |
Andrey | 5a6c8f5219 | |
Andrey | 827823ab7a | |
Andrey | 80efa41b79 | |
Andrey | 697de0bcc1 | |
Andrey | 58258e8296 | |
Andrey Dolgolev | e69d81d1fb | |
Andrey | 21a0fc4157 | |
Andrey | bb4bc7f778 | |
Andrey | d07ec13bf6 | |
Andrey | 2719d9baba | |
Andrey Dolgolev | 6292aecd3a | |
Andrey | 8502889540 | |
Andrey | 0bb88788ca | |
Andrey | 0f608aac77 | |
Andrey | 49802bf589 | |
Andrey | d5e23ed928 | |
Andrey | edae1f91a1 | |
Sergei Sumarokov | 5aaa4f6e93 | |
kompotkot | 2dd2e28958 | |
Sergei Sumarokov | 969f2496a9 | |
kompotkot | d29c40e907 | |
Andrey Dolgolev | 87997add7d | |
Andrey Dolgolev | ae4de8831c | |
kompotkot | 9f747474d7 | |
Andrey Dolgolev | d6aa376b8e | |
kompotkot | 4c5e6cc327 | |
kompotkot | ec918609db | |
Sergei Sumarokov | 3770730e1c | |
kompotkot | e273f34d06 | |
Andrey Dolgolev | 502a7e21d3 | |
Andrey | 1ddf96b258 | |
Andrey | adea116388 | |
Andrey | c3b690c668 | |
Andrey | 3d85f1767d | |
Andrey | f49b7c4a78 | |
Andrey | 5fcffd7d5e | |
Andrey | 0dbbaf7aa2 | |
Andrey | 561ff35f13 | |
Andrey Dolgolev | a65f1559bf | |
Andrey | 6b9fd67d6b | |
Andrey | cc47bc761b | |
Andrey | 28c660c979 | |
Andrey | 9a722abcbb | |
Andrey | d9f82b7e28 | |
Andrey | 95439825a3 | |
Andrey Dolgolev | 397c83a508 | |
Andrey Dolgolev | 19f9ac62c7 | |
Andrey | a991549ea6 | |
Andrey | 23d0670714 | |
Andrey | cebd0cb777 | |
Andrey | 77ee44884e | |
Andrey | b85cbb5f00 | |
Andrey Dolgolev | f8d067bf7d | |
Andrey | fa92e3260d | |
Andrey | 3b52d8f49f | |
Andrey | fa66d1cebe | |
Andrey | 9165a32f5e | |
Andrey | fbf106eba3 | |
Andrey | 3319d4a8a4 | |
Andrey | cb7950d922 | |
Sergei Sumarokov | fbee1a068c | |
kompotkot | a7ab7063b0 | |
kompotkot | 8686b38aa0 | |
kompotkot | 5e03473705 | |
Andrey Dolgolev | 2f21b64f68 | |
Andrey | 5b38cf1eff | |
Andrey | 20149770aa | |
Andrey Dolgolev | f0ed70696f | |
Andrey | f15d70acd7 | |
Andrey | 0c095b7696 | |
Andrey | 9f376e86f7 | |
Andrey | 1ae366ab7d | |
kompotkot | f1a84d8cec | |
kompotkot | 663c2be9d9 | |
Sergei Sumarokov | 9a7b1ea47e | |
kompotkot | f80edef557 | |
kompotkot | 4ee6ce6406 | |
kompotkot | d4ff8e8870 | |
kompotkot | ba78712cc5 | |
kompotkot | 1bd2f3706b | |
kompotkot | eab292e815 | |
Andrey | c417fc14b6 | |
Andrey | ba6fd3222f | |
Andrey | 1d0f01cb09 | |
Andrey | 0403b1ba0c | |
Sergei Sumarokov | a45ea9e36f | |
Sergei Sumarokov | ef57400c52 | |
Sergei Sumarokov | c6b64cf5ba | |
kompotkot | 927eb85c94 | |
Andrey Dolgolev | ad9c22dd40 | |
kompotkot | 1589c8c65d | |
kompotkot | 2d93c307ab | |
Sergei Sumarokov | 096f55ccbb | |
Sergei Sumarokov | 632b8304b9 | |
kompotkot | e5fc6dcc27 | |
kompotkot | 47db2e534c | |
kompotkot | 252da0cd36 | |
kompotkot | 6dfca4a49b | |
kompotkot | b0d8e17254 | |
kompotkot | 39a64cf454 | |
kompotkot | 3b265ad15d | |
kompotkot | 179370affb | |
Andrey | c4b18145b0 | |
Andrey | 9246950293 | |
Andrey | 29e512538e | |
kompotkot | e854978459 | |
kompotkot | 92dbfe5576 | |
kompotkot | 1e01a89b1b | |
kompotkot | 1ee2436e7f | |
kompotkot | 0878d5b798 | |
kompotkot | 5dfcfd634f | |
Andrey Dolgolev | f5b85d0b5f | |
kompotkot | bbe1530eef | |
kompotkot | a3b0841d10 | |
Andrey | e3072a36ad | |
Andrey | 63347d81f3 | |
Andrey | 9d9fef0059 | |
Andrey | bc06edc635 | |
Andrey | 6462d84d65 | |
Andrey Dolgolev | f820129205 | |
Andrey | 323784f8b2 | |
Andrey | f2f078b9c3 | |
Andrey | be7a7e0267 | |
Andrey | bf7f6497c5 | |
Andrey | 3215e5d719 | |
Andrey | 4d1d242c81 | |
Andrey | 6c11fb3d0f | |
Andrey Dolgolev | 330a7feb3f | |
Andrey | 88d0f91706 | |
Andrey | cb1137fd39 | |
Andrey Dolgolev | 25767fb555 | |
Andrey Dolgolev | 24aeada59c | |
Andrey | 9f38e19d9d | |
Andrey | 4891f1b5fe | |
Andrey | 07734d1b2b | |
Andrey | 0e5ea6f4cc | |
Sergei Sumarokov | 61e21e443b | |
kompotkot | 3bbc47a36d | |
Andrey | 60d98a8627 | |
Andrey | 78bc0770f5 | |
Andrey Dolgolev | 48cea0709e | |
Andrey | 0e1a54fda7 | |
Andrey | e76a779400 | |
Sergei Sumarokov | 4a99250889 | |
kompotkot | f768369f05 | |
Sergei Sumarokov | 0d8f9e9211 | |
kompotkot | 45c748c829 | |
Sergei Sumarokov | dfd659e369 | |
kompotkot | 4c60796c41 | |
Sergei Sumarokov | 7f2a0d3c59 | |
kompotkot | ca10fdb37b | |
Andrey Dolgolev | cd7704ecd2 | |
Andrey | e16558bd9e | |
Andrey Dolgolev | fb0f22c052 | |
Andrey | 40f498e5a9 | |
Andrey Dolgolev | 218da46e47 | |
Andrey Dolgolev | 03fcf456c7 | |
Andrey | bb1bade57b | |
Andrey | 0d6259a52d | |
Andrey | b9d0f1ce7d | |
Andrey | edb9ffcc4e | |
Andrey | 49ab67697e | |
Andrey | 03ce50580b | |
Andrey | 93418ebf76 | |
Andrey | d9873b64c4 | |
Andrey | 3ca04573b8 | |
Andrey | 984115dadc | |
Andrey | 40366d5e06 | |
Andrey | 5cd830a3c4 | |
Andrey Dolgolev | 147819e1ac | |
Andrey Dolgolev | b4960019f7 | |
Sergei Sumarokov | e8a1618dfb | |
kompotkot | 3f53809869 | |
Sergei Sumarokov | 0663fc2031 | |
kompotkot | 622218139b | |
Andrey Dolgolev | e960029673 | |
Andrey Dolgolev | 734bbaac5e | |
Sergei Sumarokov | e807e7b493 | |
kompotkot | 2f162cbf91 | |
kompotkot | c44ab9778c | |
Sergei Sumarokov | 2d1a026bf0 | |
Sergei Sumarokov | f89ecd6fe0 | |
Sergei Sumarokov | e11bad0bb3 | |
Sergei Sumarokov | b1f3d24145 | |
kompotkot | 591be28322 | |
kompotkot | 9b57d7e057 | |
kompotkot | e87a92e47e | |
Andrey | 177c2caf28 | |
Andrey | 60aa576a93 | |
Andrey | 22f4af26a2 | |
Andrey Dolgolev | 6de5552092 | |
Andrey Dolgolev | c4470d321a | |
Sergei Sumarokov | df61dede63 | |
kompotkot | 5bd81bfefa | |
Andrey Dolgolev | afc165a716 | |
Andrey | 9fa794a856 | |
Andrey | ff3899d8d5 | |
Sergei Sumarokov | 421d350435 | |
kompotkot | 0c2fe4a68d | |
Sergei Sumarokov | 1f6105afce | |
Sergei Sumarokov | 358aadcec1 | |
kompotkot | a9186181d8 | |
kompotkot | fefd11a5d3 | |
Andrey | 36d7796baf | |
Andrey | 1a68bd7a1f | |
Andrey | 3ea1e29f04 | |
Andrey | 9d577b676e | |
Andrey | 91ec48d7ac | |
Andrey Dolgolev | fa4baa9ba8 | |
Andrey | 355b286930 | |
Andrey | 7eea2c49b5 | |
Andrey | 114420588d | |
Andrey | 212bf0931f | |
Andrey | 6f8e852386 | |
kompotkot | aa5f0b2891 | |
kompotkot | 1b0a8581d3 | |
Sergei Sumarokov | e7e1e72c5c | |
kompotkot | a7f5e6507f | |
omahs | 867534da92 | |
omahs | c6c0a2721e | |
omahs | 2afd172f15 | |
Andrey Dolgolev | 3cc70294a0 | |
Andrey | 4e2100423d | |
Andrey | c8796aa428 | |
Sergei Sumarokov | 88bee3d4f0 | |
Andrey | f7aec850de | |
kompotkot | 0b1a023bf1 | |
Andrey | e329a807b6 | |
Andrey | 93a620dd67 | |
Andrey | 9596479592 | |
Andrey | 41ef7eaa61 | |
Andrey | fc168a619e | |
kompotkot | fe60493dbc | |
Sergei Sumarokov | 02cb2c3cb0 | |
Sergei Sumarokov | 7b43e0614f | |
Sergei Sumarokov | 2d110a4cbb | |
kompotkot | 9e1697665e | |
Andrey Dolgolev | d16c880aab | |
Andrey | b44322a501 | |
Andrey | 6f4828932a | |
Andrey | 9d134f295b | |
Andrey | d3bdc3f28d | |
Andrey | d27e675df0 | |
Sergei Sumarokov | 12376013ae | |
kompotkot | a3d88d9221 | |
kompotkot | 2c51435cf1 | |
kompotkot | b92342e233 | |
Sergei Sumarokov | 73bc3bdc70 | |
kompotkot | d999f43798 | |
Sergei Sumarokov | 74aed08a6d | |
kompotkot | 9eefd7d67d | |
kompotkot | 16881b19a9 | |
kompotkot | dc1b0e8a17 | |
Sergei Sumarokov | 4ffa599d54 | |
kompotkot | f91fb3b14b | |
kompotkot | 7e8bb0bd59 | |
kompotkot | 0a20cae636 | |
kompotkot | 9045a3758b | |
Sergei Sumarokov | 81e683a137 | |
Sergei Sumarokov | e16f451144 | |
Sergei Sumarokov | f5e7ec4901 | |
Sergei Sumarokov | 75fceba337 | |
Andrey Dolgolev | 4fa396d365 | |
kompotkot | 016d9129ae | |
Andrey | 338418ef72 | |
Andrey | 1b3b2a3cdd | |
Andrey | 42849691ff | |
Andrey | 67de697270 | |
Andrey | e26e324a09 | |
Andrey | 2daf6ab755 | |
Andrey | 6a710ef9f7 | |
Andrey | fdb9e88d8f | |
Andrey | 5b4d429199 | |
Andrey | d998ac13b5 | |
kompotkot | 5eca0fb57f | |
Andrey Dolgolev | 068bebb586 | |
Andrey | bb4b6330fb | |
Andrey Dolgolev | 0228db4c0e | |
Andrey | 7aee7e72e3 | |
kompotkot | aca575052b | |
Andrey | 92888ba80d | |
Andrey | 07de2c7382 | |
Andrey | cfa48fccf5 | |
Andrey | 327e46d896 | |
Andrey | b90d92f7b3 | |
Andrey | 9daa2e2805 | |
Andrey | 483a52bed8 | |
Andrey Dolgolev | 51e9a4df58 | |
Andrey | 22d0bfe083 | |
Andrey | 767921a67f | |
Andrey | d0ee979593 | |
Andrey | 1169f7e22b | |
kompotkot | 872c1f6e76 | |
Andrey Dolgolev | d3b2e350f5 | |
Andrey | 8051287d65 | |
kompotkot | 7c39a19b0b | |
Andrey | 9dbd3b2871 | |
Andrey | c848de4714 | |
Andrey | c1fdf25342 | |
Andrey | 59d8342faa | |
Andrey | a8b47b630c | |
Andrey | 39047c8022 | |
Andrey | 0a7a5a7d99 | |
Andrey | d473db954c | |
kompotkot | 360f4d8286 | |
Andrey | 4788a3d8a7 | |
Andrey | b2bf46ad7e | |
Andrey | 8cb7dfb8be | |
Andrey | da9d343577 | |
Andrey | 476f24dd50 | |
Andrey | cadb226744 | |
Sergei Sumarokov | 7bbb0be6df | |
kompotkot | 45dee64e5d | |
kompotkot | d5fff67abd | |
kompotkot | d438801b98 | |
kompotkot | b2bb3b70a9 | |
Andrey Dolgolev | 0a73a800d2 | |
Andrey | 3560be1e55 | |
Andrey | d4e32b520f | |
Andrey | af06bfa0e5 | |
kompotkot | f9646ea8ba | |
Andrey | a4a982b1c1 | |
Andrey | 63105b99e4 | |
Andrey Dolgolev | a54048fb0a | |
Andrey | 264955a2e3 | |
Andrey | de7f01e7f0 | |
Andrey | e1e8a4e45c | |
Andrey | a52910b710 | |
kompotkot | ce99e7140a | |
Andrey | 5519975b55 | |
Sergei Sumarokov | 2f08452fbb | |
kompotkot | 001a01d5d6 | |
kompotkot | 660d7a7f49 | |
Sergei Sumarokov | 8df6ba410d | |
Sergei Sumarokov | 10a3123b30 | |
Sergei Sumarokov | 3d7897b0b1 | |
Sergei Sumarokov | 837d02ffc8 | |
Sergei Sumarokov | 438cb5be69 | |
kompotkot | 897c2faa90 | |
kompotkot | 820adb4ce3 | |
kompotkot | ac1a75d043 | |
kompotkot | 3c240db2e1 | |
kompotkot | a7cbaae00e | |
kompotkot | b07c77c138 | |
kompotkot | 4647ce7426 | |
Sergei Sumarokov | 43a8e09d35 | |
kompotkot | 4682ab3b2b | |
Sergei Sumarokov | 6e3ae7478d | |
kompotkot | a5482d5885 | |
kompotkot | 7663e67b77 | |
Sergei Sumarokov | 2da576ad1c | |
kompotkot | 1e1f1ef2c2 | |
kompotkot | 84d941b0df | |
Sergei Sumarokov | dce18b124d | |
kompotkot | b629e42f17 | |
Andrey Dolgolev | 5a7e328334 | |
Andrey | eb20013588 | |
Andrey | 1eb263b071 | |
Sergei Sumarokov | 245994dbed | |
Andrey Dolgolev | 8c9e1cec65 | |
Andrey | 243338705c | |
Andrey Dolgolev | deb2cc5392 | |
Andrey | 479ad41184 | |
Andrey | c697aad609 | |
Andrey | ee8d1cf457 | |
kompotkot | 9a0b7eb8f0 | |
Andrey Dolgolev | 72ccc6c0e0 | |
kompotkot | 96886a56a0 | |
Andrey | 2e211a6b18 | |
Andrey | 96af5d950c | |
Sergei Sumarokov | 541779c0b5 | |
kompotkot | f6883d87a9 | |
kompotkot | 8cff6ba00f | |
Sergei Sumarokov | 626954484c | |
kompotkot | 5f9d8072c1 | |
Andrey | 9f82dc3b7f | |
Andrey | 8f5ab20ed2 | |
Andrey | ee9fba7469 | |
Andrey | 13fc900bc1 | |
Andrey | 839622df10 | |
Andrey | ad2045dd80 | |
Andrey | 43e0367f17 | |
Andrey | 6f86ad6a02 | |
Andrey | de39b35f69 | |
Andrey | bcc9897fb1 | |
kompotkot | 30a1fee536 | |
kompotkot | 43d7e4a807 | |
kompotkot | bfdc2eb718 | |
kompotkot | 7f89784f2f | |
kompotkot | 2d42ac4cd5 | |
kompotkot | 401ae81dc6 | |
Andrey | cf93f99fb1 | |
kompotkot | 33adf6255a | |
kompotkot | 4c86217439 | |
kompotkot | 1e825c5487 | |
kompotkot | e46eb54f91 | |
kompotkot | e2f454af5f | |
kompotkot | 647cccb99f | |
kompotkot | e199d5bb1a | |
kompotkot | 40c9c21777 | |
kompotkot | 759a51db46 | |
kompotkot | 9da72eac44 | |
kompotkot | 363388857b | |
Andrey | 07ad71fd9c | |
Anton Mushnin | 8e7da79fb1 | |
Andrey Dolgolev | 55cd64721c | |
Andrey | 4a72ec0b5f | |
Andrey | 4b13572040 | |
Andrey | d20c0bf43c | |
Andrey | ba1992c26d | |
Andrey | 4535e7b3c0 | |
Andrey | 50c4720349 | |
Andrey | d2f29f5831 | |
Andrey | 789ed431c1 | |
Andrey | d794a887ce | |
Andrey | c928e11fcb | |
Anton Mushnin | 7ee79d76d2 | |
Sergei Sumarokov | f26c25e3e3 | |
Sergei Sumarokov | da433d7bf7 | |
Sergei Sumarokov | 75b6328b39 | |
Sergei Sumarokov | f6992a5af7 | |
Sergei Sumarokov | f79c064dd8 | |
Andrey Dolgolev | b21740ec38 | |
Andrey | 9f2051688a | |
Andrey | aa0f2458d7 | |
Andrey | 4d31f8394a | |
Andrey | c13ece0580 | |
Andrey | 24cccddb99 | |
Andrey | d7f08a05d3 | |
Sergei Sumarokov | fb359ab40c | |
kompotkot | 1f9608d156 | |
Andrey Dolgolev | 2bdd5c3895 | |
Andrey | fd9495340a | |
Andrey Dolgolev | 7394c969cf | |
Andrey | 14ed5325df | |
Andrey | e2f84bbd51 | |
Andrey | 1c0a6dd467 | |
Andrey | 4d1a558747 | |
Andrey | 222581f1d0 | |
Andrey | d98f3e4c2d | |
Andrey | 0db9de4f56 | |
Andrey | 180cbe455b | |
Andrey | 97153b0c4b | |
Andrey | 3279ab5d25 | |
Andrey | bf2716cfd0 | |
Andrey | 28358c93f5 | |
Andrey | 1c908d73fc | |
Andrey | 1f9bb83cfa | |
Andrey | d7c8a13ba1 | |
Andrey | ee45e08370 | |
Andrey | 2e52e68739 | |
Andrey | f83aec9166 | |
Andrey | 8f36278f1b | |
Andrey | 782c7082ad | |
Andrey | e2c50c26b9 | |
Andrey | ad06bf947f | |
Andrey | 447763ca85 | |
Andrey | da905d8ced | |
Andrey | 231382422e | |
Andrey | ccfae29520 | |
Andrey | 55048fc6c0 | |
Andrey | a54e9bb3b1 | |
Andrey Dolgolev | 514c859430 | |
Andrey | 42cca77868 | |
Andrey Dolgolev | 487e74058a | |
Andrey | c2a02eb70a | |
Sergei Sumarokov | f4d0ce6d9a | |
kompotkot | 8b95f6cece | |
Andrey Dolgolev | 584b2385ec | |
Andrey | 6c8e9bbb5b | |
Andrey | 9ab9e7fd1f | |
Andrey | ada492e372 | |
Andrey | 91ca3d15bf | |
Andrey | ff6a2ed5d5 | |
Andrey | a0b1ee7770 | |
Andrey | 30a9bf9951 | |
Andrey | d2cef2b637 | |
Andrey | b441e316a0 | |
Sergei Sumarokov | 99504a431a | |
kompotkot | f2a69072e0 | |
kompotkot | fd652092cc | |
Andrey Dolgolev | eb350bd7ec | |
Andrey Dolgolev | 048079a6b7 | |
Andrey | 0e3b4928a7 | |
Andrey | 534004851f | |
Andrey | e76669921e | |
Andrey | d910f1bfa7 | |
Andrey Dolgolev | fcc80ac619 | |
Andrey | 4c36794d79 | |
Andrey | 7b3df0912c | |
Andrey | 612da984a7 | |
Andrey | f2bc8dfd17 | |
Andrey | e885cf5fe2 | |
Andrey | 082ca2b867 | |
Sergei Sumarokov | 95588afa30 | |
kompotkot | 99a608c6dc | |
Andrey Dolgolev | b85939595c | |
Andrey | aef0e4743e | |
Andrey | abe8c17e8c | |
Andrey Dolgolev | 1a7a385510 | |
Andrey | a6184b725d | |
Andrey Dolgolev | 677f416420 | |
kompotkot | 59e4ce36fb | |
Andrey Dolgolev | e8a97406d3 | |
Andrey | ce3a5ffd41 | |
Andrey | 3a8069e976 | |
Andrey | 0311551cf6 | |
kompotkot | a51a275daa | |
Andrey | 3fb27c6ffa | |
Andrey | 49c0454e6e | |
Andrey | aae26b67d9 | |
Andrey | 469e1a4d7d | |
Andrey | c23afaa6de | |
Andrey | 695c3e646a | |
Andrey | e7e4ef8916 | |
Andrey | 2b646b200a | |
Andrey | 3f8f63a866 | |
Andrey | 9b56b4d991 | |
Andrey | ccef2daf5e | |
Andrey | 887114b7a5 | |
Andrey | 16a0a804fd | |
Andrey | 84d39d5644 | |
Andrey | b2249af97f | |
Andrey | 3b0eb01c0c | |
Andrey | 7a3a949a93 | |
Andrey | d2836d7d41 | |
Anton Mushnin | 3893e5e272 | |
Andrey Dolgolev | 8201526064 | |
Andrey | 002a1ad88c | |
Andrey | 1e60876519 | |
Anton Mushnin | 69c0347c9c | |
Anton Mushnin | 8d9aab5013 | |
Anton Mushnin | f497b90da2 | |
Andrey Dolgolev | 059d759890 | |
Andrey | b04506dbb6 | |
Andrey | 8745dd1153 | |
Andrey | 60e96f5ad6 | |
Andrey | 2bbfdb92b4 | |
Andrey | cd541cbb74 | |
Andrey Dolgolev | 6f168e37e8 | |
Andrey | cbccf12b56 | |
Andrey | d7e108a1c5 | |
Andrey | 7287e800b8 | |
Andrey | b1ff9eba0a | |
Andrey | 94d33a7482 | |
Andrey | c16d709994 | |
Andrey Dolgolev | 410471b193 | |
Andrey | 6d7cbb0139 | |
Andrey | 1b1e6d6930 | |
Andrey | 6381e29d7a | |
Andrey | 8947f4249e | |
Andrey | 3cc202b88b | |
Andrey | 84ec8c80a8 | |
Andrey | a3038b1f88 | |
Andrey | 86a3d79cdf | |
Andrey | fea92703db | |
Andrey | 63c16da8cf | |
Andrey | c79355e070 | |
Andrey | 1122590c7f | |
Andrey | 4c7a7fdaf6 | |
Andrey | 556e91fba9 | |
Andrey | 9bdd7b504e | |
Andrey | f9c6d29561 | |
Sergei Sumarokov | 9cd4dedabe | |
kompotkot | fd134352a1 | |
Sergei Sumarokov | 91e0ffea5b | |
kompotkot | 43c7df5ecf | |
Andrey | 6a292c6db8 | |
Andrey | 8c69840ea9 | |
kompotkot | c53c4f3648 | |
kompotkot | e5ef9cfe5e | |
Sergei Sumarokov | cdaf64312f | |
Sergei Sumarokov | 3f814c779e | |
Andrey | 69cfe4069b | |
Andrey | 59a3454d71 | |
Andrey | 6ec956c723 | |
Sergei Sumarokov | 2659ad91b9 | |
kompotkot | 87d034f48a | |
kompotkot | b4ca416e1c | |
Anton Mushnin | bd85f4869f | |
Anton Mushnin | 66bc01e229 | |
Anton Mushnin | c6b63361af | |
kompotkot | e40744c04b | |
kompotkot | d3e6070ec9 | |
kompotkot | 382d776f8d | |
Anton Mushnin | 3ca2cec860 | |
Anton Mushnin | 4b2515c4af | |
Anton Mushnin | b572089dc4 | |
Anton Mushnin | 7d2968f9d8 | |
Anton Mushnin | e065414d0c | |
Anton Mushnin | 789035cd06 | |
Anton Mushnin | 7747d49abc | |
Anton Mushnin | 882627aaa6 | |
Anton Mushnin | c61a55a245 | |
Anton Mushnin | 62525f6941 | |
Anton Mushnin | e6cfa39b7d | |
Anton Mushnin | 9b260e5003 | |
Anton Mushnin | 470b5dff74 | |
Anton Mushnin | 17672bf552 | |
Anton Mushnin | f86166e9af | |
Anton Mushnin | d450b625e1 | |
Anton Mushnin | d132481fa6 | |
kompotkot | de609da7cd | |
Andrey Dolgolev | 62d786a8a6 | |
Andrey | fa7c0f9ba1 | |
Andrey | 2691409022 | |
Andrey Dolgolev | 0afd4c6ba3 | |
Andrey | 338b321b1f | |
Andrey Dolgolev | 909745a2e2 | |
Andrey | c7cfe7a160 | |
Andrey | e5b864d9c0 | |
Andrey | 5ec46a6a2f | |
Andrey | b16a4ff1a1 | |
Andrey | e1aafd7552 | |
Andrey | 9cfd12dc8d | |
Andrey | e64bb7fcad | |
Andrey | f68ab988c9 | |
Andrey | 83771549d8 | |
Andrey | a4dd79a663 | |
Andrey | 6a379c1b55 | |
Sergei Sumarokov | 1935d7fbcd | |
kompotkot | 36e58c7275 | |
Neeraj Kashyap | 704c73abde | |
Neeraj Kashyap | d6eb317fc8 | |
daria-bochkareva | 706a16cf88 | |
daria-bochkareva | aa6f4d5184 | |
Andrey Dolgolev | 8fc810b9e9 | |
Andrey | 36888404ef | |
Andrey | f0857b4d73 | |
Andrey | 0aeae49c97 | |
Andrey Dolgolev | e917d9361d | |
Andrey | 5b4ddc6feb | |
Andrey | e34f544099 | |
Anton Mushnin | d463d0a7d1 | |
Anton Mushnin | 2ded714936 | |
Sergei Sumarokov | a866c8afca | |
Anton Mushnin | 849ce50bde | |
kompotkot | f3347224c9 | |
Andrey | c7e8402785 | |
Andrey | 6c8046908d | |
Andrey | f60fe8693c | |
Andrey | b44881a28d | |
Andrey | a66039ad38 | |
Andrey | 7bf1ef00af | |
Andrey | 3800c558b8 | |
Andrey | e8a1187889 | |
Neeraj Kashyap | 48ad43cd9b | |
Anton Mushnin | 3836dafa79 | |
Anton Mushnin | 5c0a7f3ec1 | |
Anton Mushnin | b15f9e7687 | |
Anton Mushnin | 7aed617155 | |
Anton Mushnin | 7280015ff7 | |
Anton Mushnin | 9d566cf62a | |
Anton Mushnin | 22f8012956 | |
Anton Mushnin | 4d333191af | |
Anton Mushnin | 72caf9695e | |
Anton Mushnin | 9ba9e59169 | |
Anton Mushnin | fee4e74861 | |
Anton Mushnin | 9f02ee21b0 | |
Anton Mushnin | 224610088a | |
Anton Mushnin | 779be7c23a | |
Anton Mushnin | c640c38fdb | |
Anton Mushnin | b53b0d4952 | |
Andrey Dolgolev | 100d024101 | |
Andrey | 27f2ea6ec5 | |
Anton Mushnin | f3440eeba0 | |
Andrey | b8b72ea35d | |
Andrey Dolgolev | 9e8a7466b6 | |
Andrey | f54c113e65 | |
Anton Mushnin | 9a8961e3e3 | |
Anton Mushnin | 86102c90c7 | |
Anton Mushnin | 05dd017a50 | |
Anton Mushnin | 6b106eb77f | |
Anton Mushnin | db06fef9c3 | |
Anton Mushnin | 714407702b | |
Anton Mushnin | 0f30e6699e | |
Anton Mushnin | ebc4f461d4 | |
Anton Mushnin | 996f22727e | |
Anton Mushnin | 308bea5ab7 | |
Anton Mushnin | fc7e97661f | |
Andrey Dolgolev | 4639bef6bf | |
Andrey | 1a3509d554 | |
Sergei Sumarokov | 35e416bbf0 | |
kompotkot | 8f98d5feba | |
Andrey Dolgolev | 6f3f62f35e | |
Andrey | aae3b9bd3e | |
Andrey | f5fc52e8be | |
Andrey Dolgolev | ca8aef467f | |
Andrey | 1dff0431ed | |
Andrey | 9d89b6231e | |
Andrey | d23e83e15a | |
Andrey | 9c2b3d42a1 | |
Andrey | 16a3dc24f0 | |
Anton Mushnin | 7f373bbbfe | |
Anton Mushnin | f4052080f9 | |
Anton Mushnin | 05160fb931 | |
Anton Mushnin | 7d4c790cba | |
Anton Mushnin | d6fbfbe747 | |
Anton Mushnin | 8c2f3fb0b4 | |
Anton Mushnin | e1e5ded83d | |
Anton Mushnin | 70a06a468f | |
Andrey | d00c9d0790 | |
Andrey | 4808d600de | |
Anton Mushnin | 9cef6bc9dc | |
Neeraj Kashyap | 7172666a90 | |
Anton Mushnin | 901e85089c | |
Anton Mushnin | be00baca14 | |
Anton Mushnin | 6dbc701f47 | |
Andrey | 5e100228cd | |
Anton Mushnin | a5e158b414 | |
Anton Mushnin | 0a33c1f0df | |
Anton Mushnin | 14b444f13c | |
Anton Mushnin | 306f45772b | |
Anton Mushnin | 9fdae7bf3e | |
Anton Mushnin | bd4dc4b0b7 | |
Anton Mushnin | 912a76c8a4 | |
Anton Mushnin | b8eaa3fdb8 | |
Anton Mushnin | 97c8de3664 | |
Anton Mushnin | f6c069cca8 | |
Anton Mushnin | 3be9a0b190 | |
Anton Mushnin | abec704636 | |
Anton Mushnin | 6855f27394 | |
Anton Mushnin | 5ea0e64e86 | |
Anton Mushnin | ce3578689b | |
Andrey | 69835d09c8 | |
Andrey | 2e65a2b889 | |
Andrey | c992475765 | |
Andrey | ef423b9739 | |
Andrey | 0c38a3d90e | |
Andrey | 75ae88d2c3 | |
Sergei Sumarokov | 93a73f18f2 | |
kompotkot | 8fd1178ab5 | |
Sergei Sumarokov | df4d23a907 | |
kompotkot | 5fa3c07497 | |
Sergei Sumarokov | eadb8aa8ac | |
kompotkot | b5c8e80560 | |
Anton Mushnin | 6ca5895e19 | |
kompotkot | e8e2481428 | |
Andrey | db77374149 | |
Anton Mushnin | 9341a5cd09 | |
kompotkot | 897d2d7321 | |
Andrey | 0c6656fad5 | |
Sergei Sumarokov | ef95486716 | |
kompotkot | b83303f1b5 | |
Sergei Sumarokov | e13ef1cbfb | |
kellan-simiotics | 8886a76ea6 | |
Sergei Sumarokov | bc0f80052b | |
kompotkot | 592d2cf667 | |
Sergei Sumarokov | dc05519738 | |
Kellan Wampler | 4928e59fee | |
kompotkot | 2d1a4c4c9c | |
Kellan Wampler | 3776a90ba1 | |
kompotkot | 8291fbb58e | |
Andrey | e23fd52453 | |
kompotkot | c21dff3df8 | |
Andrey | 920c8941ef | |
kompotkot | 48a1c8fbed | |
kompotkot | c8f4d4efdc | |
Andrey | 0436ab784a | |
kompotkot | b2e598e0f8 | |
kompotkot | 06cdb16042 | |
Andrey | 15126c5086 | |
kompotkot | 43535cae50 | |
Andrey | c580a7d3c2 | |
kellan-simiotics | 01e916ed2f | |
gnawali | 777414c7db | |
Kellan Wampler | ef6a3f3f15 | |
Andrey | 94e98a3059 | |
Sergei Sumarokov | 6fe23253f8 | |
kompotkot | accb5caf03 | |
Sergei Sumarokov | a8470441cb | |
Andrey Dolgolev | 3e2cfae8cf | |
Andrey | 9f441373fe | |
Andrey | b8ce45470b | |
Neeraj Kashyap | 3d7bbb03ce | |
Neeraj Kashyap | 76792ea983 | |
Neeraj Kashyap | ddd70cb451 | |
Kellan Wampler | 327351880f | |
Kellan Wampler | ae5accc568 | |
Kellan Wampler | 4245dd1a12 | |
kellan-simiotics | 1e87b36e13 | |
Kellan Wampler | d64c1a46df | |
Kellan Wampler | 6826dfeda1 | |
Kellan Wampler | 2d6087e828 | |
Kellan Wampler | f47c4469b3 | |
Kellan Wampler | 4fbcdf0422 | |
Andrey Dolgolev | e5240d2fd8 | |
Kellan Wampler | 8b03477763 | |
Andrey | 2f5147a84f | |
Andrey Dolgolev | dbf60c686b | |
Andrey | 20cda62a1b | |
Andrey Dolgolev | 9d93a00f05 | |
Andrey | e5f2c4a550 | |
Andrey | 03af327f8e | |
Andrey | dd9b462c4e | |
Andrey | cccb42156e | |
Andrey | 8c6d22c88b | |
Andrey | 3e3288a55f | |
Andrey | 3f35b47a91 | |
Sergei Sumarokov | 01d0b8dd03 | |
Andrey Dolgolev | 5f18793055 | |
Andrey | bffe318cd9 | |
Sergei Sumarokov | 9f7169df56 | |
kompotkot | b24bdb0f6b | |
kompotkot | a4c04a472a | |
Andrey Dolgolev | 548a89331e | |
kompotkot | 66ac8c9d5e | |
Andrey | 2b8e1f4b7b | |
Andrey | e8f787dcb0 | |
Andrey | 7074eb9923 | |
kellan-simiotics | d5065c42e1 | |
Kellan Wampler | a788aeec7f | |
Andrey | d31ef8b909 | |
Kellan Wampler | 2b60c466d2 | |
Kellan Wampler | fd76439685 | |
Kellan Wampler | bd09e1b18c | |
Kellan Wampler | 7eef5d35c5 | |
Andrey Dolgolev | 8efa9858ac | |
kompotkot | 7dc045c3bf | |
Andrey | d200e82460 | |
Andrey | ec718f0e4c | |
Andrey | 14dedab479 | |
Andrey | 31aa042dfa | |
Andrey Dolgolev | 25066e9ec1 | |
Andrey | b0a605cf90 | |
Andrey Dolgolev | 7252d06cad | |
Andrey | 4a22a593e0 | |
Andrey | 808c604299 | |
Andrey | 95ab437a22 | |
Andrey | 88d1c5a77b | |
kompotkot | 6582d12759 | |
Andrey | def1dc1527 | |
Andrey | 7d073c3e63 | |
Andrey | 8855097728 | |
Andrey Dolgolev | cde854eb71 | |
Andrey | 12cbe3e797 | |
Andrey | d9c80e566c | |
Sergei Sumarokov | 7d235eceff | |
kompotkot | 93bc461a0d | |
Andrey | 59f8f22b2e | |
Andrey | 42ebb66e4d | |
Andrey | aaa6c95953 | |
Andrey | c2d790d08d | |
Andrey | 7fe587fdbc | |
Andrey | 54870630c4 | |
kellan-simiotics | e1477643b9 | |
Kellan Wampler | 94a5812e47 | |
Kellan Wampler | 3b83200c35 | |
Andrey Dolgolev | c822274344 | |
Andrey Dolgolev | 27eed5910f | |
Andrey | b3eba087f0 | |
Andrey | af8e114515 | |
Andrey Dolgolev | 4ac5b9e716 | |
Andrey | 004f662bbb | |
Andrey | 3ed4d3bb74 | |
Andrey | 5646dbf442 | |
Andrey | 77c3e7e2f8 | |
Andrey | 9d8566cb59 | |
Andrey | 935ebe11a2 | |
Andrey | e6a730d760 | |
kellan-simiotics | d2b9aced9b | |
Kellan Wampler | 3c041f4caa | |
Kellan Wampler | 6cf7fcdb5a | |
kellan-simiotics | dd9a34bbf0 | |
Kellan Wampler | 0a10157005 | |
Kellan Wampler | 732c966f90 | |
Neeraj Kashyap | cff3a4c586 | |
Kellan Wampler | fd18c0c503 | |
Kellan Wampler | f3b9128738 | |
Andrey Dolgolev | dbe11f60b1 | |
kompotkot | 1cfd22adb4 | |
kompotkot | 6a140e7cac | |
Andrey | b14eae475c | |
Andrey | 1339de073b | |
Andrey | 870faf15d7 | |
kompotkot | 94c24466c5 | |
Sergei Sumarokov | 43162ac7d2 | |
kompotkot | a2c3078b2a | |
Sergei Sumarokov | 3cc7b5f3c6 | |
Sergei Sumarokov | ec114f5c8a | |
kompotkot | 4cee8eacfc | |
kompotkot | 70f96ab457 | |
kompotkot | 9d52960f9b | |
kompotkot | 19b586760b | |
kompotkot | d36f28e010 | |
kompotkot | 5b912ff0d9 | |
kompotkot | de879a7adf | |
kompotkot | 9496214d58 | |
kompotkot | 7a86f69bf7 |
|
@ -13,9 +13,8 @@ jobs:
|
|||
publish:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: "3.9"
|
||||
- name: Install dependencies
|
||||
|
@ -24,22 +23,8 @@ jobs:
|
|||
pip install -e .[distribute]
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
TWINE_USERNAME: "__token__"
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN_MOONSTREAM }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
create_release:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/create-release@v1
|
||||
id: create_release
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
tag_name: ${{ github.ref }}
|
||||
release_name: "Moonstream Python client library - ${{ github.ref }}"
|
||||
body: |
|
||||
Version ${{ github.ref }} of the Moonstream Python client library.
|
||||
draft: true
|
||||
prerelease: false
|
||||
|
|
|
@ -13,8 +13,8 @@ jobs:
|
|||
publish:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
|
@ -23,8 +23,8 @@ jobs:
|
|||
pip install -e .[distribute]
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
TWINE_USERNAME: "__token__"
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN_MOONCRAWL }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
|
|
|
@ -13,8 +13,8 @@ jobs:
|
|||
publish:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
|
@ -23,8 +23,8 @@ jobs:
|
|||
pip install -e .[distribute]
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
TWINE_USERNAME: "__token__"
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN_MOONSTREAMAPI }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
|
|
|
@ -7,14 +7,14 @@ on:
|
|||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: db
|
||||
working-directory: moonstreamdb
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
|
@ -23,8 +23,8 @@ jobs:
|
|||
pip install -e .[distribute]
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
|
||||
TWINE_USERNAME: "__token__"
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN_MOONSTREAMDB }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
name: Release moonstreamdbv3 package
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'moonstreamdbv3/v*'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: moonstreamdb-v3
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-20.04
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -e .[distribute]
|
||||
- name: Build and publish
|
||||
env:
|
||||
TWINE_USERNAME: "__token__"
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN_MOONSTREAMDBV3 }}
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
twine upload dist/*
|
35
README.md
35
README.md
|
@ -1,21 +1,42 @@
|
|||
![github read me header](https://user-images.githubusercontent.com/8016073/203381867-f7b56861-04ca-4ae4-a5e6-53e97804817a.png)
|
||||
|
||||
# moonstream
|
||||
|
||||
\[[Live at https://moonstream.to/](https://moonstream.to)\] | \[[Join us on Discord](https://discord.gg/pYE65FuNSz)\]
|
||||
[Website](https://moonstream.to)
|
||||
|
||||
[Join our Discord](https://discord.gg/pYE65FuNSz)
|
||||
|
||||
## What is Moonstream?
|
||||
|
||||
Moonstream makes tools that help people build, manage, and maintain their blockchain economies.
|
||||
Moonstream creates economic infrastructure for web3 projects with a focus on blockchain games.
|
||||
|
||||
In order to provide this functionality, we build a lot of technology to crawl blockchains and makes sense of crawled transactions and events. This repository contains that code.
|
||||
This repository contains Moonstream's complete data analysis stack. The emphasis of it is on collecting actionable data related to the blockchain. The repository contains:
|
||||
|
||||
1. Database management tools
|
||||
2. Blockchain node management tools
|
||||
3. Blockchain data crawlers
|
||||
4. Access-controlled API which exposes collected data
|
||||
|
||||
## Important resources
|
||||
1. [Documentation](https://docs.moonstream.to/)
|
||||
2. [Status page](https://moonstream.to/status/)
|
||||
3. [On-chain mechanics](https://github.com/bugout-dev/engine)
|
||||
4. [How to create a dashboard to analyze a smart contract?](https://voracious-gerbil-120.notion.site/Creating-dashboard-for-a-smart-contract-288b1bfa64984b109b79895f69129fce)
|
||||
|
||||
## Who uses Moonstream?
|
||||
|
||||
Game designers and economists, data scientists, smart contract developers, backend engineers, and teams managing loyalty programs for blockchain projects.
|
||||
People from different backgrounds who are interested in data, crypto and code.
|
||||
Moonstream tools are often used by game designers and economists, data scientists, smart contract developers, backend engineers, and teams managing loyalty programs for blockchain projects.
|
||||
|
||||
Some of our prominents customers:
|
||||
Some projects currently using Moonstream:
|
||||
|
||||
1. [Laguna Games](https://laguna.games), makers of [Crypto Unicorns](https://cryptounicorns.fun)
|
||||
2. [RealtyBits](https://realtybits.com)
|
||||
2. [Game7](https://game7.io)
|
||||
3. [Champions Ascension](https://www.champions.io/)
|
||||
|
||||
Please read [the Game Master's Guide to Moonstream Solutions](https://docs.google.com/document/d/1mjfF8SgRrAZvtCVVxB2qNSUcbbmrH6dTEYSMfHKdEgc/view) if you want to know how Moonstream tools are applied in web3 games.
|
||||
|
||||
[Moonworm tool](https://github.com/bugout-dev/moonworm) is used to build datasets of on-chain data related to market activity. The dataset with on-chain activity from the Ethereum NFT market (April 1 to September 25, 2021) is available [on Kaggle](https://www.kaggle.com/datasets/simiotic/ethereum-nfts). The full report on it is published on [GitHub](https://github.com/bugout-dev/moonstream/blob/main/datasets/nfts/papers/ethereum-nfts.pdf).
|
||||
|
||||
## Free software
|
||||
|
||||
|
@ -62,4 +83,4 @@ docker-compose up --build
|
|||
|
||||
## Contributing
|
||||
|
||||
If you would like to contribute to Moonstream, please reach out to @zomglings on the [Moonstream Discord](https://discord.gg/pYE65FuNSz).
|
||||
We are working on contributing guidelines. In the meantime, please reach out to @zomglings on the [Moonstream Discord](https://discord.gg/pYE65FuNSz).
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
# Required environment variables
|
||||
export MOONSTREAM_DB_URI="postgresql://<username>:<password>@<db_host>:<db_port>/<db_name>"
|
||||
export MOONSTREAM_DB_URI_READ_ONLY="postgresql://<username>:<password>@<db_host>:<db_port>/<db_name>"
|
||||
export MOONSTREAM_CORS_ALLOWED_ORIGINS="http://localhost:3000,https://moonstream.to,https://www.moonstream.to"
|
||||
export BUGOUT_BROOD_URL="https://auth.bugout.dev"
|
||||
export BUGOUT_SPIRE_URL="https://spire.bugout.dev"
|
||||
export MOONSTREAM_APPLICATION_ID="<issued_bugout_application_id>"
|
||||
export MOONSTREAM_ADMIN_ACCESS_TOKEN="<Access_token_to_application_resources>"
|
||||
export MOONSTREAM_POOL_SIZE=0
|
||||
export MOONSTREAM_MOONWORM_TASKS_JOURNAL="<Bugout_journal_with_tasks_for_moonworm>"
|
||||
export MOONSTREAM_CRAWLERS_SERVER_URL="<Moonstream_crawlers_server_API_endpoint_URL>"
|
||||
export MOONSTREAM_CRAWLERS_SERVER_PORT="<Moonstream_crawlers_server_port>"
|
||||
|
||||
# Blockchain, txpool, whalewatch data depends variables
|
||||
export MOONSTREAM_DATA_JOURNAL_ID="<bugout_journal_id_to_store_blockchain_data>"
|
||||
export HUMBUG_TXPOOL_CLIENT_ID="<Bugout_Humbug_client_id_for_txpool_transactions_in_journal>"
|
||||
export MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI="https://<connection_path_uri_to_ethereum_node>"
|
||||
export MOONSTREAM_QUERIES_JOURNAL_ID="<bugout_journal_id_where_store_queries_for_executing>"
|
||||
|
||||
# Set following parameters if AWS node instance and S3 smartcontracts configured
|
||||
export MOONSTREAM_S3_SMARTCONTRACTS_BUCKET="<AWS_S3_bucket_to_store_smart_contracts>"
|
||||
export MOONSTREAM_S3_SMARTCONTRACTS_ABI_BUCKET="<AWS_S3_bucket_to_store_smart_contracts_ABI>"
|
||||
export MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX="<Previx_for_AWS_S3_bucket_(prod,dev,..)>"
|
||||
export MOONSTREAM_S3_QUERIES_BUCKET="<AWS_S3_bucket_to_store_sql_queries>"
|
||||
export MOONSTREAM_S3_QUERIES_BUCKET_PREFIX="dev"
|
||||
|
||||
# Set the following variables in the most reasonable manner for your development environment
|
||||
export HUMBUG_REPORTER_BACKEND_TOKEN="<Bugout_umbug_token_for_crash_reports>"
|
|
@ -1,455 +0,0 @@
|
|||
"""
|
||||
Utilities for managing subscription type resources for a Moonstream application.
|
||||
"""
|
||||
import argparse
|
||||
import json
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from bugout.data import BugoutResource, BugoutResources
|
||||
from sqlalchemy.sql.expression import update
|
||||
|
||||
from ..data import SubscriptionTypeResourceData
|
||||
from ..settings import (
|
||||
BUGOUT_REQUEST_TIMEOUT_SECONDS,
|
||||
MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
MOONSTREAM_APPLICATION_ID,
|
||||
)
|
||||
from ..settings import bugout_client as bc
|
||||
|
||||
CANONICAL_SUBSCRIPTION_TYPES = {
|
||||
"ethereum_smartcontract": SubscriptionTypeResourceData(
|
||||
id="ethereum_smartcontract",
|
||||
name="Ethereum smartcontracts",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
description="Contracts events and tx_calls of contract of Ethereum blockchain",
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/ethereum/eth-diamond-purple.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"polygon_smartcontract": SubscriptionTypeResourceData(
|
||||
id="polygon_smartcontract",
|
||||
name="Polygon smartcontracts",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
description="Contracts events and tx_calls of contract of Polygon blockchain",
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/matic-token-inverted-icon.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"xdai_smartcontract": SubscriptionTypeResourceData(
|
||||
id="xdai_smartcontract",
|
||||
name="XDai smartcontract",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
description="Contracts events and tx_calls of contract of XDai blockchain.",
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/xdai-token-logo.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"ethereum_blockchain": SubscriptionTypeResourceData(
|
||||
id="ethereum_blockchain",
|
||||
name="Ethereum transactions",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
description="Transactions that have been mined into the Ethereum blockchain",
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/ethereum/eth-diamond-purple.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"polygon_blockchain": SubscriptionTypeResourceData(
|
||||
id="polygon_blockchain",
|
||||
name="Polygon transactions",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
description="Transactions that have been mined into the Polygon blockchain",
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/matic-token-inverted-icon.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"xdai_blockchain": SubscriptionTypeResourceData(
|
||||
id="xdai_blockchain",
|
||||
name="XDai transactions",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
description="Gnosis chain transactions subscription.",
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/xdai-token-logo.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"ethereum_whalewatch": SubscriptionTypeResourceData(
|
||||
id="ethereum_whalewatch",
|
||||
name="Ethereum whale watch",
|
||||
description="Ethereum accounts that have experienced a lot of recent activity",
|
||||
choices=[],
|
||||
# Icon taken from: https://www.maxpixel.net/Whale-Cetacean-Wildlife-Symbol-Ocean-Sea-Black-99310
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/whalewatch.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
"ethereum_txpool": SubscriptionTypeResourceData(
|
||||
id="ethereum_txpool",
|
||||
name="Ethereum transaction pool",
|
||||
description="Transactions that have been submitted into the Ethereum transaction pool but not necessarily mined yet",
|
||||
choices=["input:address", "tag:erc721"],
|
||||
icon_url="https://s3.amazonaws.com/static.simiotics.com/moonstream/assets/ethereum/eth-diamond-rainbow.png",
|
||||
stripe_product_id=None,
|
||||
stripe_price_id=None,
|
||||
active=True,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
class ConflictingSubscriptionTypesError(Exception):
|
||||
"""
|
||||
Raised when caller tries to add a resource that conflicts with an existing resource.
|
||||
"""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class SubscriptionTypeNotFoundError(Exception):
|
||||
"""
|
||||
Raised when a subscription type is expected to exist as a Brood resource but is not found.
|
||||
"""
|
||||
|
||||
|
||||
class UnexpectedError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
BUGOUT_RESOURCE_TYPE = "subscription_type"
|
||||
|
||||
|
||||
def create_subscription_type(
|
||||
id: str,
|
||||
name: str,
|
||||
description: str,
|
||||
icon_url: str,
|
||||
choices: List[str] = [],
|
||||
stripe_product_id: Optional[str] = None,
|
||||
stripe_price_id: Optional[str] = None,
|
||||
active: bool = False,
|
||||
) -> BugoutResource:
|
||||
"""
|
||||
Add a new Moonstream subscription type as a Brood resource.
|
||||
|
||||
Args:
|
||||
- id: Moonstream ID for the subscription type. Examples: "ethereum_blockchain", "ethereum_txpool",
|
||||
"ethereum_whalewatch", etc.
|
||||
- name: Human-friendly name for the subscription type, which can be displayed to users.
|
||||
- description: Detailed description of the subscription type for users who would like more
|
||||
information.
|
||||
- icon_url: URL to the icon for this subscription type
|
||||
- stripe_product_id: Optional product ID from Stripe account dashboard.
|
||||
- stripe_price_id: Optional price ID from Stripe account dashboard.
|
||||
- active: Set to True if you would like the subscription type to immediately be available for
|
||||
subscriptions. If you set this to False (which is the default), users will not be able to create
|
||||
subscriptions of this type until you later on set to true.
|
||||
"""
|
||||
params = {"type": BUGOUT_RESOURCE_TYPE, "id": id}
|
||||
|
||||
response: BugoutResources = bc.list_resources(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
params=params,
|
||||
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
if response.resources:
|
||||
raise ConflictingSubscriptionTypesError(
|
||||
f"There is already a subscription_type with id: {id}"
|
||||
)
|
||||
|
||||
subscription_data = {
|
||||
"type": BUGOUT_RESOURCE_TYPE,
|
||||
"id": id,
|
||||
"name": name,
|
||||
"description": description,
|
||||
"choices": choices,
|
||||
"icon_url": icon_url,
|
||||
"stripe_product_id": stripe_product_id,
|
||||
"stripe_price_id": stripe_price_id,
|
||||
"active": active,
|
||||
}
|
||||
|
||||
resource = bc.create_resource(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
application_id=MOONSTREAM_APPLICATION_ID,
|
||||
resource_data=subscription_data,
|
||||
)
|
||||
|
||||
return resource
|
||||
|
||||
|
||||
def cli_create_subscription_type(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handler for "mnstr subtypes create".
|
||||
"""
|
||||
result = create_subscription_type(
|
||||
args.id,
|
||||
args.name,
|
||||
args.description,
|
||||
args.icon,
|
||||
args.choices,
|
||||
args.stripe_product_id,
|
||||
args.stripe_price_id,
|
||||
args.active,
|
||||
)
|
||||
print(result.json())
|
||||
|
||||
|
||||
def list_subscription_types(active_only: bool = False) -> BugoutResources:
|
||||
"""
|
||||
Lists all subscription types registered as Brood resources for this Moonstream application.
|
||||
|
||||
Args:
|
||||
- active_only: Set this to true if you only want to list active subscription types. By default,
|
||||
all subscription types are listed, be they active or inactive.
|
||||
"""
|
||||
response = bc.list_resources(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
params={"type": BUGOUT_RESOURCE_TYPE},
|
||||
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
# TODO(kompotkot): Currently, we cannot filter using non-string fields in Brood resources. This means
|
||||
# that we have to implement the active_only filter in this API instead of just setting a query parameter
|
||||
# in the Brood API call. This should be fixed.
|
||||
if not active_only:
|
||||
return response
|
||||
|
||||
active_resources = [
|
||||
resource for resource in response.resources if resource.resource_data["active"]
|
||||
]
|
||||
return BugoutResources(resources=active_resources)
|
||||
|
||||
|
||||
def cli_list_subscription_types(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handler for "mnstr subtypes list".
|
||||
"""
|
||||
results = list_subscription_types(args.active)
|
||||
print(results.json())
|
||||
|
||||
|
||||
def get_subscription_type(id: str) -> Optional[BugoutResource]:
|
||||
"""
|
||||
Retrieves the resource representing the subscription type with the given ID.
|
||||
|
||||
Args:
|
||||
- id: Moonstream ID for the subscription type (not the Brood resource ID).
|
||||
Examples - "ethereum_blockchain", "ethereum_whalewatch", etc.
|
||||
|
||||
Returns: None if there is no subscription type with that ID. Otherwise, returns the full
|
||||
Brood resource. To access the subscription type itself, use the "resource_data" member of the
|
||||
return value. If more than one subscription type is found with the given ID, raises a
|
||||
ConflictingSubscriptionTypesError.
|
||||
"""
|
||||
response = bc.list_resources(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
params={"type": BUGOUT_RESOURCE_TYPE, "id": id},
|
||||
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
resources = response.resources
|
||||
|
||||
if not resources:
|
||||
return None
|
||||
if len(resources) > 1:
|
||||
raise ConflictingSubscriptionTypesError(
|
||||
f"More than one resource with the given ID:\n{json.dumps(resources, indent=2)}"
|
||||
)
|
||||
return resources[0]
|
||||
|
||||
|
||||
def cli_get_subscription_type(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handler for "mnstr subtypes get".
|
||||
"""
|
||||
resource = get_subscription_type(args.id)
|
||||
if resource is None:
|
||||
print(f"Could not find resource with ID: {id}")
|
||||
else:
|
||||
print(resource.json())
|
||||
|
||||
|
||||
def update_subscription_type(
|
||||
id: str,
|
||||
name: Optional[str] = None,
|
||||
description: Optional[str] = None,
|
||||
choices: Optional[List[str]] = None,
|
||||
icon_url: Optional[str] = None,
|
||||
stripe_product_id: Optional[str] = None,
|
||||
stripe_price_id: Optional[str] = None,
|
||||
active: Optional[bool] = None,
|
||||
) -> BugoutResource:
|
||||
"""
|
||||
Update a Moonstream subscription type using the Brood Resources API.
|
||||
|
||||
Args:
|
||||
- id: Moonstream ID for the subscription type. Examples: "ethereum_blockchain", "ethereum_txpool",
|
||||
"ethereum_whalewatch", etc.
|
||||
- name: Human-friendly name for the subscription type, which can be displayed to users.
|
||||
- description: Detailed description of the subscription type for users who would like more
|
||||
information.
|
||||
- icon_url: URL to the icon for this subscription type
|
||||
- stripe_product_id: Optional product ID from Stripe account dashboard.
|
||||
- stripe_price_id: Optional price ID from Stripe account dashboard.
|
||||
- active: Set to True if you would like the subscription type to immediately be available for
|
||||
subscriptions. If you set this to False (which is the default), users will not be able to create
|
||||
subscriptions of this type until you later on set to true.
|
||||
"""
|
||||
|
||||
resource = get_subscription_type(id)
|
||||
if resource is None:
|
||||
raise SubscriptionTypeNotFoundError(
|
||||
f"Could not find subscription type with ID: {id}."
|
||||
)
|
||||
|
||||
brood_resource_id = resource.id
|
||||
updated_resource_data = resource.resource_data
|
||||
if name is not None:
|
||||
updated_resource_data["name"] = name
|
||||
if description is not None:
|
||||
updated_resource_data["description"] = description
|
||||
if choices is not None:
|
||||
updated_resource_data["choices"] = choices
|
||||
if icon_url is not None:
|
||||
updated_resource_data["icon_url"] = icon_url
|
||||
if stripe_product_id is not None:
|
||||
updated_resource_data["stripe_product_id"] = stripe_product_id
|
||||
if stripe_price_id is not None:
|
||||
updated_resource_data["stripe_price_id"] = stripe_price_id
|
||||
if active is not None:
|
||||
updated_resource_data["active"] = active
|
||||
|
||||
# TODO(zomglings): This was written with an outdated bugout-python client.
|
||||
# New client has an update_resource method which is what we should be using
|
||||
# here.
|
||||
|
||||
try:
|
||||
new_resource = bc.update_resource(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
resource_id=brood_resource_id,
|
||||
resource_data={"update": updated_resource_data},
|
||||
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise ConflictingSubscriptionTypesError(
|
||||
f"Unable to delete old subscription type with ID: {id}. Error:\n{repr(e)}"
|
||||
)
|
||||
|
||||
return new_resource
|
||||
|
||||
|
||||
def cli_update_subscription_type(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handler for "mnstr subtypes update".
|
||||
"""
|
||||
result = update_subscription_type(
|
||||
args.id,
|
||||
args.name,
|
||||
args.description,
|
||||
args.choices,
|
||||
args.icon,
|
||||
args.stripe_product_id,
|
||||
args.stripe_price_id,
|
||||
args.active,
|
||||
)
|
||||
print(result.json())
|
||||
|
||||
|
||||
def delete_subscription_type(id: str) -> Optional[BugoutResource]:
|
||||
"""
|
||||
Deletes the subscription type resource with the given ID.
|
||||
|
||||
Args:
|
||||
- id: Moonstream ID of the subscription type you would like to delete. Examples - "ethereum_blockchain",
|
||||
"ethereum_whalewatch", etc.
|
||||
|
||||
Returns: The BugoutResource that was deleted. If no such resource existed in the first place, returns
|
||||
None. If multiple resources existed with the given Moonstream ID, raises a ConflictingSubscriptionTypesError
|
||||
and does not delete anything!
|
||||
"""
|
||||
# ConflictingSubscriptionTypesError raised here if there are multiple resources with the given id.
|
||||
resource = get_subscription_type(id)
|
||||
if resource is None:
|
||||
return None
|
||||
|
||||
resource = bc.delete_resource(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
resource_id=resource.id,
|
||||
timeout=BUGOUT_REQUEST_TIMEOUT_SECONDS,
|
||||
)
|
||||
|
||||
return resource
|
||||
|
||||
|
||||
def cli_delete_subscription_type(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handler for "mnstr subtypes delete".
|
||||
"""
|
||||
result = delete_subscription_type(args.id)
|
||||
if result is None:
|
||||
print(f"Could not find resource with ID: {id}")
|
||||
else:
|
||||
print(result.json())
|
||||
|
||||
|
||||
def ensure_canonical_subscription_types() -> BugoutResources:
|
||||
"""
|
||||
Ensures that the connected Brood API has at least the canonical subscription types. If any of the
|
||||
canonical subscription types does not exist as a Brood resource, this API creates the corresponding
|
||||
resource. If any of the canonical subscription types exists as a Brood resource but has been modified,
|
||||
this method does not change it on the server.
|
||||
|
||||
Args: None
|
||||
|
||||
Returns: A list of the resources representing the canonical subscription types as they exist
|
||||
on the connected Brood API.
|
||||
"""
|
||||
existing_canonical_subscription_types: Dict[str, BugoutResource] = {}
|
||||
for id, canonical_subscription_type in CANONICAL_SUBSCRIPTION_TYPES.items():
|
||||
resource = get_subscription_type(canonical_subscription_type.id)
|
||||
if resource is not None:
|
||||
existing_canonical_subscription_types[id] = resource
|
||||
|
||||
for id in CANONICAL_SUBSCRIPTION_TYPES.keys():
|
||||
if existing_canonical_subscription_types.get(id) is None:
|
||||
canonical_subscription_type = CANONICAL_SUBSCRIPTION_TYPES[id]
|
||||
resource = create_subscription_type(
|
||||
id,
|
||||
canonical_subscription_type.name,
|
||||
canonical_subscription_type.description,
|
||||
canonical_subscription_type.icon_url,
|
||||
canonical_subscription_type.choices,
|
||||
canonical_subscription_type.stripe_product_id,
|
||||
canonical_subscription_type.stripe_price_id,
|
||||
canonical_subscription_type.active,
|
||||
)
|
||||
existing_canonical_subscription_types[id] = resource
|
||||
else:
|
||||
canonical_subscription_type = CANONICAL_SUBSCRIPTION_TYPES[id]
|
||||
resource = update_subscription_type(
|
||||
id,
|
||||
canonical_subscription_type.name,
|
||||
canonical_subscription_type.description,
|
||||
canonical_subscription_type.choices,
|
||||
canonical_subscription_type.icon_url,
|
||||
canonical_subscription_type.stripe_product_id,
|
||||
canonical_subscription_type.stripe_price_id,
|
||||
canonical_subscription_type.active,
|
||||
)
|
||||
existing_canonical_subscription_types[id] = resource
|
||||
|
||||
return BugoutResources(
|
||||
resources=list(existing_canonical_subscription_types.values())
|
||||
)
|
||||
|
||||
|
||||
def cli_ensure_canonical_subscription_types(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Handler for "mnstr subtypes ensure-canonical
|
||||
"""
|
||||
resources = ensure_canonical_subscription_types()
|
||||
print(resources.json())
|
|
@ -1,411 +0,0 @@
|
|||
"""
|
||||
The Moonstream queries HTTP API
|
||||
"""
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional, Tuple, Union
|
||||
from uuid import UUID
|
||||
|
||||
import boto3 # type: ignore
|
||||
from bugout.data import BugoutResources, BugoutJournalEntryContent, BugoutJournalEntry
|
||||
from bugout.exceptions import BugoutResponseException
|
||||
from fastapi import APIRouter, Body, Request
|
||||
import requests
|
||||
|
||||
|
||||
from .. import data
|
||||
from ..actions import get_query_by_name, name_normalization, NameNormalizationException
|
||||
from ..middleware import MoonstreamHTTPException
|
||||
from ..settings import (
|
||||
MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
MOONSTREAM_APPLICATION_ID,
|
||||
MOONSTREAM_CRAWLERS_SERVER_URL,
|
||||
MOONSTREAM_CRAWLERS_SERVER_PORT,
|
||||
MOONSTREAM_S3_QUERIES_BUCKET,
|
||||
MOONSTREAM_S3_QUERIES_BUCKET_PREFIX,
|
||||
MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
)
|
||||
from ..settings import bugout_client as bc
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/queries",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/list", tags=["queries"])
|
||||
async def get_list_of_queries_handler(request: Request) -> List[Dict[str, Any]]:
|
||||
|
||||
token = request.state.token
|
||||
|
||||
# Check already existed queries
|
||||
|
||||
params = {
|
||||
"type": data.BUGOUT_RESOURCE_QUERY_RESOLVER,
|
||||
}
|
||||
try:
|
||||
resources: BugoutResources = bc.list_resources(token=token, params=params)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
users_queries: List[Dict[str, Any]] = [
|
||||
resource.resource_data for resource in resources.resources
|
||||
]
|
||||
return users_queries
|
||||
|
||||
|
||||
@router.post("/", tags=["queries"])
|
||||
async def create_query_handler(
|
||||
request: Request, query_applied: data.PreapprovedQuery = Body(...)
|
||||
) -> BugoutJournalEntry:
|
||||
"""
|
||||
Create query in bugout journal
|
||||
"""
|
||||
|
||||
token = request.state.token
|
||||
|
||||
user = request.state.user
|
||||
|
||||
# Check already existed queries
|
||||
|
||||
params = {
|
||||
"type": data.BUGOUT_RESOURCE_QUERY_RESOLVER,
|
||||
}
|
||||
try:
|
||||
resources: BugoutResources = bc.list_resources(token=token, params=params)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
used_queries: List[str] = [
|
||||
resource.resource_data["name"] for resource in resources.resources
|
||||
]
|
||||
try:
|
||||
query_name = name_normalization(query_applied.name)
|
||||
except NameNormalizationException:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=403,
|
||||
detail=f"Provided query name can't be normalize please select different.",
|
||||
)
|
||||
|
||||
if query_name in used_queries:
|
||||
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=404,
|
||||
detail=f"Provided query name already use. Please remove it or use PUT /{query_name} for update query",
|
||||
)
|
||||
|
||||
try:
|
||||
# Put query to journal
|
||||
entry = bc.create_entry(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
title=f"Query:{query_name}",
|
||||
tags=["type:query"],
|
||||
content=query_applied.query,
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
try:
|
||||
# create resource query_name_resolver
|
||||
bc.create_resource(
|
||||
token=token,
|
||||
application_id=MOONSTREAM_APPLICATION_ID,
|
||||
resource_data={
|
||||
"type": data.BUGOUT_RESOURCE_QUERY_RESOLVER,
|
||||
"user_id": str(user.id),
|
||||
"user": str(user.username),
|
||||
"name": query_name,
|
||||
"entry_id": str(entry.id),
|
||||
},
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
logger.error(f"Error creating name resolving resource: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
try:
|
||||
|
||||
bc.update_tags(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
entry_id=entry.id,
|
||||
tags=[f"query_id:{entry.id}", f"preapprove"],
|
||||
)
|
||||
|
||||
except BugoutResponseException as e:
|
||||
logger.error(f"Error in applind tags to query entry: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
@router.get("/{query_name}/query", tags=["queries"])
|
||||
async def get_query_handler(request: Request, query_name: str) -> BugoutJournalEntry:
|
||||
|
||||
token = request.state.token
|
||||
|
||||
try:
|
||||
query_id = get_query_by_name(query_name, token)
|
||||
except NameNormalizationException:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=403,
|
||||
detail=f"Provided query name can't be normalize please select different.",
|
||||
)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
try:
|
||||
|
||||
entry = bc.get_entry(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
entry_id=query_id,
|
||||
)
|
||||
|
||||
except BugoutResponseException as e:
|
||||
logger.error(f"Error in get query: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
@router.put("/{query_name}", tags=["queries"])
|
||||
async def update_query_handler(
|
||||
request: Request,
|
||||
query_name: str,
|
||||
request_update: data.UpdateQueryRequest = Body(...),
|
||||
) -> BugoutJournalEntryContent:
|
||||
|
||||
token = request.state.token
|
||||
|
||||
try:
|
||||
query_id = get_query_by_name(query_name, token)
|
||||
except NameNormalizationException:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=403,
|
||||
detail=f"Provided query name can't be normalize please select different.",
|
||||
)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
try:
|
||||
|
||||
entry = bc.update_entry_content(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
entry_id=query_id,
|
||||
title=query_name,
|
||||
content=request_update.query,
|
||||
tags=["preapprove"],
|
||||
)
|
||||
|
||||
except BugoutResponseException as e:
|
||||
logger.error(f"Error in updating query: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
@router.post(
|
||||
"/{query_name}/update_data",
|
||||
tags=["queries"],
|
||||
)
|
||||
async def update_query_data_handler(
|
||||
request: Request,
|
||||
query_name: str,
|
||||
request_update: data.UpdateDataRequest = Body(...),
|
||||
) -> Optional[data.QueryPresignUrl]:
|
||||
"""
|
||||
Request update data on S3 bucket
|
||||
"""
|
||||
|
||||
token = request.state.token
|
||||
|
||||
try:
|
||||
query_id = get_query_by_name(query_name, token)
|
||||
except NameNormalizationException:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=403,
|
||||
detail=f"Provided query name can't be normalize please select different.",
|
||||
)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
try:
|
||||
entries = bc.search(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
query=f"tag:approved tag:query_id:{query_id} !tag:preapprove",
|
||||
limit=1,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if len(entries.results) == 0:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=403, detail="Query not approved yet."
|
||||
)
|
||||
|
||||
s3_response = None
|
||||
|
||||
if entries.results[0].content:
|
||||
content = entries.results[0].content
|
||||
|
||||
tags = entries.results[0].tags
|
||||
|
||||
file_type = "json"
|
||||
|
||||
if "ext:csv" in tags:
|
||||
file_type = "csv"
|
||||
|
||||
responce = requests.post(
|
||||
f"{MOONSTREAM_CRAWLERS_SERVER_URL}:{MOONSTREAM_CRAWLERS_SERVER_PORT}/jobs/{query_id}/query_update",
|
||||
json={
|
||||
"query": content,
|
||||
"params": request_update.params,
|
||||
"file_type": file_type,
|
||||
},
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
if responce.status_code != 200:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=responce.status_code,
|
||||
detail=responce.text,
|
||||
)
|
||||
|
||||
s3_response = data.QueryPresignUrl(**responce.json())
|
||||
except BugoutResponseException as e:
|
||||
logger.error(f"Error in updating query: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return s3_response
|
||||
|
||||
|
||||
@router.get("/{query_name}", tags=["queries"])
|
||||
async def get_access_link_handler(
|
||||
request: Request,
|
||||
query_name: str,
|
||||
) -> Optional[data.QueryPresignUrl]:
|
||||
"""
|
||||
Request S3 presign url
|
||||
"""
|
||||
|
||||
# get real connect to query_id
|
||||
|
||||
token = request.state.token
|
||||
|
||||
try:
|
||||
query_id = get_query_by_name(query_name, token)
|
||||
except NameNormalizationException:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=403,
|
||||
detail=f"Provided query name can't be normalize please select different.",
|
||||
)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
s3 = boto3.client("s3")
|
||||
|
||||
try:
|
||||
entries = bc.search(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
query=f"tag:approved tag:query_id:{query_id} !tag:preapprove",
|
||||
limit=1,
|
||||
timeout=5,
|
||||
)
|
||||
|
||||
s3_response = None
|
||||
|
||||
if entries.results and entries.results[0].content:
|
||||
|
||||
tags = entries.results[0].tags
|
||||
|
||||
file_type = "json"
|
||||
|
||||
if "ext:csv" in tags:
|
||||
file_type = "csv"
|
||||
|
||||
stats_presigned_url = s3.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={
|
||||
"Bucket": MOONSTREAM_S3_QUERIES_BUCKET,
|
||||
"Key": f"{MOONSTREAM_S3_QUERIES_BUCKET_PREFIX}/queries/{query_id}/data.{file_type}",
|
||||
},
|
||||
ExpiresIn=300000,
|
||||
HttpMethod="GET",
|
||||
)
|
||||
s3_response = data.QueryPresignUrl(url=stats_presigned_url)
|
||||
except BugoutResponseException as e:
|
||||
logger.error(f"Error in get access link: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return s3_response
|
||||
|
||||
|
||||
@router.delete("/{query_name}", tags=["queries"])
|
||||
async def remove_query_handler(
|
||||
request: Request,
|
||||
query_name: str,
|
||||
) -> BugoutJournalEntry:
|
||||
"""
|
||||
Request delete query from journal
|
||||
"""
|
||||
token = request.state.token
|
||||
|
||||
params = {"type": data.BUGOUT_RESOURCE_QUERY_RESOLVER, "name": query_name}
|
||||
try:
|
||||
resources: BugoutResources = bc.list_resources(token=token, params=params)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
query_ids: Dict[str, Tuple[UUID, Union[UUID, str]]] = {
|
||||
resource.resource_data["name"]: (
|
||||
resource.id,
|
||||
resource.resource_data["entry_id"],
|
||||
)
|
||||
for resource in resources.resources
|
||||
}
|
||||
if len(query_ids) == 0:
|
||||
raise MoonstreamHTTPException(status_code=404, detail="Query does not exists")
|
||||
|
||||
try:
|
||||
bc.delete_resource(token=token, resource_id=query_ids[query_name][0])
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
try:
|
||||
entry = bc.delete_entry(
|
||||
token=MOONSTREAM_ADMIN_ACCESS_TOKEN,
|
||||
journal_id=MOONSTREAM_QUERIES_JOURNAL_ID,
|
||||
entry_id=query_ids[query_name][1],
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return entry
|
|
@ -1,397 +0,0 @@
|
|||
"""
|
||||
The Moonstream subscriptions HTTP API
|
||||
"""
|
||||
import hashlib
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import boto3 # type: ignore
|
||||
from bugout.data import BugoutResource, BugoutResources
|
||||
from bugout.exceptions import BugoutResponseException
|
||||
from fastapi import APIRouter, Depends, Request, Form, BackgroundTasks
|
||||
from web3 import Web3
|
||||
|
||||
from ..actions import validate_abi_json, upload_abi_to_s3, apply_moonworm_tasks
|
||||
from ..admin import subscription_types
|
||||
from .. import data
|
||||
from ..actions import upload_abi_to_s3, validate_abi_json
|
||||
from ..admin import subscription_types
|
||||
from ..middleware import MoonstreamHTTPException
|
||||
from ..reporter import reporter
|
||||
from ..settings import (
|
||||
MOONSTREAM_APPLICATION_ID,
|
||||
MOONSTREAM_S3_SMARTCONTRACTS_ABI_BUCKET,
|
||||
MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX,
|
||||
)
|
||||
from ..settings import bugout_client as bc
|
||||
from ..web3_provider import yield_web3_provider
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(
|
||||
prefix="/subscriptions",
|
||||
)
|
||||
|
||||
BUGOUT_RESOURCE_TYPE_SUBSCRIPTION = "subscription"
|
||||
|
||||
|
||||
@router.post("/", tags=["subscriptions"], response_model=data.SubscriptionResourceData)
|
||||
async def add_subscription_handler(
|
||||
request: Request, # subscription_data: data.CreateSubscriptionRequest = Body(...)
|
||||
background_tasks: BackgroundTasks,
|
||||
address: str = Form(...),
|
||||
color: str = Form(...),
|
||||
label: str = Form(...),
|
||||
subscription_type_id: str = Form(...),
|
||||
abi: Optional[str] = Form(None),
|
||||
web3: Web3 = Depends(yield_web3_provider),
|
||||
) -> data.SubscriptionResourceData:
|
||||
"""
|
||||
Add subscription to blockchain stream data for user.
|
||||
"""
|
||||
token = request.state.token
|
||||
|
||||
if subscription_type_id != "ethereum_whalewatch":
|
||||
try:
|
||||
address = web3.toChecksumAddress(address)
|
||||
except ValueError as e:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=400,
|
||||
detail=str(e),
|
||||
internal_error=e,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to convert address to checksum address")
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=500,
|
||||
internal_error=e,
|
||||
detail="Currently unable to convert address to checksum address",
|
||||
)
|
||||
|
||||
active_subscription_types_response = subscription_types.list_subscription_types(
|
||||
active_only=True
|
||||
)
|
||||
available_subscription_type_ids = [
|
||||
subscription_type.resource_data.get("id")
|
||||
for subscription_type in active_subscription_types_response.resources
|
||||
if subscription_type.resource_data.get("id") is not None
|
||||
]
|
||||
|
||||
if subscription_type_id not in available_subscription_type_ids:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=404,
|
||||
detail=f"Invalid subscription type: {subscription_type_id}.",
|
||||
)
|
||||
|
||||
user = request.state.user
|
||||
|
||||
resource_data = {
|
||||
"type": BUGOUT_RESOURCE_TYPE_SUBSCRIPTION,
|
||||
"user_id": str(user.id),
|
||||
"subscription_type_id": subscription_type_id,
|
||||
"address": address,
|
||||
"color": color,
|
||||
"label": label,
|
||||
"abi": None,
|
||||
"bucket": None,
|
||||
"s3_path": None,
|
||||
}
|
||||
|
||||
try:
|
||||
resource: BugoutResource = bc.create_resource(
|
||||
token=token,
|
||||
application_id=MOONSTREAM_APPLICATION_ID,
|
||||
resource_data=resource_data,
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating subscription resource: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
if abi:
|
||||
|
||||
try:
|
||||
json_abi = json.loads(abi)
|
||||
except json.JSONDecodeError:
|
||||
raise MoonstreamHTTPException(status_code=400, detail="Malformed abi body.")
|
||||
|
||||
validate_abi_json(json_abi)
|
||||
|
||||
update_resource = upload_abi_to_s3(resource=resource, abi=abi, update={})
|
||||
|
||||
abi_string = json.dumps(json_abi, sort_keys=True, indent=2)
|
||||
|
||||
hash = hashlib.md5(abi_string.encode("utf-8")).hexdigest()
|
||||
|
||||
update_resource["abi_hash"] = hash
|
||||
|
||||
try:
|
||||
updated_resource: BugoutResource = bc.update_resource(
|
||||
token=token,
|
||||
resource_id=resource.id,
|
||||
resource_data=data.SubscriptionUpdate(
|
||||
update=update_resource,
|
||||
).dict(),
|
||||
)
|
||||
resource = updated_resource
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user subscriptions: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
background_tasks.add_task(
|
||||
apply_moonworm_tasks,
|
||||
subscription_type_id,
|
||||
json_abi,
|
||||
address,
|
||||
)
|
||||
|
||||
return data.SubscriptionResourceData(
|
||||
id=str(resource.id),
|
||||
user_id=resource.resource_data["user_id"],
|
||||
address=resource.resource_data["address"],
|
||||
color=resource.resource_data["color"],
|
||||
label=resource.resource_data["label"],
|
||||
abi=resource.resource_data.get("abi"),
|
||||
subscription_type_id=resource.resource_data["subscription_type_id"],
|
||||
updated_at=resource.updated_at,
|
||||
created_at=resource.created_at,
|
||||
)
|
||||
|
||||
|
||||
@router.delete(
|
||||
"/{subscription_id}",
|
||||
tags=["subscriptions"],
|
||||
response_model=data.SubscriptionResourceData,
|
||||
)
|
||||
async def delete_subscription_handler(request: Request, subscription_id: str):
|
||||
"""
|
||||
Delete subscriptions.
|
||||
"""
|
||||
token = request.state.token
|
||||
try:
|
||||
deleted_resource = bc.delete_resource(token=token, resource_id=subscription_id)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting subscription: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return data.SubscriptionResourceData(
|
||||
id=str(deleted_resource.id),
|
||||
user_id=deleted_resource.resource_data["user_id"],
|
||||
address=deleted_resource.resource_data["address"],
|
||||
color=deleted_resource.resource_data["color"],
|
||||
label=deleted_resource.resource_data["label"],
|
||||
abi=deleted_resource.resource_data.get("abi"),
|
||||
subscription_type_id=deleted_resource.resource_data["subscription_type_id"],
|
||||
updated_at=deleted_resource.updated_at,
|
||||
created_at=deleted_resource.created_at,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/", tags=["subscriptions"], response_model=data.SubscriptionsListResponse)
|
||||
async def get_subscriptions_handler(request: Request) -> data.SubscriptionsListResponse:
|
||||
"""
|
||||
Get user's subscriptions.
|
||||
"""
|
||||
token = request.state.token
|
||||
params = {
|
||||
"type": BUGOUT_RESOURCE_TYPE_SUBSCRIPTION,
|
||||
"user_id": str(request.state.user.id),
|
||||
}
|
||||
try:
|
||||
resources: BugoutResources = bc.list_resources(token=token, params=params)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Error listing subscriptions for user ({request.user.id}) with token ({request.state.token}), error: {str(e)}"
|
||||
)
|
||||
reporter.error_report(e)
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return data.SubscriptionsListResponse(
|
||||
subscriptions=[
|
||||
data.SubscriptionResourceData(
|
||||
id=str(resource.id),
|
||||
user_id=resource.resource_data["user_id"],
|
||||
address=resource.resource_data["address"],
|
||||
color=resource.resource_data["color"],
|
||||
label=resource.resource_data["label"],
|
||||
abi=resource.resource_data.get("abi"),
|
||||
subscription_type_id=resource.resource_data["subscription_type_id"],
|
||||
updated_at=resource.updated_at,
|
||||
created_at=resource.created_at,
|
||||
)
|
||||
for resource in resources.resources
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/{subscription_id}",
|
||||
tags=["subscriptions"],
|
||||
response_model=data.SubscriptionResourceData,
|
||||
)
|
||||
async def update_subscriptions_handler(
|
||||
request: Request,
|
||||
subscription_id: str,
|
||||
background_tasks: BackgroundTasks,
|
||||
color: Optional[str] = Form(None),
|
||||
label: Optional[str] = Form(None),
|
||||
abi: Optional[str] = Form(None),
|
||||
) -> data.SubscriptionResourceData:
|
||||
"""
|
||||
Get user's subscriptions.
|
||||
"""
|
||||
token = request.state.token
|
||||
|
||||
update: Dict[str, Any] = {}
|
||||
|
||||
if color:
|
||||
update["color"] = color
|
||||
|
||||
if label:
|
||||
update["label"] = label
|
||||
|
||||
if abi:
|
||||
|
||||
try:
|
||||
json_abi = json.loads(abi)
|
||||
except json.JSONDecodeError:
|
||||
raise MoonstreamHTTPException(status_code=400, detail="Malformed abi body.")
|
||||
|
||||
validate_abi_json(json_abi)
|
||||
|
||||
abi_string = json.dumps(json_abi, sort_keys=True, indent=2)
|
||||
|
||||
hash = hashlib.md5(abi_string.encode("utf-8")).hexdigest()
|
||||
|
||||
try:
|
||||
subscription_resource: BugoutResource = bc.get_resource(
|
||||
token=token,
|
||||
resource_id=subscription_id,
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating subscription resource: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
if subscription_resource.resource_data["abi"] is not None:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=400,
|
||||
detail="Subscription already have ABI. For add a new ABI create new subscription.",
|
||||
)
|
||||
|
||||
update = upload_abi_to_s3(
|
||||
resource=subscription_resource, abi=abi, update=update
|
||||
)
|
||||
|
||||
update["abi_hash"] = hash
|
||||
|
||||
try:
|
||||
resource: BugoutResource = bc.update_resource(
|
||||
token=token,
|
||||
resource_id=subscription_id,
|
||||
resource_data=data.SubscriptionUpdate(
|
||||
update=update,
|
||||
).dict(),
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user subscriptions: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
if abi:
|
||||
background_tasks.add_task(
|
||||
apply_moonworm_tasks,
|
||||
subscription_resource.resource_data["subscription_type_id"],
|
||||
json_abi,
|
||||
subscription_resource.resource_data["address"],
|
||||
)
|
||||
|
||||
return data.SubscriptionResourceData(
|
||||
id=str(resource.id),
|
||||
user_id=resource.resource_data["user_id"],
|
||||
address=resource.resource_data["address"],
|
||||
color=resource.resource_data["color"],
|
||||
label=resource.resource_data["label"],
|
||||
abi=resource.resource_data.get("abi"),
|
||||
subscription_type_id=resource.resource_data["subscription_type_id"],
|
||||
updated_at=resource.updated_at,
|
||||
created_at=resource.created_at,
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/{subscription_id}/abi",
|
||||
tags=["subscriptions"],
|
||||
response_model=data.SubdcriptionsAbiResponse,
|
||||
)
|
||||
async def get_subscription_abi_handler(
|
||||
request: Request,
|
||||
subscription_id: str,
|
||||
) -> data.SubdcriptionsAbiResponse:
|
||||
|
||||
token = request.state.token
|
||||
|
||||
try:
|
||||
subscription_resource: BugoutResource = bc.get_resource(
|
||||
token=token,
|
||||
resource_id=subscription_id,
|
||||
)
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating subscription resource: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
if subscription_resource.resource_data["abi"] is None:
|
||||
raise MoonstreamHTTPException(
|
||||
status_code=404,
|
||||
detail="Subscription abi not exists.",
|
||||
)
|
||||
|
||||
s3_client = boto3.client("s3")
|
||||
|
||||
result_key = f"{subscription_resource.resource_data['s3_path']}"
|
||||
presigned_url = s3_client.generate_presigned_url(
|
||||
"get_object",
|
||||
Params={
|
||||
"Bucket": subscription_resource.resource_data["bucket"],
|
||||
"Key": result_key,
|
||||
},
|
||||
ExpiresIn=300,
|
||||
HttpMethod="GET",
|
||||
)
|
||||
|
||||
return data.SubdcriptionsAbiResponse(url=presigned_url)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/types", tags=["subscriptions"], response_model=data.SubscriptionTypesListResponse
|
||||
)
|
||||
async def list_subscription_types() -> data.SubscriptionTypesListResponse:
|
||||
"""
|
||||
Get availables subscription types.
|
||||
"""
|
||||
results: List[data.SubscriptionTypeResourceData] = []
|
||||
try:
|
||||
response = subscription_types.list_subscription_types()
|
||||
results = [
|
||||
data.SubscriptionTypeResourceData.validate(resource.resource_data)
|
||||
for resource in response.resources
|
||||
]
|
||||
except BugoutResponseException as e:
|
||||
raise MoonstreamHTTPException(status_code=e.status_code, detail=e.detail)
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading subscription types from Brood API: {str(e)}")
|
||||
raise MoonstreamHTTPException(status_code=500, internal_error=e)
|
||||
|
||||
return data.SubscriptionTypesListResponse(subscription_types=results)
|
|
@ -1,113 +0,0 @@
|
|||
import os
|
||||
|
||||
from bugout.app import Bugout
|
||||
|
||||
# Bugout
|
||||
BUGOUT_BROOD_URL = os.environ.get("BUGOUT_BROOD_URL", "https://auth.bugout.dev")
|
||||
BUGOUT_SPIRE_URL = os.environ.get("BUGOUT_SPIRE_URL", "https://spire.bugout.dev")
|
||||
|
||||
|
||||
bugout_client = Bugout(brood_api_url=BUGOUT_BROOD_URL, spire_api_url=BUGOUT_SPIRE_URL)
|
||||
|
||||
BUGOUT_REQUEST_TIMEOUT_SECONDS = 5
|
||||
|
||||
HUMBUG_REPORTER_BACKEND_TOKEN = os.environ.get("HUMBUG_REPORTER_BACKEND_TOKEN")
|
||||
|
||||
# Default value is "" instead of None so that mypy understands that MOONSTREAM_APPLICATION_ID is a string
|
||||
MOONSTREAM_APPLICATION_ID = os.environ.get("MOONSTREAM_APPLICATION_ID", "")
|
||||
if MOONSTREAM_APPLICATION_ID == "":
|
||||
raise ValueError("MOONSTREAM_APPLICATION_ID environment variable must be set")
|
||||
|
||||
MOONSTREAM_DATA_JOURNAL_ID = os.environ.get("MOONSTREAM_DATA_JOURNAL_ID", "")
|
||||
if MOONSTREAM_DATA_JOURNAL_ID == "":
|
||||
raise ValueError("MOONSTREAM_DATA_JOURNAL_ID environment variable must be set")
|
||||
|
||||
|
||||
MOONSTREAM_QUERIES_JOURNAL_ID = os.environ.get("MOONSTREAM_QUERIES_JOURNAL_ID", "")
|
||||
if MOONSTREAM_DATA_JOURNAL_ID == "":
|
||||
raise ValueError("MOONSTREAM_QUERIES_JOURNAL_ID environment variable must be set")
|
||||
|
||||
|
||||
MOONSTREAM_ADMIN_ACCESS_TOKEN = os.environ.get("MOONSTREAM_ADMIN_ACCESS_TOKEN", "")
|
||||
if MOONSTREAM_ADMIN_ACCESS_TOKEN == "":
|
||||
raise ValueError("MOONSTREAM_ADMIN_ACCESS_TOKEN environment variable must be set")
|
||||
|
||||
# Origin
|
||||
RAW_ORIGINS = os.environ.get("MOONSTREAM_CORS_ALLOWED_ORIGINS")
|
||||
if RAW_ORIGINS is None:
|
||||
raise ValueError(
|
||||
"MOONSTREAM_CORS_ALLOWED_ORIGINS environment variable must be set (comma-separated list of CORS allowed origins)"
|
||||
)
|
||||
ORIGINS = RAW_ORIGINS.split(",")
|
||||
|
||||
# OpenAPI
|
||||
DOCS_TARGET_PATH = "docs"
|
||||
|
||||
DEFAULT_STREAM_TIMEINTERVAL = 5 * 60
|
||||
|
||||
HUMBUG_TXPOOL_CLIENT_ID = os.environ.get(
|
||||
"HUMBUG_TXPOOL_CLIENT_ID", "client:ethereum-txpool-crawler-0"
|
||||
)
|
||||
|
||||
# S3 Bucket
|
||||
ETHERSCAN_SMARTCONTRACTS_BUCKET = os.environ.get("MOONSTREAM_S3_SMARTCONTRACTS_BUCKET")
|
||||
if ETHERSCAN_SMARTCONTRACTS_BUCKET is None:
|
||||
raise ValueError("MOONSTREAM_S3_SMARTCONTRACTS_BUCKET is not set")
|
||||
|
||||
MOONSTREAM_S3_SMARTCONTRACTS_ABI_BUCKET = os.environ.get(
|
||||
"MOONSTREAM_S3_SMARTCONTRACTS_ABI_BUCKET"
|
||||
)
|
||||
if MOONSTREAM_S3_SMARTCONTRACTS_ABI_BUCKET is None:
|
||||
raise ValueError(
|
||||
"MOONSTREAM_S3_SMARTCONTRACTS_ABI_BUCKET environment variable must be set"
|
||||
)
|
||||
MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX = os.environ.get(
|
||||
"MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX"
|
||||
)
|
||||
if MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX is None:
|
||||
raise ValueError(
|
||||
"MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX environment variable must be set"
|
||||
)
|
||||
MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX = (
|
||||
MOONSTREAM_S3_SMARTCONTRACTS_ABI_PREFIX.rstrip("/")
|
||||
)
|
||||
|
||||
MOONSTREAM_CRAWLERS_SERVER_URL = os.environ.get("MOONSTREAM_CRAWLERS_SERVER_URL")
|
||||
if MOONSTREAM_CRAWLERS_SERVER_URL is None:
|
||||
raise ValueError("MOONSTREAM_CRAWLERS_SERVER_URL environment variable must be set")
|
||||
MOONSTREAM_CRAWLERS_SERVER_URL = MOONSTREAM_CRAWLERS_SERVER_URL.rstrip("/")
|
||||
|
||||
MOONSTREAM_CRAWLERS_SERVER_PORT = os.environ.get("MOONSTREAM_CRAWLERS_SERVER_PORT")
|
||||
if MOONSTREAM_CRAWLERS_SERVER_PORT is None:
|
||||
raise ValueError("MOONSTREAM_CRAWLERS_SERVER_PORT environment variable must be set")
|
||||
MOONSTREAM_CRAWLERS_SERVER_PORT = MOONSTREAM_CRAWLERS_SERVER_PORT.rstrip("/")
|
||||
|
||||
|
||||
MOONSTREAM_MOONWORM_TASKS_JOURNAL = os.environ.get(
|
||||
"MOONSTREAM_MOONWORM_TASKS_JOURNAL", ""
|
||||
)
|
||||
if MOONSTREAM_MOONWORM_TASKS_JOURNAL == "":
|
||||
raise ValueError(
|
||||
"MOONSTREAM_MOONWORM_TASKS_JOURNAL environment variable must be set"
|
||||
)
|
||||
|
||||
# Web3
|
||||
MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI = os.environ.get(
|
||||
"MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI", ""
|
||||
)
|
||||
if MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI == "":
|
||||
raise ValueError(
|
||||
"MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI environment variable must be set"
|
||||
)
|
||||
|
||||
MOONSTREAM_S3_QUERIES_BUCKET = os.environ.get("MOONSTREAM_S3_QUERIES_BUCKET", "")
|
||||
if MOONSTREAM_S3_QUERIES_BUCKET == "":
|
||||
raise ValueError("MOONSTREAM_S3_QUERIES_BUCKET environment variable must be set")
|
||||
|
||||
MOONSTREAM_S3_QUERIES_BUCKET_PREFIX = os.environ.get(
|
||||
"MOONSTREAM_S3_QUERIES_BUCKET_PREFIX", ""
|
||||
)
|
||||
if MOONSTREAM_S3_QUERIES_BUCKET_PREFIX == "":
|
||||
raise ValueError(
|
||||
"MOONSTREAM_S3_QUERIES_BUCKET_PREFIX environment variable must be set"
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
import logging
|
||||
|
||||
from web3 import Web3
|
||||
|
||||
from .settings import MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
moonstream_web3_provider = Web3(
|
||||
Web3.HTTPProvider(MOONSTREAM_ETHEREUM_WEB3_PROVIDER_URI)
|
||||
)
|
||||
|
||||
|
||||
def yield_web3_provider() -> Web3:
|
||||
return moonstream_web3_provider
|
|
@ -143,5 +143,10 @@ cython_debug/
|
|||
|
||||
# End of https://www.toptal.com/developers/gitignore/api/python
|
||||
|
||||
# Custom
|
||||
.moonstream-py/
|
||||
.venv/
|
||||
.secrets/
|
||||
prod.env
|
||||
dev.env
|
||||
test.env
|
||||
|
|
|
@ -82,7 +82,7 @@ Stream of event packs will be generating from recent timestamp to older and inne
|
|||
|
||||
**From timestamp to timestamp, from bottom to top**
|
||||
|
||||
When `start_time` is less then `end_time`.
|
||||
When `start_time` is less than `end_time`.
|
||||
|
||||
```python
|
||||
for events in mc.create_stream(
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
from typing import Any, Dict
|
||||
|
||||
import boto3
|
||||
|
||||
|
||||
def upload_to_aws_s3_bucket(
|
||||
data: str,
|
||||
bucket: str,
|
||||
key: str,
|
||||
metadata: Dict[str, Any] = {},
|
||||
) -> str:
|
||||
"""
|
||||
Push data to AWS S3 bucket and return URL to object.
|
||||
"""
|
||||
s3 = boto3.client("s3")
|
||||
s3.put_object(
|
||||
Body=data,
|
||||
Bucket=bucket,
|
||||
Key=key,
|
||||
ContentType="application/json",
|
||||
Metadata=metadata,
|
||||
)
|
||||
|
||||
return f"{bucket}/{key}"
|
|
@ -1,41 +1,34 @@
|
|||
import logging
|
||||
import os
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Dict, Generator, List, Optional, Tuple
|
||||
import uuid
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import requests
|
||||
|
||||
from .version import MOONSTREAM_CLIENT_VERSION
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
log_level = logging.INFO
|
||||
if os.environ.get("DEBUG", "").lower() in ["true", "1"]:
|
||||
log_level = logging.DEBUG
|
||||
logger.setLevel(log_level)
|
||||
try:
|
||||
from .aws.bucket import upload_to_aws_s3_bucket
|
||||
except Exception as e:
|
||||
pass
|
||||
from .data import (
|
||||
APISpec,
|
||||
AuthType,
|
||||
Method,
|
||||
MoonstreamQueries,
|
||||
MoonstreamQuery,
|
||||
MoonstreamQueryResultUrl,
|
||||
OutputType,
|
||||
)
|
||||
from .exceptions import MoonstreamResponseException, MoonstreamUnexpectedResponse
|
||||
from .settings import MOONSTREAM_API_URL, MOONSTREAM_REQUEST_TIMEOUT
|
||||
|
||||
ENDPOINT_PING = "/ping"
|
||||
ENDPOINT_VERSION = "/version"
|
||||
ENDPOINT_NOW = "/now"
|
||||
ENDPOINT_TOKEN = "/users/token"
|
||||
ENDPOINT_SUBSCRIPTIONS = "/subscriptions/"
|
||||
ENDPOINT_SUBSCRIPTION_TYPES = "/subscriptions/types"
|
||||
ENDPOINT_STREAMS = "/streams/"
|
||||
ENDPOINT_STREAMS_LATEST = "/streams/latest"
|
||||
ENDPOINT_STREAMS_NEXT = "/streams/next"
|
||||
ENDPOINT_STREAMS_PREVIOUS = "/streams/previous"
|
||||
ENDPOINT_QUERIES = "/queries"
|
||||
|
||||
ENDPOINTS = [
|
||||
ENDPOINT_PING,
|
||||
ENDPOINT_VERSION,
|
||||
ENDPOINT_NOW,
|
||||
ENDPOINT_TOKEN,
|
||||
ENDPOINT_SUBSCRIPTIONS,
|
||||
ENDPOINT_SUBSCRIPTION_TYPES,
|
||||
ENDPOINT_STREAMS,
|
||||
ENDPOINT_STREAMS_LATEST,
|
||||
ENDPOINT_STREAMS_NEXT,
|
||||
ENDPOINT_STREAMS_PREVIOUS,
|
||||
ENDPOINT_QUERIES,
|
||||
]
|
||||
|
||||
|
||||
|
@ -43,473 +36,216 @@ def moonstream_endpoints(url: str) -> Dict[str, str]:
|
|||
"""
|
||||
Creates a dictionary of Moonstream API endpoints at the given Moonstream API URL.
|
||||
"""
|
||||
url_with_protocol = url
|
||||
if not (
|
||||
url_with_protocol.startswith("http://")
|
||||
or url_with_protocol.startswith("https://")
|
||||
):
|
||||
url_with_protocol = f"http://{url_with_protocol}"
|
||||
if not (url.startswith("http://") or url.startswith("https://")):
|
||||
url = f"http://{url}"
|
||||
|
||||
normalized_url = url_with_protocol.rstrip("/")
|
||||
normalized_url = url.rstrip("/")
|
||||
|
||||
return {endpoint: f"{normalized_url}{endpoint}" for endpoint in ENDPOINTS}
|
||||
|
||||
|
||||
class UnexpectedResponse(Exception):
|
||||
"""
|
||||
Raised when a server response cannot be parsed into the appropriate/expected Python structure.
|
||||
"""
|
||||
|
||||
|
||||
class Unauthenticated(Exception):
|
||||
"""
|
||||
Raised when a user tries to make a request that needs to be authenticated by they are not authenticated.
|
||||
"""
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class APISpec:
|
||||
url: str
|
||||
endpoints: Dict[str, str]
|
||||
|
||||
|
||||
class Moonstream:
|
||||
"""
|
||||
A Moonstream client configured to communicate with a given Moonstream API server.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str = "https://api.moonstream.to",
|
||||
timeout: Optional[float] = None,
|
||||
):
|
||||
def __init__(self, moonstream_api_url: str = MOONSTREAM_API_URL):
|
||||
"""
|
||||
Initializes a Moonstream API client.
|
||||
|
||||
Arguments:
|
||||
url - Moonstream API URL. By default this points to the production Moonstream API at https://api.moonstream.to,
|
||||
but you can replace it with the URL of any other Moonstream API instance.
|
||||
timeout - Timeout (in seconds) for Moonstream API requests. Default is None, which means that
|
||||
Moonstream API requests will never time out.
|
||||
|
||||
Returns: A Moonstream client.
|
||||
Arguments:
|
||||
url - Moonstream API URL. By default this points to the production Moonstream API at https://api.moonstream.to,
|
||||
but you can replace it with the URL of any other Moonstream API instance.
|
||||
"""
|
||||
endpoints = moonstream_endpoints(url)
|
||||
self.api = APISpec(url=url, endpoints=endpoints)
|
||||
self.timeout = timeout
|
||||
self._session = requests.Session()
|
||||
self._session.headers.update(
|
||||
{
|
||||
"User-Agent": f"Moonstream Python client (version {MOONSTREAM_CLIENT_VERSION})"
|
||||
}
|
||||
)
|
||||
endpoints = moonstream_endpoints(moonstream_api_url)
|
||||
self.api = APISpec(url=moonstream_api_url, endpoints=endpoints)
|
||||
|
||||
def _call(
|
||||
self,
|
||||
method: Method,
|
||||
url: str,
|
||||
timeout: float = MOONSTREAM_REQUEST_TIMEOUT,
|
||||
**kwargs,
|
||||
):
|
||||
try:
|
||||
response = requests.request(
|
||||
method.value, url=url, timeout=timeout, **kwargs
|
||||
)
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise MoonstreamUnexpectedResponse(str(e))
|
||||
return response.json()
|
||||
|
||||
def ping(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Checks that you have a connection to the Moonstream API.
|
||||
"""
|
||||
r = self._session.get(self.api.endpoints[ENDPOINT_PING])
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
result = self._call(method=Method.GET, url=self.api.endpoints[ENDPOINT_PING])
|
||||
return result
|
||||
|
||||
def version(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Gets the Moonstream API version information from the server.
|
||||
"""
|
||||
r = self._session.get(self.api.endpoints[ENDPOINT_VERSION])
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
result = self._call(method=Method.GET, url=self.api.endpoints[ENDPOINT_VERSION])
|
||||
return result
|
||||
|
||||
def server_time(self) -> float:
|
||||
"""
|
||||
Gets the current time (as microseconds since the Unix epoch) on the server.
|
||||
"""
|
||||
r = self._session.get(self.api.endpoints[ENDPOINT_NOW])
|
||||
r.raise_for_status()
|
||||
result = r.json()
|
||||
raw_epoch_time = result.get("epoch_time")
|
||||
if raw_epoch_time is None:
|
||||
raise UnexpectedResponse(
|
||||
f'Server response does not contain "epoch_time": {result}'
|
||||
)
|
||||
|
||||
try:
|
||||
epoch_time = float(raw_epoch_time)
|
||||
except:
|
||||
raise UnexpectedResponse(
|
||||
f"Could not process epoch time as a float: {raw_epoch_time}"
|
||||
)
|
||||
|
||||
return epoch_time
|
||||
|
||||
def authorize(self, access_token: str) -> None:
|
||||
if not access_token:
|
||||
logger.warning("Setting authorization header to empty token.")
|
||||
self._session.headers.update({"Authorization": f"Bearer {access_token}"})
|
||||
|
||||
def requires_authorization(self):
|
||||
if self._session.headers.get("Authorization") is None:
|
||||
raise Unauthenticated(
|
||||
'This method requires that you authenticate to the API, either by calling the "authorize" method with an API token or by calling the "login" method.'
|
||||
)
|
||||
|
||||
def login(self, username: str, password: Optional[str] = None) -> str:
|
||||
"""
|
||||
Authorizes this client to act as the given user when communicating with the Moonstream API.
|
||||
|
||||
To register an account on the production Moonstream API, go to https://moonstream.to.
|
||||
|
||||
Arguments:
|
||||
username - Username of the user to authenticate as.
|
||||
password - Optional password for the user. If this is not provided, you will be prompted for
|
||||
the password.
|
||||
"""
|
||||
if password is None:
|
||||
password = input(f"Moonstream password for {username}: ")
|
||||
|
||||
r = self._session.post(
|
||||
self.api.endpoints[ENDPOINT_TOKEN],
|
||||
data={"username": username, "password": password},
|
||||
)
|
||||
r.raise_for_status()
|
||||
|
||||
token = r.json()
|
||||
self.authorize(token["id"])
|
||||
return token
|
||||
|
||||
def logout(self) -> None:
|
||||
"""
|
||||
Logs the current user out of the Moonstream client.
|
||||
"""
|
||||
self._session.delete(self.api.endpoints[ENDPOINT_TOKEN])
|
||||
self._session.headers.pop("Authorization")
|
||||
|
||||
def subscription_types(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Gets the currently available subscription types on the Moonstream API.
|
||||
"""
|
||||
r = self._session.get(self.api.endpoints[ENDPOINT_SUBSCRIPTION_TYPES])
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def list_subscriptions(self) -> Dict[str, Any]:
|
||||
"""
|
||||
Gets the currently authorized user's subscriptions from the API server.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
r = self._session.get(self.api.endpoints[ENDPOINT_SUBSCRIPTIONS])
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def create_subscription(
|
||||
self, subscription_type: str, label: str, color: str, specifier: str = ""
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Creates a subscription.
|
||||
|
||||
Arguments:
|
||||
subscription_type - The type of subscription you would like to create. To see the available subscription
|
||||
types, call the "subscription_types" method on this Moonstream client. This argument must be
|
||||
the "id" if the subscription type you want.
|
||||
label - A label for the subscription. This will identify the subscription to you in your stream.
|
||||
color - A hexadecimal color to associate with the subscription.
|
||||
specifier - A specifier for the subscription, which must correspond to one of the choices in the
|
||||
subscription type. This is optional because some subscription types do not require a specifier.
|
||||
|
||||
Returns: The subscription resource that was created on the backend.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
r = self._session.post(
|
||||
self.api.endpoints[ENDPOINT_SUBSCRIPTIONS],
|
||||
data={
|
||||
"subscription_type_id": subscription_type,
|
||||
"label": label,
|
||||
"color": color,
|
||||
"address": specifier,
|
||||
},
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def delete_subscription(self, id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Delete a subscription by ID.
|
||||
|
||||
Arguments:
|
||||
id - ID of the subscription to delete.
|
||||
|
||||
Returns: The subscription resource that was deleted.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
r = self._session.delete(f"{self.api.endpoints[ENDPOINT_SUBSCRIPTIONS]}{id}")
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def update_subscription(
|
||||
self, id: str, label: Optional[str] = None, color: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Update a subscription label or color.
|
||||
|
||||
Arguments:
|
||||
label - New label for subscription (optional).
|
||||
color - New color for subscription (optional).
|
||||
|
||||
Returns - If neither label or color are specified, raises a ValueError. Otherwise PUTs the updated
|
||||
information to the server and returns the updated subscription resource.
|
||||
"""
|
||||
if label is None and color is None:
|
||||
raise ValueError(
|
||||
"At least one of the arguments to this method should not be None."
|
||||
)
|
||||
self.requires_authorization()
|
||||
data = {}
|
||||
if label is not None:
|
||||
data["label"] = label
|
||||
if color is not None:
|
||||
data["color"] = color
|
||||
|
||||
r = self._session.put(
|
||||
f"{self.api.endpoints[ENDPOINT_SUBSCRIPTIONS]}{id}", data=data
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def latest_events(self, q: str = "") -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Returns the latest events in your stream. You can optionally provide a query parameter to
|
||||
constrain the query to specific subscription types or to specific subscriptions.
|
||||
|
||||
Arguments:
|
||||
- q - Optional query (default is the empty string). The syntax to constrain to a particular
|
||||
type of subscription is "type:<subscription_type>". For example, to get the latest event from
|
||||
your Ethereum transaction pool subscriptions, you would use "type:ethereum_txpool".
|
||||
|
||||
Returns: A list of the latest events in your stream.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
query_params: Dict[str, str] = {}
|
||||
if q:
|
||||
query_params["q"] = q
|
||||
r = self._session.get(
|
||||
self.api.endpoints[ENDPOINT_STREAMS_LATEST], params=query_params
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def next_event(
|
||||
self, end_time: int, include_end: bool = True, q: str = ""
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Return the earliest event in your stream that occurred after the given end_time.
|
||||
|
||||
Arguments:
|
||||
- end_time - Time after which you want to retrieve the earliest event from your stream.
|
||||
- include_end - If True, the result is the first event that occurred in your stream strictly
|
||||
*after* the end time. If False, then you will get the first event that occurred in your
|
||||
stream *on* or *after* the end time.
|
||||
- q - Optional query to filter over your available subscriptions and subscription types.
|
||||
|
||||
Returns: None if no event has occurred after the given end time, else returns a dictionary
|
||||
representing that event.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
query_params: Dict[str, Any] = {
|
||||
"end_time": end_time,
|
||||
"include_end": include_end,
|
||||
}
|
||||
if q:
|
||||
query_params["q"] = q
|
||||
r = self._session.get(
|
||||
self.api.endpoints[ENDPOINT_STREAMS_NEXT], params=query_params
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def previous_event(
|
||||
self, start_time: int, include_start: bool = True, q: str = ""
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Return the latest event in your stream that occurred before the given start_time.
|
||||
|
||||
Arguments:
|
||||
- start_time - Time before which you want to retrieve the latest event from your stream.
|
||||
- include_start - If True, the result is the last event that occurred in your stream strictly
|
||||
*before* the start time. If False, then you will get the last event that occurred in your
|
||||
stream *on* or *before* the start time.
|
||||
- q - Optional query to filter over your available subscriptions and subscription types.
|
||||
|
||||
Returns: None if no event has occurred before the given start time, else returns a dictionary
|
||||
representing that event.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
query_params: Dict[str, Any] = {
|
||||
"start_time": start_time,
|
||||
"include_start": include_start,
|
||||
}
|
||||
if q:
|
||||
query_params["q"] = q
|
||||
r = self._session.get(
|
||||
self.api.endpoints[ENDPOINT_STREAMS_PREVIOUS], params=query_params
|
||||
)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
|
||||
def events(
|
||||
def create_query(
|
||||
self,
|
||||
start_time: int,
|
||||
end_time: int,
|
||||
include_start: bool = False,
|
||||
include_end: bool = False,
|
||||
q: str = "",
|
||||
) -> Dict[str, Any]:
|
||||
token: Union[str, uuid.UUID],
|
||||
query: str,
|
||||
name: str,
|
||||
public: bool = False,
|
||||
auth_type: AuthType = AuthType.bearer,
|
||||
timeout: float = MOONSTREAM_REQUEST_TIMEOUT,
|
||||
) -> MoonstreamQuery:
|
||||
"""
|
||||
Return all events in your stream that occurred between the given start and end times.
|
||||
|
||||
Arguments:
|
||||
- start_time - Time after which you want to query your stream.
|
||||
- include_start - Whether or not events that occurred exactly at the start_time should be included in the results.
|
||||
- end_time - Time before which you want to query your stream.
|
||||
- include_end - Whether or not events that occurred exactly at the end_time should be included in the results.
|
||||
- q - Optional query to filter over your available subscriptions and subscription types.
|
||||
|
||||
Returns: A dictionary representing the results of your query.
|
||||
Creates new query.
|
||||
"""
|
||||
self.requires_authorization()
|
||||
query_params: Dict[str, Any] = {
|
||||
"start_time": start_time,
|
||||
"include_start": include_start,
|
||||
"end_time": end_time,
|
||||
"include_end": include_end,
|
||||
json = {
|
||||
"query": query,
|
||||
"name": name,
|
||||
"public": public,
|
||||
}
|
||||
if q:
|
||||
query_params["q"] = q
|
||||
headers = {
|
||||
"Authorization": f"{auth_type.value} {token}",
|
||||
}
|
||||
response = self._call(
|
||||
method=Method.POST,
|
||||
url=f"{self.api.endpoints[ENDPOINT_QUERIES]}/",
|
||||
headers=headers,
|
||||
json=json,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
r = self._session.get(self.api.endpoints[ENDPOINT_STREAMS], params=query_params)
|
||||
r.raise_for_status()
|
||||
return r.json()
|
||||
return MoonstreamQuery(
|
||||
id=response["id"],
|
||||
journal_url=response["journal_url"],
|
||||
name=response["title"],
|
||||
query=response["content"],
|
||||
tags=response["tags"],
|
||||
created_at=response["created_at"],
|
||||
updated_at=response["updated_at"],
|
||||
)
|
||||
|
||||
def create_stream(
|
||||
def list_queries(
|
||||
self,
|
||||
start_time: int,
|
||||
end_time: Optional[int] = None,
|
||||
q: str = "",
|
||||
) -> Generator[Dict[str, Any], None, None]:
|
||||
token: Union[str, uuid.UUID],
|
||||
auth_type: AuthType = AuthType.bearer,
|
||||
timeout: float = MOONSTREAM_REQUEST_TIMEOUT,
|
||||
) -> MoonstreamQueries:
|
||||
"""
|
||||
Return a stream of event. Event packs will be generated with 1 hour time range.
|
||||
|
||||
Arguments:
|
||||
- start_time - One of time border.
|
||||
- end_time - Time until the end of stream, if set to None stream will be going forward endlessly.
|
||||
- q - Optional query to filter over your available subscriptions and subscription types.
|
||||
|
||||
Returns: A dictionary stream representing the results of your query.
|
||||
Returns list of all queries available to user.
|
||||
"""
|
||||
# TODO(kompotkot): Add tests
|
||||
shift_two_hours = 2 * 60 * 60 # 2 hours
|
||||
shift_half_hour = 1 * 30 * 30 # 30 min
|
||||
headers = {
|
||||
"Authorization": f"{auth_type.value} {token}",
|
||||
}
|
||||
response = self._call(
|
||||
method=Method.GET,
|
||||
url=f"{self.api.endpoints[ENDPOINT_QUERIES]}/list",
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
def fetch_events(
|
||||
modified_start_time: int, modified_end_time: int
|
||||
) -> Generator[Tuple[Dict[str, Any], bool], None, None]:
|
||||
# If it is going from top to bottom in history,
|
||||
# then time_range will be reversed
|
||||
reversed_time = False
|
||||
if modified_start_time > modified_end_time:
|
||||
reversed_time = True
|
||||
max_boundary = max(modified_start_time, modified_end_time)
|
||||
min_boundary = min(modified_start_time, modified_end_time)
|
||||
|
||||
time_range_list = []
|
||||
# 300, 450 with shift 100 => [{"start_time": 300, "end_time": 399}, {"start_time": 400, "end_time": 450}]
|
||||
if max_boundary - min_boundary > shift_half_hour:
|
||||
for i in range(min_boundary, max_boundary, shift_half_hour):
|
||||
end_i = (
|
||||
i + shift_half_hour - 1
|
||||
if i + shift_half_hour <= max_boundary
|
||||
else max_boundary
|
||||
)
|
||||
time_range_list.append({"start_time": i, "end_time": end_i})
|
||||
else:
|
||||
time_range_list.append(
|
||||
{"start_time": min_boundary, "end_time": max_boundary}
|
||||
return MoonstreamQueries(
|
||||
queries=[
|
||||
MoonstreamQuery(
|
||||
id=query["entry_id"],
|
||||
name=query["name"],
|
||||
query_type=query["type"],
|
||||
user=query["user"],
|
||||
user_id=query["user_id"],
|
||||
)
|
||||
if reversed_time:
|
||||
time_range_list.reverse()
|
||||
for query in response
|
||||
]
|
||||
)
|
||||
|
||||
for time_range in time_range_list:
|
||||
r_json = self.events(
|
||||
start_time=time_range["start_time"],
|
||||
end_time=time_range["end_time"],
|
||||
include_start=True,
|
||||
include_end=True,
|
||||
q=q,
|
||||
)
|
||||
def exec_query(
|
||||
self,
|
||||
token: Union[str, uuid.UUID],
|
||||
name: str,
|
||||
params: Dict[str, Any] = {},
|
||||
auth_type: AuthType = AuthType.bearer,
|
||||
timeout: float = MOONSTREAM_REQUEST_TIMEOUT,
|
||||
) -> MoonstreamQueryResultUrl:
|
||||
"""
|
||||
Executes queries and upload data to external storage.
|
||||
"""
|
||||
headers = {
|
||||
"Authorization": f"{auth_type.value} {token}",
|
||||
}
|
||||
json = {
|
||||
"params": params,
|
||||
}
|
||||
response = self._call(
|
||||
method=Method.POST,
|
||||
url=f"{self.api.endpoints[ENDPOINT_QUERIES]}/{name}/update_data",
|
||||
headers=headers,
|
||||
json=json,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
yield r_json, reversed_time
|
||||
return MoonstreamQueryResultUrl(url=response["url"])
|
||||
|
||||
time_range_list = time_range_list[:]
|
||||
|
||||
if end_time is None:
|
||||
float_start_time = start_time
|
||||
|
||||
while True:
|
||||
end_time = int(self.server_time())
|
||||
# If time range is greater then 2 hours,
|
||||
# shift float_start time close to end_time to prevent stream block
|
||||
if end_time - float_start_time > shift_two_hours:
|
||||
float_start_time = shift_two_hours
|
||||
for r_json, reversed_time in fetch_events(float_start_time, end_time):
|
||||
|
||||
yield r_json
|
||||
|
||||
events = r_json.get("events", [])
|
||||
if len(events) > 0:
|
||||
# Updating float_start_time after first iteration to last event time
|
||||
if reversed_time:
|
||||
float_start_time = events[-1].get("event_timestamp") - 1
|
||||
else:
|
||||
float_start_time = events[0].get("event_timestamp") + 1
|
||||
|
||||
else:
|
||||
# If there are no events in response, wait
|
||||
# until new will be added
|
||||
time.sleep(5)
|
||||
else:
|
||||
for r_json, reversed_time in fetch_events(start_time, end_time):
|
||||
yield r_json
|
||||
|
||||
|
||||
def client_from_env() -> Moonstream:
|
||||
"""
|
||||
Produces a Moonstream client instantiated using the following environment variables:
|
||||
- MOONSTREAM_API_URL: Specifies the url parameter on the Moonstream client
|
||||
- MOONSTREAM_TIMEOUT_SECONDS: Specifies the request timeout
|
||||
- MOONSTREAM_ACCESS_TOKEN: If this environment variable is defined, the client sets this token as
|
||||
the authorization header for all Moonstream API requests.
|
||||
"""
|
||||
kwargs: Dict[str, Any] = {}
|
||||
|
||||
url = os.environ.get("MOONSTREAM_API_URL")
|
||||
if url is not None:
|
||||
kwargs["url"] = url
|
||||
|
||||
raw_timeout = os.environ.get("MOONSTREAM_TIMEOUT_SECONDS")
|
||||
timeout: Optional[float] = None
|
||||
if raw_timeout is not None:
|
||||
def download_query_results(
|
||||
self,
|
||||
url: str,
|
||||
output_type: OutputType = OutputType.JSON,
|
||||
timeout: float = MOONSTREAM_REQUEST_TIMEOUT,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
"""
|
||||
Fetch results of query from url.
|
||||
"""
|
||||
try:
|
||||
timeout = float(raw_timeout)
|
||||
except:
|
||||
raise ValueError(
|
||||
f"Could not convert MOONSTREAM_TIMEOUT_SECONDS ({raw_timeout}) to float."
|
||||
response = requests.request(
|
||||
Method.GET.value, url=url, timeout=timeout, **kwargs
|
||||
)
|
||||
response.raise_for_status()
|
||||
except Exception as e:
|
||||
raise Exception(str(e))
|
||||
|
||||
kwargs["timeout"] = timeout
|
||||
output = response
|
||||
if output_type == OutputType.JSON:
|
||||
output = response.json()
|
||||
|
||||
moonstream_client = Moonstream(**kwargs)
|
||||
return output
|
||||
|
||||
access_token = os.environ.get("MOONSTREAM_ACCESS_TOKEN")
|
||||
if access_token is not None:
|
||||
moonstream_client.authorize(access_token)
|
||||
def upload_query_results(
|
||||
self, data: str, bucket: str, key: str, metadata: Dict[str, Any] = {}
|
||||
) -> str:
|
||||
"""
|
||||
Uploads data to AWS S3 bucket.
|
||||
|
||||
return moonstream_client
|
||||
Requirements: "pip install -e .[aws]" with "boto3" module.
|
||||
"""
|
||||
try:
|
||||
url = upload_to_aws_s3_bucket(
|
||||
data=data, bucket=bucket, key=key, metadata=metadata
|
||||
)
|
||||
except Exception as e:
|
||||
raise Exception(str(e))
|
||||
|
||||
return url
|
||||
|
||||
def delete_query(
|
||||
self,
|
||||
token: Union[str, uuid.UUID],
|
||||
name: str,
|
||||
auth_type: AuthType = AuthType.bearer,
|
||||
timeout: float = MOONSTREAM_REQUEST_TIMEOUT,
|
||||
) -> uuid.UUID:
|
||||
"""
|
||||
Deletes query specified by name.
|
||||
"""
|
||||
headers = {
|
||||
"Authorization": f"{auth_type.value} {token}",
|
||||
}
|
||||
response = self._call(
|
||||
method=Method.DELETE,
|
||||
url=f"{self.api.endpoints[ENDPOINT_QUERIES]}/{name}",
|
||||
headers=headers,
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
return response["id"]
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class APISpec:
|
||||
url: str
|
||||
endpoints: Dict[str, str]
|
||||
|
||||
|
||||
class AuthType(Enum):
|
||||
bearer = "Bearer"
|
||||
web3 = "Web3"
|
||||
|
||||
|
||||
class Method(Enum):
|
||||
DELETE = "delete"
|
||||
GET = "get"
|
||||
POST = "post"
|
||||
PUT = "put"
|
||||
|
||||
|
||||
class OutputType(Enum):
|
||||
CSV = "csv"
|
||||
JSON = "json"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MoonstreamQuery:
|
||||
id: uuid.UUID
|
||||
name: str
|
||||
journal_url: Optional[str] = None
|
||||
query: Optional[str] = None
|
||||
tags: Optional[List[str]] = None
|
||||
user: Optional[str] = None
|
||||
user_id: Optional[uuid.UUID] = None
|
||||
query_type: Optional[str] = None
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MoonstreamQueries:
|
||||
queries: List[MoonstreamQuery]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class MoonstreamQueryResultUrl:
|
||||
url: str
|
|
@ -0,0 +1,24 @@
|
|||
from typing import Any, Optional
|
||||
|
||||
|
||||
class MoonstreamResponseException(Exception):
|
||||
"""
|
||||
Raised when Moonstream server response with error.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message,
|
||||
status_code: int,
|
||||
detail: Optional[Any] = None,
|
||||
) -> None:
|
||||
super().__init__(message)
|
||||
self.status_code = status_code
|
||||
if detail is not None:
|
||||
self.detail = detail
|
||||
|
||||
|
||||
class MoonstreamUnexpectedResponse(Exception):
|
||||
"""
|
||||
Raised when Moonstream server response is unexpected (e.g. unparseable).
|
||||
"""
|
|
@ -0,0 +1,13 @@
|
|||
import os
|
||||
|
||||
MOONSTREAM_API_URL = os.environ.get("MOONSTREAM_API_URL", "https://api.moonstream.to")
|
||||
|
||||
MOONSTREAM_REQUEST_TIMEOUT = 10
|
||||
MOONSTREAM_REQUEST_TIMEOUT_RAW = os.environ.get("MOONSTREAM_REQUEST_TIMEOUT")
|
||||
try:
|
||||
if MOONSTREAM_REQUEST_TIMEOUT_RAW is not None:
|
||||
MOONSTREAM_REQUEST_TIMEOUT = int(MOONSTREAM_REQUEST_TIMEOUT_RAW)
|
||||
except:
|
||||
raise Exception(
|
||||
f"Could not parse MOONSTREAM_REQUEST_TIMEOUT environment variable as int: {MOONSTREAM_REQUEST_TIMEOUT_RAW}"
|
||||
)
|
|
@ -1,138 +1,54 @@
|
|||
from dataclasses import FrozenInstanceError
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from . import client
|
||||
|
||||
|
||||
class TestMoonstreamClient(unittest.TestCase):
|
||||
def test_client_init(self):
|
||||
m = client.Moonstream()
|
||||
self.assertEqual(m.api.url, "https://api.moonstream.to")
|
||||
self.assertIsNone(m.timeout)
|
||||
self.assertGreater(len(m.api.endpoints), 0)
|
||||
|
||||
def test_client_init_with_timeout(self):
|
||||
timeout = 7
|
||||
m = client.Moonstream(timeout=timeout)
|
||||
self.assertEqual(m.api.url, "https://api.moonstream.to")
|
||||
self.assertEqual(m.timeout, timeout)
|
||||
self.assertGreater(len(m.api.endpoints), 0)
|
||||
|
||||
def test_client_with_custom_url_and_timeout(self):
|
||||
timeout = 9
|
||||
url = "https://my.custom.api.url"
|
||||
m = client.Moonstream(url=url, timeout=timeout)
|
||||
self.assertEqual(m.api.url, url)
|
||||
self.assertEqual(m.timeout, timeout)
|
||||
self.assertGreater(len(m.api.endpoints), 0)
|
||||
|
||||
def test_client_with_custom_messy_url_and_timeout(self):
|
||||
timeout = 3.5
|
||||
url = "https://my.custom.api.url/"
|
||||
m = client.Moonstream(url=url, timeout=timeout)
|
||||
self.assertEqual(m.api.url, url)
|
||||
self.assertEqual(m.timeout, timeout)
|
||||
self.assertGreater(len(m.api.endpoints), 0)
|
||||
|
||||
def test_client_with_custom_messy_url_no_protocol_and_timeout(self):
|
||||
timeout = 5.5
|
||||
url = "my.custom.api.url/"
|
||||
m = client.Moonstream(url=url, timeout=timeout)
|
||||
self.assertEqual(m.api.url, url)
|
||||
self.assertEqual(m.timeout, timeout)
|
||||
self.assertGreater(len(m.api.endpoints), 0)
|
||||
|
||||
def test_immutable_api_url(self):
|
||||
m = client.Moonstream()
|
||||
with self.assertRaises(FrozenInstanceError):
|
||||
m.api.url = "lol"
|
||||
|
||||
def test_immutable_api_endpoints(self):
|
||||
m = client.Moonstream()
|
||||
with self.assertRaises(FrozenInstanceError):
|
||||
m.api.endpoints = {}
|
||||
|
||||
def test_mutable_timeout(self):
|
||||
original_timeout = 5.0
|
||||
updated_timeout = 10.5
|
||||
m = client.Moonstream(timeout=original_timeout)
|
||||
self.assertEqual(m.timeout, original_timeout)
|
||||
m.timeout = updated_timeout
|
||||
self.assertEqual(m.timeout, updated_timeout)
|
||||
|
||||
|
||||
class TestMoonstreamClientFromEnv(unittest.TestCase):
|
||||
class TestMoonstreamCalls(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.old_moonstream_api_url = os.environ.get("MOONSTREAM_API_URL")
|
||||
self.old_moonstream_timeout_seconds = os.environ.get(
|
||||
"MOONSTREAM_TIMEOUT_SECONDS"
|
||||
url = os.environ.get("MOONSTREAM_API_URL", "https://api.moonstream.to")
|
||||
self.token = os.environ.get("MOONSTREAM_ACCESS_TOKEN")
|
||||
if self.token is None:
|
||||
raise Exception("MOONSTREAM_ACCESS_TOKEN should be specified")
|
||||
self.m = client.Moonstream(moonstream_api_url=url)
|
||||
|
||||
queries = self.m.list_queries(self.token)
|
||||
for query in queries.queries:
|
||||
if query.name.startswith("test_query_name"):
|
||||
self.m.delete_query(self.token, query.name)
|
||||
|
||||
def test_ping(self):
|
||||
response = self.m.ping()
|
||||
self.assertEqual(response["status"], "ok")
|
||||
|
||||
def test_create_query(self):
|
||||
query = "SELECT count(*) FROM polygon_blocks"
|
||||
name = "test-query-name-1"
|
||||
response = self.m.create_query(self.token, query, name)
|
||||
self.assertEqual(f"Query:{name.replace('-', '_')}", response.name)
|
||||
|
||||
def test_list_queries(self):
|
||||
query = (
|
||||
"SELECT hash,block_number FROM polygon_blocks WHERE block_number = 21175765"
|
||||
)
|
||||
self.old_moonstream_access_token = os.environ.get("MOONSTREAM_ACCESS_TOKEN")
|
||||
name = "test-query-name-2"
|
||||
response_1 = self.m.create_query(self.token, query, name)
|
||||
self.assertEqual(f"Query:{name.replace('-', '_')}", response_1.name)
|
||||
|
||||
self.moonstream_api_url = "https://custom.example.com"
|
||||
self.moonstream_timeout_seconds = 15.333333
|
||||
self.moonstream_access_token = "1d431ca4-af9b-4c3a-b7b9-3cc79f3b0900"
|
||||
response_2 = self.m.list_queries(self.token)
|
||||
self.assertGreaterEqual(len(response_2.queries), 1)
|
||||
|
||||
os.environ["MOONSTREAM_API_URL"] = self.moonstream_api_url
|
||||
os.environ["MOONSTREAM_TIMEOUT_SECONDS"] = str(self.moonstream_timeout_seconds)
|
||||
os.environ["MOONSTREAM_ACCESS_TOKEN"] = self.moonstream_access_token
|
||||
def test_delete_query(self):
|
||||
query = "SELECT 1"
|
||||
name = "test-query-name-0"
|
||||
response_1 = self.m.create_query(self.token, query, name)
|
||||
self.assertEqual(f"Query:{name.replace('-', '_')}", response_1.name)
|
||||
|
||||
response_2 = self.m.delete_query(self.token, name.replace("-", "_"))
|
||||
self.assertEqual(response_1.id, response_2)
|
||||
|
||||
def tearDown(self) -> None:
|
||||
del os.environ["MOONSTREAM_API_URL"]
|
||||
del os.environ["MOONSTREAM_TIMEOUT_SECONDS"]
|
||||
del os.environ["MOONSTREAM_ACCESS_TOKEN"]
|
||||
|
||||
if self.old_moonstream_api_url is not None:
|
||||
os.environ["MOONSTREAM_API_URL"] = self.old_moonstream_api_url
|
||||
if self.old_moonstream_timeout_seconds is not None:
|
||||
os.environ[
|
||||
"MOONSTREAM_TIMEOUT_SECONDS"
|
||||
] = self.old_moonstream_timeout_seconds
|
||||
if self.old_moonstream_access_token is not None:
|
||||
os.environ["MOONSTREAM_ACCESS_TOKEN"] = self.old_moonstream_access_token
|
||||
|
||||
def test_client_from_env(self):
|
||||
m = client.client_from_env()
|
||||
self.assertEqual(m.api.url, self.moonstream_api_url)
|
||||
self.assertEqual(m.timeout, self.moonstream_timeout_seconds)
|
||||
self.assertIsNone(m.requires_authorization())
|
||||
|
||||
authorization_header = m._session.headers["Authorization"]
|
||||
self.assertEqual(authorization_header, f"Bearer {self.moonstream_access_token}")
|
||||
|
||||
|
||||
class TestMoonstreamEndpoints(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.url = "https://api.moonstream.to"
|
||||
self.normalized_url = "https://api.moonstream.to"
|
||||
|
||||
def test_moonstream_endpoints(self):
|
||||
endpoints = client.moonstream_endpoints(self.url)
|
||||
self.assertDictEqual(
|
||||
endpoints,
|
||||
{
|
||||
client.ENDPOINT_PING: f"{self.normalized_url}{client.ENDPOINT_PING}",
|
||||
client.ENDPOINT_VERSION: f"{self.normalized_url}{client.ENDPOINT_VERSION}",
|
||||
client.ENDPOINT_NOW: f"{self.normalized_url}{client.ENDPOINT_NOW}",
|
||||
client.ENDPOINT_TOKEN: f"{self.normalized_url}{client.ENDPOINT_TOKEN}",
|
||||
client.ENDPOINT_SUBSCRIPTION_TYPES: f"{self.normalized_url}{client.ENDPOINT_SUBSCRIPTION_TYPES}",
|
||||
client.ENDPOINT_SUBSCRIPTIONS: f"{self.normalized_url}{client.ENDPOINT_SUBSCRIPTIONS}",
|
||||
client.ENDPOINT_STREAMS: f"{self.normalized_url}{client.ENDPOINT_STREAMS}",
|
||||
client.ENDPOINT_STREAMS_LATEST: f"{self.normalized_url}{client.ENDPOINT_STREAMS_LATEST}",
|
||||
client.ENDPOINT_STREAMS_NEXT: f"{self.normalized_url}{client.ENDPOINT_STREAMS_NEXT}",
|
||||
client.ENDPOINT_STREAMS_PREVIOUS: f"{self.normalized_url}{client.ENDPOINT_STREAMS_PREVIOUS}",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class TestMoonstreamEndpointsMessyURL(TestMoonstreamEndpoints):
|
||||
def setUp(self):
|
||||
self.url = "https://api.moonstream.to/"
|
||||
self.normalized_url = "https://api.moonstream.to"
|
||||
|
||||
|
||||
class TestMoonstreamEndpointsMessyURLWithNoProtocol(TestMoonstreamEndpoints):
|
||||
def setUp(self):
|
||||
self.url = "api.moonstream.to/"
|
||||
self.normalized_url = "http://api.moonstream.to"
|
||||
queries = self.m.list_queries(self.token)
|
||||
for query in queries.queries:
|
||||
if query.name.startswith("test_query_name"):
|
||||
self.m.delete_query(self.token, query.name)
|
||||
|
|
|
@ -1 +1 @@
|
|||
MOONSTREAM_CLIENT_VERSION = "0.0.3"
|
||||
MOONSTREAM_CLIENT_VERSION = "0.1.1"
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
[mypy]
|
||||
|
||||
[mypy-boto3.*]
|
||||
ignore_missing_imports = True
|
|
@ -0,0 +1,3 @@
|
|||
# Tests variables
|
||||
export MOONSTREAM_API_URL="https://api.moonstream.to"
|
||||
export MOONSTREAM_ACCESS_TOKEN="<access_token_for_tests>"
|
|
@ -11,8 +11,9 @@ setup(
|
|||
version=MOONSTREAM_CLIENT_VERSION,
|
||||
packages=find_packages(),
|
||||
package_data={"moonstream": ["py.typed"]},
|
||||
install_requires=["requests", "dataclasses; python_version=='3.6'"],
|
||||
install_requires=["requests", "pydantic", "dataclasses; python_version=='3.6'"],
|
||||
extras_require={
|
||||
"aws": ["boto3"],
|
||||
"dev": [
|
||||
"black",
|
||||
"mypy",
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
set -e
|
||||
TAG="clients/python/v$(python setup.py --version)"
|
||||
read -r -p "Tag: $TAG -- tag and push (y/n)?" ACCEPT
|
||||
if [ "$ACCEPT" = "y" ]
|
||||
then
|
||||
echo "Tagging and pushing: $TAG..."
|
||||
git tag "$TAG"
|
||||
git push upstream "$TAG"
|
||||
else
|
||||
echo "noop"
|
||||
fi
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Amoy historical crawler events
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli historical-crawl --blockchain-type amoy --find-deployed-blocks --end 0 --tasks-journal --only-events
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=amoy-historical-crawl-events
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs events historical crawler on Amoy
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Amoy historical crawler transactions
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli historical-crawl --blockchain-type amoy --find-deployed-blocks --end 0 --tasks-journal --only-functions
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=amoy-historical-crawl-transactions
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs transactions historical crawler on Amoy
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -1,15 +1,11 @@
|
|||
[Unit]
|
||||
Description=Load trending Ethereum addresses to the database
|
||||
Description=Fill missing blocks at Amoy database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler \
|
||||
--access-id "${NB_CONTROLLER_ACCESS_ID}" \
|
||||
trending
|
||||
CPUWeight=30
|
||||
SyslogIdentifier=ethereum-trending
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain amoy -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=amoy-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Amoy database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Amoy moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b amoy --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=amoy-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Amoy block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain amoy -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=amoy-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Arbitrum Nova database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain arbitrum_nova -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=arbitrum-nova-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Arbitrum Nova database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum Nova moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b arbitrum_nova --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=arbitrum-nova-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum Nova block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain arbitrum_nova -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=arbitrum-nova-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum One historical crawler events
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli historical-crawl --blockchain-type arbitrum_one --find-deployed-blocks --end 0 --tasks-journal --only-events
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=arbitrum-one-historical-crawl-events
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs events historical crawler on arbitrum one
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum One historical crawler transactions
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli historical-crawl --blockchain-type arbitrum_one --find-deployed-blocks --end 0 --tasks-journal --only-functions
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=arbitrum-one-historical-crawl-transactions
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs transactions historical crawler on proofofplay apex
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Arbitrum One database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain arbitrum_one -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=arbitrum-one-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Arbitrum One database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum One moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b arbitrum_one --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=arbitrum-one-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum One block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain arbitrum_one -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=arbitrum-one-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Arbitrum Sepolia database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain arbitrum_sepolia -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=arbitrum-sepolia-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Arbitrum Sepolia database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum Sepolia moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b arbitrum_sepolia --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=arbitrum-sepolia-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Arbitrum Sepolia block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain arbitrum_sepolia -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=arbitrum-sepolia-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Avalanche Fuji database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain avalanche_fuji -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=avalanche-fuji-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Avalanche Fuji database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Avalanche Fuji moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b avalanche_fuji --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=avalanche-fuji-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Avalanche Fuji block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain avalanche_fuji -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=avalanche-fuji-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Avalanche database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain avalanche -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=avalanche-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Avalanche database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Avalanche moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b avalanche --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=avalanche-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Avalanche block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain avalanche -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=avalanche-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Blast database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain blast -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=blast-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Blast database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Blast moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b blast --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=blast-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Blast Sepolia database
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain blast_sepolia -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=blast-sepolia-missing
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Fill missing blocks at Blast Sepolia database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=120s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Blast Sepolia moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b blast_sepolia --confirmations 10 --min-blocks-batch 20
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=blast-sepolia-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Blast Sepolia block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain blast_sepolia -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=blast-sepolia-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Blast block with transactions synchronizer
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain blast -c 10 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=blast-synchronize
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,467 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Deployment script - intended to run on Moonstream crawlers server
|
||||
|
||||
# Colors
|
||||
C_RESET='\033[0m'
|
||||
C_RED='\033[1;31m'
|
||||
C_GREEN='\033[1;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
|
||||
# Logs
|
||||
PREFIX_INFO="${C_GREEN}[INFO]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_WARN="${C_YELLOW}[WARN]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_CRIT="${C_RED}[CRIT]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
|
||||
# Main
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
||||
APP_DIR="${APP_DIR:-/home/ubuntu/moonstream}"
|
||||
APP_CRAWLERS_DIR="${APP_DIR}/crawlers"
|
||||
PYTHON_ENV_DIR="${PYTHON_ENV_DIR:-/home/ubuntu/moonstream-env}"
|
||||
PYTHON="${PYTHON_ENV_DIR}/bin/python"
|
||||
PIP="${PYTHON_ENV_DIR}/bin/pip"
|
||||
SECRETS_DIR="${SECRETS_DIR:-/home/ubuntu/moonstream-secrets}"
|
||||
PARAMETERS_ENV_PATH="${SECRETS_DIR}/app.env"
|
||||
SCRIPT_DIR="$(realpath $(dirname $0))"
|
||||
|
||||
# Service files
|
||||
MOONCRAWL_SERVICE_FILE="mooncrawl.service"
|
||||
LEADERBOARDS_WORKER_SERVICE_FILE="leaderboards-worker.service"
|
||||
LEADERBOARDS_WORKER_TIMER_FILE="leaderboards-worker.timer"
|
||||
|
||||
# Ethereum service files
|
||||
ETHEREUM_SYNCHRONIZE_SERVICE_FILE="ethereum-synchronize.service"
|
||||
ETHEREUM_MISSING_SERVICE_FILE="ethereum-missing.service"
|
||||
ETHEREUM_MISSING_TIMER_FILE="ethereum-missing.timer"
|
||||
ETHEREUM_ORANGE_DAO_REPORTS_TOKENONOMICS_SERVICE_FILE="ethereum-orange-dao-reports-tokenonomics.service"
|
||||
ETHEREUM_ORANGE_DAO_TOKENONOMICS_TIMER_FILE="ethereum-orange-dao-reports-tokenonomics.timer"
|
||||
|
||||
# Polygon service files
|
||||
POLYGON_SYNCHRONIZE_SERVICE="polygon-synchronize.service"
|
||||
POLYGON_MISSING_SERVICE_FILE="polygon-missing.service"
|
||||
POLYGON_MISSING_TIMER_FILE="polygon-missing.timer"
|
||||
POLYGON_CU_NFT_DASHBOARD_SERVICE_FILE="polygon-cu-nft-dashboard.service"
|
||||
POLYGON_CU_NFT_DASHBOARD_TIMER_FILE="polygon-cu-nft-dashboard.timer"
|
||||
|
||||
# Amoy
|
||||
AMOY_MISSING_SERVICE_FILE="amoy-missing.service"
|
||||
AMOY_MISSING_TIMER_FILE="amoy-missing.timer"
|
||||
AMOY_SYNCHRONIZE_SERVICE="amoy-synchronize.service"
|
||||
|
||||
# XDai service files
|
||||
XDAI_SYNCHRONIZE_SERVICE="xdai-synchronize.service"
|
||||
XDAI_MISSING_SERVICE_FILE="xdai-missing.service"
|
||||
XDAI_MISSING_TIMER_FILE="xdai-missing.timer"
|
||||
|
||||
# ZkSync Era
|
||||
ZKSYNC_ERA_SYNCHRONIZE_SERVICE="zksync-era-synchronize.service"
|
||||
ZKSYNC_ERA_MISSING_SERVICE_FILE="zksync-era-missing.service"
|
||||
ZKSYNC_ERA_MISSING_TIMER_FILE="zksync-era-missing.timer"
|
||||
|
||||
# ZkSync Era Sepolia
|
||||
ZKSYNC_ERA_SEPOLIA_SYNCHRONIZE_SERVICE="zksync-era-sepolia-synchronize.service"
|
||||
ZKSYNC_ERA_SEPOLIA_MISSING_SERVICE_FILE="zksync-era-sepolia-missing.service"
|
||||
ZKSYNC_ERA_SEPOLIA_MISSING_TIMER_FILE="zksync-era-sepolia-missing.timer"
|
||||
|
||||
# Arbitrum one
|
||||
ARBITRUM_ONE_SYNCHRONIZE_SERVICE="arbitrum-one-synchronize.service"
|
||||
ARBITRUM_ONE_MISSING_SERVICE_FILE="arbitrum-one-missing.service"
|
||||
ARBITRUM_ONE_MISSING_TIMER_FILE="arbitrum-one-missing.timer"
|
||||
|
||||
# Arbitrum Nova
|
||||
ARBITRUM_NOVA_SYNCHRONIZE_SERVICE="arbitrum-nova-synchronize.service"
|
||||
ARBITRUM_NOVA_MISSING_SERVICE_FILE="arbitrum-nova-missing.service"
|
||||
ARBITRUM_NOVA_MISSING_TIMER_FILE="arbitrum-nova-missing.timer"
|
||||
|
||||
# Arbitrum Sepolia
|
||||
ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE="arbitrum-sepolia-synchronize.service"
|
||||
ARBITRUM_SEPOLIA_MISSING_SERVICE_FILE="arbitrum-sepolia-missing.service"
|
||||
ARBITRUM_SEPOLIA_MISSING_TIMER_FILE="arbitrum-sepolia-missing.timer"
|
||||
|
||||
# Xai
|
||||
XAI_SYNCHRONIZE_SERVICE="xai-synchronize.service"
|
||||
XAI_MISSING_SERVICE_FILE="xai-missing.service"
|
||||
XAI_MISSING_TIMER_FILE="xai-missing.timer"
|
||||
|
||||
# Xai sepolia
|
||||
XAI_SEPOLIA_SYNCHRONIZE_SERVICE="xai-sepolia-synchronize.service"
|
||||
XAI_SEPOLIA_MISSING_SERVICE_FILE="xai-sepolia-missing.service"
|
||||
XAI_SEPOLIA_MISSING_TIMER_FILE="xai-sepolia-missing.timer"
|
||||
|
||||
# Avalanche sepolia
|
||||
AVALANCHE_SYNCHRONIZE_SERVICE="avalanche-synchronize.service"
|
||||
AVALANCHE_MISSING_SERVICE_FILE="avalanche-missing.service"
|
||||
AVALANCHE_MISSING_TIMER_FILE="avalanche-missing.timer"
|
||||
|
||||
# Avalanche Fuji sepolia
|
||||
AVALANCHE_FUJI_SYNCHRONIZE_SERVICE="avalanche-fuji-synchronize.service"
|
||||
AVALANCHE_FUJI_MISSING_SERVICE_FILE="avalanche-fuji-missing.service"
|
||||
AVALANCHE_FUJI_MISSING_TIMER_FILE="avalanche-fuji-missing.timer"
|
||||
|
||||
# Blast
|
||||
BLAST_MISSING_SERVICE_FILE="blast-missing.service"
|
||||
BLAST_MISSING_TIMER_FILE="blast-missing.timer"
|
||||
BLAST_SYNCHRONIZE_SERVICE="blast-synchronize.service"
|
||||
|
||||
# Blast sepolia
|
||||
BLAST_SEPOLIA_MISSING_SERVICE_FILE="blast-sepolia-missing.service"
|
||||
BLAST_SEPOLIA_MISSING_TIMER_FILE="blast-sepolia-missing.timer"
|
||||
BLAST_SEPOLIA_SYNCHRONIZE_SERVICE="blast-sepolia-synchronize.service"
|
||||
|
||||
# ProofofPlay APEX
|
||||
PROOFOFPLAY_APEX_MISSING_SERVICE_FILE="proofofplay-apex-missing.service"
|
||||
PROOFOFPLAY_APEX_MISSING_TIMER_FILE="proofofplay-apex-missing.timer"
|
||||
PROOFOFPLAY_APEX_SYNCHRONIZE_SERVICE="proofofplay-apex-synchronize.service"
|
||||
|
||||
set -eu
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Upgrading Python pip and setuptools"
|
||||
"${PIP}" install --upgrade pip setuptools
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Installing Python dependencies"
|
||||
"${PIP}" install -e "${APP_CRAWLERS_DIR}/mooncrawl/"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Install checkenv"
|
||||
HOME=/home/ubuntu /usr/local/go/bin/go install github.com/bugout-dev/checkenv@latest
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Retrieving deployment parameters"
|
||||
if [ ! -d "${SECRETS_DIR}" ]; then
|
||||
mkdir -p "${SECRETS_DIR}"
|
||||
echo -e "${PREFIX_WARN} Created new secrets directory"
|
||||
fi
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" CHECKENV_AWS_FETCH_LOOP_LIMIT=20 /home/ubuntu/go/bin/checkenv show aws_ssm+moonstream:true > "${PARAMETERS_ENV_PATH}"
|
||||
chmod 0640 "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add instance local IP to parameters"
|
||||
echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
if [ ! -d "/home/ubuntu/.config/systemd/user/" ]; then
|
||||
mkdir -p /home/ubuntu/.config/systemd/user/
|
||||
echo -e "${PREFIX_WARN} Created user systemd directory"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Moonstream crawlers HTTP API server service definition with ${MOONCRAWL_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${MOONCRAWL_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${MOONCRAWL_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${MOONCRAWL_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${MOONCRAWL_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Leaderboards worker service and timer with: ${LEADERBOARDS_WORKER_SERVICE_FILE}, ${LEADERBOARDS_WORKER_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${LEADERBOARDS_WORKER_SERVICE_FILE}" "${SCRIPT_DIR}/${LEADERBOARDS_WORKER_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${LEADERBOARDS_WORKER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${LEADERBOARDS_WORKER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${LEADERBOARDS_WORKER_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${LEADERBOARDS_WORKER_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${LEADERBOARDS_WORKER_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum block with transactions syncronizer service definition with ${ETHEREUM_SYNCHRONIZE_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_SYNCHRONIZE_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_SYNCHRONIZE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_SYNCHRONIZE_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_SYNCHRONIZE_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum missing service and timer with: ${ETHEREUM_MISSING_SERVICE_FILE}, ${ETHEREUM_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum Orange DAO reports tokenonomics service and timer with: ${ETHEREUM_ORANGE_DAO_REPORTS_TOKENONOMICS_SERVICE_FILE}, ${ETHEREUM_ORANGE_DAO_TOKENONOMICS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_ORANGE_DAO_REPORTS_TOKENONOMICS_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_ORANGE_DAO_TOKENONOMICS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_ORANGE_DAO_REPORTS_TOKENONOMICS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_ORANGE_DAO_REPORTS_TOKENONOMICS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_ORANGE_DAO_TOKENONOMICS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_ORANGE_DAO_TOKENONOMICS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_ORANGE_DAO_TOKENONOMICS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon block with transactions syncronizer service definition with ${POLYGON_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${POLYGON_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon missing service and timer with: ${POLYGON_MISSING_SERVICE_FILE}, ${POLYGON_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon CU reports tokenonomics service and timer with: ${POLYGON_CU_NFT_DASHBOARD_SERVICE_FILE}, ${POLYGON_CU_NFT_DASHBOARD_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_CU_NFT_DASHBOARD_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_CU_NFT_DASHBOARD_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_CU_NFT_DASHBOARD_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_CU_NFT_DASHBOARD_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_CU_NFT_DASHBOARD_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_CU_NFT_DASHBOARD_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_CU_NFT_DASHBOARD_TIMER_FILE}"
|
||||
|
||||
# Amoy
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Amoy block with transactions syncronizer service definition with ${AMOY_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AMOY_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${AMOY_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AMOY_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Amoy missing service and timer with: ${AMOY_MISSING_SERVICE_FILE}, ${AMOY_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AMOY_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${AMOY_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AMOY_MISSING_TIMER_FILE}"
|
||||
|
||||
# Xdai
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XDai block with transactions syncronizer service definition with ${XDAI_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XDAI_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${XDAI_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${XDAI_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDAI_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XDai missing service and timer with: ${XDAI_MISSING_SERVICE_FILE}, ${XDAI_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XDAI_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${XDAI_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDAI_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XDAI_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDAI_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XDAI_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDAI_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era block with transactions syncronizer service definition with ${ZKSYNC_ERA_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era missing service and timer with: ${ZKSYNC_ERA_MISSING_SERVICE_FILE}, ${ZKSYNC_ERA_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era Sepolia block with transactions syncronizer service definition with ${ZKSYNC_ERA_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era Sepolia missing service and timer with: ${ZKSYNC_ERA_SEPOLIA_MISSING_SERVICE_FILE}, ${ZKSYNC_ERA_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
|
||||
|
||||
# Arbitrum one
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum one block with transactions syncronizer service definition with ${ARBITRUM_ONE_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum one missing service and timer with: ${ARBITRUM_ONE_MISSING_SERVICE_FILE}, ${ARBITRUM_ONE_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_ONE_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum Nova block with transactions syncronizer service definition with ${ARBITRUM_NOVA_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_NOVA_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_NOVA_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_NOVA_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_NOVA_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum Nova missing service and timer with: ${ARBITRUM_NOVA_MISSING_SERVICE_FILE}, ${ARBITRUM_NOVA_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_NOVA_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_NOVA_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_NOVA_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_NOVA_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_NOVA_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_NOVA_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_NOVA_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum Sepolia block with transactions syncronizer service definition with ${ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum Sepolia missing service and timer with: ${ARBITRUM_SEPOLIA_MISSING_SERVICE_FILE}, ${ARBITRUM_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Xai block with transactions syncronizer service definition with ${XAI_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${XAI_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Xai missing service and timer with: ${XAI_MISSING_SERVICE_FILE}, ${XAI_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${XAI_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Xai sepolia block with transactions syncronizer service definition with ${XAI_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Xai sepolia missing service and timer with: ${XAI_SEPOLIA_MISSING_SERVICE_FILE}, ${XAI_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_SEPOLIA_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${XAI_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Avalanche block with transactions syncronizer service definition with ${AVALANCHE_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AVALANCHE_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AVALANCHE_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Avalanche missing service and timer with: ${AVALANCHE_MISSING_SERVICE_FILE}, ${AVALANCHE_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AVALANCHE_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${AVALANCHE_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AVALANCHE_MISSING_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Avalanche Fuji block with transactions syncronizer service definition with ${AVALANCHE_FUJI_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AVALANCHE_FUJI_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_FUJI_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_FUJI_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AVALANCHE_FUJI_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Avalanche Fuji missing service and timer with: ${AVALANCHE_FUJI_MISSING_SERVICE_FILE}, ${AVALANCHE_FUJI_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AVALANCHE_FUJI_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${AVALANCHE_FUJI_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_FUJI_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_FUJI_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_FUJI_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_FUJI_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AVALANCHE_FUJI_MISSING_TIMER_FILE}"
|
||||
|
||||
# Blast
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Blast block with transactions syncronizer service definition with ${BLAST_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${BLAST_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${BLAST_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${BLAST_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Blast missing service and timer with: ${BLAST_MISSING_SERVICE_FILE}, ${BLAST_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${BLAST_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${BLAST_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${BLAST_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${BLAST_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${BLAST_MISSING_TIMER_FILE}"
|
||||
|
||||
# Blast sepolia
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Blast sepolia block with transactions syncronizer service definition with ${BLAST_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${BLAST_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_SEPOLIA_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${BLAST_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${BLAST_SEPOLIA_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Blast sepolia missing service and timer with: ${BLAST_SEPOLIA_MISSING_SERVICE_FILE}, ${BLAST_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${BLAST_SEPOLIA_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${BLAST_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_SEPOLIA_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${BLAST_SEPOLIA_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_SEPOLIA_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${BLAST_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${BLAST_SEPOLIA_MISSING_TIMER_FILE}"
|
||||
|
||||
# Proofofplay Apex
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Proofofplay Apex block with transactions syncronizer service definition with ${PROOFOFPLAY_APEX_SYNCHRONIZE_SERVICE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_SYNCHRONIZE_SERVICE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_SYNCHRONIZE_SERVICE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_SYNCHRONIZE_SERVICE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${PROOFOFPLAY_APEX_SYNCHRONIZE_SERVICE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Proofofplay Apex missing service and timer with: ${PROOFOFPLAY_APEX_MISSING_SERVICE_FILE}, ${PROOFOFPLAY_APEX_MISSING_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_MISSING_SERVICE_FILE}" "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_MISSING_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_MISSING_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_MISSING_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_MISSING_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_MISSING_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${PROOFOFPLAY_APEX_MISSING_TIMER_FILE}"
|
|
@ -0,0 +1,305 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Deployment script - intended to run on Moonstream crawlers server
|
||||
|
||||
# Colors
|
||||
C_RESET='\033[0m'
|
||||
C_RED='\033[1;31m'
|
||||
C_GREEN='\033[1;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
|
||||
# Logs
|
||||
PREFIX_INFO="${C_GREEN}[INFO]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_WARN="${C_YELLOW}[WARN]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_CRIT="${C_RED}[CRIT]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
|
||||
# Main
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
||||
APP_DIR="${APP_DIR:-/home/ubuntu/moonstream}"
|
||||
APP_CRAWLERS_DIR="${APP_DIR}/crawlers"
|
||||
PYTHON_ENV_DIR="${PYTHON_ENV_DIR:-/home/ubuntu/moonstream-env}"
|
||||
PYTHON="${PYTHON_ENV_DIR}/bin/python"
|
||||
PIP="${PYTHON_ENV_DIR}/bin/pip"
|
||||
SECRETS_DIR="${SECRETS_DIR:-/home/ubuntu/moonstream-secrets}"
|
||||
PARAMETERS_ENV_PATH="${SECRETS_DIR}/app.env"
|
||||
SCRIPT_DIR="$(realpath $(dirname $0))"
|
||||
|
||||
# Ethereum service files
|
||||
ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="ethereum-historical-crawl-transactions.service"
|
||||
ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="ethereum-historical-crawl-transactions.timer"
|
||||
ETHEREUM_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="ethereum-historical-crawl-events.service"
|
||||
ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="ethereum-historical-crawl-events.timer"
|
||||
|
||||
# Polygon service files
|
||||
POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="polygon-historical-crawl-transactions.service"
|
||||
POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="polygon-historical-crawl-transactions.timer"
|
||||
POLYGON_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="polygon-historical-crawl-events.service"
|
||||
POLYGON_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="polygon-historical-crawl-events.timer"
|
||||
|
||||
# Amoy service files
|
||||
AMOY_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="amoy-historical-crawl-transactions.service"
|
||||
AMOY_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="amoy-historical-crawl-transactions.timer"
|
||||
AMOY_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="amoy-historical-crawl-events.service"
|
||||
AMOY_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="amoy-historical-crawl-events.timer"
|
||||
|
||||
# XDai service files
|
||||
XDai_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="xdai-historical-crawl-transactions.service"
|
||||
XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="xdai-historical-crawl-transactions.timer"
|
||||
XDai_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="xdai-historical-crawl-events.service"
|
||||
XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="xdai-historical-crawl-events.timer"
|
||||
|
||||
# ZkSync Era
|
||||
ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="zksync-era-historical-crawl-transactions.service"
|
||||
ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="zksync-era-historical-crawl-transactions.timer"
|
||||
ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="zksync-era-historical-crawl-events.service"
|
||||
ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="zksync-era-historical-crawl-events.timer"
|
||||
|
||||
# ZkSync Era Sepolia
|
||||
ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="zksync-era-sepolia-historical-crawl-transactions.service"
|
||||
ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="zksync-era-sepolia-historical-crawl-transactions.timer"
|
||||
ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="zksync-era-sepolia-historical-crawl-events.service"
|
||||
ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="zksync-era-sepolia-historical-crawl-events.timer"
|
||||
|
||||
# Arbitrum one
|
||||
ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="arbitrum-one-historical-crawl-transactions.service"
|
||||
ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="arbitrum-one-historical-crawl-transactions.timer"
|
||||
ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="arbitrum-one-historical-crawl-events.service"
|
||||
ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="arbitrum-one-historical-crawl-events.timer"
|
||||
|
||||
# ProofofPlay APEX
|
||||
PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="proofofplay-apex-historical-crawl-transactions.service"
|
||||
PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="proofofplay-apex-historical-crawl-transactions.timer"
|
||||
PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="proofofplay-apex-historical-crawl-events.service"
|
||||
PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="proofofplay-apex-historical-crawl-events.timer"
|
||||
|
||||
# XAI
|
||||
XAI_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="xai-historical-crawl-transactions.service"
|
||||
XAI_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="xai-historical-crawl-transactions.timer"
|
||||
XAI_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="xai-historical-crawl-events.service"
|
||||
XAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="xai-historical-crawl-events.timer"
|
||||
|
||||
# XAI Sepolia
|
||||
XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE="xai-sepolia-historical-crawl-transactions.service"
|
||||
XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE="xai-sepolia-historical-crawl-transactions.timer"
|
||||
XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE="xai-sepolia-historical-crawl-events.service"
|
||||
XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE="xai-sepolia-historical-crawl-events.timer"
|
||||
|
||||
|
||||
set -eu
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Upgrading Python pip and setuptools"
|
||||
"${PIP}" install --upgrade pip setuptools
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Installing Python dependencies"
|
||||
"${PIP}" install -e "${APP_CRAWLERS_DIR}/mooncrawl/"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Install checkenv"
|
||||
HOME=/home/ubuntu /usr/local/go/bin/go install github.com/bugout-dev/checkenv@latest
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Retrieving deployment parameters"
|
||||
if [ ! -d "${SECRETS_DIR}" ]; then
|
||||
mkdir -p "${SECRETS_DIR}"
|
||||
echo -e "${PREFIX_WARN} Created new secrets directory"
|
||||
fi
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" CHECKENV_AWS_FETCH_LOOP_LIMIT=20 /home/ubuntu/go/bin/checkenv show aws_ssm+moonstream:true > "${PARAMETERS_ENV_PATH}"
|
||||
chmod 0640 "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add instance local IP to parameters"
|
||||
echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
if [ ! -d "/home/ubuntu/.config/systemd/user/" ]; then
|
||||
mkdir -p /home/ubuntu/.config/systemd/user/
|
||||
echo -e "${PREFIX_WARN} Created user systemd directory"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum historical transactions crawler service and timer with: ${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum historical events crawler service and timer with: ${ETHEREUM_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon historical transactions crawler service and timer with: ${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon historical events crawler service and timer with: ${POLYGON_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${POLYGON_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Amoy historical transactions crawler service and timer with: ${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AMOY_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Amoy historical events crawler service and timer with: ${AMOY_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${AMOY_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AMOY_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing xDai historical transactions crawler service and timer with: ${XDai_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XDai_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDai_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing xDai historical events crawler service and timer with: ${XDai_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XDai_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDai_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era historical transactions crawler service and timer with: ${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era historical events crawler service and timer with: ${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era Sepolia historical transactions crawler service and timer with: ${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era Sepolia historical events crawler service and timer with: ${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Proofofplay Apex historical transactions crawler service and timer with: ${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Proofofplay Apex historical events crawler service and timer with: ${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${PROOFOFPLAY_APEX_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum one historical transactions crawler service and timer with: ${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum one historical events crawler service and timer with: ${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XAI historical transactions crawler service and timer with: ${XAI_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${XAI_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XAI historical events crawler service and timer with: ${XAI_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${XAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XAI Sepolia historical transactions crawler service and timer with: ${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}, ${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SEPOLIA_HISTORICAL_CRAWL_TRANSACTIONS_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XAI Sepolia historical events crawler service and timer with: ${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}, ${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SEPOLIA_HISTORICAL_CRAWL_EVENTS_TIMER_FILE}"
|
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Deployment script of monitoring services - intended to run on Moonstream crawlers server
|
||||
|
||||
# Colors
|
||||
C_RESET='\033[0m'
|
||||
C_RED='\033[1;31m'
|
||||
C_GREEN='\033[1;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
|
||||
# Logs
|
||||
PREFIX_INFO="${C_GREEN}[INFO]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_WARN="${C_YELLOW}[WARN]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_CRIT="${C_RED}[CRIT]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
|
||||
# Main
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
||||
SECRETS_DIR="${SECRETS_DIR:-/home/ubuntu/moonstream-secrets}"
|
||||
PARAMETERS_ENV_MONITORING_PATH="${SECRETS_DIR}/monitoring.env"
|
||||
SCRIPT_DIR="$(realpath $(dirname $0))"
|
||||
|
||||
# Service files
|
||||
MONITORING_CRAWLERS_SERVICE_FILE="monitoring-crawlers.service"
|
||||
|
||||
set -eu
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Install checkenv"
|
||||
HOME=/home/ubuntu /usr/local/go/bin/go install github.com/bugout-dev/checkenv@latest
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Copy monitoring binary from AWS S3"
|
||||
aws s3 cp s3://bugout-binaries/prod/monitoring/monitoring "/home/ubuntu/monitoring"
|
||||
chmod +x "/home/ubuntu/monitoring"
|
||||
chown ubuntu:ubuntu "/home/ubuntu/monitoring"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Retrieving monitoring deployment parameters"
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" /home/ubuntu/go/bin/checkenv show aws_ssm+service:true,monitoring:true > "${PARAMETERS_ENV_MONITORING_PATH}"
|
||||
chmod 0640 "${PARAMETERS_ENV_MONITORING_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add instance local IP to monitoring parameters"
|
||||
echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_MONITORING_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add AWS default region to monitoring parameters"
|
||||
echo "AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION}" >> "${PARAMETERS_ENV_MONITORING_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Prepare monitoring configuration"
|
||||
if [ ! -d "/home/ubuntu/.monitoring" ]; then
|
||||
mkdir -p /home/ubuntu/.monitoring
|
||||
echo -e "${PREFIX_WARN} Created monitoring configuration directory"
|
||||
fi
|
||||
cp "${SCRIPT_DIR}/monitoring-crawlers-config.json" /home/ubuntu/.monitoring/monitoring-crawlers-config.json
|
||||
|
||||
echo
|
||||
echo
|
||||
if [ ! -d "/home/ubuntu/.config/systemd/user/" ]; then
|
||||
mkdir -p /home/ubuntu/.config/systemd/user/
|
||||
echo -e "${PREFIX_WARN} Created user systemd directory"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing systemd crawlers monitoring service definition with ${MONITORING_CRAWLERS_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${MONITORING_CRAWLERS_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${MONITORING_CRAWLERS_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${MONITORING_CRAWLERS_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart "${MONITORING_CRAWLERS_SERVICE_FILE}"
|
|
@ -23,7 +23,7 @@ set -eu
|
|||
|
||||
if [ ! -d "$PYTHON_ENV_DIR" ]; then
|
||||
echo -e "${PREFIX_WARN} Dierectory with Python environment doesn't exist, generating..."
|
||||
sudo -u ubuntu python3.8 -m venv "${PYTHON_ENV_DIR}"
|
||||
python3.9 -m venv "${PYTHON_ENV_DIR}"
|
||||
fi
|
||||
|
||||
echo
|
||||
|
@ -34,12 +34,12 @@ echo -e "${PREFIX_INFO} Upgrading Python pip and setuptools"
|
|||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Installing Python dependencies"
|
||||
"${PIP}" install moonworm
|
||||
"${PIP}" install moonworm==0.2.4
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Moonworm watch Unicorns service definition with ${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}" "/etc/systemd/system/${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}"
|
||||
systemctl daemon-reload
|
||||
systemctl restart "${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${MOONWORM_WATCH_UNICORNS_MAINNET_SERVICE_FILE}"
|
||||
|
|
|
@ -0,0 +1,211 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Deployment script - intended to run on Moonstream crawlers server
|
||||
|
||||
# Colors
|
||||
C_RESET='\033[0m'
|
||||
C_RED='\033[1;31m'
|
||||
C_GREEN='\033[1;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
|
||||
# Logs
|
||||
PREFIX_INFO="${C_GREEN}[INFO]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_WARN="${C_YELLOW}[WARN]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_CRIT="${C_RED}[CRIT]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
|
||||
# Main
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
||||
APP_DIR="${APP_DIR:-/home/ubuntu/moonstream}"
|
||||
APP_CRAWLERS_DIR="${APP_DIR}/crawlers"
|
||||
PYTHON_ENV_DIR="${PYTHON_ENV_DIR:-/home/ubuntu/moonstream-env}"
|
||||
PYTHON="${PYTHON_ENV_DIR}/bin/python"
|
||||
PIP="${PYTHON_ENV_DIR}/bin/pip"
|
||||
SECRETS_DIR="${SECRETS_DIR:-/home/ubuntu/moonstream-secrets}"
|
||||
PARAMETERS_ENV_PATH="${SECRETS_DIR}/app.env"
|
||||
SCRIPT_DIR="$(realpath $(dirname $0))"
|
||||
|
||||
# Service files
|
||||
ETHEREUM_MOONWORM_CRAWLER_SERVICE_FILE="ethereum-moonworm-crawler.service"
|
||||
POLYGON_MOONWORM_CRAWLER_SERVICE_FILE="polygon-moonworm-crawler.service"
|
||||
AMOY_MOONWORM_CRAWLER_SERVICE_FILE="amoy-moonworm-crawler.service"
|
||||
XDAI_MOONWORM_CRAWLER_SERVICE_FILE="xdai-moonworm-crawler.service"
|
||||
ZKSYNC_ERA_MOONWORM_CRAWLER_SERVICE_FILE="zksync-era-moonworm-crawler.service"
|
||||
ZKSYNC_ERA_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE="zksync-era-sepolia-moonworm-crawler.service"
|
||||
ARBITRUM_ONE_MOONWORM_CRAWLER_SERVICE_FILE="arbitrum-one-moonworm-crawler.service"
|
||||
ARBITRUM_NOVA_MOONWORM_CRAWLER_SERVICE_FILE="arbitrum-nova-moonworm-crawler.service"
|
||||
ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE="arbitrum-sepolia-moonworm-crawler.service"
|
||||
XAI_MOONWORM_CRAWLER_SERVICE_FILE="xai-moonworm-crawler.service"
|
||||
XAI_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE="xai-sepolia-moonworm-crawler.service"
|
||||
AVALANCHE_MOONWORM_CRAWLER_SERVICE_FILE="avalanche-moonworm-crawler.service"
|
||||
AVALANCHE_FUJI_MOONWORM_CRAWLER_SERVICE_FILE="avalanche-fuji-moonworm-crawler.service"
|
||||
BLAST_MOONWORM_CRAWLER_SERVICE_FILE="blast-moonworm-crawler.service"
|
||||
BLAST_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE="blast-sepolia-moonworm-crawler.service"
|
||||
PROOFOFPLAY_APEX_MOONWORM_CRAWLER_SERVICE_FILE="proofofplay-apex-moonworm-crawler.service"
|
||||
|
||||
set -eu
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Upgrading Python pip and setuptools"
|
||||
"${PIP}" install --upgrade pip setuptools
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Installing Python dependencies"
|
||||
"${PIP}" install -e "${APP_CRAWLERS_DIR}/mooncrawl/"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Install checkenv"
|
||||
HOME=/home/ubuntu /usr/local/go/bin/go install github.com/bugout-dev/checkenv@latest
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Retrieving deployment parameters"
|
||||
if [ ! -d "${SECRETS_DIR}" ]; then
|
||||
mkdir -p "${SECRETS_DIR}"
|
||||
echo -e "${PREFIX_WARN} Created new secrets directory"
|
||||
fi
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" CHECKENV_AWS_FETCH_LOOP_LIMIT=20 /home/ubuntu/go/bin/checkenv show aws_ssm+moonstream:true > "${PARAMETERS_ENV_PATH}"
|
||||
chmod 0640 "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add instance local IP to parameters"
|
||||
echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
if [ ! -d "/home/ubuntu/.config/systemd/user/" ]; then
|
||||
mkdir -p /home/ubuntu/.config/systemd/user/
|
||||
echo -e "${PREFIX_WARN} Created user systemd directory"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum moonworm crawler service definition with ${ETHEREUM_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon moonworm crawler service definition with ${POLYGON_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Amoy moonworm crawler service definition with ${AMOY_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AMOY_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AMOY_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AMOY_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AMOY_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing XDai moonworm crawler service definition with ${XDAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XDAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XDAI_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XDAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XDAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era moonworm crawler service definition with ${ZKSYNC_ERA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era Sepolia moonworm crawler service definition with ${ZKSYNC_ERA_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum One moonworm crawler service definition with ${ARBITRUM_ONE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_ONE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_ONE_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_ONE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_ONE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum Nova moonworm crawler service definition with ${ARBITRUM_NOVA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_NOVA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_NOVA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_NOVA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_NOVA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Arbitrum Sepolia moonworm crawler service definition with ${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ARBITRUM_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Xai moonworm crawler service definition with ${XAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Xai sepolia moonworm crawler service definition with ${XAI_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${XAI_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${XAI_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${XAI_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${XAI_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Avalanche moonworm crawler service definition with ${AVALANCHE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AVALANCHE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AVALANCHE_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Avalanche Fuji moonworm crawler service definition with ${AVALANCHE_FUJI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${AVALANCHE_FUJI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${AVALANCHE_FUJI_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${AVALANCHE_FUJI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${AVALANCHE_FUJI_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Blast moonworm crawler service definition with ${BLAST_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${BLAST_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${BLAST_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${BLAST_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Blast sepolia moonworm crawler service definition with ${BLAST_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${BLAST_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${BLAST_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${BLAST_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${BLAST_SEPOLIA_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Proofofplay Apex moonworm crawler service definition with ${PROOFOFPLAY_APEX_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${PROOFOFPLAY_APEX_MOONWORM_CRAWLER_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${PROOFOFPLAY_APEX_MOONWORM_CRAWLER_SERVICE_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${PROOFOFPLAY_APEX_MOONWORM_CRAWLER_SERVICE_FILE}"
|
|
@ -0,0 +1,158 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# Deployment script - intended to run on Moonstream crawlers server
|
||||
|
||||
# Colors
|
||||
C_RESET='\033[0m'
|
||||
C_RED='\033[1;31m'
|
||||
C_GREEN='\033[1;32m'
|
||||
C_YELLOW='\033[1;33m'
|
||||
|
||||
# Logs
|
||||
PREFIX_INFO="${C_GREEN}[INFO]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_WARN="${C_YELLOW}[WARN]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
PREFIX_CRIT="${C_RED}[CRIT]${C_RESET} [$(date +%d-%m\ %T)]"
|
||||
|
||||
# Main
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}"
|
||||
APP_DIR="${APP_DIR:-/home/ubuntu/moonstream}"
|
||||
APP_CRAWLERS_DIR="${APP_DIR}/crawlers"
|
||||
PYTHON_ENV_DIR="${PYTHON_ENV_DIR:-/home/ubuntu/moonstream-env}"
|
||||
PYTHON="${PYTHON_ENV_DIR}/bin/python"
|
||||
PIP="${PYTHON_ENV_DIR}/bin/pip"
|
||||
SECRETS_DIR="${SECRETS_DIR:-/home/ubuntu/moonstream-secrets}"
|
||||
PARAMETERS_ENV_PATH="${SECRETS_DIR}/app.env"
|
||||
SCRIPT_DIR="$(realpath $(dirname $0))"
|
||||
|
||||
# Ethereum service files
|
||||
ETHEREUM_STATE_SERVICE_FILE="ethereum-state.service"
|
||||
ETHEREUM_STATE_TIMER_FILE="ethereum-state.timer"
|
||||
ETHEREUM_STATE_CLEAN_SERVICE_FILE="ethereum-state-clean.service"
|
||||
ETHEREUM_STATE_CLEAN_TIMER_FILE="ethereum-state-clean.timer"
|
||||
ETHEREUM_METADATA_SERVICE_FILE="ethereum-metadata.service"
|
||||
ETHEREUM_METADATA_TIMER_FILE="ethereum-metadata.timer"
|
||||
|
||||
# Polygon service files
|
||||
POLYGON_STATE_SERVICE_FILE="polygon-state.service"
|
||||
POLYGON_STATE_TIMER_FILE="polygon-state.timer"
|
||||
POLYGON_STATE_CLEAN_SERVICE_FILE="polygon-state-clean.service"
|
||||
POLYGON_STATE_CLEAN_TIMER_FILE="polygon-state-clean.timer"
|
||||
POLYGON_METADATA_SERVICE_FILE="polygon-metadata.service"
|
||||
POLYGON_METADATA_TIMER_FILE="polygon-metadata.timer"
|
||||
|
||||
# ZkSync Era
|
||||
ZKSYNC_ERA_STATE_SERVICE_FILE="zksync-era-state.service"
|
||||
ZKSYNC_ERA_STATE_TIMER_FILE="zksync-era-state.timer"
|
||||
ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE="zksync-era-state-clean.service"
|
||||
ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE="zksync-era-state-clean.timer"
|
||||
|
||||
set -eu
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Upgrading Python pip and setuptools"
|
||||
"${PIP}" install --upgrade pip setuptools
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Installing Python dependencies"
|
||||
"${PIP}" install -e "${APP_CRAWLERS_DIR}/mooncrawl/"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Install checkenv"
|
||||
HOME=/home/ubuntu /usr/local/go/bin/go install github.com/bugout-dev/checkenv@latest
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Retrieving deployment parameters"
|
||||
if [ ! -d "${SECRETS_DIR}" ]; then
|
||||
mkdir -p "${SECRETS_DIR}"
|
||||
echo -e "${PREFIX_WARN} Created new secrets directory"
|
||||
fi
|
||||
AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION}" CHECKENV_AWS_FETCH_LOOP_LIMIT=20 /home/ubuntu/go/bin/checkenv show aws_ssm+moonstream:true > "${PARAMETERS_ENV_PATH}"
|
||||
chmod 0640 "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Add instance local IP to parameters"
|
||||
echo "AWS_LOCAL_IPV4=$(ec2metadata --local-ipv4)" >> "${PARAMETERS_ENV_PATH}"
|
||||
|
||||
echo
|
||||
echo
|
||||
if [ ! -d "/home/ubuntu/.config/systemd/user/" ]; then
|
||||
mkdir -p /home/ubuntu/.config/systemd/user/
|
||||
echo -e "${PREFIX_WARN} Created user systemd directory"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum state service and timer with: ${ETHEREUM_STATE_SERVICE_FILE}, ${ETHEREUM_STATE_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_STATE_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_STATE_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_STATE_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_STATE_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum state clean service and timer with: ${ETHEREUM_STATE_CLEAN_SERVICE_FILE}, ${ETHEREUM_STATE_CLEAN_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_STATE_CLEAN_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_STATE_CLEAN_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_STATE_CLEAN_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_STATE_CLEAN_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_STATE_CLEAN_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Ethereum metadata service and timer with: ${ETHEREUM_METADATA_SERVICE_FILE}, ${ETHEREUM_METADATA_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ETHEREUM_METADATA_SERVICE_FILE}" "${SCRIPT_DIR}/${ETHEREUM_METADATA_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_METADATA_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_METADATA_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ETHEREUM_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ETHEREUM_METADATA_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ETHEREUM_METADATA_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon state service and timer with: ${POLYGON_STATE_SERVICE_FILE}, ${POLYGON_STATE_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_STATE_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_STATE_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_STATE_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_STATE_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon state clean service and timer with: ${POLYGON_STATE_CLEAN_SERVICE_FILE}, ${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_STATE_CLEAN_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_STATE_CLEAN_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing Polygon metadata service and timer with: ${POLYGON_METADATA_SERVICE_FILE}, ${POLYGON_METADATA_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${POLYGON_METADATA_SERVICE_FILE}" "${SCRIPT_DIR}/${POLYGON_METADATA_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_METADATA_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_METADATA_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${POLYGON_METADATA_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${POLYGON_METADATA_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${POLYGON_METADATA_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era state service and timer with: ${ZKSYNC_ERA_STATE_SERVICE_FILE}, ${ZKSYNC_ERA_STATE_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_STATE_TIMER_FILE}"
|
||||
|
||||
echo
|
||||
echo
|
||||
echo -e "${PREFIX_INFO} Replacing existing ZkSync Era state clean service and timer with: ${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}, ${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
|
||||
chmod 644 "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}" "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_CLEAN_SERVICE_FILE}"
|
||||
cp "${SCRIPT_DIR}/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}" "/home/ubuntu/.config/systemd/user/${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user daemon-reload
|
||||
XDG_RUNTIME_DIR="/run/user/1000" systemctl --user restart --no-block "${ZKSYNC_ERA_STATE_CLEAN_TIMER_FILE}"
|
Plik diff jest za duży
Load Diff
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Ethereum historical crawler events
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli historical-crawl --blockchain-type ethereum --find-deployed-blocks --end 0 --tasks-journal --only-events
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=ethereum-historical-crawl-events
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs events historical crawler on ethereum
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Ethereum historical crawler transactions
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli historical-crawl --blockchain-type ethereum --find-deployed-blocks --end 0 --tasks-journal --only-functions
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=ethereum-historical-crawl-transactions
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs transactions historical crawler on ethereum
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Execute metadata crawler
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.metadata_crawler.cli crawl --blockchain ethereum
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=ethereum-metadata
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Execute Ethereum metadata crawler each 10m
|
||||
|
||||
[Timer]
|
||||
OnBootSec=20s
|
||||
OnUnitActiveSec=60m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -4,12 +4,8 @@ After=network.target
|
|||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler \
|
||||
--access-id "${NB_CONTROLLER_ACCESS_ID}" \
|
||||
blocks missing --blockchain ethereum -n
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks missing --blockchain ethereum -n
|
||||
CPUWeight=50
|
||||
SyslogIdentifier=ethereum-missing
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
Description=Fill missing blocks at Ethereum database
|
||||
|
||||
[Timer]
|
||||
OnBootSec=10s
|
||||
OnBootSec=40s
|
||||
OnUnitActiveSec=15m
|
||||
|
||||
[Install]
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Ethereum moonworm crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.moonworm_crawler.cli crawl -b ethereum
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=ethereum-moonworm-crawler
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Runs custom crawler for orange dao tokenonomics
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.reports_crawler.cli orangedao --moonstream-token "${MOONSTREAM_ORANGE_DAO_QUERIES_DATA_ACCESS_TOKEN}" run_tokenomics_orange_dao
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=ethereum-orange-dao-reports-tokenonomics
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs custom crawler for CU tokenonomics
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -1,15 +1,11 @@
|
|||
[Unit]
|
||||
Description=Update XDai statistics dashboards
|
||||
Description=Execute state clean labels crawler
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.stats_worker.dashboard \
|
||||
--access-id "${NB_CONTROLLER_ACCESS_ID}" \
|
||||
generate --blockchain xdai
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli clean-state-labels --blockchain ethereum -N 10000
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=xdai-statistics
|
||||
SyslogIdentifier=ethereum-state-clean
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Execute Ethereum state clean labels crawler each 25m
|
||||
|
||||
[Timer]
|
||||
OnBootSec=50s
|
||||
OnUnitActiveSec=25m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Execute state crawler
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.state_crawler.cli crawl-jobs --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" --blockchain ethereum --jobs-file /home/ubuntu/moonstream/crawlers/mooncrawl/mooncrawl/state_crawler/jobs/ethereum-jobs.json
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=ethereum-state
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Execute Ethereum state crawler each 10m
|
||||
|
||||
[Timer]
|
||||
OnBootSec=15s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -7,13 +7,9 @@ After=network.target
|
|||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler \
|
||||
--access-id "${NB_CONTROLLER_ACCESS_ID}" \
|
||||
blocks synchronize --blockchain ethereum -c 6 -j 2
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.crawler blocks synchronize --blockchain ethereum -c 6 -j 2
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=ethereum-synchronize
|
||||
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
[Unit]
|
||||
Description=Load trending Ethereum addresses to the database every 5 minutes
|
||||
|
||||
[Timer]
|
||||
OnBootSec=10s
|
||||
OnUnitActiveSec=5m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -1,21 +0,0 @@
|
|||
[Unit]
|
||||
Description=Ethereum txpool crawler
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/txpool
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonstream/crawlers/txpool/txpool \
|
||||
-blockchain ethereum \
|
||||
-access-id "${NB_CONTROLLER_ACCESS_ID}"
|
||||
CPUWeight=30
|
||||
SyslogIdentifier=ethereum-txpool
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Runs leaderboards generator worker
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.leaderboards_generator.cli leaderboards-generate --query-api-access-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}"
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=leaderboards-worker
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Runs leaderboard update script every 10 minutes
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"notification_pagerduty": true,
|
||||
"notification_telegram": true,
|
||||
"notification_sendgrid": true,
|
||||
"notification_humbug": true,
|
||||
"silent": []
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
[Unit]
|
||||
Description=Monitor crawlers systemd state
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
WorkingDirectory=/home/ubuntu/
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/monitoring.env
|
||||
ExecStart=/home/ubuntu/monitoring -plugin systemd -host "${AWS_LOCAL_IPV4}" -port 7171 -healthcheck -server -threshold 3 -config /home/ubuntu/.monitoring/monitoring-crawlers-config.json -service ethereum-moonworm-crawler.service -service amoy-moonworm-crawler.service -service polygon-moonworm-crawler.service -service zksync-era-moonworm-crawler.service -service zksync-era-sepolia-moonworm-crawler.service -service arbitrum-nova-moonworm-crawler.service -service arbitrum-sepolia-moonworm-crawler.service -service xai-moonworm-crawler.service -service xai-sepolia-moonworm-crawler.service -service avalanche-moonworm-crawler.service -service avalanche-fuji-moonworm-crawler.service -service blast-moonworm-crawler.service -service blast-sepolia-moonworm-crawler.service -service proofofplay-apex-moonworm-crawler.service -service arbitrum-one-moonworm-crawler.service
|
||||
CPUWeight=90
|
||||
SyslogIdentifier=monitoring-crawlers
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -5,8 +5,6 @@ StartLimitIntervalSec=300
|
|||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
Description=Moonworm CryptoUnicorns watch
|
||||
After=network.target
|
||||
StartLimitIntervalSec=300
|
||||
StartLimitBurst=3
|
||||
|
||||
[Service]
|
||||
User=ubuntu
|
||||
Group=www-data
|
||||
WorkingDirectory=/home/ubuntu
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
Restart=on-failure
|
||||
RestartSec=15s
|
||||
ExecStart=/home/ubuntu/moonworm-env/bin/python -m moonworm.cli watch-cu -w "${MOONSTREAM_POLYGON_WEB3_PROVIDER_URI}?access_id=${NB_CONTROLLER_ACCESS_ID}&data_source=blockchain" -c 0xdC0479CC5BbA033B3e7De9F178607150B3AbCe1f -d 21418707 --confirmations 60
|
||||
CPUWeight=70
|
||||
SyslogIdentifier=moonworm-unicorns-mainnet
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,102 +0,0 @@
|
|||
"""
|
||||
Collect secrets from AWS SSM Parameter Store and output as environment variable exports.
|
||||
"""
|
||||
import argparse
|
||||
from dataclasses import dataclass
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
|
||||
import boto3
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnvironmentVariable:
|
||||
name: str
|
||||
value: str
|
||||
|
||||
|
||||
def get_parameters(path: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Retrieve parameters from AWS SSM Parameter Store. Decrypts any encrypted parameters.
|
||||
|
||||
Relies on the appropriate environment variables to authenticate against AWS:
|
||||
https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-envvars.html
|
||||
"""
|
||||
ssm = boto3.client("ssm")
|
||||
next_token: Optional[bool] = True
|
||||
parameters: List[Dict[str, Any]] = []
|
||||
while next_token is not None:
|
||||
kwargs = {"Path": path, "Recursive": False, "WithDecryption": True}
|
||||
if next_token is not True:
|
||||
kwargs["NextToken"] = next_token
|
||||
response = ssm.get_parameters_by_path(**kwargs)
|
||||
new_parameters = response.get("Parameters", [])
|
||||
parameters.extend(new_parameters)
|
||||
next_token = response.get("NextToken")
|
||||
|
||||
return parameters
|
||||
|
||||
|
||||
def parameter_to_env(parameter_object: Dict[str, Any]) -> EnvironmentVariable:
|
||||
"""
|
||||
Transforms parameters returned by the AWS SSM API into EnvironmentVariables.
|
||||
"""
|
||||
parameter_path = parameter_object.get("Name")
|
||||
if parameter_path is None:
|
||||
raise ValueError('Did not find "Name" in parameter object')
|
||||
name = parameter_path.split("/")[-1].upper()
|
||||
|
||||
value = parameter_object.get("Value")
|
||||
if value is None:
|
||||
raise ValueError('Did not find "Value" in parameter object')
|
||||
|
||||
return EnvironmentVariable(name, value)
|
||||
|
||||
|
||||
def env_string(env_vars: Iterable[EnvironmentVariable], with_export: bool) -> str:
|
||||
"""
|
||||
Produces a string which, when executed in a shell, exports the desired environment variables as
|
||||
specified by env_vars.
|
||||
"""
|
||||
prefix = "export " if with_export else ""
|
||||
return "\n".join([f'{prefix}{var.name}="{var.value}"' for var in env_vars])
|
||||
|
||||
|
||||
def extract_handler(args: argparse.Namespace) -> None:
|
||||
"""
|
||||
Save environment variables to file.
|
||||
"""
|
||||
result = env_string(map(parameter_to_env, get_parameters(args.path)), args.export)
|
||||
with args.outfile as ofp:
|
||||
print(result, file=ofp)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Materialize environment variables from AWS SSM Parameter Store"
|
||||
)
|
||||
parser.set_defaults(func=lambda _: parser.print_help())
|
||||
subcommands = parser.add_subparsers(description="Parameters commands")
|
||||
|
||||
parser_extract = subcommands.add_parser(
|
||||
"extract", description="Parameters extract commands"
|
||||
)
|
||||
parser_extract.set_defaults(func=lambda _: parser_extract.print_help())
|
||||
parser_extract.add_argument(
|
||||
"-o", "--outfile", type=argparse.FileType("w"), default=sys.stdout
|
||||
)
|
||||
parser_extract.add_argument(
|
||||
"--export",
|
||||
action="store_true",
|
||||
help="Set to output environment strings with export statements",
|
||||
)
|
||||
parser_extract.add_argument(
|
||||
"-p",
|
||||
"--path",
|
||||
default=None,
|
||||
help="SSM path from which to pull environment variables (pull is NOT recursive)",
|
||||
)
|
||||
parser_extract.set_defaults(func=extract_handler)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
|
@ -0,0 +1,11 @@
|
|||
[Unit]
|
||||
Description=Generate data for nft dashboard for cu community
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
WorkingDirectory=/home/ubuntu/moonstream/crawlers/mooncrawl
|
||||
EnvironmentFile=/home/ubuntu/moonstream-secrets/app.env
|
||||
ExecStart=/home/ubuntu/moonstream-env/bin/python -m mooncrawl.reports_crawler.cli cu-reports --moonstream-token "${MOONSTREAM_PUBLIC_QUERIES_DATA_ACCESS_TOKEN}" generate-nft-dashboard
|
||||
CPUWeight=60
|
||||
SyslogIdentifier=polygon-cu-nft-dashboard
|
|
@ -0,0 +1,9 @@
|
|||
[Unit]
|
||||
Description=Generate data for nft dashboard for cu community
|
||||
|
||||
[Timer]
|
||||
OnBootSec=60s
|
||||
OnUnitActiveSec=10m
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
Some files were not shown because too many files have changed in this diff Show More
Ładowanie…
Reference in New Issue