Compare commits
1514 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 17c6bf8d50 | |||
| ec98c8226e | |||
| 999d69365c | |||
| 7ba66997b4 | |||
| 03849e55f1 | |||
| 3faf29d761 | |||
| 60654579ae | |||
| b41839a5ee | |||
| de78151b2f | |||
| e569bb32b7 | |||
| 266004624d | |||
| b369361a43 | |||
| bab00db3ee | |||
| 6c8f83f42d | |||
| 40ffc2a3ba | |||
| 33329ba90b | |||
| 8f0ee9015d | |||
| fa012bebca | |||
| 9be95763e2 | |||
| 85fd1d081d | |||
| eac81778bd | |||
| fa27f79911 | |||
| 8e0ac790f1 | |||
| d992eee560 | |||
| 633abd19a9 | |||
| 3a3cd78cac | |||
| 27596aff20 | |||
| 57eea3a08a | |||
| e73dc9dc9b | |||
| d1935d31af | |||
| 93510e4b2e | |||
| e9127466f0 | |||
| 10046f1edb | |||
| fbb95920dc | |||
| b9a68aab04 | |||
| 9ce5058e85 | |||
| 53a3f9816a | |||
| d74c24339e | |||
| 7270de2937 | |||
| 808e172221 | |||
| 679d12b48f | |||
| 0ec810591e | |||
| c7a7e8431c | |||
| 934fb03ca2 | |||
| 4008b9aa25 | |||
| 8993a6dcb7 | |||
| 8730f0034d | |||
| c83606908f | |||
| 970cd568dc | |||
| 935800e334 | |||
| b5bc54b7f4 | |||
| 0301545df9 | |||
| e36b512908 | |||
| aa3c6e6209 | |||
| 93b69c4cbb | |||
| 7ffa2e891b | |||
| be99baf64c | |||
| 8592659432 | |||
| 1b6fac3313 | |||
| 01f35b603e | |||
| da90401b2d | |||
| 28cf7683a7 | |||
| d1ba2f53fa | |||
| 24a3610c4c | |||
| 7da653efc6 | |||
| 735a9dc1d2 | |||
| f2585c569c | |||
| 0fc46d5916 | |||
| 61ad3fbe45 | |||
| 12213fb31b | |||
| 7a361a330d | |||
| 34a136eba5 | |||
| fcd95f1a25 | |||
| a95e44cf42 | |||
| 040ab106b4 | |||
| 5f3fcc2b3e | |||
| 114aec73ed | |||
| 60bf09e813 | |||
| 347f62bd6d | |||
| 878f0e9bcf | |||
| 18db17fe0b | |||
| b53c908b55 | |||
| dc2fee3a9d | |||
| 387be40076 | |||
| 3153e8bf13 | |||
| 67c99436d1 | |||
| 431435f8bd | |||
| 57dc2023cd | |||
| d6b664d84a | |||
| 52d4c47e43 | |||
| 7f71ff9a2e | |||
| a82732a49c | |||
| a6ce0b7e67 | |||
| bab3200b6c | |||
| 89ed2003fa | |||
| a82b2b8cb8 | |||
| 6a47302192 | |||
| 0e0b2489e6 | |||
| 9ef4dad27c | |||
| e5a84e09a8 | |||
| 4bd62b3567 | |||
| 503cde6063 | |||
| 8757f1cda3 | |||
| ace2557a60 | |||
| 537d5c9133 | |||
| 00d26a684a | |||
| 1cfe51f894 | |||
| 43edca7093 | |||
| 839ed138ca | |||
| 797067ee38 | |||
| e45e05f337 | |||
| b8a9ecb253 | |||
| 0c464a9963 | |||
| 2f6cbe59eb | |||
| 33ecfce313 | |||
| 9229725300 | |||
| 89eca2ddf9 | |||
| a1b2d580a8 | |||
| c6cb645453 | |||
| 7e79308868 | |||
| da75aeecf2 | |||
| af39a73e8f | |||
| 53ce100859 | |||
| 364b4ddc32 | |||
| 06fda0554c | |||
| 0375f666d6 | |||
| aefb5ec6bd | |||
| 0a0e6f6301 | |||
| ab5dd13fcb | |||
| d67c54c6e7 | |||
| 4bb37c5ab3 | |||
| 2fbceb11e3 | |||
| 524353bfa1 | |||
| 822dad5352 | |||
| 5ac778ea53 | |||
| f3af19f03a | |||
| bd3d3881f5 | |||
| a17544bb4b | |||
| a101ff3cc9 | |||
| a5b9ee0c6c | |||
| 4a90917645 | |||
| d5d6201177 | |||
| 309f8fe2c5 | |||
| bf6b5dcb17 | |||
| 4ed005fb37 | |||
| 4fcb184286 | |||
| 2911c7b215 | |||
| ff668c223b | |||
| 52c41e29d8 | |||
| 38359e20e9 | |||
| 20bf272746 | |||
| 060d0dd95f | |||
| 72626e8dd0 | |||
| b68d93ec91 | |||
| 6e3d079c46 | |||
| 878b43c0a6 | |||
| 175fd8d0fb | |||
| dba8b6c475 | |||
| 32a0d895c4 | |||
| 4ae0410930 | |||
| 8bdd18649d | |||
| 8fcd926d43 | |||
| 68adab88bc | |||
| 82b313f62f | |||
| 4ce91d77d4 | |||
| 6350aa00d5 | |||
| 909665ab6c | |||
| 1dba58472b | |||
| 9c392e5791 | |||
| 63ebe382b6 | |||
| a2b8527d91 | |||
| 3867737fcc | |||
| 937953f2a2 | |||
| 96498c01bf | |||
| b92697b8c8 | |||
| ffe427564b | |||
| be8d92d7ae | |||
| 8a05ba2faf | |||
| c783ab7942 | |||
| 5e638bdf1d | |||
| 203d2014b0 | |||
| 3bfcfff1eb | |||
| e5080eaaf6 | |||
| a88aa4c8a0 | |||
| 6992031007 | |||
| c1a8249dc0 | |||
| 77283b3654 | |||
| f1e4aca9b8 | |||
| 963254973b | |||
| cad01e689e | |||
| c6282c9f5e | |||
| c8989237b0 | |||
| 516dd6f429 | |||
| 72a8ed5c16 | |||
| b4e6bac566 | |||
| cc59ccb9b5 | |||
| 1ddba5ebb1 | |||
| 582aa952e4 | |||
| 10dc5c0bd7 | |||
| 7be8b428e4 | |||
| 1d266c88c1 | |||
| 5caa9d8c7a | |||
| b0170b20d5 | |||
| f24a583e2e | |||
| 23819961e6 | |||
| a8819c907e | |||
| b5923137a7 | |||
| 78458760ec | |||
| 1286d0ea2a | |||
| 2fbcb9f918 | |||
| 5cdbc4eb53 | |||
| b4527fba8b | |||
| bcd51cf5cf | |||
| 8a9a3e8c0c | |||
| 986d12eab2 | |||
| 839abcbd28 | |||
| 544ac78a3b | |||
| 8d37e8fab5 | |||
| 2b30411c1b | |||
| baaa3bff5b | |||
| 0ce3420792 | |||
| 4db1a6f678 | |||
| f24104dc94 | |||
| ee9a355613 | |||
| 810a13cee0 | |||
| fe2041f22b | |||
| a0ac5c0674 | |||
| bcc5151116 | |||
| 8fd86d478c | |||
| 0b005c3e76 | |||
| 4a214b099e | |||
| eb27af7d90 | |||
| 0d8d7f3aeb | |||
| 80031c1d1a | |||
| bcea3c6bdf | |||
| bad50a8772 | |||
| bd3e42f83e | |||
| f927b0a911 | |||
| 8eae73eefb | |||
| 5d510915d2 | |||
| e2d4f917a0 | |||
| 2a3dbbdad3 | |||
| 7a6cffa74c | |||
| e929118349 | |||
| aae0d1ed74 | |||
| 8387cf667b | |||
| ffd424df12 | |||
| 0b32492fd6 | |||
| ade629ecf5 | |||
| 55cb4ca962 | |||
| efd6f59fca | |||
| a6ca30fdb1 | |||
| 3196b73a80 | |||
| 5a865cc5e1 | |||
| abbe80b1f0 | |||
| ac552be7e7 | |||
| cedbb3372e | |||
| 0420b777c9 | |||
| 83bf3023de | |||
| 0e777568fb | |||
| 75e9d5a621 | |||
| a2cdbc26bd | |||
| be9065354d | |||
| fa675f293d | |||
| b7d26d5ad7 | |||
| 21587493c0 | |||
| 4625dd39d0 | |||
| fd662721bb | |||
| 4a8c0d2e60 | |||
| c938cb231e | |||
| ba8c0016ac | |||
| b6e1bffd79 | |||
| 61351dabf1 | |||
| efa01cffc2 | |||
| bf156eaf7f | |||
| cb92b845e6 | |||
| b85deb229f | |||
| bff81eb6e3 | |||
| 617631063f | |||
| 1f8e6b698f | |||
| 09fe773987 | |||
| 273e2b7b32 | |||
| dc3cce0b8a | |||
| 2c0aa980e7 | |||
| 3ab0a00959 | |||
| 2ddf015a68 | |||
| b7eff027e7 | |||
| fde9539191 | |||
| 6945b9f9ed | |||
| 5100c8f0be | |||
| 1aba99f732 | |||
| 97ec0667a5 | |||
| c0935c848b | |||
| a2271a2ce8 | |||
| 83c013785f | |||
| e464a9fcdb | |||
| 59bf360937 | |||
| a33056039a | |||
| f1890332ae | |||
| b3a89d9c68 | |||
| a922196f20 | |||
| 89a2cb30e6 | |||
| 1bc452bc09 | |||
| ee2254281c | |||
| 59755a0b42 | |||
| d03c12ffb6 | |||
| 163b0f9edc | |||
| a6f9396760 | |||
| 84da2bdc7d | |||
| ed6dde2f0a | |||
| 3379251ccb | |||
| 7483a6a695 | |||
| d047c460ed | |||
| 81e057b900 | |||
| b6d1bd9ee2 | |||
| 08a1b5b81d | |||
| 7b95cfe833 | |||
| 6b90edf053 | |||
| f444bf39fb | |||
| 2ea47f03f4 | |||
| 74e24b7de3 | |||
| 5a35c1d1f8 | |||
| 5d06afbecc | |||
| 8514ec36d5 | |||
| 35ab261ee8 | |||
| 838e24bbed | |||
| 84604dfdc8 | |||
| d86ef13345 | |||
| b9c257a635 | |||
| 671397ba81 | |||
| c4c22f6733 | |||
| 03dccb638a | |||
| e3f9a19b84 | |||
| 262aa009c2 | |||
| 74e0630a41 | |||
| 29c3b267d9 | |||
| e1f3c93a1d | |||
| 259960cf45 | |||
| d395699dc4 | |||
| f490e4a1a4 | |||
| d352b0d932 | |||
| a9077b6c36 | |||
| 5f68eb453f | |||
| 5e0981e2a2 | |||
| b2d8e3ba27 | |||
| 5bf93c3dfd | |||
| b2c5bb6735 | |||
| 171672ee33 | |||
| aa5a35b15f | |||
| 82dd5e8683 | |||
| f5ac7bb4ee | |||
| e894ae28dc | |||
| d55a7ec5af | |||
| 0e165b57d0 | |||
| dfe7d3650f | |||
| b6951d64d4 | |||
| ee38d0d2b6 | |||
| ac27c60e0c | |||
| 6a8ae6d81a | |||
| 87c802fa90 | |||
| b08582224a | |||
| 66d35428fa | |||
| 344f4bcaa5 | |||
| ac65129ba6 | |||
| 322be2fe40 | |||
| 1097004245 | |||
| f4d0ce015d | |||
| adcff5c5c8 | |||
| 388801fd09 | |||
| cd98751667 | |||
| b0e2e97f09 | |||
| 239340a7a9 | |||
| 31d8d2d0d5 | |||
| 05ec6798ac | |||
| 5549f9d79f | |||
| 0fbf891c23 | |||
| 9ea99c92f9 | |||
| 8ebcff7390 | |||
| 659df00cc9 | |||
| 5451c297c2 | |||
| b09725726c | |||
| 4a440e3022 | |||
| 28ec1c3d67 | |||
| 347e8dcb86 | |||
| 67dcb87b81 | |||
| b849eec7ea | |||
| ebbd79ed7e | |||
| fe41df3e16 | |||
| 4a28a16639 | |||
| 59e58840c9 | |||
| 7e2a22c58c | |||
| 9b1de15373 | |||
| c84a2ef42b | |||
| 5527731e83 | |||
| 84db38c985 | |||
| a23866619d | |||
| 7545b2e4ef | |||
| 5448702a7d | |||
| 2408bcbeff | |||
| e707d91e7f | |||
| e2b6bc6502 | |||
| b805374a6c | |||
| 6e1a5b4348 | |||
| cadf6afb5f | |||
| 3c62fe2ca1 | |||
| 02139450c6 | |||
| 3ae72c8944 | |||
| a189348b36 | |||
| 8f494991e2 | |||
| f74d2c3ca1 | |||
| c0389fa4b1 | |||
| 9c557a0391 | |||
| 96878f24de | |||
| 083c4ddae9 | |||
| ba76c8b1db | |||
| 9bca15ae7e | |||
| ffedae0373 | |||
| dcab2e1f8f | |||
| 92f4282674 | |||
| 8baa056fab | |||
| 5011f4c137 | |||
| 1a031f16bd | |||
| 5db4c05544 | |||
| 0f94292ab7 | |||
| b701771dfb | |||
| 447ea18d95 | |||
| 756cc0d266 | |||
| 0297114faf | |||
| dddeafe6cd | |||
| 6f5b8f5575 | |||
| 9ba99d5ceb | |||
| 5306f8ba62 | |||
| ae10e4fee8 | |||
| c8f74d3c26 | |||
| a3c340f787 | |||
| 875298fe88 | |||
| ab47259534 | |||
| 60eb6b9bbf | |||
| 4735734404 | |||
| 4060e7ddcd | |||
| 730f40956f | |||
| e04b86218d | |||
| 12aedaf543 | |||
| bac55774f8 | |||
| 14c0d453e9 | |||
| b23fc6edc5 | |||
| c48aebcb0b | |||
| 97acdb0e2c | |||
| f969f262b8 | |||
| ae50a1abd8 | |||
| 553b65556a | |||
| 3ad95e1365 | |||
| 86ab67e70b | |||
| 3bde7a9cac | |||
| a4a9662c94 | |||
| 79a56c2d20 | |||
| d3662b8240 | |||
| dbc5db9727 | |||
| 5b5a63114c | |||
| a6912929a7 | |||
| 8d6976c770 | |||
| 9dccd21cee | |||
| cb9e5146bf | |||
| 882636de8f | |||
| 531f3282d9 | |||
| f88ca57d97 | |||
| b2c24a0438 | |||
| 37ce3183ca | |||
| 2c0bfb9904 | |||
| c41154a200 | |||
| 7f90391ecd | |||
| 1a7a2b13aa | |||
| 6c79b8a85e | |||
| 765b8fbef7 | |||
| 8e536d1d2f | |||
| 68315a3fb2 | |||
| 6fb5a7f971 | |||
| cc7ce44f47 | |||
| 53bfbbc5ef | |||
| 9601b88b41 | |||
| f22ddd0405 | |||
| 1a53d5338f | |||
| e7c33d7e10 | |||
| d52101d25b | |||
| 6f677b5638 | |||
| 4faa7fa8c0 | |||
| 20614acb86 | |||
| c393f6fd81 | |||
| 9ef6aaa406 | |||
| 43dce16fbd | |||
| 437f87013a | |||
| 380b41f695 | |||
| 2d5dca3fb5 | |||
| b6cfbee102 | |||
| 5681ff2283 | |||
| 332e64c9ab | |||
| beb6d9d066 | |||
| e56e83161b | |||
| e08f614d11 | |||
| 10e368c403 | |||
| 597aedc2af | |||
| 07e48aa071 | |||
| c5c78a2b84 | |||
| 9104e2f7c3 | |||
| 27fd1faa9c | |||
| 6203e3da75 | |||
| b6037d7525 | |||
| 51fe851c5a | |||
| c0e414bdc3 | |||
| 5842b6251d | |||
| bfecae7d61 | |||
| 4f9617aa2f | |||
| fd7607f5b7 | |||
| 64a8de7a32 | |||
| 4a9d6e0db6 | |||
| 4d718f9a12 | |||
| 2342a99405 | |||
| 4f6369fa27 | |||
| e8db3de122 | |||
| b658e28f2e | |||
| b919472f42 | |||
| d7d6888f63 | |||
| 54e77f72f4 | |||
| 3e0003095b | |||
| 095ab828b6 | |||
| 06345099eb | |||
| 5cabea8577 | |||
| 42bcdb8af8 | |||
| bfe2b88622 | |||
| 377683c4e3 | |||
| 457f123f69 | |||
| 4b87c74f41 | |||
| 522c5785a2 | |||
| 76c395d613 | |||
| 51811132a4 | |||
| ea231fb0cc | |||
| 9574ed4812 | |||
| af2299f417 | |||
| 945b482b00 | |||
| f9934095b3 | |||
| aa02d2e729 | |||
| ee76dddf2f | |||
| fc56c1406a | |||
| 7ee70b24ee | |||
| 3745231f51 | |||
| 353a3ea442 | |||
| 124d1b7078 | |||
| 42d111aac9 | |||
| 00ebc27069 | |||
| 4145944b1b | |||
| a89a4fbec5 | |||
| 0a7a2512d4 | |||
| 6fbde6389d | |||
| a656d45a6d | |||
| ab519020fc | |||
| 6c311c76e3 | |||
| 5172400803 | |||
| c253e655b1 | |||
| 0ecf9c1be1 | |||
| 05a7bbb4e3 | |||
| 7f8491a1c2 | |||
| 7f72e82ceb | |||
| af136f324d | |||
| 4f96f35d9a | |||
| 7b3c1f2b54 | |||
| 21b7b68f50 | |||
| 6f9a511874 | |||
| ad90f9c95e | |||
| da7549eeda | |||
| 92ed974e4b | |||
| 15371ec064 | |||
| 4bfaaa72ce | |||
| e59794f5e0 | |||
| 1f9f1ae166 | |||
| 9a9371301c | |||
| 9921618c12 | |||
| 33399b5e2a | |||
| da14410fc7 | |||
| 6f8c012394 | |||
| daf921accf | |||
| c67afa7e1e | |||
| 2e0f657585 | |||
| 7699e58bc3 | |||
| 4bbfbdb9e6 | |||
| f60bde7fd9 | |||
| 1ad3c5a5c8 | |||
| 747544bb58 | |||
| 0d1bd752a4 | |||
| 88d88ec8d3 | |||
| 670310de15 | |||
| df8cab4b07 | |||
| aee9bb9267 | |||
| d7c07fc65f | |||
| 3ff7ff05ab | |||
| bf2a7582fa | |||
| ef412b28ec | |||
| 2a92420bbe | |||
| ba89b2e994 | |||
| 0718d88f7a | |||
| a64df8a687 | |||
| f33020e2b8 | |||
| 0318b332bb | |||
| 09ae9d0ce3 | |||
| 6fe107601e | |||
| 69eabe4e85 | |||
| 0289bf5756 | |||
| 6f45325d9d | |||
| d5cf65f4cb | |||
| fb853a2bd3 | |||
| e0a586b311 | |||
| 55c8b8f1dd | |||
| 8594723a0d | |||
| 761af13270 | |||
| 7f756ce8ca | |||
| 21a8f3029e | |||
| 4f92fdced8 | |||
| 7360a9d2e1 | |||
| b081dcf6d5 | |||
| 2856587aca | |||
| ea318af65f | |||
| da62cc8f98 | |||
| 4a835d3f16 | |||
| 84102dd50e | |||
| 32f00afa8a | |||
| 3ae88f984a | |||
| 96237c7599 | |||
| f90faa4732 | |||
| a7fe9ee6d9 | |||
| 9eb7022336 | |||
| 0a79c5d945 | |||
| b384252c7c | |||
| 47f900ab76 | |||
| 50643698c2 | |||
| 8d5fb1ef0b | |||
| 354c07f2bf | |||
| 11f56bfd1c | |||
| a4a8bcdaa9 | |||
| 35fc57e8d1 | |||
| a410bf441c | |||
| 71ffa13167 | |||
| bcde604690 | |||
| 29d98796fb | |||
| de0fbd7188 | |||
| da8074d1e0 | |||
| b671133f88 | |||
| 6797dfa251 | |||
| b152cf9c36 | |||
| 0b8bea393e | |||
| 674801c8b2 | |||
| 6cd5821e5f | |||
| d4165ec2d0 | |||
| 82ecf0f5d1 | |||
| 9c56f148e4 | |||
| b68332afc0 | |||
| 22c2df11f8 | |||
| 978c20d72a | |||
| bacfe7218f | |||
| c73a1123d2 | |||
| 7dd61a06e2 | |||
| 5f54cfa6ed | |||
| ac6cd878af | |||
| 821647cef1 | |||
| 03301518f0 | |||
| d9a98e9eb2 | |||
| ef552af054 | |||
| 46edbbae74 | |||
| 31a9490210 | |||
| 21180816be | |||
| 736c71eefc | |||
| c75c5446f7 | |||
| 6422609150 | |||
| bf3204992e | |||
| 6d6fe9e1d6 | |||
| eaeedd5356 | |||
| 34d14846a1 | |||
| d367166e77 | |||
| dba1468e4d | |||
| e5af41b703 | |||
| 48d44487da | |||
| 1bd0adbc50 | |||
| 9b8c079d79 | |||
| efece7733f | |||
| 5779242f22 | |||
| 004a49c4e4 | |||
| 80f4f51b02 | |||
| f3c818a48f | |||
| 1e67c0090d | |||
| 0126d24242 | |||
| ccdb391ccc | |||
| 228f4a6db9 | |||
| 5d8314d13b | |||
| 303b4b826b | |||
| cee76ddd53 | |||
| a2c3f52ab4 | |||
| 9cbce4ff14 | |||
| 0d6a6fa797 | |||
| 16499fe23e | |||
| 31947c848a | |||
| 976a29b7d7 | |||
| 701f4853b5 | |||
| 9b6cacda0e | |||
| ddc63bfa91 | |||
| 931ea00e22 | |||
| 2cdcbb3784 | |||
| c2c1f3377a | |||
| c4359a3c81 | |||
| e86cdc6764 | |||
| b034fa5cf5 | |||
| 8c6bb7db26 | |||
| 94400aa808 | |||
| 23fdfc5a98 | |||
| 2f8f579430 | |||
| c392029a11 | |||
| bf5c49378b | |||
| 557caef8e5 | |||
| 067465ab35 | |||
| 3c26ebdaf2 | |||
| daf5aa316f | |||
| aa94cce2ad | |||
| f09533c742 | |||
| e88f40793f | |||
| 76bfc09aa5 | |||
| edfd8e285f | |||
| 441e4d45b1 | |||
| 313dacd956 | |||
| 9d2b757bc7 | |||
| c9802b78d0 | |||
| 6fcaf7fb5d | |||
| 9ca8ec4ce2 | |||
| 29e66327ee | |||
| a87904f2ff | |||
| 42d9d2372d | |||
| 68e0da1133 | |||
| cb34c43ef4 | |||
| 6042e7d337 | |||
| 31a767c944 | |||
| 87961bac58 | |||
| c7c1c45008 | |||
| fdab54a775 | |||
| ba03bf9d4f | |||
| 17fb3dcdb5 | |||
| f2b7d30a7f | |||
| 429b724cf2 | |||
| 2c4e7c4f96 | |||
| 7a111ab9b3 | |||
| 2f1b612e9e | |||
| a5b8a333d6 | |||
| 908ac4faea | |||
| 2a207fd613 | |||
| ee61b6d24b | |||
| 954a4330ee | |||
| 786a6c16a3 | |||
| 97c9269215 | |||
| 0cc0b57e33 | |||
| dad867a356 | |||
| ab5840dd54 | |||
| 5fd85d7052 | |||
| 544f99c09b | |||
| 6338d9f3f3 | |||
| 45643e8369 | |||
| c872cebb8f | |||
| 5fa4dd2884 | |||
| c039c70e3e | |||
| 434746aa99 | |||
| 2f61b224de | |||
| f2ea1367e2 | |||
| d287fa44df | |||
| b2eb98a66c | |||
| 732b123342 | |||
| f66d40f28b | |||
| 15766d0f86 | |||
| bf99e0ce2e | |||
| d03ae73285 | |||
| c39837faba | |||
| 28714b06b8 | |||
| 5cfd16e6a0 | |||
| 685b7456b6 | |||
| 21e8b9880d | |||
| 341c3ef6b9 | |||
| 1d2570c912 | |||
| 1e071d5ce5 | |||
| 2538638c9d | |||
| 3a28151b09 | |||
| 7a0fe04768 | |||
| cc95c38ab5 | |||
| 9707e8793b | |||
| 70d3aef8b3 | |||
| a8f2c87e38 | |||
| 148454d392 | |||
| 1075e35bca | |||
| 94b2ff2e21 | |||
| a20c8918f8 | |||
| f8193f7354 | |||
| 299b72eac3 | |||
| 4c71cab973 | |||
| 70ebfa8ee0 | |||
| 66e122f7e7 | |||
| 1a9a72adc0 | |||
| 5d86da626b | |||
| b29d172030 | |||
| e2af49a323 | |||
| 3e1b56a266 | |||
| 6fb964852f | |||
| be1e49e524 | |||
| 4925fe4857 | |||
| e8d865d0ab | |||
| 9a5cc4cf97 | |||
| f29047f723 | |||
| 44c4f17f32 | |||
| 32dcc50c94 | |||
| 9525363bc8 | |||
| 82f67bdb6c | |||
| 26bf8ceab9 | |||
| 20c0b4487c | |||
| 6d1003dcbd | |||
| 40e7c8fdbe | |||
| 20025333fa | |||
| f1c21a6fba | |||
| 6e9554d62d | |||
| d0a920b8d9 | |||
| 5b75ac1d1c | |||
| 62fd857b83 | |||
| a049cef651 | |||
| 7231bf0aad | |||
| e09c412139 | |||
| e63646c9a1 | |||
| 182fd7d229 | |||
| ce3650bc21 | |||
| 10b4bf929f | |||
| 3ccc05d4c5 | |||
| a1e6f930cb | |||
| edcb48f84f | |||
| f2eda02f26 | |||
| 289fac1e7e | |||
| 5e7c547670 | |||
| a7c34ca3b2 | |||
| b90b817e4c | |||
| f43fcde742 | |||
| 3743d63692 | |||
| d6407933f8 | |||
| c2429654c6 | |||
| 666552d0fb | |||
| 7d791000d9 | |||
| e1222778fe | |||
| a9c0c56e7c | |||
| fb8ee96b21 | |||
| d2620cdf1d | |||
| 6db5bb1cbf | |||
| e5211d1409 | |||
| 4ac7e29909 | |||
| 78d47c4035 | |||
| a2b8e8f7c7 | |||
| 8bbe3d3cb3 | |||
| cd763efb15 | |||
| 741b60485c | |||
| 355a52d100 | |||
| 873d5da2cd | |||
| ce6c6c1cc1 | |||
| cd47aaba94 | |||
| 52f2a75ec5 | |||
| f6ce5f91a2 | |||
| 642309520f | |||
| 2071ae9e54 | |||
| 93d767c9d6 | |||
| 4cd0e05996 | |||
| d3f554db92 | |||
| 808d6fb273 | |||
| 7e60713649 | |||
| 1be8c24235 | |||
| 7c98a52133 | |||
| 55e62f3831 | |||
| 3185a17b25 | |||
| d1f542efe9 | |||
| d2eecdef90 | |||
| 5af93abbb9 | |||
| 034115a515 | |||
| 85a3f5e578 | |||
| 66beb30d93 | |||
| 6b4c6ab942 | |||
| f683cbe2a2 | |||
| 3c43f72028 | |||
| 99dc9a08c0 | |||
| ac98586425 | |||
| 15af787950 | |||
| e932c2c473 | |||
| 884634a2d7 | |||
| 2f7ecdfae8 | |||
| cfe399e44f | |||
| 1e80c62701 | |||
| 4972dd05ee | |||
| beb4b914a0 | |||
| 1970ccb13e | |||
| 361933994f | |||
| 9b18209d24 | |||
| 2f140c168e | |||
| 2c0d545fe7 | |||
| f1f9c8f902 | |||
| cd4b272643 | |||
| 6ebf755e14 | |||
| e4d75613b7 | |||
| cb1229a8bd | |||
| 462e12dd6e | |||
| 636a0379aa | |||
| 13f2ade9f4 | |||
| 630c6b7342 | |||
| 6081781a5f | |||
| 228379a8a6 | |||
| e57d2e73a9 | |||
| 401e6fc25f | |||
| 502a4d15df | |||
| e1bcbf79b1 | |||
| e38465aaf3 | |||
| 4494bd97cf | |||
| e44bc62325 | |||
| 3bc5b1c945 | |||
| 03380eba45 | |||
| 527e82031e | |||
| 662188485e | |||
| 49a109ae85 | |||
| 0fbd1c3fca | |||
| c254bf6b48 | |||
| de63a47c64 | |||
| 7723f03915 | |||
| c154ceffcf | |||
| b183308da0 | |||
| 25cd89b8a7 | |||
| 0e4ce01776 | |||
| 9d2eee882a | |||
| a4a6ab8f3a | |||
| fd93d3740b | |||
| 7e390bd31d | |||
| e33bce7436 | |||
| 2283aba713 | |||
| 6c922ec9df | |||
| 72a4ef9fff | |||
| 4577fda4a9 | |||
| f97e769d4b | |||
| 121b880783 | |||
| fdae11f7cd | |||
| f3d38a6045 | |||
| 3a747b2b1d | |||
| dd50035a47 | |||
| 7abf0f3c76 | |||
| 3501c38deb | |||
| 8e64ba8032 | |||
| 138dbed251 | |||
| 8d400e9631 | |||
| 13cf7a7e2d | |||
| 0aeba98fb0 | |||
| 89197df6b0 | |||
| 0f4ef9d2f8 | |||
| b58199281b | |||
| 9ecff6794d | |||
| 478abf6d1b | |||
| 1a22b9233d | |||
| 78a35e5d1f | |||
| 5fa4be483b | |||
| 9345412f73 | |||
| dcafb14238 | |||
| 31c6bf3a64 | |||
| eb03cb5857 | |||
| 61d9a21636 | |||
| b5636af514 | |||
| c53ea02ff0 | |||
| 60344e3c30 | |||
| a28377d8f8 | |||
| 9c3d2ba3df | |||
| 8e00d3e04b | |||
| 96514d61e2 | |||
| 438c946bad | |||
| daa8cb1748 | |||
| 80e14568c6 | |||
| 6959499d37 | |||
| bee8097546 | |||
| 6a5a59f8b8 | |||
| 3302990837 | |||
| a19de21333 | |||
| bded9127f8 | |||
| 713dfd087e | |||
| 70fb3b3711 | |||
| d0ba21ac58 | |||
| 6cadc8bdca | |||
| 079bc6f93c | |||
| 18c827ab90 | |||
| 5894e4b55f | |||
| c7dd53b6eb | |||
| b0fce4f363 | |||
| 4756009d6b | |||
| 9826f3fd70 | |||
| 8c495a048e | |||
| 7cc43bc9ce | |||
| 38906db816 | |||
| 6aa7fa60b4 | |||
| 0ff493cd53 | |||
| c9eee95cbb | |||
| 4be9c72060 | |||
| 0f6da8e25f | |||
| 87fe5ec2e5 | |||
| a0eee3a95f | |||
| a7bb6181c7 | |||
| 0c00171d8f | |||
| 1df2aa72c0 | |||
| ff2ec0ad14 | |||
| 9939434cb3 | |||
| 8bfad892bc | |||
| 89eda187be | |||
| 6fb7fc7825 | |||
| 1e8c7b3512 | |||
| 7def74d2c3 | |||
| b062e0b2ca | |||
| 558f523e48 | |||
| d4d9749431 | |||
| f19fb0ef2a | |||
| 4f46e7e82f | |||
| a5299f0cae | |||
| b292f47d47 | |||
| 29bd1fab5c | |||
| 558412cfb4 | |||
| 985a2ab186 | |||
| 8455a67750 | |||
| 91fe244da8 | |||
| 9613cac11a | |||
| 5d8366015c | |||
| c5c688850d | |||
| 7b1ffbab12 | |||
| 060e2915f1 | |||
| 7080222fbc | |||
| 5e3a97272a | |||
| 2838d8eee2 | |||
| 504939a438 | |||
| 0ab4e31d5e | |||
| 0c6cd3b562 | |||
| de0d112630 | |||
| c1825d8ae0 | |||
| 2d2a8248c4 | |||
| 2bb4a134b2 | |||
| 8feabbc489 | |||
| ca88a0eaab | |||
| 6cccc16031 | |||
| d375105de4 | |||
| 4e8cb0568e | |||
| efbef12080 | |||
| 9c6155367b | |||
| 1366e89cf6 | |||
| 1a98039264 | |||
| 9ba01a2cbe | |||
| 6180860ac0 | |||
| c8f5408f27 | |||
| a776c83557 | |||
| c4e5369796 | |||
| 115c5dd71c | |||
| f140222dbc | |||
| 7a84c81a70 | |||
| b741c3e14d | |||
| e93e4cc115 | |||
| 424e53c78d | |||
| 2c42ec3559 | |||
| e513107f75 | |||
| dac52a879a | |||
| 85d2d0b95b | |||
| 4100263393 | |||
| 2d61532dd1 | |||
| ed89a27d47 | |||
| ea1af2da53 | |||
| 9cbb81f347 | |||
| 67a7d20f6c | |||
| 2626e044ca | |||
| b0ee388986 | |||
| df0644f85b | |||
| 9f20eda00d | |||
| ec8b7b7db9 | |||
| 7ea66dc02e | |||
| b4817546df | |||
| 18c7a5f84b | |||
| ee7dc1dd08 | |||
| 58da733531 | |||
| 8bf0e24bcd | |||
| 09783ee1a0 | |||
| dbadb55e7c | |||
| 50f0327f59 | |||
| 34c2475515 | |||
| b9b8bd9943 | |||
| b78837cbab | |||
| b6264da972 | |||
| c439c7c8a8 | |||
| e5b6135e44 | |||
| 61737d0288 | |||
| 2a17c5c133 | |||
| d536458280 | |||
| 55553eb6a6 | |||
| eeb74643d0 | |||
| 3d815b374e | |||
| 8150af5889 | |||
| 7d33354acb | |||
| 1739bc6000 | |||
| cd6bbc69a4 | |||
| 5a7c9fd090 | |||
| b786c88f52 | |||
| 0b97c2cecc | |||
| d4ed987857 | |||
| 38f64783ac | |||
| f6879ac094 | |||
| 421260a80a | |||
| 5b33b7ffcf | |||
| 9d0a4b4b88 | |||
| c256c96c45 | |||
| 57a45d5ea2 | |||
| a65f1e0776 | |||
| da7a687499 | |||
| 161bb809b9 | |||
| b8a5ada5dc | |||
| ef569aef18 | |||
| 123a4bf590 | |||
| f859e372c6 | |||
| a1cc2dbaff | |||
| 4c8da3b96a | |||
| 0f7e01e8a2 | |||
| 77423a813c | |||
| 9f78b7d9ba | |||
| ebc329fc5e | |||
| 0659b8993d | |||
| 66c9b40ead | |||
| 9fdadcc296 | |||
| 5d0d75ebb1 | |||
| 4c856ab403 | |||
| 427b60132a | |||
| 2bc0e18b9e | |||
| 4d61783e18 | |||
| ff1cd00c96 | |||
| ab9f16505e | |||
| a68cb7a986 | |||
| d83984f8df | |||
| 76f4613320 | |||
| 8932f46900 | |||
| 26be460041 | |||
| f9d4206bab | |||
| b654198e3f | |||
| bdd862e649 | |||
| e475e43fef | |||
| 961ca9270e | |||
| 38432a18c1 | |||
| 62c724b5c2 | |||
| a93099fd6e | |||
| b5395afe74 | |||
| a82231859d | |||
| 8e68230f4a | |||
| bbbc06583a | |||
| b254472dcd | |||
| c136ff102b | |||
| f02640be35 | |||
| a92cb52eb5 | |||
| b3ab8f6cef | |||
| 420e0246ed | |||
| 346e294b21 | |||
| 47fb1b407f | |||
| b4053047ff | |||
| 42d05fd8e4 | |||
| 396cf5c586 | |||
| c3e8bba822 | |||
| 522727cd8f | |||
| ef205bd622 | |||
| c74fff6da9 | |||
| 5802fe4fd3 | |||
| 28ade30d2b | |||
| 1b85ce0d78 | |||
| e40fe55eaf | |||
| d93cdbef41 | |||
| b25daf12fa | |||
| 41f59f0518 | |||
| 648e0ee567 | |||
| dd33c6ab5e | |||
| 6181e2968d | |||
| fe3ac88ec6 | |||
| b9cf98eee8 | |||
| 13c5529a41 | |||
| 84f1bee4f6 | |||
| f78515cc07 | |||
| b263d50b76 | |||
| 8cd1a0ce02 | |||
| fe53a6ca2c | |||
| 783910be50 | |||
| edd9bdcadc | |||
| 63193c5324 | |||
| 443a13afcf | |||
| 316a94a6cf | |||
| f00436b136 | |||
| 515dbb54fa | |||
| d06b8f7949 | |||
| 135ad2b73e | |||
| 8d4195a024 | |||
| aee17d2c7a | |||
| f9c8f37cec | |||
| e3535391dd | |||
| e81161ca7f | |||
| 6b02b75a87 | |||
| fdd783c19c | |||
| c2d84b8734 | |||
| 013ac85a70 | |||
| d17c8b8be7 | |||
| f81f8def1c | |||
| 6716bc68c9 | |||
| 0d55eb1da4 | |||
| c4decb0afb | |||
| 21fac37597 | |||
| d437c68403 | |||
| 59076e9eb0 | |||
| 874d49c6a5 | |||
| f9389de27b | |||
| db387ad28b | |||
| 75dba7193a | |||
| 0265e9d3ec | |||
| eb6e54a0f7 | |||
| 951720def9 | |||
| 500659be4a | |||
| f6f54da305 | |||
| d464a5fbd0 | |||
| 49b10a7d7e | |||
| dbd84c1c09 | |||
| e17dc142a5 | |||
| a82509bebb | |||
| e001d23457 | |||
| d0b28d7f59 | |||
| 39e0d9d9f3 | |||
| 57191fa222 | |||
| 9328e7a2f8 | |||
| 47c3f7320c | |||
| 5fc0f9fa3d | |||
| ee7d99bc4c | |||
| fd86281a7e | |||
| 947ece00f1 | |||
| 287133e35d | |||
| ca9ab6a6f2 | |||
| 41dcf6577a | |||
| a5d2df9626 | |||
| c6fefc5cb0 | |||
| f301feb537 | |||
| d58a893651 | |||
| 3cdad6fd77 | |||
| 96e14b2b5e | |||
| 0e6e85825a | |||
| af516242d3 | |||
| df58424dd7 | |||
| eddda2f005 | |||
| 820f8237d1 | |||
| 63358e6848 | |||
| 8c6299a7e7 | |||
| 07e8de57de | |||
| f3221ec315 | |||
| 7032f8e26b | |||
| 765e437d6c | |||
| fa0ac035ac | |||
| 26223ccc0a | |||
| a9e333b73a | |||
| eb3d09c989 | |||
| 8d96055ba1 | |||
| 8bed4fa2fa | |||
| 82adb92d06 | |||
| cb3c4e8d2c | |||
| a5985b362a | |||
| a67279db88 | |||
| 73ed495b72 | |||
| 3bed78f742 | |||
| b2737b4968 | |||
| a52f5da87a | |||
| 4f56915f15 | |||
| 9d7ca81508 | |||
| 00a75a556e | |||
| 7ee2f72b8e | |||
| b77d9aa80a | |||
| d3acb7d47d | |||
| 8b90b4c9ef | |||
| b96ddf7162 | |||
| 11011f3804 | |||
| 8bd83207dd | |||
| 0eb5118459 | |||
| 758d2dd9dc | |||
| c15d1b1e22 | |||
| 2066840a40 | |||
| 93079c5c8e | |||
| c435feeebc | |||
| c5f6db0b73 | |||
| d28e3e2ccc | |||
| 4809ec101d | |||
| a93b921103 | |||
| 527f6a5628 | |||
| a369758eda | |||
| f20067b323 | |||
| 1d7484ef4d | |||
| a9b0a55f20 | |||
| cbcd998803 | |||
| f9fb04bd60 | |||
| a86149b8d7 | |||
| f00b32075a | |||
| caf76abe3b | |||
| bf8c4ca6da | |||
| 1fbe41b725 | |||
| 612f2fba77 | |||
| 64cd2f49dc | |||
| ba22ed2733 | |||
| dc77d56573 | |||
| 74717ef48c | |||
| 250a3cbf58 | |||
| f0fcfe548e | |||
| 0700b1c446 | |||
| de3a1d23b6 | |||
| 96e3441556 | |||
| 27927c506e | |||
| bab8af4572 | |||
| 48c49fc73e | |||
| 7fb02f4788 | |||
| 40028744ba | |||
| 38b1cdbbad | |||
| bb9dd1754f | |||
| 9b1d75d47f | |||
| 2bd848fa97 | |||
| 3bf40572e2 | |||
| 87893363e5 | |||
| bc27a19534 | |||
| 5970e9c5a4 | |||
| a41fdac13b | |||
| 78ee0483d3 | |||
| e7230eb3c2 | |||
| 708b5925a7 | |||
| 62ec73c673 | |||
| c820ec91c6 | |||
| c42beaca66 | |||
| 2baad02a0c | |||
| 8dc23d2621 | |||
| 9d5cc85757 | |||
| 1dea676ef9 | |||
| db9d4e05b7 | |||
| 38d492e9da | |||
| 207ad95840 | |||
| bf62e20e46 | |||
| b0b886ac7c | |||
| e8f9bc666d | |||
| dc30065520 | |||
| 8eae22cc7e | |||
| 2c565e9401 | |||
| 7e69cf0e17 | |||
| dd48d7ffd6 | |||
| 9bf407be6c | |||
| d73dd9310c | |||
| e7681c7d6e | |||
| bb692cced9 | |||
| e094ed3d0d | |||
| 9047fbb3ee | |||
| ef5d8168f0 | |||
| 6a434c458d | |||
| 981f444609 | |||
| d2d5f003d8 | |||
| a900c23090 | |||
| 9b152628fa | |||
| 713cefc21a | |||
| 03c3467eea | |||
| d9ccdd41b2 | |||
| ef38aaf041 | |||
| aa3e22d814 | |||
| 4a99cb1166 | |||
| 17a5131ced | |||
| 2dab6bcaf0 | |||
| 20eda6a1d8 | |||
| 5d4a7a4155 | |||
| 082fdeebdd | |||
| bdb3c80ad7 | |||
| 182175254e | |||
| a42c5824af | |||
| 9914274d42 | |||
| a68b8781e7 | |||
| b8e7b9b7fd | |||
| 407b478637 | |||
| e8e840ec44 | |||
| 1cd4a31404 | |||
| d2d95a1f6b | |||
| c0ecfe2e18 | |||
| 6070d50a58 | |||
| c276cbac0b | |||
| 940f3901be | |||
| adc99e8871 | |||
| ab105e16e8 | |||
| 82081b9609 | |||
| c0b6398de2 | |||
| 5e9c0d9f11 | |||
| 1f52dda56d | |||
| dd57cacd3b | |||
| 16477a9f5a | |||
| 0407f270ad | |||
| 135e2ef77d | |||
| d59c619729 | |||
| d552a18c0b | |||
| d63e580cd3 | |||
| c6fd6295a0 | |||
| 65c3e8ee51 | |||
| 9aee938e30 | |||
| 9e586ae6ef | |||
| 8f58859693 | |||
| 3075814e81 | |||
| e62b1ccfff | |||
| ee1ee1e901 | |||
| 03a97f30a8 | |||
| 46fcfa88ad | |||
| db75826e59 | |||
| 53397d2609 | |||
| 9ca470ffb6 | |||
| f549243c10 | |||
| 45868e4bde | |||
| 35eaf781f3 | |||
| 07c5b30dc4 | |||
| 3144717b0b | |||
| 1dc340872c | |||
| 804059f87d | |||
| 196792810b | |||
| 3fed489258 | |||
| 5e6288ab9b | |||
| 44fdaa6c2b | |||
| aa7585563b | |||
| f7d40c6d70 | |||
| 1e51c5ce9e | |||
| be86a5d950 | |||
| aeaf45fa2b | |||
| 5d8649ffe5 | |||
| 1aa19ce707 | |||
| 964ef49a78 | |||
| d478f39800 | |||
| 56e1e51279 | |||
| f38381eaf0 | |||
| b7eb79d4f7 | |||
| 3573127bf1 | |||
| 480aaf0d0c | |||
| ad7ddf285c | |||
| 61d8d14fc2 | |||
| 486a2d98c2 | |||
| b94d09696e | |||
| bea7c28af2 | |||
| c2c4d52026 | |||
| 13cbf71c0f | |||
| aa3d7ab6b7 | |||
| a4afa057e3 | |||
| 2da6bba041 | |||
| 9d2253a4a2 | |||
| f4a88623af | |||
| 842e6cff43 | |||
| 8f44e57c72 | |||
| 98372d924d | |||
| 42caa77b3e | |||
| 1232a7c0ec | |||
| 4ab3c355c5 | |||
| 2c880568dd | |||
| 087f29d491 | |||
| 4e294699d3 | |||
| 7abaebe496 | |||
| 8ab0b78e6e | |||
| 079d20c086 | |||
| b95a3275ff | |||
| 684c424131 | |||
| 9b5140f0c2 | |||
| f6b5a1e580 | |||
| 0bb055b391 | |||
| e2549f0317 | |||
| 885b58a0ab | |||
| 0bd1a10753 | |||
| 948f967a16 | |||
| 08c6a7f884 | |||
| beb98813d9 | |||
| f50f3892f2 | |||
| 06140a9062 | |||
| f94b89381f | |||
| d46d988b4d | |||
| 248cffd323 | |||
| 92294e5b16 | |||
| 35721e7fa6 | |||
| c44fe26cdf | |||
| cd22818240 | |||
| a4c0e367a8 | |||
| 4f0b73c769 | |||
| 21a8f9622a | |||
| e1684ce8f1 | |||
| 9667f34388 | |||
| 8623773edc | |||
| 749f8aaec7 | |||
| ea48364d95 | |||
| 6fd0ed8711 | |||
| b91718cd7c | |||
| 53f8857795 | |||
| d60db93bf2 | |||
| 81dabdf097 | |||
| cf06bb9f49 | |||
| 96c144ca74 | |||
| 52a0031d16 | |||
| 00fd676adc | |||
| e6e8371742 | |||
| e18ce6a09e | |||
| d02d34cbaa | |||
| eb24d871d0 | |||
| acaeb2129e | |||
| 28e4e88794 | |||
| 653e3473b3 | |||
| 75454834f4 | |||
| 4303534396 | |||
| f6127af058 | |||
| 9e8ad7707c | |||
| f1accd7fe8 | |||
| 7919b37453 | |||
| 54d2b26acd | |||
| 01ed2d6086 | |||
| 9680fb6a68 | |||
| b4e91609d3 | |||
| 0cd6697fd4 | |||
| 12ad9fa483 | |||
| 06767ff39a | |||
| 2c85624583 | |||
| fc10e3e95d | |||
| a162781f89 | |||
| 632fac4558 |
1484 changed files with 188413 additions and 106530 deletions
|
|
@ -7,7 +7,7 @@ tmp_dir = "tmp"
|
|||
bin = "./tmp/nidus-sync"
|
||||
cmd = "go build -o ./tmp/nidus-sync ."
|
||||
delay = 1000
|
||||
exclude_dir = ["templates", "static", "tmp"]
|
||||
exclude_dir = ["templates", "static", "cmd", "tmp"]
|
||||
exclude_file = []
|
||||
exclude_regex = ["_test.go"]
|
||||
exclude_unchanged = false
|
||||
|
|
@ -25,7 +25,7 @@ tmp_dir = "tmp"
|
|||
rerun = false
|
||||
rerun_delay = 500
|
||||
send_interrupt = true
|
||||
stop_on_error = true
|
||||
stop_on_error = false
|
||||
|
||||
[color]
|
||||
app = ""
|
||||
|
|
|
|||
9
.forgejo/workflows/golint.yaml
Normal file
9
.forgejo/workflows/golint.yaml
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
on: [push]
|
||||
jobs:
|
||||
golint:
|
||||
runs-on: nixos
|
||||
steps:
|
||||
- name: checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: golangci-lint
|
||||
run: golangci-lint run
|
||||
28
.gitignore
vendored
28
.gitignore
vendored
|
|
@ -1,2 +1,26 @@
|
|||
nidus-sync
|
||||
tmp/
|
||||
.env
|
||||
.sass-cache/
|
||||
cmd/geocode-test/geocode-test
|
||||
cmd/passwordgen/passwordgen
|
||||
/db/jet/jet
|
||||
districts/
|
||||
flogo.log
|
||||
lob/cmd/letter-create/letter-create
|
||||
lob/cmd/letter-list/letter-list
|
||||
lob/cmd/address-create/address-create
|
||||
lob/cmd/address-list/address-list
|
||||
/nidus-sync
|
||||
/nidus-sync.log
|
||||
node_modules/
|
||||
postgrid/cmd/send-pdf/send-pdf
|
||||
result
|
||||
stadia/cmd/bulk-geocode/bulk-geocode
|
||||
stadia/cmd/geocode-autocomplete/geocode-autocomplete
|
||||
stadia/cmd/geocode-bygid/geocode-bygid
|
||||
stadia/cmd/reverse-geocode/reverse-geocode
|
||||
stadia/cmd/structured-geocode/structured-geocode
|
||||
stadia/cmd/tile-raster/tile-raster
|
||||
static/gen/
|
||||
temp/
|
||||
ts/gen
|
||||
vite/*/.vite/
|
||||
|
|
|
|||
3
.gitmodules
vendored
3
.gitmodules
vendored
|
|
@ -4,6 +4,3 @@
|
|||
[submodule "go-geojson2h3"]
|
||||
path = go-geojson2h3
|
||||
url = git@github.com:Gleipnir-Technology/go-geojson2h3.git
|
||||
[submodule "bob"]
|
||||
path = db/bob
|
||||
url = git@github.com:Gleipnir-Technology/bob.git
|
||||
|
|
|
|||
12
.prettierrc
Normal file
12
.prettierrc
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"plugins": ["/nix/store/6kfm5qrd2bckffxphb5ylvbg3sz1657r-prettier-plugin-go-template-0.0.15-unstable-2023-07-26/lib/node_modules/prettier-plugin-go-template/lib/index.js"],
|
||||
"useTabs": true,
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.html"],
|
||||
"options": {
|
||||
"parser": "go-template",
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
173
AGENTS.md
Normal file
173
AGENTS.md
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
# AGENTS.md — Nidus Sync Codebase Guide
|
||||
|
||||
This file captures conventions, patterns, and gotchas for anyone working on this codebase. It was produced during a lint cleanup pass (May 2026) to document lessons learned.
|
||||
|
||||
## Project Overview
|
||||
|
||||
**Module:** `github.com/Gleipnir-Technology/nidus-sync`
|
||||
**Language:** Go 1.24
|
||||
**Build:** Nix (`flake.nix`) + standard Go toolchain
|
||||
**ORM:** Bob (legacy) + Jet (new, partial migration)
|
||||
**Frontend:** Vue SPA (Vite) replacing Go HTML templates
|
||||
|
||||
The app serves two hosts from a single binary:
|
||||
- **Sync** (`sync/`) — internal dashboard for mosquito control districts
|
||||
- **RMO** (`rmo/`) — public-facing "Report Mosquitoes Online" site
|
||||
|
||||
Both are migrating from Go `html/template` rendered pages to Vue SPAs served by `static.SinglePageApp()`.
|
||||
|
||||
## Build & Lint Commands
|
||||
|
||||
```bash
|
||||
# Build everything
|
||||
go build ./...
|
||||
|
||||
# Run linter
|
||||
golangci-lint run
|
||||
|
||||
# Build a specific package
|
||||
go build ./api/
|
||||
go build ./platform/
|
||||
```
|
||||
|
||||
## Lint Helpers (`lint/error.go`)
|
||||
|
||||
The `lint/` package provides helpers for common error-handling patterns. **Always use these instead of bare calls** to avoid errcheck lint failures:
|
||||
|
||||
| Helper | Use for | Example |
|
||||
|--------|---------|---------|
|
||||
| `lint.Fprintf(w, fmt, args...)` | `fmt.Fprintf` to writers where errors are non-critical | `lint.Fprintf(w, "ok")` |
|
||||
| `lint.Fprint(w, args...)` | `fmt.Fprint` to writers | `lint.Fprint(w, "User-agent: *\n")` |
|
||||
| `lint.Write(w, p []byte)` | `w.Write(p)` — HTTP response bodies | `lint.Write(w, body)` |
|
||||
| `lint.LogOnErr(f, msg)` | Deferred `Close()` calls | `defer lint.LogOnErr(file.Close, "close file")` |
|
||||
| `lint.LogOnErrCtx(f, ctx, msg)` | `txn.Commit(ctx)` or other ctx funcs | `lint.LogOnErrCtx(txn.Commit, ctx, "commit")` |
|
||||
| `lint.LogOnErrRollback(f, ctx, msg)` | Deferred `txn.Rollback(ctx)` | `defer lint.LogOnErrRollback(txn.Rollback, ctx, "rollback")` |
|
||||
|
||||
**Key rule:** `LogOnErrRollback` silently ignores `"sql: transaction has already been committed or rolled back"` errors, which occur when a deferred rollback fires after a successful commit. Always use it for deferred rollbacks.
|
||||
|
||||
### For DB transactions, use this pattern:
|
||||
|
||||
```go
|
||||
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return fmt.Errorf("begin: %w", err)
|
||||
}
|
||||
defer lint.LogOnErrRollback(txn.Rollback, ctx, "rollback")
|
||||
|
||||
// ... do work ...
|
||||
|
||||
if err := txn.Commit(ctx); err != nil {
|
||||
return fmt.Errorf("commit: %w", err)
|
||||
}
|
||||
return nil
|
||||
```
|
||||
|
||||
### For HTTP handlers that render HTML:
|
||||
|
||||
```go
|
||||
if err := renderShim(w, r, errRender(err)); err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
```
|
||||
|
||||
## Architecture Notes
|
||||
|
||||
### Two hosts, one binary
|
||||
|
||||
`main.go` creates two `gorilla/mux` routers and supports three modes via CLI flags:
|
||||
- `-sync` — serve the Sync dashboard
|
||||
- `-report` — serve the RMO public site
|
||||
- `-all` — serve both (default)
|
||||
|
||||
Each host has its own route registration in `sync/routes.go` and `rmo/routes.go`.
|
||||
|
||||
### RMO package — all handlers are dead
|
||||
|
||||
**All** route registrations in `rmo/routes.go` are commented out. The file now only serves the Vue SPA via `static.SinglePageApp("static/gen/rmo")`. During cleanup (May 2026), all handler files were deleted:
|
||||
|
||||
```
|
||||
rmo/compliance.go rmo/error.go rmo/nuisance.go rmo/report.go
|
||||
rmo/district.go rmo/image.go rmo/quick.go rmo/root.go
|
||||
rmo/email.go rmo/mailer.go rmo/notification.go rmo/scss.go
|
||||
rmo/mock.go rmo/status.go rmo/water.go
|
||||
```
|
||||
|
||||
Only `rmo/routes.go` remains. **Do not add new Go template handlers here** — the RMO host is pure Vue SPA now.
|
||||
|
||||
### Sync package — partially live
|
||||
|
||||
Many route registrations in `sync/routes.go` are active. Files deleted during cleanup were those with zero active registrations:
|
||||
|
||||
```
|
||||
sync/admin.go sync/download.go sync/operations.go sync/pool.go
|
||||
sync/cell.go sync/intelligence.go sync/parcel.go sync/radar.go
|
||||
sync/communication.go sync/messages.go sync/planning.go sync/review.go
|
||||
sync/dash.go sync/mock.go sync/notification.go sync/service-request.go
|
||||
sync/signin.go sync/sms.go sync/text.go sync/tile.go
|
||||
```
|
||||
|
||||
### api/ vs resource/ — two handler layers
|
||||
|
||||
The codebase has two HTTP handler patterns:
|
||||
|
||||
1. **`api/`** — route registration (`api/routes.go`) + `http.HandlerFunc` handlers. Handles signin, webhooks (Twilio, VoIP.ms), media uploads, configuration POSTs.
|
||||
|
||||
2. **`resource/`** — typed resource handlers with `List`, `Get`, `Create`, etc. methods. Each resource has a struct embedding `*router` for URI generation. This is the newer, preferred pattern.
|
||||
|
||||
The split is not clean — some `api/` files contain substantial business logic. New handlers should use the `resource/` pattern.
|
||||
|
||||
### DB access — Bob vs Jet
|
||||
|
||||
Two ORMs coexist:
|
||||
- **Bob** (`github.com/Gleipnir-Technology/bob`) — legacy, used by most queries. Models in `db/models/*.bob.go` (103 files).
|
||||
- **Jet** (`db/jet/`) — new, generated queries in `db/query/public/`, `db/query/publicreport/`, `db/query/arcgis/`. Only 3 schemas partially ported.
|
||||
|
||||
The `db.PGInstance` singleton holds both `BobDB` and `PGXPool`. Jet uses PGXPool directly; Bob uses BobDB.
|
||||
|
||||
### db/prepared.go & db/fieldseeker.go
|
||||
|
||||
`db/prepared.go` contains utility functions (`pointOrNull`, `lineOrNull`, `queryStoredProcedure`, etc.) that are **only** called from `db/fieldseeker.go`. That file is **entirely commented out** (`/* ... */`). The 9 unused-prepared-funcs lint warnings are expected — do not delete them unless you're also deleting or uncommenting fieldseeker.go.
|
||||
|
||||
## Lint Cleanup Context (May 2026)
|
||||
|
||||
### What was fixed
|
||||
|
||||
- **errcheck (36→0):** All unchecked error returns eliminated using `lint/` helpers or explicit checks.
|
||||
- **unused (50→9):** ~60 functions/types deleted across ~30 files. Remaining 9 are in `db/prepared.go` (see above).
|
||||
|
||||
### golangci-lint reporting cap
|
||||
|
||||
golangci-lint caps unused reports at **50 items**. During cleanup, each batch of deletions exposed previously hidden items. If you see 50 unused items, there are almost certainly more hidden behind the cap. Delete the visible ones, re-run lint, and repeat.
|
||||
|
||||
### What was NOT fixed (remaining lint categories)
|
||||
|
||||
- **govet (26):** printf format mismatches, copylocks, lostcancel — some are real bugs
|
||||
- **ineffassign (9):** dead assignments that may indicate logic errors
|
||||
- **staticcheck (29):** deprecated `io/ioutil`, redundant returns, error string conventions, comparison always-true bugs
|
||||
|
||||
### Deleted by file count
|
||||
|
||||
| Directory | Files deleted | Reason |
|
||||
|-----------|--------------|--------|
|
||||
| `rmo/` | 15 | All handlers unused — routes commented out |
|
||||
| `sync/` | 17 | Unregistered handlers |
|
||||
| `api/` | 2 | `compliance.go`, `debug.go` — unused handlers |
|
||||
| `platform/` | 2 | `text/db.go`, `dashboard.go`, `publicreport/address.go` |
|
||||
| Other | 1 | `tomtom/` (prior cleanup) |
|
||||
|
||||
## Commit Conventions
|
||||
|
||||
Commits during the cleanup followed a consistent pattern:
|
||||
|
||||
```
|
||||
lint: fix errcheck in api/api.go debug log writes
|
||||
lint: remove unused code from sync/ package
|
||||
```
|
||||
|
||||
Each commit fixes one category of issue in a small set of related files. Build verification (`go build ./...`) was performed before each commit.
|
||||
|
||||
## See Also
|
||||
|
||||
- `CLEANUP.md` — broader cleanup roadmap (Bob→Jet migration, html/ package removal, etc.)
|
||||
- `HISTORY.md` — project history and architectural decisions
|
||||
- `README.md` — administration and build-from-source instructions
|
||||
390
CLEANUP.md
Normal file
390
CLEANUP.md
Normal file
|
|
@ -0,0 +1,390 @@
|
|||
# nidus-sync — Cleanup Tasks
|
||||
|
||||
This file lists code, files, and patterns that are remnants of older architectural approaches. These should be removed to reduce complexity, maintenance burden, and confusion.
|
||||
|
||||
---
|
||||
|
||||
## 1. Bob → Jet Migration (Incomplete)
|
||||
|
||||
**Status:** Bob is still the primary ORM. Jet was introduced May 2026 but only covers 3 schemas partially.
|
||||
|
||||
### 1a. Port remaining schemas from Bob to Jet
|
||||
|
||||
Jet-based queries exist for:
|
||||
- `db/query/public/` — address, communication, communication_log_entry, compliance_report_request, feature, feature_pool, job, lead, signal, site
|
||||
- `db/query/publicreport/` — compliance, image, image_exif, nuisance, report, report_image, report_log, water
|
||||
- `db/query/arcgis/` — account, oauth, service_feature, service_map, user, user_privileges
|
||||
|
||||
Still using Bob directly (not yet ported to Jet queries):
|
||||
- `platform/report/notification.go` (13 bob references)
|
||||
- `platform/background/background.go` (8)
|
||||
- `platform/arcgis.go` (8)
|
||||
- `platform/text/send.go` (7)
|
||||
- `platform/report/some_report.go` (6)
|
||||
- `platform/site.go` (5)
|
||||
- `platform/csv/flyover.go` (7)
|
||||
- `platform/csv/pool.go` (5)
|
||||
- `platform/csv/csv.go` (4)
|
||||
- `platform/text/report.go` (4)
|
||||
- `platform/text/phone_number.go` (3)
|
||||
- `platform/publicreport/log.go` (3)
|
||||
- `platform/mailer.go` (3)
|
||||
- `platform/email/template.go` (2)
|
||||
- `db/connection.go` (4 — bob.Tx types)
|
||||
- `db/prepared.go` (2)
|
||||
- `resource/review_task.go` (2)
|
||||
- `rmo/status.go` (2)
|
||||
- `rmo/report.go` (1)
|
||||
- `rmo/mailer.go` (1)
|
||||
- Plus many api/* files
|
||||
|
||||
### 1b. Remove Bob-generated models after migration
|
||||
|
||||
Once all queries are ported to Jet, delete the 103 `.bob.go` files in `db/models/`:
|
||||
```
|
||||
db/models/*.bob.go
|
||||
```
|
||||
|
||||
### 1c. Remove Bob-specific helper files
|
||||
|
||||
These are Bob-specific and can be removed once Bob is fully replaced:
|
||||
- `db/dberrors/` — Bob error types (still referenced)
|
||||
- `db/dbinfo/` — Bob type info (still referenced)
|
||||
- `db/models/bob_loaders.bob.go`
|
||||
- `db/models/bob_where.bob.go`
|
||||
|
||||
### 1d. Remove Bob from go.mod and dependencies
|
||||
|
||||
After all Bob code is gone:
|
||||
- Remove `github.com/Gleipnir-Technology/bob` from `go.mod`
|
||||
- Run `go mod tidy`
|
||||
|
||||
### 1e. Remove Bob codegen scripts
|
||||
|
||||
- `db/bobgen.sh`
|
||||
- `db/bobgen.yaml`
|
||||
|
||||
### 1f. Regenerate Jet output
|
||||
|
||||
The `db/jet/main.go` generator outputs to `db/gen/` but no output is currently checked in. Run the generator and ensure generated code is usable:
|
||||
```bash
|
||||
cd db/jet && go run .
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Go HTML Templates → Vue SPA (Mostly Complete)
|
||||
|
||||
**Status:** Nearly all Go template routes are commented out in `sync/routes.go` and `rmo/routes.go`. Both hosts serve Vue SPAs via `static.SinglePageApp()`. Some Go template routes remain active.
|
||||
|
||||
### 2a. Remaining active Go template routes (sync)
|
||||
|
||||
These routes in `sync/routes.go` still render Go templates:
|
||||
- `/oauth/arcgis/begin` → `getArcgisOauthBegin` (redirect, no template but in Go)
|
||||
- `/oauth/arcgis/callback` → `getArcgisOauthCallback`
|
||||
- `/mailer/pool/random` → `getMailerPoolRandom`
|
||||
- `/mailer/mode-1` → `getMailer1` (generates PDF)
|
||||
- `/mailer/mode-2` → `getMailer2` (generates PDF)
|
||||
- `/mailer/mode-3/{code}` → `getMailer3` (generates PDF)
|
||||
- `/mailer/mode-1/preview` → `getMailer1Preview`
|
||||
- `/mailer/mode-2/preview` → `getMailer2Preview`
|
||||
- `/mailer/mode-3/{code}/preview` → `getMailer3Preview`
|
||||
- `/privacy` → `getPrivacy`
|
||||
|
||||
The mailer routes use `platform/pdf` which in turn uses headless Chrome (`chromedp`) to render HTML to PDF. This is legitimate server-side functionality, not just a template remnant. However, the PDF templates themselves may be candidates for migration to the Vue ecosystem.
|
||||
|
||||
### 2b. Remove all commented-out routes
|
||||
|
||||
Both `sync/routes.go` and `rmo/routes.go` have large blocks of commented-out route registrations. Remove these once migration is confirmed complete.
|
||||
|
||||
### 2c. Remove unused Go template files
|
||||
|
||||
Once all routes are ported or confirmed dead, remove the entire `html/template/` directory. The `html/` package (`html/embed.go`, `html/filesystem.go`, `html/func.go`, etc.) should also be removed once nothing references it.
|
||||
|
||||
### 2d. Reduce the html/ package surface
|
||||
|
||||
**Note:** The `html/` package is still actively imported by 40+ Go files. It provides:
|
||||
- Template rendering (`html/embed.go`, `html/filesystem.go`) — mostly for mailer PDFs and privacy page
|
||||
- `html.ContentConfig` — used extensively in sync/routes (mailer previews, admin pages)
|
||||
- `html.MakeGet`, `html.MakePost` — HTTP handler wrappers (used by active `sync/` routes)
|
||||
- `html.RespondError` — HTTP error responses
|
||||
- Form parsing, image upload handling, URL building
|
||||
|
||||
**Short-term:** Remove the template rendering portion once mailer PDFs and privacy page are migrated.
|
||||
**Long-term:** The full `html/` package can be removed only after all server-rendered pages are gone and handler wrappers are replaced with the `resource/` pattern.
|
||||
|
||||
---
|
||||
|
||||
## 3. esbuild (`build.js`) — Removed ✅
|
||||
|
||||
*(Completed 2026-05-09: `build.js` removed and `pkgs.esbuild` dropped from flake.nix devShell — Vite is the build tool)*
|
||||
|
||||
---
|
||||
|
||||
## 4. Legacy Static JavaScript Files
|
||||
|
||||
**Status:** `static/js/` contains 20 plain JavaScript files written as custom HTML elements and standalone scripts for the Go template era. These are referenced by old Go HTML templates but most of those templates are now unused.
|
||||
|
||||
### 4a. Files in static/js/
|
||||
|
||||
```
|
||||
address-display.js
|
||||
address-or-report-suggestion.js
|
||||
address-suggestion.js
|
||||
events.js
|
||||
geocode.js
|
||||
location.js
|
||||
map-admin.js
|
||||
map-aggregate.js
|
||||
map-arcgis-tile.js
|
||||
map-cell.js
|
||||
map-locator.js
|
||||
map-locator-ro.js
|
||||
map-multipoint.js
|
||||
map-proxied-arcgis-tile.js
|
||||
map-routing.js
|
||||
map-service-area.js
|
||||
photo-upload.js
|
||||
table-report.js
|
||||
table-site.js
|
||||
time-relative.js
|
||||
user-selector.js
|
||||
```
|
||||
|
||||
### 4b. Determine which are still used
|
||||
|
||||
The remaining active Go templates (mailer, oauth, privacy) may reference some of these. Check each active template for `<script src="/static/js/...">` references. Templates that are confirmed unused:
|
||||
- All templates in `html/template/sync/` (dashboard, cell, communication-root, district, intelligence, layout, operations-root, planning-root, radar, review, sudo, upload-*) — these are replaced by Vue SPAs
|
||||
- Most templates in `html/template/rmo/` — RMO routes are all commented out
|
||||
|
||||
### 4c. Migrate any still-needed functionality
|
||||
|
||||
The map-locator, address-suggestion, and photo-upload functionality has Vue equivalents in `ts/components/`. The remaining custom element patterns should be fully replaced by Vue components.
|
||||
|
||||
---
|
||||
|
||||
## 5. TomTom Integration — Removed ✅
|
||||
|
||||
*(Completed 2026-05-09: `tomtom/` directory removed — zero imports outside itself, Stadia Maps is now the geocoding/tile provider)*
|
||||
|
||||
---
|
||||
|
||||
## 6. Postgrid — Alternate Mail Provider
|
||||
|
||||
**Status:** `postgrid/` contains a single CLI tool (`cmd/send-pdf`) and a `postgrid` Go package reference in `main.go`. Lob is now the mail provider, with its own integration in `lob/`.
|
||||
|
||||
### 6a. Investigate and remove if unused
|
||||
|
||||
- Check if Postgrid is actually being used in production vs Lob
|
||||
- If Lob is the chosen provider, remove `postgrid/` entirely
|
||||
- Remove any Postgrid configuration references
|
||||
|
||||
---
|
||||
|
||||
## 7. Duplicate Architecture: `api/` vs `resource/`
|
||||
|
||||
**Status:** The `api/` package contains both route registration (`api/routes.go`) and handler functions (`api/signin.go`, `api/publicreport.go`, `api/compliance.go`, etc.). The `resource/` package provides typed resource handlers that expose `List`, `Get`, `Create`, etc. Some functionality exists in both layers.
|
||||
|
||||
### 7a. Consolidate handler functions
|
||||
|
||||
Functions in `api/` that directly handle business logic should be moved to `resource/`:
|
||||
- `api/signin.go` — `postSignin`, `postSignout`, `postSignup`
|
||||
- `api/compliance.go` — various compliance handlers
|
||||
- `api/publicreport.go` — `postPublicreportInvalid`, `postPublicreportSignal`, `postPublicreportMessage`
|
||||
- `api/sudo.go` — `postSudoEmail`, `postSudoSMS`, `postSudoSSE`
|
||||
- `api/configuration.go` — `postConfigurationIntegrationArcgis`
|
||||
- `api/review.go` — `postReviewPool`
|
||||
- `api/twilio.go`, `api/voipms.go` — webhook handlers
|
||||
- `api/audio.go`, `api/image.go` — media upload handlers
|
||||
- `api/tile.go`, `api/debug.go` — utilities
|
||||
|
||||
### 7b. Standardize on resource pattern
|
||||
|
||||
Either move everything to `resource/` or keep both but clearly define responsibilities:
|
||||
- `resource/` — domain resource CRUD + URI generation
|
||||
- `api/` — route registration + HTTP concerns only
|
||||
|
||||
Currently the split is unclear and some `api/` files do substantial business logic.
|
||||
|
||||
---
|
||||
|
||||
## 8. `arcgis-go` Submodule — Not Checked Out
|
||||
|
||||
**Status:** The `arcgis-go` submodule (referenced in `.gitmodules`) is not checked out (empty directory). The external `github.com/Gleipnir-Technology/arcgis-go` package is used via `go.mod` instead.
|
||||
|
||||
### 8a. Remove submodule
|
||||
|
||||
```bash
|
||||
git submodule deinit arcgis-go
|
||||
git rm arcgis-go
|
||||
```
|
||||
|
||||
Verify that all code references use the external package, not a local path.
|
||||
|
||||
---
|
||||
|
||||
## 9. `go-geojson2h3` Local Copy
|
||||
|
||||
**Status:** `go-geojson2h3/` is also a submodule. The external package `github.com/Gleipnir-Technology/go-geojson2h3/v2` is imported in `go.mod`. Only `h3utils/h3.go` references it.
|
||||
|
||||
### 9a. Consolidate
|
||||
|
||||
- If the local copy isn't needed, remove the submodule
|
||||
- If local modifications exist, merge upstream or maintain intentionally with documentation
|
||||
|
||||
---
|
||||
|
||||
## 10. Old Generated Files & Artifacts
|
||||
|
||||
### 10a. `query.go` at project root — Removed ✅
|
||||
|
||||
### 10b. `db/sql/` directory
|
||||
|
||||
Contains `.bob.go` and `.bob.sql` files — these are Bob-style named queries. Once Bob is removed, these can be cleaned up or migrated to Jet equivalents.
|
||||
|
||||
### 10c. `static/gen/main.js`
|
||||
|
||||
A leftover built artifact. The new build output goes to `static/gen/sync/` and `static/gen/rmo/` via Vite. Ensure `static/gen/` is in `.gitignore` and the stale `main.js` is removed.
|
||||
|
||||
### 10d. `static/css/placeholder`
|
||||
|
||||
Empty placeholder file. Remove.
|
||||
|
||||
---
|
||||
|
||||
## 11. Nix devShell Cleanup
|
||||
|
||||
**Status:** `flake.nix` devShell includes several tools from older workflows:
|
||||
|
||||
### 11a. Potentially unnecessary devShell packages
|
||||
|
||||
- `pkgs.esbuild` — replaced by Vite (keep only if `build.js` is retained)
|
||||
- `pkgs.dart-sass` — Vue/Vite uses the `sass` npm package; check if Go code invokes dart-sass directly
|
||||
- `pkgs.autoprefixer` — may not be needed with Vite's built-in PostCSS
|
||||
|
||||
---
|
||||
|
||||
## 12. Start Scripts — Consolidate
|
||||
|
||||
**Status:** Four start scripts exist:
|
||||
|
||||
| Script | Purpose |
|
||||
|--------|---------|
|
||||
| `start-air.sh` | Development with air (live reload) |
|
||||
| `start-flogo.sh` | Unknown (references `flogo`) |
|
||||
| `start-nidus-sync.sh` | Production-like direct run |
|
||||
| `start-nix-built.sh` | Run Nix-built output |
|
||||
|
||||
`start-flogo.sh` may be a remnant. Investigate and remove if unused.
|
||||
|
||||
---
|
||||
|
||||
## 13. Normalize Query Function Signatures to `db.Ex`
|
||||
|
||||
**Status:** The `db/query/` packages have inconsistent transaction parameter conventions. Some functions accept `db.Ex`, some accept `db.Tx` (concrete type), some accept `bob.Tx`, and some accept no transaction parameter at all (using the global `db.PGInstance` singleton). This blocks transaction-based testing and creates inconsistent patterns.
|
||||
|
||||
### 13a. Functions missing transaction parameter (use global pool)
|
||||
|
||||
These functions have no `txn` parameter and call `db.ExecuteOne`/`db.ExecuteMany`/`db.ExecuteNone` which use the global `PGInstance.PGXPool`:
|
||||
|
||||
**`db/query/public/communication.go`:**
|
||||
- `CommunicationFromID(ctx, comm_id int64)` → add `txn db.Ex`, switch to `ExecuteOneTx`
|
||||
- `CommunicationsFromOrganization(ctx, org_id int64)` → add `txn db.Ex`, switch to `ExecuteManyTx`
|
||||
|
||||
**`db/query/publicreport/report.go`:**
|
||||
- `ReportFromID(ctx, report_id int64)` → add `txn db.Ex`, switch to `ExecuteOneTx`
|
||||
- `ReportsFromIDs(ctx, report_ids []int64)` → add `txn db.Ex`, switch to `ExecuteManyTx`
|
||||
|
||||
**`db/query/arcgis/account.go`:**
|
||||
- `AccountFromID(ctx, org_id string)` → add `txn db.Ex`, switch to `ExecuteOneTx`
|
||||
|
||||
**`db/query/arcgis/oauth.go` (all 9 functions use global pool):**
|
||||
- `OAuthTokenInsert`, `OAuthTokenInvalidate`, `OAuthTokensValid`, `OAuthTokenFromID`, `OAuthTokenForUser`, `OAuthTokensForUser`, `OAuthTokenForUserExists`, `OAuthTokenUpdateAccessToken`, `OAuthTokenUpdateRefreshToken`, `OAuthTokenUpdateLicense` — add `txn db.Ex` to all, switch to `ExecuteOneTx`/`ExecuteManyTx`/`ExecuteNoneTx`
|
||||
|
||||
**`db/query/arcgis/service_feature.go`:**
|
||||
- `ServiceFeatureFromID(ctx, id string)` → add `txn db.Ex`
|
||||
- `ServiceFeatureFromURL(ctx, url string)` → add `txn db.Ex`
|
||||
|
||||
**`db/query/arcgis/service_map.go`:**
|
||||
- `ServiceMapFromID(ctx, id string)` → add `txn db.Ex`
|
||||
- `ServiceMapsFromAccountID(ctx, account_id string)` → add `txn db.Ex`
|
||||
|
||||
**Caller impact:** All callers will need to be updated to pass a `db.Ex`. Most platform callers either have a `db.Tx` from `db.BeginTxn()` already in scope (e.g., `communicationMark`, `SignalCreateFromPublicreport`) or can pass `db.PGInstance.PGXPool` (which implements `db.Ex`).
|
||||
|
||||
### 13b. Functions using `db.Tx` instead of `db.Ex`
|
||||
|
||||
These functions accept the concrete `db.Tx` type. While `db.Tx` implements `db.Ex`, using the concrete type prevents callers from passing other `db.Ex` implementors (e.g., `*pgxpool.Pool` or mock implementations). Change all to accept `db.Ex`:
|
||||
|
||||
**`db/query/public/communication.go`:**
|
||||
- `CommunicationInsert(ctx, txn db.Tx, m)` → `txn db.Ex`
|
||||
- `CommunicationSetStatus(ctx, txn db.Tx, ...)` → `txn db.Ex`
|
||||
|
||||
**`db/query/public/communication_log_entry.go`:**
|
||||
- `CommunicationLogEntryInsert(ctx, txn db.Tx, m)` → `txn db.Ex`
|
||||
|
||||
**`db/query/publicreport/compliance.go`:**
|
||||
- `ComplianceFromID(ctx, txn db.Tx, report_id)` → `txn db.Ex`
|
||||
|
||||
### 13c. Functions using `bob.Tx` instead of `db.Ex`
|
||||
|
||||
These are Bob-specific and need to be migrated to Jet's `db.Ex` pattern. This is part of the broader Bob→Jet migration (item 1):
|
||||
|
||||
**`db/query/arcgis/account.go`:**
|
||||
- `AccountInsert(ctx, txn bob.Tx, m)` → `txn db.Ex`, switch from `ExecuteOneTxBob` to `ExecuteOneTx`
|
||||
|
||||
**`db/query/arcgis/service_feature.go`:**
|
||||
- `ServiceFeatureInsert(ctx, txn bob.Tx, m)` → `txn db.Ex`, switch from `ExecuteOneTxBob` to `ExecuteOneTx`
|
||||
|
||||
**`db/query/arcgis/service_map.go`:**
|
||||
- `ServiceMapInsert(ctx, txn bob.Tx, m)` → `txn db.Ex`, switch from `ExecuteOneTxBob` to `ExecuteOneTx`
|
||||
|
||||
**`db/query/arcgis/user.go`:**
|
||||
- `UserInsert(ctx, txn bob.Tx, m)` → `txn db.Ex`, switch from `ExecuteOneTxBob` to `ExecuteOneTx`
|
||||
|
||||
**`db/query/arcgis/user_privileges.go`:**
|
||||
- `UserPrivilegesDeleteByUserID(ctx, txn bob.Tx, id)` → `txn db.Ex`
|
||||
- `UserPrivilegeInsert(ctx, txn bob.Tx, m)` → `txn db.Ex`
|
||||
|
||||
### 13d. Bug: `AddressFromID` ignores its transaction parameter
|
||||
|
||||
**`db/query/public/address.go` `AddressFromID`:** takes `txn db.Ex` but calls `db.ExecuteOne` (global pool) instead of `db.ExecuteOneTx`. This works when `txn` is the pool itself (callers pass `db.PGInstance.PGXPool`) but is a latent bug when called from within a transaction (caller in `platform/signal.go:85` and `platform/compliance.go:37`). Fix by switching to `db.ExecuteOneTx`.
|
||||
|
||||
Same bug in `AddressFromComplianceReportRequestID` (line 31 uses `ExecuteOne` instead of `ExecuteOneTx`).
|
||||
|
||||
### 13e. Migration strategy
|
||||
|
||||
1. Fix 13d first (one-character bugs — swap `ExecuteOne` → `ExecuteOneTx`)
|
||||
2. Convert 13b next (signature-compatible change — `db.Tx` → `db.Ex` is widening)
|
||||
3. Convert 13a next (add `txn db.Ex` parameter, update all callers)
|
||||
4. Convert 13c last (part of broader Bob→Jet migration, item 1)
|
||||
|
||||
After all conversions, every query function will have a consistent `(ctx context.Context, txn db.Ex, ...)` signature, enabling uniform transaction-based testing.
|
||||
|
||||
---
|
||||
|
||||
## Priority Summary
|
||||
|
||||
1. **High impact, low effort:**
|
||||
- ~~Remove `tomtom/` (unused, no imports)~~ ✅
|
||||
- ~~Remove `build.js` (dead, replaced by Vite)~~ ✅
|
||||
- Remove commented-out routes in `sync/routes.go` and `rmo/routes.go`
|
||||
- ~~Remove `query.go` commented-out code~~ ✅
|
||||
- Remove `static/gen/main.js` stale artifact
|
||||
- Remove `static/css/placeholder`
|
||||
- **Fix `AddressFromID`/`AddressFromComplianceReportRequestID` — swap `ExecuteOne` → `ExecuteOneTx`** (item 13d)
|
||||
- **Convert `db.Tx` → `db.Ex` in query functions** (item 13b)
|
||||
|
||||
2. **Medium impact, medium effort:**
|
||||
- Remove unused Go HTML templates (confirm which are still active first)
|
||||
- Remove unused `static/js/` files (verify against active templates)
|
||||
- Remove `arcgis-go` submodule
|
||||
- Clean up Nix devShell
|
||||
- **Add `txn db.Ex` to query functions missing it** (item 13a)
|
||||
|
||||
3. **High impact, high effort:**
|
||||
- Complete Bob → Jet migration across all schemas
|
||||
- Remove Bob-generated models, helpers, scripts
|
||||
- Remove Bob from go.mod
|
||||
- Consolidate `api/` and `resource/` handler patterns
|
||||
- Remove `html/` package (after all Go templates are gone)
|
||||
- **Convert `bob.Tx` → `db.Ex` in arcgis query functions** (item 13c)
|
||||
207
HISTORY.md
Normal file
207
HISTORY.md
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
# nidus-sync — Project History
|
||||
|
||||
## Overview
|
||||
|
||||
nidus-sync is a dual-tenant mosquito abatement platform serving two domains:
|
||||
- **RMO** (`report.mosquitoes.online`) — Public-facing mosquito/water/nuisance reporting
|
||||
- **Sync** (`sync.nidus.cloud`) — Administrative dashboard for vector control districts
|
||||
|
||||
The project was started in November 2025 and has undergone several major architectural shifts across ~1655 commits spanning 6 months.
|
||||
|
||||
---
|
||||
|
||||
## Timeline
|
||||
|
||||
### Phase 1: Foundation (November 2025)
|
||||
|
||||
**Nov 3 – Nov 13: Project bootstrap**
|
||||
- Initial Go project with Nix build system (`flake.nix`, `default.nix`)
|
||||
- Basic `net/http` web serving with `gorilla/mux` routing
|
||||
- Go `html/template` server-side rendering
|
||||
- Bob ORM integration (`github.com/Gleipnir-Technology/bob`) for PostgreSQL — code-generated models via `bobgen`
|
||||
- ArcGIS OAuth integration for user authentication
|
||||
- ArcGIS Fieldseeker data synchronization (treatment areas, inspections, breeding sources, etc.)
|
||||
- MapBox GL JS integration for heatmap visualization
|
||||
- Dashboard with login, basic CRUD mocks
|
||||
|
||||
**Nov 13 – Nov 24: Logging & DB restructuring**
|
||||
- Migration from standard `log` to `zerolog` for structured, colorized output
|
||||
- Database logic moved into a separate `db/` subdirectory
|
||||
- Clean shutdown logic, token refresh loops
|
||||
|
||||
**Key characteristics:** Monolithic Go server, HTML templates, Bob ORM, MapBox maps, ArcGIS OAuth
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: Fieldseeker & Schema Evolution (December 2025)
|
||||
|
||||
**Dec 2 – Dec 24: Fieldseeker schema v2**
|
||||
- Bob codegen updated to latest version
|
||||
- Fieldseeker schema captured on OAuth connect and stored locally
|
||||
- Dynamic SQL functions replacing hardcoded per-table sync logic
|
||||
- Old Fieldseeker tables removed, v2 generated tables used
|
||||
- Note/image audio support added
|
||||
- MMS file downloads from SMS webhooks
|
||||
|
||||
**Key characteristics:** Bob-generated fieldseeker models, prepared SQL functions, SMS/MMS debugging
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: Architecture Maturation (January 2026)
|
||||
|
||||
**Jan 2 – Jan 8: Domain split & template system**
|
||||
- WIP pass-through models concept ("Checkpoint on initial idea for passing through models")
|
||||
- Massive reorganization: templates split into `rmo/` (public) and `sync/` (admin) subdirectories
|
||||
- `html/` package created with embedded template loading
|
||||
- Bob submodule removed, `arcgis-go` became external dependency
|
||||
- Public report domain support added
|
||||
- Version bumped 7 times in rapid iteration (v0.0.4 → v0.0.10)
|
||||
|
||||
**Jan 8 – Jan 31: Platform Layer emergence**
|
||||
- "Report platform layer" introduced (`a9b0a55f`) — initial abstraction between HTTP handlers and database
|
||||
- Address suggestion and map-locator components via custom HTML elements
|
||||
- SVG auto-transformation into Go templates
|
||||
- Report submission forms wired up (nuisance, water)
|
||||
- Email template system
|
||||
|
||||
**Key characteristics:** Two-domain architecture (RMO/Sync), `html/` template package, platform layer beginning, custom element web components
|
||||
|
||||
---
|
||||
|
||||
### Phase 4: Map Migration & Platform Expansion (February 2026)
|
||||
|
||||
**Feb 1 – Feb 28: Map provider transition**
|
||||
- MapBox → MapLibre GL (open-source fork) via `maplibre-gl`
|
||||
- Stadia Maps integration for tile serving and geocoding (Feb 12-14)
|
||||
- TomTom routing integration added (Feb 17)
|
||||
- Bulk geocoding via Stadia
|
||||
- Parcel image generation debugging
|
||||
|
||||
**Platform layer expansion:**
|
||||
- Emails moved to platform layer
|
||||
- Phone/SMS support
|
||||
- OAuth integration settings
|
||||
- Upload platform functions
|
||||
- QR code and image tile moved into platform
|
||||
- Admin map components
|
||||
|
||||
**Key characteristics:** MapLibre/Stadia replacing MapBox, TomTom added, platform layer expanding, heavy template iteration
|
||||
|
||||
---
|
||||
|
||||
### Phase 5: VueJS Revolution (March 2026) — 448 commits
|
||||
|
||||
**Mar 5 – Mar 12: Pre-Vue cleanup**
|
||||
- Stadia Maps client initialization
|
||||
- Signal database schema added
|
||||
- Review task/mailer schema rework
|
||||
- Generated Bob files pruned
|
||||
|
||||
**Mar 12: Massive platform layer rework** (`44c4f17f`)
|
||||
- User/organization handling restructured in platform layer
|
||||
- Signal creation moved inside platform
|
||||
|
||||
**Mar 18 – Mar 22: VueJS Migration** (the biggest architectural shift)
|
||||
- Mar 18: Auto-generated report IDs
|
||||
- Mar 21: **VueJS introduced** — begins with TypeScript bundle, then Vue SFC components, vue-router, Bootstrap/SCSS integration
|
||||
- Mar 21: Dashboard, Intelligence, sidebar all moved to Vue
|
||||
- Mar 22: **esbuild replaced by Vite** (`47f900ab`) — `vite/` directory with separate configs for `sync` and `rmo` SPAs
|
||||
- Mar 22: TypeScript checking clean across entire frontend
|
||||
- Mar 23: Public report card component, auth checks off API client
|
||||
- Mar 24-31: Communication page ripped into components, impersonation support, users page
|
||||
|
||||
**Key characteristics:** VueJS 3 + TypeScript + Vite frontend, Pinia stores, vue-router, SCSS, SPA architecture replacing server-rendered Go templates
|
||||
|
||||
---
|
||||
|
||||
### Phase 6: Compliance & Communication (April 2026) — 454 commits
|
||||
|
||||
**Apr 1 – Apr 9: RMO frontend & resources**
|
||||
- Resource layer expanded (user, avatar, district, nuisance, water, compliance resources)
|
||||
- RMO frontend checkpoint — Vue ports of public-facing pages
|
||||
- TS types migrated into API module
|
||||
- Old bundle paths removed, old SPA generation removed
|
||||
|
||||
**Apr 10 – Apr 17: Compliance workflow**
|
||||
- Compliance report creation, mailer flow
|
||||
- Site/pool review tasks
|
||||
- Stadia Maps cache, direct tile access
|
||||
- OAuth refresh in frontend
|
||||
- Image upload components
|
||||
|
||||
**Apr 17 – Apr 25: Communication system**
|
||||
- Background jobs reworked for shorter transactions
|
||||
- Lob (physical mail) integration — direct API client, address creation, letter events
|
||||
- QR code generation moved to API
|
||||
- Compliance report evidence, mailer views
|
||||
- Vue map system generalized (`cad01e68`)
|
||||
|
||||
**Apr 25 – Apr 30: Map & communication polish**
|
||||
- VueJS reimplementation of address/report suggestion
|
||||
- Communication workbench with map, list, detail views
|
||||
- Text message log, email/phone display
|
||||
- Compliance card detail display
|
||||
- SSE event system with status vs resource message distinction
|
||||
- Systemd socket activation for downtime-free deploys
|
||||
- Sentry error tracking for Vue frontend
|
||||
|
||||
**Key characteristics:** Compliance/mailer operational, communication system born, Lob integration, Sentry, generalized Vue map system
|
||||
|
||||
---
|
||||
|
||||
### Phase 7: Jet Migration & Cleanup (May 2026) — 46 commits so far
|
||||
|
||||
**May 1 – May 9: SQL generation transition**
|
||||
- **Jet (go-jet/jet) introduced** — type-safe SQL builder replacing Bob's query building
|
||||
- Custom Jet generator created with geometry/Box2D type support (`db/jet/main.go`)
|
||||
- `publicreport` schema ported to Jet
|
||||
- `arcgis` schema ported to Jet (compiles, not fully tested per commit message)
|
||||
- New `communication` table added
|
||||
- Communication marking workflow (invalid, pending-response, possible-issue, possible-resolved)
|
||||
- Linting: `golangci-lint` added to lefthook, per-file linting
|
||||
- Cleanup of legacy generated columns (latitude/longitude), string-based queries
|
||||
- Centralized error handler for Vue sync app
|
||||
|
||||
**Key characteristics:** Bob→Jet transition in progress, communication workflow, code quality improvements
|
||||
|
||||
---
|
||||
|
||||
## Architectural Patterns (by layer)
|
||||
|
||||
### Current architecture stack
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────┐
|
||||
│ Vue 3 SPA (TypeScript) │
|
||||
│ ts/ — shared components, composables, stores │
|
||||
│ vite/sync/ — admin SPA entry │
|
||||
│ vite/rmo/ — public SPA entry │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ Go HTTP Server (gorilla/mux) │
|
||||
│ api/routes.go — central route registration │
|
||||
│ resource/ — resource handlers (REST patterns) │
|
||||
│ sync/ — remaining Go template routes │
|
||||
│ rmo/ — remaining Go template routes │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ platform/ — business logic layer │
|
||||
│ (address, compliance, communication, district, │
|
||||
│ email, fieldseeker, mailer, publicreport, │
|
||||
│ review, signal, text, user, upload, etc.) │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ db/ — database access │
|
||||
│ db/models/ — Bob-generated models (103 files) │
|
||||
│ db/query/ — Jet-based query functions │
|
||||
│ db/prepared.go — prepared SQL functions │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ PostgreSQL │
|
||||
└─────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Pattern: Platform Layer
|
||||
Introduced January 2026, the `platform/` package encapsulates business logic between HTTP handlers and the database. It grew from initial report handling to encompass users, organizations, emails, texts, compliance, communications, signals, geocoding, tiles, uploads, and more.
|
||||
|
||||
### Pattern: Resource Layer
|
||||
Added March–April 2026, `resource/` provides typed REST resource handlers with URI generation (via mux route naming). Resources are instantiated with a `resource.NewRouter()` and expose methods like `List`, `Get`, `Create`, `Update`, `Delete` that return domain types. This replaced ad-hoc handler functions in `api/`.
|
||||
|
||||
### Pattern: Dual SPA + API
|
||||
Since late March 2026, both domains serve Vue SPAs for most routes, with the Go server acting as an API backend. The `static.SinglePageApp()` handler serves the Vite-built output and falls back to `index.html` for client-side routing. Some Go template routes remain for mailer PDF generation, OAuth flows, and previews.
|
||||
76
README.md
76
README.md
|
|
@ -2,6 +2,25 @@
|
|||
|
||||
This is the software that powers [Nidus Cloud Sync](https://sync.nidus.cloud).
|
||||
|
||||
## Administration
|
||||
|
||||
### Password resets
|
||||
|
||||
If you need to manually reset a password you can do so with:
|
||||
|
||||
```
|
||||
$ nix-shell -p genpass
|
||||
$ genpass 12
|
||||
abc123abc123
|
||||
# this is from nidus, installed on deployment servers at the system layer
|
||||
$ passwordgen
|
||||
Please enter your password: abc123abc123
|
||||
Password: abc123abc123
|
||||
Hash: $2a$14$hdtoAtP7joczutY3bxaFqemBApH8xc5NbXLvDQqBfdzWV3jGSy4zi
|
||||
$ psql -d nidus-sync
|
||||
nidus-sync=> update user set password_hash='$2a$14$hdtoAtP7joczutY3bxaFqemBApH8xc5NbXLvDQqBfdzWV3jGSy4zi' where id=<something>;
|
||||
```
|
||||
|
||||
## Building from source
|
||||
|
||||
First, you'll need [Nix](https://nix.dev).
|
||||
|
|
@ -13,6 +32,15 @@ nix develop
|
|||
go build .
|
||||
```
|
||||
|
||||
## Building Custom Theme
|
||||
|
||||
We're using a customized Bootstrap theme for this site. You'll need to build the SCSS into CSS:
|
||||
|
||||
```
|
||||
nix develop
|
||||
sass --style=compressed --trace "$SASS_SRC_DIR/custom.scss":"$CSS_OUTPUT_DIR/bootstrap.css"
|
||||
```
|
||||
|
||||
## Running
|
||||
|
||||
You'll need a number of environment variables for configuring things;
|
||||
|
|
@ -31,6 +59,18 @@ You'll need a number of environment variables for configuring things;
|
|||
> BASE_URL=https://sync.nidus.cloud ARCGIS_CLIENT_ID=foo ARCGIS_CLIENT_SECRET=bar POSTGRES_DSN='postgresql://?host=/var/run/postgresql&dbname=nidus-sync' ./nidus-sync
|
||||
```
|
||||
|
||||
### Districts
|
||||
|
||||
There's a table containing district information in the database, `import.district`. It was created with:
|
||||
|
||||
```
|
||||
psql
|
||||
CREATE SCHEMA import;
|
||||
shp2pgsql -s 3857 -c -D -I CA_districts.shp import.district | psql -d nidus-sync
|
||||
psql -d nidus-sync
|
||||
ALTER TABLE import.district ADD COLUMN geom_4326 geometry(MultiPolygon,4326) GENERATED ALWAYS AS (ST_Transform(geom, 4326)) STORED;
|
||||
```
|
||||
|
||||
## Hacking
|
||||
|
||||
### air
|
||||
|
|
@ -62,3 +102,39 @@ This uses [goose](https://github.com/pressly/goose). You can use the goose comma
|
|||
> GOOSE_DRIVER=postgres GOOSE_DBSTRING="dbname=nidus-sync sslmode=disable" goose down
|
||||
> GOOSE_DRIVER=postgres GOOSE_DBSTRING="dbname=nidus-sync sslmode=disable" goose up
|
||||
```
|
||||
|
||||
### svg icons
|
||||
|
||||
These icons are generated as part of the build system. You can generate them manually with:
|
||||
|
||||
```
|
||||
pnpm generate-icons
|
||||
```
|
||||
|
||||
This will produce an scss file at `ts/gen/custom-icons.scss`
|
||||
|
||||
### typescript
|
||||
|
||||
In order to work on the TypeScript code you'll need to install the dependencies locally in your dev environment:
|
||||
|
||||
```
|
||||
nix develop
|
||||
pnpm install
|
||||
```
|
||||
|
||||
You can then generate the TypeScript with:
|
||||
|
||||
```
|
||||
pnpm watch
|
||||
|
||||
```
|
||||
|
||||
The only page that works right now is `https://sync.nidus.cloud/template-test`
|
||||
|
||||
### watchexec
|
||||
|
||||
For iterating on styles
|
||||
|
||||
```
|
||||
watchexec -e scss sass scss/custom.scss:static/gen/css/bootstrap.css
|
||||
```
|
||||
|
|
|
|||
656
TEST-PLAN.md
Normal file
656
TEST-PLAN.md
Normal file
|
|
@ -0,0 +1,656 @@
|
|||
# TEST-PLAN.md — Nidus Sync Database Query Layer Testing
|
||||
|
||||
## Status
|
||||
|
||||
The project currently has **zero tests**. `testify` is already in `go.mod` as an indirect dependency and needs to be promoted to direct.
|
||||
|
||||
## Overview
|
||||
|
||||
This plan covers **Tier 2 testing**: integration tests for the `db/query/` packages that execute real SQL against a throwaway Postgres database using the project's own migration system. All tests operate inside transactions that are rolled back, so they leave no trace.
|
||||
|
||||
The query layer is the ideal starting point because:
|
||||
|
||||
1. Every function is small and focused — a single INSERT, SELECT, or UPDATE.
|
||||
2. After the signature normalization (CLEANUP.md §13), **every** function will accept a `db.Ex` interface, making them all uniformly testable from a test transaction.
|
||||
3. These are the foundation that all platform-layer business logic calls. Bugs here cascade upward.
|
||||
|
||||
### Prerequisite: Normalize Query Function Signatures
|
||||
|
||||
Before writing tests, all query functions must be converted to accept `(ctx context.Context, txn db.Ex, ...)`. This is documented in detail at **[CLEANUP.md §13](CLEANUP.md#13-normalize-query-function-signatures-to-dbex)**. Summary of changes needed:
|
||||
|
||||
| Category | Count | What | Test-blocking? |
|
||||
|----------|-------|------|---------------|
|
||||
| 13d — Bugfix: txn ignored | 2 funcs | `AddressFromID`, `AddressFromComplianceReportRequestID` call `ExecuteOne` instead of `ExecuteOneTx` | Yes — data isolation broken |
|
||||
| 13b — `db.Tx` → `db.Ex` | 4 funcs | `CommunicationInsert`, `CommunicationSetStatus`, `CommunicationLogEntryInsert`, `ComplianceFromID` | Partial — works but can't pass mock |
|
||||
| 13a — Add `txn db.Ex` param | 19 funcs | Functions missing transaction parameter entirely | Yes — can't test in transactions |
|
||||
| 13c — `bob.Tx` → `db.Ex` | 6 funcs | ArcGIS package functions using Bob transactions | Yes — can't test without Bob |
|
||||
|
||||
**Order of operations:** Fix 13d → convert 13b → convert 13a → convert 13c. After all conversions, every function is testable with `dbtest.Txn()`.
|
||||
|
||||
---
|
||||
|
||||
## Architecture of the Query Layer
|
||||
|
||||
### Package structure
|
||||
|
||||
```
|
||||
db/query/
|
||||
├── public/ ← main "public" schema queries (Jet ORM)
|
||||
│ ├── address.go
|
||||
│ ├── communication.go
|
||||
│ ├── communication_log_entry.go
|
||||
│ ├── compliance_report_request.go
|
||||
│ ├── feature.go
|
||||
│ ├── feature_pool.go
|
||||
│ ├── job.go
|
||||
│ ├── lead.go
|
||||
│ ├── signal.go
|
||||
│ └── site.go
|
||||
├── publicreport/ ← "publicreport" schema queries (Jet ORM)
|
||||
│ ├── compliance.go
|
||||
│ ├── image.go
|
||||
│ ├── image_exif.go
|
||||
│ ├── nuisance.go
|
||||
│ ├── report.go
|
||||
│ ├── report_image.go
|
||||
│ ├── report_log.go
|
||||
│ └── water.go
|
||||
└── arcgis/ ← "arcgis" schema queries (Jet ORM)
|
||||
├── account.go
|
||||
└── ...
|
||||
```
|
||||
|
||||
### Query function patterns
|
||||
|
||||
There are three patterns in the query layer:
|
||||
|
||||
| Pattern | Signature | Example |
|
||||
|---------|-----------|---------|
|
||||
| **Insert (txn)** | `func XxxInsert(ctx, txn db.Ex, model) (model, error)` | `CommunicationInsert`, `LeadInsert`, `ReportInsert` |
|
||||
| **Insert (global)** | `func XxxInsert(ctx, model) (model, error)` | (would use `db.PGInstance` directly) |
|
||||
| **Select (txn)** | `func XxxFromYyy(ctx, txn db.Ex, ...) (model, error)` | `SiteFromAddressIDForOrg`, `FeaturesFromSiteID` |
|
||||
| **Select (global)** | `func XxxFromYyy(ctx, ...) (model, error)` | `CommunicationFromID`, `AddressFromID` |
|
||||
| **Update (txn)** | `func XxxSetYyy(ctx, txn db.Ex, ...) error` | `CommunicationSetStatus` |
|
||||
| **Bulk insert (txn)** | `func XxxInserts(ctx, txn db.Ex, []model) ([]model, error)` | `AddressInserts`, `ReportImagesInsert` |
|
||||
| **Bulk select (txn)** | `func XxxsFromYyys(ctx, txn db.Ex, []int64) ([]model, error)` | `AddressesFromIDs`, `FeaturePoolsFromFeatures` |
|
||||
|
||||
After the signature normalization in CLEANUP.md §13, **every** function accepts `txn db.Ex`. All tests use the same transaction-based pattern: begin → insert → query → verify → rollback.
|
||||
|
||||
### The `db.Ex` interface (from `db/tx.go`)
|
||||
|
||||
```go
|
||||
type Ex interface {
|
||||
Exec(ctx context.Context, sql string, arguments ...any) (commandTag pgconn.CommandTag, err error)
|
||||
Query(ctx context.Context, sql string, args ...any) (pgx.Rows, error)
|
||||
}
|
||||
```
|
||||
|
||||
`db.BeginTxn()` returns `db.Tx` which implements `Ex`. `*pgxpool.Pool` does NOT implement `Ex` directly (it has different method signatures), which is why `db.ExecuteOne` (global pool) and `db.ExecuteOneTx` (transaction) are separate functions.
|
||||
|
||||
### Data flow
|
||||
|
||||
```
|
||||
Query function constructs a Jet statement →
|
||||
calls db.ExecuteOneTx[T]() or db.ExecuteManyTx[T]() →
|
||||
statement.Sql() produces (query string, args) →
|
||||
txn.Query(ctx, query, args...) →
|
||||
pgx collects rows into typed struct
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Test Infrastructure
|
||||
|
||||
### Prerequisites
|
||||
|
||||
A running Postgres instance accessible via a connection string. The test framework will:
|
||||
|
||||
1. Connect using `TEST_POSTGRES_DSN` env var (default: skip tests if unset, so `go test ./...` works without DB)
|
||||
2. Run all migrations via goose (embedded in `db/migrations/*.sql`)
|
||||
3. Each test begins a transaction, runs the test, rolls back
|
||||
4. No test data persists
|
||||
|
||||
### Test helper: `db/dbtest/dbtest.go`
|
||||
|
||||
Create a `db/dbtest/` package providing:
|
||||
|
||||
```go
|
||||
package dbtest
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
)
|
||||
|
||||
var (
|
||||
pool *pgxpool.Pool
|
||||
once sync.Once
|
||||
)
|
||||
|
||||
// Setup ensures the test database is initialized (migrations run).
|
||||
// Called once per test binary via TestMain or per-package init.
|
||||
func Setup(t *testing.T) {
|
||||
t.Helper()
|
||||
dsn := os.Getenv("TEST_POSTGRES_DSN")
|
||||
if dsn == "" {
|
||||
t.Skip("TEST_POSTGRES_DSN not set, skipping DB tests")
|
||||
}
|
||||
once.Do(func() {
|
||||
ctx := context.Background()
|
||||
if err := db.InitializeDatabase(ctx, dsn); err != nil {
|
||||
t.Fatalf("initialize test database: %v", err)
|
||||
}
|
||||
pool = db.PGInstance.PGXPool
|
||||
})
|
||||
}
|
||||
|
||||
// Txn begins a new transaction on the test pool and returns
|
||||
// it along with a rollback cleanup function.
|
||||
func Txn(t *testing.T) (context.Context, db.Ex, func()) {
|
||||
t.Helper()
|
||||
ctx := context.Background()
|
||||
tx, err := pool.Begin(ctx)
|
||||
if err != nil {
|
||||
t.Fatalf("begin txn: %v", err)
|
||||
}
|
||||
return ctx, tx, func() {
|
||||
tx.Rollback(ctx)
|
||||
}
|
||||
}
|
||||
|
||||
// Pool returns the raw pgxpool for tests that need it.
|
||||
func Pool() *pgxpool.Pool {
|
||||
return pool
|
||||
}
|
||||
```
|
||||
|
||||
### Test file naming
|
||||
|
||||
All test files follow the standard Go convention: `<name>_test.go`, placed in the same package being tested (using `_test` external test package where needed for circular dependency avoidance). The package name follows `package public_test` pattern to test exported API only.
|
||||
|
||||
Actually, since the query functions are all exported and testable from outside, use:
|
||||
|
||||
```go
|
||||
package public_test // external test package
|
||||
```
|
||||
|
||||
This avoids circular dependency on `db/dbtest` and ensures we only test the public API.
|
||||
|
||||
### Dependencies to add to `go.mod`
|
||||
|
||||
Promote to direct (already indirect):
|
||||
```
|
||||
github.com/stretchr/testify v1.11.1
|
||||
```
|
||||
|
||||
Add for assertions:
|
||||
```
|
||||
require "github.com/stretchr/testify/require"
|
||||
assert "github.com/stretchr/testify/assert"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 1: INSERT Functions (lowest risk, highest clarity)
|
||||
|
||||
These are the simplest: construct a model, insert, verify the returned model has an auto-generated ID.
|
||||
|
||||
### 1.1 `db/query/public/` insert functions
|
||||
|
||||
| File | Function | Model Dependencies | Notes |
|
||||
|------|----------|-------------------|-------|
|
||||
| `communication.go` | `CommunicationInsert` | `Communication` | Requires `organization_id` FK. Create an org in test setup. |
|
||||
| `communication_log_entry.go` | `CommunicationLogEntryInsert` | `CommunicationLogEntry` | Requires `communication_id` FK. Insert a communication first. |
|
||||
| `compliance_report_request.go` | `ComplianceReportRequestInsert` | `ComplianceReportRequest` | Requires `lead_id` FK (nullable). Test with nil. |
|
||||
| `lead.go` | `LeadInsert` | `Lead` | Requires `organization_id` and `site_id` FK. |
|
||||
| `signal.go` | `SignalInsert` | `Signal` | Requires `organization_id`, `location` (geom), FK to `site_id` (nullable). |
|
||||
| `job.go` | `JobInsert` | `Job` | Verify FK constraints documented. |
|
||||
|
||||
### 1.2 `db/query/publicreport/` insert functions
|
||||
|
||||
| File | Function | Model Dependencies |
|
||||
|------|----------|-------------------|
|
||||
| `compliance.go` | `ComplianceInsert` | `Compliance` |
|
||||
| `image.go` | `ImageInsert` | `Image` |
|
||||
| `image_exif.go` | `ImageExifInserts` | `ImageExif` (bulk) |
|
||||
| `nuisance.go` | `NuisanceInsert` | `Nuisance` |
|
||||
| `report.go` | `ReportInsert` | `Report` |
|
||||
| `report_image.go` | `ReportImageInsert`, `ReportImagesInsert` | `ReportImage` (single + bulk) |
|
||||
| `report_log.go` | `ReportLogInsert` | `ReportLog` |
|
||||
| `water.go` | `WaterInsert` | `Water` |
|
||||
|
||||
### 1.3 `db/query/arcgis/` insert functions
|
||||
|
||||
| File | Function | Model Dependencies |
|
||||
|------|----------|-------------------|
|
||||
| `account.go` | `AccountInsert` | `Account` |
|
||||
|
||||
### Example test: `db/query/public/communication_test.go`
|
||||
|
||||
```go
|
||||
package public_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/dbtest"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/public/model"
|
||||
query "github.com/Gleipnir-Technology/nidus-sync/db/query/public"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCommunicationInsert(t *testing.T) {
|
||||
dbtest.Setup(t)
|
||||
ctx, txn, cleanup := dbtest.Txn(t)
|
||||
defer cleanup()
|
||||
|
||||
comm := model.Communication{
|
||||
Created: time.Now(),
|
||||
OrganizationID: 1,
|
||||
Status: model.Communicationstatus_New,
|
||||
}
|
||||
result, err := query.CommunicationInsert(ctx, txn, comm)
|
||||
|
||||
require.NoError(t, err)
|
||||
assert.NotZero(t, result.ID)
|
||||
assert.Equal(t, model.Communicationstatus_New, result.Status)
|
||||
assert.Equal(t, int32(1), result.OrganizationID)
|
||||
}
|
||||
```
|
||||
|
||||
### Test structure pattern
|
||||
|
||||
Every INSERT test follows this template:
|
||||
|
||||
1. **Arrange**: Create a model struct with required fields populated.
|
||||
2. **Act**: Call the Insert function with a test transaction.
|
||||
3. **Assert**:
|
||||
- No error returned.
|
||||
- `result.ID` is non-zero (auto-generated primary key).
|
||||
- Inserted values match input values.
|
||||
- Timestamp fields are set (where applicable).
|
||||
|
||||
---
|
||||
|
||||
## Phase 2: SELECT Functions
|
||||
|
||||
These require data to already exist in the table. Each SELECT test inserts a row in the same transaction, then queries it back. After the signature normalization (CLEANUP.md §13), **all** SELECT functions accept `txn db.Ex` and use `ExecuteOneTx`/`ExecuteManyTx`.
|
||||
|
||||
### 2.1 `db/query/public/` select functions
|
||||
|
||||
| File | Function | Strategy |
|
||||
|------|----------|----------|
|
||||
| `address.go` | `AddressFromComplianceReportRequestID` | Insert address → query by report request ID |
|
||||
| `address.go` | `AddressFromGID` | Insert address → query by GID |
|
||||
| `address.go` | `AddressFromID` | Insert address → query by ID |
|
||||
| `address.go` | `AddressesFromIDs` | Insert 2 addresses → query by IDs |
|
||||
| `communication.go` | `CommunicationFromID` | Insert communication → query by ID |
|
||||
| `communication.go` | `CommunicationsFromOrganization` | Insert 2 communications → query by org |
|
||||
| `feature.go` | `FeaturesFromSiteID` | Insert site → feature → query |
|
||||
| `feature.go` | `FeaturesFromSiteIDs` | Insert 2 sites + features → query |
|
||||
| `feature_pool.go` | `FeaturePoolsFromFeatures` | Insert feature → pool → query |
|
||||
| `site.go` | `SiteFromAddressIDForOrg` | Insert address + site → query |
|
||||
| `site.go` | `SiteFromIDForOrg` | Insert site → query |
|
||||
|
||||
### 2.2 `db/query/publicreport/` select functions
|
||||
|
||||
| File | Function | Strategy |
|
||||
|------|----------|----------|
|
||||
| `compliance.go` | `ComplianceFromID` | Insert compliance → query by ID |
|
||||
| `report.go` | `ReportFromPublicID` | Insert report → query by public ID |
|
||||
| `report.go` | `ReportFromPublicIDForOrg` | Insert report → query by public ID + org |
|
||||
| `report.go` | `ReportFromID` | Insert report → query by ID |
|
||||
| `report.go` | `ReportsFromIDs` | Insert 2 reports → query by IDs |
|
||||
| `report.go` | `ReportsFromIDsForOrg` | Insert 2 reports → query by IDs + org |
|
||||
| `report.go` | `ReportsUnreviewedForOrganization` | Insert reviewed + unreviewed → query unreviewed |
|
||||
|
||||
### 2.3 `db/query/arcgis/` select functions
|
||||
|
||||
| File | Function | Strategy |
|
||||
|------|----------|----------|
|
||||
| `account.go` | `AccountFromID` | Insert account → query by ID |
|
||||
| `oauth.go` | `OAuthTokenFromID` | Insert token → query by ID |
|
||||
| `oauth.go` | `OAuthTokenForUser` | Insert token → query by user |
|
||||
| `oauth.go` | `OAuthTokensForUser` | Insert tokens → query by user |
|
||||
| `oauth.go` | `OAuthTokensValid` | Insert valid + invalid → query valid |
|
||||
| `oauth.go` | `OAuthTokenForUserExists` | Insert token → verify exists |
|
||||
| `service_feature.go` | `ServiceFeatureFromID` | Insert → query by ID |
|
||||
| `service_feature.go` | `ServiceFeatureFromURL` | Insert → query by URL |
|
||||
| `service_map.go` | `ServiceMapFromID` | Insert → query by ID |
|
||||
| `service_map.go` | `ServiceMapsFromAccountID` | Insert maps → query by account |
|
||||
|
||||
### Example test: `db/query/public/address_test.go`
|
||||
|
||||
```go
|
||||
package public_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/dbtest"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/public/model"
|
||||
query "github.com/Gleipnir-Technology/nidus-sync/db/query/public"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/twpayne/go-geom"
|
||||
)
|
||||
|
||||
func TestAddressFromGID(t *testing.T) {
|
||||
dbtest.Setup(t)
|
||||
ctx, txn, cleanup := dbtest.Txn(t)
|
||||
defer cleanup()
|
||||
|
||||
// Insert test data
|
||||
addr := model.Address{
|
||||
Country: "US",
|
||||
Created: time.Now(),
|
||||
Location: geom.NewPoint(geom.XY).MustSetCoords(geom.Coord{-122.4, 37.8}),
|
||||
H3cell: "test",
|
||||
Locality: "San Francisco",
|
||||
PostalCode: "94102",
|
||||
Street: "Market St",
|
||||
Unit: "",
|
||||
Region: "CA",
|
||||
Number: "1234",
|
||||
Gid: "test-gid-001",
|
||||
}
|
||||
inserted, err := query.AddressInsert(ctx, txn, addr)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Query by GID
|
||||
result, err := query.AddressFromGID(ctx, txn, "test-gid-001")
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, result)
|
||||
assert.Equal(t, inserted.ID, result.ID)
|
||||
assert.Equal(t, "US", result.Country)
|
||||
assert.Equal(t, "San Francisco", result.Locality)
|
||||
}
|
||||
|
||||
func TestAddressesFromIDs(t *testing.T) {
|
||||
dbtest.Setup(t)
|
||||
ctx, txn, cleanup := dbtest.Txn(t)
|
||||
defer cleanup()
|
||||
|
||||
// Insert two addresses
|
||||
a1, _ := query.AddressInsert(ctx, txn, model.Address{
|
||||
Created: time.Now(), Location: geom.NewPoint(geom.XY).MustSetCoords(geom.Coord{0, 0}),
|
||||
H3cell: "a", Gid: "gid-a",
|
||||
})
|
||||
a2, _ := query.AddressInsert(ctx, txn, model.Address{
|
||||
Created: time.Now(), Location: geom.NewPoint(geom.XY).MustSetCoords(geom.Coord{0, 1}),
|
||||
H3cell: "b", Gid: "gid-b",
|
||||
})
|
||||
|
||||
// Query by IDs
|
||||
results, err := query.AddressesFromIDs(ctx, txn, []int64{int64(a1.ID), int64(a2.ID)})
|
||||
require.NoError(t, err)
|
||||
assert.Len(t, results, 2)
|
||||
|
||||
// Empty input returns empty output
|
||||
empty, err := query.AddressesFromIDs(ctx, txn, []int64{})
|
||||
require.NoError(t, err)
|
||||
assert.Empty(t, empty)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 3: UPDATE Functions
|
||||
|
||||
Verify that updates modify rows correctly and respect predicates.
|
||||
|
||||
### 3.1 Update functions
|
||||
|
||||
| File | Function | Test Strategy |
|
||||
|------|----------|---------------|
|
||||
| `communication.go` | `CommunicationSetStatus` | Insert communication → update status → verify via SELECT |
|
||||
| `report.go` | `ReportUpdater.Execute` | Insert report → apply updater → verify |
|
||||
|
||||
### Example test: `db/query/public/communication_test.go`
|
||||
|
||||
```go
|
||||
func TestCommunicationSetStatus(t *testing.T) {
|
||||
dbtest.Setup(t)
|
||||
ctx, txn, cleanup := dbtest.Txn(t)
|
||||
defer cleanup()
|
||||
|
||||
// Insert
|
||||
comm, err := query.CommunicationInsert(ctx, txn, model.Communication{
|
||||
Created: time.Now(),
|
||||
OrganizationID: 1,
|
||||
Status: model.Communicationstatus_New,
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
// Update status
|
||||
err = query.CommunicationSetStatus(ctx, txn,
|
||||
int64(comm.OrganizationID), int64(comm.ID),
|
||||
model.Communicationstatus_Closed)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify the update via a SELECT in the same transaction
|
||||
// (CommunicationFromID accepts db.Ex after CLEANUP.md §13a conversion)
|
||||
updated, err := query.CommunicationFromID(ctx, txn, int64(comm.ID))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, model.Communicationstatus_Closed, updated.Status)
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Phase 4: ArcGIS Query Package
|
||||
|
||||
After the `bob.Tx` → `db.Ex` conversion (CLEANUP.md §13c), the arcgis query functions use the same transaction pattern as the other packages.
|
||||
|
||||
### 4.1 INSERT functions
|
||||
|
||||
| File | Function | Notes |
|
||||
|------|----------|-------|
|
||||
| `account.go` | `AccountInsert` | After 13c: uses `ExecuteOneTx` |
|
||||
| `oauth.go` | `OAuthTokenInsert` | After 13a: accepts `txn db.Ex` |
|
||||
| `service_feature.go` | `ServiceFeatureInsert` | After 13c: uses `ExecuteOneTx` |
|
||||
| `service_map.go` | `ServiceMapInsert` | After 13c: uses `ExecuteOneTx` |
|
||||
| `user.go` | `UserInsert` | After 13c: uses `ExecuteOneTx` |
|
||||
| `user_privileges.go` | `UserPrivilegeInsert` | After 13c: uses `ExecuteOneTx` |
|
||||
|
||||
### 4.2 SELECT functions
|
||||
|
||||
| File | Function | Notes |
|
||||
|------|----------|-------|
|
||||
| `account.go` | `AccountFromID` | After 13a: accepts `txn db.Ex` |
|
||||
| `oauth.go` | `OAuthTokenFromID`, `OAuthTokenForUser`, `OAuthTokensForUser`, `OAuthTokensValid`, `OAuthTokenForUserExists` | After 13a |
|
||||
| `service_feature.go` | `ServiceFeatureFromID`, `ServiceFeatureFromURL` | After 13a |
|
||||
| `service_map.go` | `ServiceMapFromID`, `ServiceMapsFromAccountID` | After 13a |
|
||||
|
||||
### 4.3 UPDATE/DELETE functions
|
||||
|
||||
| File | Function | Notes |
|
||||
|------|----------|-------|
|
||||
| `oauth.go` | `OAuthTokenUpdateAccessToken`, `OAuthTokenUpdateRefreshToken`, `OAuthTokenUpdateLicense`, `OAuthTokenInvalidate` | After 13a |
|
||||
| `user_privileges.go` | `UserPrivilegesDeleteByUserID` | After 13c |
|
||||
|
||||
---
|
||||
|
||||
## Phase 5: Edge Cases and Error Handling
|
||||
|
||||
### 5.1 Empty bulk operations
|
||||
|
||||
Functions like `AddressesFromIDs` and `ReportImagesInsert` already handle empty input slices gracefully. Write tests confirming:
|
||||
- Empty input → non-nil empty slice, no error.
|
||||
- Nil input → handled gracefully (or skipped with `t.Skip` if the function panics).
|
||||
|
||||
### 5.2 Unique constraint violations
|
||||
|
||||
Insert two rows with the same unique key; verify the error message is readable.
|
||||
|
||||
### 5.3 Foreign key violations
|
||||
|
||||
Insert a row referencing a non-existent parent; verify the error. This validates that FK constraints are correctly defined in the schema.
|
||||
|
||||
### 5.4 Not found
|
||||
|
||||
Functions returning `(*model.Xxx, error)` should return `nil, nil` on not-found (pattern already used by `ReportFromPublicID` and `AddressFromGID`). Test this behavior.
|
||||
|
||||
### 5.5 NULL handling
|
||||
|
||||
Models with nullable fields (`*int32`, `*string`, `*time.Time`, `*geom.T`): test with nil and non-nil values to verify round-trip fidelity.
|
||||
|
||||
---
|
||||
|
||||
## Test Execution
|
||||
|
||||
### Local development
|
||||
|
||||
```bash
|
||||
# Set up a test database (one time)
|
||||
createdb nidus-sync-test
|
||||
|
||||
# Run the query-layer tests
|
||||
TEST_POSTGRES_DSN="postgresql://?host=/var/run/postgresql&dbname=nidus-sync-test" \
|
||||
go test ./db/query/... -v -count=1
|
||||
|
||||
# Run all tests (skips DB tests if no DSN set)
|
||||
go test ./... -v -count=1
|
||||
```
|
||||
|
||||
### CI (GitHub Actions example)
|
||||
|
||||
```yaml
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_DB: nidus-sync-test
|
||||
POSTGRES_PASSWORD: password
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: Test query layer
|
||||
run: |
|
||||
go test ./db/query/... -v -count=1
|
||||
env:
|
||||
TEST_POSTGRES_DSN: "postgresql://postgres:password@localhost:5432/nidus-sync-test"
|
||||
```
|
||||
|
||||
### Using test databases in sequence vs parallel
|
||||
|
||||
- All Phase 1 INSERT tests can run in parallel (they use separate transactions on separate tables).
|
||||
- All SELECT tests for the same table should run sequentially to avoid ID conflicts.
|
||||
- Per-package `TestMain` can handle `db.InitializeDatabase` once, then run all tests.
|
||||
|
||||
**Recommended approach**: Run all tests sequentially within each package (Go's default), using `-count=1` to disable caching. Each test starts its own transaction, so there's no data leakage even running sequentially.
|
||||
|
||||
---
|
||||
|
||||
## File-by-File Implementation Order
|
||||
|
||||
### Step 1: Infrastructure (1 file)
|
||||
|
||||
| File | Purpose |
|
||||
|------|---------|
|
||||
| `db/dbtest/dbtest.go` | Test helper: pool setup, migration runner, transaction factory |
|
||||
|
||||
### Step 2: `go.mod` change (1 line)
|
||||
|
||||
Promote `github.com/stretchr/testify` to direct dependency.
|
||||
|
||||
### Step 3: INSERT tests (8 test files)
|
||||
|
||||
| Test File | Query File Tested | Functions Covered |
|
||||
|-----------|------------------|-------------------|
|
||||
| `db/query/public/communication_test.go` | `communication.go` + `communication_log_entry.go` | `CommunicationInsert`, `CommunicationLogEntryInsert`, `CommunicationSetStatus`, `CommunicationFromID`, `CommunicationsFromOrganization` |
|
||||
| `db/query/public/address_test.go` | `address.go` | `AddressInsert`, `AddressesFromIDs`, `AddressFromGID`, `AddressFromID`, `AddressFromComplianceReportRequestID` |
|
||||
| `db/query/public/site_test.go` | `site.go` | `SiteFromAddressIDForOrg`, `SiteFromIDForOrg` |
|
||||
| `db/query/public/lead_test.go` | `lead.go` | `LeadInsert` |
|
||||
| `db/query/public/signal_test.go` | `signal.go` | `SignalInsert` |
|
||||
| `db/query/public/compliance_report_request_test.go` | `compliance_report_request.go` | `ComplianceReportRequestInsert` |
|
||||
| `db/query/public/feature_test.go` | `feature.go` + `feature_pool.go` | `FeaturesFromSiteID`, `FeaturePoolsFromFeatures`, `FeaturesFromSiteIDs` |
|
||||
| `db/query/publicreport/report_test.go` | `report.go` | `ReportInsert`, `ReportFromPublicID`, `ReportFromPublicIDForOrg`, `ReportFromID`, `ReportsFromIDs`, `ReportsFromIDsForOrg`, `ReportsUnreviewedForOrganization` |
|
||||
|
||||
### Step 4: Remaining query packages (4 test files)
|
||||
|
||||
| Test File | Query File Tested | Functions Covered |
|
||||
|-----------|------------------|-------------------|
|
||||
| `db/query/publicreport/compliance_test.go` | `compliance.go` | `ComplianceInsert`, `ComplianceFromID` |
|
||||
| `db/query/publicreport/image_test.go` | `image.go` + `image_exif.go` + `report_image.go` | All image insert functions |
|
||||
| `db/query/publicreport/nuisance_water_test.go` | `nuisance.go` + `water.go` + `report_log.go` | `NuisanceInsert`, `WaterInsert`, `ReportLogInsert` |
|
||||
| `db/query/arcgis/arcgis_test.go` | `account.go` + `oauth.go` + `service_feature.go` + `service_map.go` + `user.go` | All arcgis query functions (after 13a + 13c conversions) |
|
||||
|
||||
---
|
||||
|
||||
## Model Foreign Key Dependency Graph
|
||||
|
||||
Understanding which inserts require which parent rows helps plan test setup:
|
||||
|
||||
```
|
||||
organization ─────────────────────────────────────────────┐
|
||||
│ │
|
||||
├── communication ── communication_log_entry │
|
||||
├── site ── feature ── feature_pool │
|
||||
│ │ │
|
||||
│ ├── signal (site_id, location) │
|
||||
│ └── lead (site_id) ── compliance_report_request │
|
||||
│ │
|
||||
└── publicreport.report ── report_log │
|
||||
├── report_image │
|
||||
├── compliance (report_id) │
|
||||
├── nuisance (report_id) │
|
||||
└── water (report_id) │
|
||||
```
|
||||
|
||||
For initial INSERT tests, we need at minimum a test `organization` row. The `dbtest.Setup` function can optionally seed this.
|
||||
|
||||
### Seeding approach
|
||||
|
||||
Option A — Seed in `dbtest.Setup()`: insert a minimal org row (id=1) during migration/setup so all tests have a valid FK target.
|
||||
Option B — Each test creates its own dependency rows within the transaction.
|
||||
|
||||
**Recommendation**: Option B for now (each test is self-contained). The overhead is low and tests remain independent. If organization-schema evolves and gets more columns, we can add a helper:
|
||||
|
||||
```go
|
||||
func SeedOrganization(ctx context.Context, txn db.Ex) (int32, error) {
|
||||
// Insert a minimal org row
|
||||
}
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## What Is NOT Covered (yet)
|
||||
|
||||
| Area | Reason |
|
||||
|------|--------|
|
||||
| `db/prepared.go` param builders | Scheduled for removal (per project owner) |
|
||||
| Platform layer (`platform/*.go`) | Separate plan — these call query functions; test them after query layer is solid |
|
||||
| HTTP handlers (`api/`, `resource/`) | Need HTTP test infrastructure (httptest) |
|
||||
| Bob ORM-generated models (`db/models/`) | Legacy ORM; query tests cover the Jet layer which is the migration target |
|
||||
| `db/fieldseeker.go` | Entirely commented out |
|
||||
| `db/connection.go` `Execute*` helpers | Covered transitively by query tests; direct tests would be lower priority |
|
||||
| Vue/TypeScript frontend | Separate test effort (Vitest) |
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
After all phases complete:
|
||||
|
||||
1. **Signature normalization (CLEANUP.md §13)**: every query function has `(ctx context.Context, txn db.Ex, ...)` signature. No function uses the global pool internally.
|
||||
2. **Every exported function in `db/query/public/`**, `db/query/publicreport/`, and `db/query/arcgis/` has at least one transaction-based test.
|
||||
3. **INSERT functions**: verify returned model has auto-generated ID and correct typed fields.
|
||||
4. **SELECT functions**: verify round-trip (insert → query → match) within the same transaction.
|
||||
5. **UPDATE functions**: verify modification takes effect, verifiable via SELECT in same transaction.
|
||||
6. **Edge cases**: empty input slices, not-found returns `nil`/error, FK/unique violations produce errors, NULL round-trips.
|
||||
7. **CI green**: tests pass in CI with a Postgres service container.
|
||||
319
api/api.go
319
api/api.go
|
|
@ -2,87 +2,76 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/queue"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/userfile"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/resource"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/version"
|
||||
//"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func apiAudioPost(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
id := chi.URLParam(r, "uuid")
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
return
|
||||
/*
|
||||
type renderer struct {
|
||||
}
|
||||
func (ren *renderer) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
*/
|
||||
// In the best case scenario, the excellent github.com/pkg/errors package
|
||||
// helps reveal information on the error, setting it on Err, and in the Render()
|
||||
// method, using it to set the application-specific error code in AppCode.
|
||||
type ResponseErr struct {
|
||||
Error error `json:"-"` // low-level runtime error
|
||||
HTTPStatusCode int `json:"-"` // http response status code
|
||||
|
||||
StatusText string `json:"status"` // user-level status message
|
||||
AppCode int64 `json:"code,omitempty"` // application-specific error code
|
||||
ErrorText string `json:"error,omitempty"` // application-level error message, for debugging
|
||||
}
|
||||
|
||||
var payload NoteAudioPayload
|
||||
body, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(body, &payload); err != nil {
|
||||
debugSaveRequest(body, err, "Audio note POST JSON decode error")
|
||||
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
setter := models.NoteAudioSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(u.ID),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
Duration: omit.From(payload.Duration),
|
||||
Transcription: omitnull.FromPtr(payload.Transcription),
|
||||
TranscriptionUserEdited: omit.From(payload.TranscriptionUserEdited),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
if err := db.NoteAudioCreate(context.Background(), u.R.Organization, u.ID, setter); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
func (e *ResponseErr) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
http.Error(w, e.StatusText, e.HTTPStatusCode)
|
||||
return nil
|
||||
}
|
||||
|
||||
func apiAudioContentPost(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
u_str := chi.URLParam(r, "uuid")
|
||||
audioUUID, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
return
|
||||
func errRender(err error) *ResponseErr {
|
||||
log.Error().Err(err).Msg("Rendering error")
|
||||
return &ResponseErr{
|
||||
Error: err,
|
||||
HTTPStatusCode: 500,
|
||||
StatusText: "Error rendering response",
|
||||
ErrorText: err.Error(),
|
||||
}
|
||||
err = userfile.AudioFileContentWrite(audioUUID, r.Body)
|
||||
if err != nil {
|
||||
log.Printf("Failed to write content file: %v", err)
|
||||
http.Error(w, "failed to write content file", http.StatusInternalServerError)
|
||||
}
|
||||
|
||||
queue.EnqueueAudioJob(queue.AudioJob{AudioUUID: audioUUID})
|
||||
w.WriteHeader(http.StatusOK)
|
||||
type Renderable interface {
|
||||
Render(http.ResponseWriter, *http.Request) error
|
||||
}
|
||||
|
||||
func handleClientIos(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
func renderShim(w http.ResponseWriter, r *http.Request, renderer Renderable) error {
|
||||
return renderer.Render(w, r)
|
||||
}
|
||||
func renderList(w http.ResponseWriter, r *http.Request, data []Renderable) error {
|
||||
return nil
|
||||
}
|
||||
func handleClientIos(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
var sinceStr string
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse GET form: %w", err)))
|
||||
err = renderShim(w, r, errRender(fmt.Errorf("Failed to parse GET form: %w", err)))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
} else {
|
||||
sinceStr = r.FormValue("since")
|
||||
|
|
@ -94,14 +83,20 @@ func handleClientIos(w http.ResponseWriter, r *http.Request, u *models.User) {
|
|||
} else {
|
||||
since, err = parseTime(sinceStr)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse 'since' value: %w", err)))
|
||||
err = renderShim(w, r, errRender(fmt.Errorf("Failed to parse 'since' value: %w", err)))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
csync, err := platform.ContentClientIos(r.Context(), u, since)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -115,68 +110,22 @@ func handleClientIos(w http.ResponseWriter, r *http.Request, u *models.User) {
|
|||
Fieldseeker: toResponseFieldseeker(csync.Fieldseeker),
|
||||
Since: since_used,
|
||||
}
|
||||
if err := render.Render(w, r, response); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderShim(w, r, response); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func apiImagePost(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
id := chi.URLParam(r, "uuid")
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload NoteImagePayload
|
||||
body, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(body, &payload); err != nil {
|
||||
debugSaveRequest(body, err, "Image note POST JSON decode error")
|
||||
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
setter := models.NoteImageSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(u.ID),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
err = db.NoteImageCreate(context.Background(), u.R.Organization, u.ID, setter)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
}
|
||||
|
||||
func apiImageContentPost(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
u_str := chi.URLParam(r, "uuid")
|
||||
imageUUID, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to parse image UUID")
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
}
|
||||
err = userfile.ImageFileContentWrite(imageUUID, r.Body)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
log.Printf("Saved image file %s\n", imageUUID)
|
||||
fmt.Fprintf(w, "PNG uploaded successfully")
|
||||
}
|
||||
|
||||
func apiMosquitoSource(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
func apiMosquitoSource(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
bounds, err := parseBounds(r)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -185,23 +134,32 @@ func apiMosquitoSource(w http.ResponseWriter, r *http.Request, u *models.User) {
|
|||
query.Limit = 100
|
||||
sources, err := platform.MosquitoSourceQuery()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
data := []render.Renderer{}
|
||||
data := []Renderable{}
|
||||
for _, s := range sources {
|
||||
data = append(data, NewResponseMosquitoSource(s))
|
||||
}
|
||||
if err := render.RenderList(w, r, data); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderList(w, r, data); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func apiTrapData(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
func apiTrapData(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
bounds, err := parseBounds(r)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -210,40 +168,22 @@ func apiTrapData(w http.ResponseWriter, r *http.Request, u *models.User) {
|
|||
query.Limit = 100
|
||||
trap_data, err := platform.TrapDataQuery()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
data := []render.Renderer{}
|
||||
data := []Renderable{}
|
||||
for _, td := range trap_data {
|
||||
data = append(data, NewResponseTrapDatum(td))
|
||||
}
|
||||
if err := render.RenderList(w, r, data); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
}
|
||||
}
|
||||
|
||||
func apiServiceRequest(w http.ResponseWriter, r *http.Request, u *models.User) {
|
||||
bounds, err := parseBounds(r)
|
||||
if err := renderList(w, r, data); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
return
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
query := db.NewGeoQuery()
|
||||
query.Bounds = *bounds
|
||||
query.Limit = 100
|
||||
requests, err := platform.ServiceRequestQuery()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
|
||||
data := []render.Renderer{}
|
||||
for _, sr := range requests {
|
||||
data = append(data, NewResponseServiceRequest(sr))
|
||||
}
|
||||
if err := render.RenderList(w, r, data); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -284,16 +224,6 @@ func parseBounds(r *http.Request) (*db.GeoBounds, error) {
|
|||
return &bounds, nil
|
||||
}
|
||||
|
||||
func errRender(err error) render.Renderer {
|
||||
log.Error().Err(err).Msg("Rendering error")
|
||||
return &ResponseErr{
|
||||
Error: err,
|
||||
HTTPStatusCode: 500,
|
||||
StatusText: "Error rendering response",
|
||||
ErrorText: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
func webhookFieldseeker(w http.ResponseWriter, r *http.Request) {
|
||||
// Create or open the log file
|
||||
file, err := os.OpenFile("webhook/request.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
|
||||
|
|
@ -302,37 +232,68 @@ func webhookFieldseeker(w http.ResponseWriter, r *http.Request) {
|
|||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
defer lint.LogOnErr(file.Close, "close request log")
|
||||
|
||||
// Write timestamp
|
||||
timestamp := time.Now().Format("2006-01-02 15:04:05")
|
||||
fmt.Fprintf(file, "\n=== Request logged at %s ===\n", timestamp)
|
||||
_, err = fmt.Fprintf(file, "\n=== Request logged at %s ===\n", timestamp)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Write request line
|
||||
fmt.Fprintf(file, "%s %s %s\n", r.Method, r.RequestURI, r.Proto)
|
||||
_, err = fmt.Fprintf(file, "%s %s %s\n", r.Method, r.RequestURI, r.Proto)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Write all headers
|
||||
fmt.Fprintf(file, "\nHeaders:\n")
|
||||
_, err = fmt.Fprintf(file, "\nHeaders:\n")
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
for name, values := range r.Header {
|
||||
for _, value := range values {
|
||||
fmt.Fprintf(file, "%s: %s\n", name, value)
|
||||
lint.Fprintf(file, "%s: %s\n", name, value)
|
||||
}
|
||||
}
|
||||
|
||||
// Write body
|
||||
fmt.Fprintf(file, "\nBody:\n")
|
||||
_, err = fmt.Fprintf(file, "\nBody:\n")
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
log.Printf("Error reading request body: %v", err)
|
||||
fmt.Fprintf(file, "Error reading body: %v\n", err)
|
||||
_, err = fmt.Fprintf(file, "Error reading body: %v\n", err)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
file.Write(body)
|
||||
_, err = file.Write(body)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if len(body) == 0 {
|
||||
fmt.Fprintf(file, "(empty body)")
|
||||
lint.Fprintf(file, "(empty body)")
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Fprintf(file, "\n=== End of request ===\n\n")
|
||||
lint.Fprintf(file, "\n=== End of request ===\n\n")
|
||||
|
||||
// Extract the crc_token value for the signature portion
|
||||
|
||||
|
|
@ -348,3 +309,27 @@ func parseTime(x string) (*time.Time, error) {
|
|||
created := time.UnixMilli(created_epoch)
|
||||
return &created, nil
|
||||
}
|
||||
|
||||
type about struct {
|
||||
Environment string `json:"environment"`
|
||||
SentryDSN string `json:"sentry_dsn"`
|
||||
Tegola tegolaURLs `json:"tegola"`
|
||||
Version version.VersionInfo `json:"version"`
|
||||
}
|
||||
type tegolaURLs struct {
|
||||
Nidus string `json:"nidus"`
|
||||
RMO string `json:"rmo"`
|
||||
}
|
||||
|
||||
func getRoot(ctx context.Context, r *http.Request, q resource.QueryParams) (*about, *nhttp.ErrorWithStatus) {
|
||||
v := version.Get()
|
||||
return &about{
|
||||
Environment: config.Environment,
|
||||
SentryDSN: config.SentryDSNFrontend,
|
||||
Tegola: tegolaURLs{
|
||||
Nidus: config.MakeURLTegola("/maps/nidus/{z}/{x}/{y}?id={organization_id}"),
|
||||
RMO: config.MakeURLTegola("/maps/rmo/{z}/{x}/{y}"),
|
||||
},
|
||||
Version: v,
|
||||
}, nil
|
||||
}
|
||||
|
|
|
|||
96
api/audio.go
Normal file
96
api/audio.go
Normal file
|
|
@ -0,0 +1,96 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func apiAudioPost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
id := vars["uuid"]
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload NoteAudioPayload
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(body, &payload); err != nil {
|
||||
//debugSaveRequest(body, err, "Audio note POST JSON decode error")
|
||||
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
setter := models.NoteAudioSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(int32(u.ID)),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
Duration: omit.From(payload.Duration),
|
||||
OrganizationID: omit.From(u.Organization.ID),
|
||||
Transcription: omitnull.FromPtr(payload.Transcription),
|
||||
TranscriptionUserEdited: omit.From(payload.TranscriptionUserEdited),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
if err := platform.NoteAudioCreate(ctx, u, setter); err != nil {
|
||||
if err := renderShim(w, r, errRender(err)); err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
}
|
||||
|
||||
func apiAudioContentPost(w http.ResponseWriter, r *http.Request, user platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
u_str := vars["uuid"]
|
||||
u, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
err = file.FileContentWrite(r.Body, file.CollectionAudioRaw, u)
|
||||
if err != nil {
|
||||
log.Printf("Failed to write content file: %v", err)
|
||||
http.Error(w, "failed to write content file", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
a, err := models.NoteAudios.Query(
|
||||
models.SelectWhere.NoteAudios.UUID.EQ(u),
|
||||
models.SelectWhere.NoteAudios.OrganizationID.EQ(user.Organization.ID),
|
||||
).One(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
log.Printf("Failed to get note audio %s for org %d: %w", u_str, user.Organization.ID, err)
|
||||
http.Error(w, "failed to update database", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
err = background.NewAudioTranscode(ctx, db.PGInstance.BobDB, a.ID)
|
||||
if err != nil {
|
||||
log.Printf("Failed to transcode audio %s for org %d: %w", u_str, user.Organization.ID, err)
|
||||
http.Error(w, "failed to transcode audio", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
1
api/avatar.go
Normal file
1
api/avatar.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
1
api/communication.go
Normal file
1
api/communication.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
34
api/configuration.go
Normal file
34
api/configuration.go
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type formArcgisConfiguration struct {
|
||||
MapService *string `schema:"map-service"`
|
||||
}
|
||||
|
||||
func postConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, u platform.User, f formArcgisConfiguration) (string, *nhttp.ErrorWithStatus) {
|
||||
if f.MapService != nil {
|
||||
_, err := psql.Update(
|
||||
um.Table("organization"),
|
||||
um.SetCol("arcgis_map_service_id").ToArg(f.MapService),
|
||||
um.Where(psql.Quote("id").EQ(psql.Arg(u.Organization.ID))),
|
||||
).Exec(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("Failed to update map service config: %w", err)
|
||||
}
|
||||
log.Info().Str("map-service", *f.MapService).Int32("org-id", u.Organization.ID).Msg("changed map service")
|
||||
} else {
|
||||
log.Info().Msg("no map service")
|
||||
}
|
||||
return "/configuration/integration/arcgis", nil
|
||||
}
|
||||
19
api/debug.go
19
api/debug.go
|
|
@ -1,19 +0,0 @@
|
|||
package api
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func debugSaveRequest(body []byte, err error, message string) {
|
||||
// TODO(eliribble): avoid using a single static filename and instead securely generate
|
||||
// this value
|
||||
log.Error().Err(err).Msg(message)
|
||||
output, err := os.OpenFile("/tmp/request.body", os.O_RDWR|os.O_CREATE, 0666)
|
||||
if err != nil {
|
||||
log.Info().Msg("Failed to open temp request.bady")
|
||||
}
|
||||
defer output.Close()
|
||||
output.Write(body)
|
||||
log.Info().Msg("Wrote request to /tmp/request.body")
|
||||
}
|
||||
39
api/district.go
Normal file
39
api/district.go
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func apiGetDistrictLogo(w http.ResponseWriter, r *http.Request) {
|
||||
vars := mux.Vars(r)
|
||||
slug := vars["slug"]
|
||||
ctx := r.Context()
|
||||
rows, err := models.Organizations.Query(
|
||||
models.SelectWhere.Organizations.Slug.EQ(slug),
|
||||
).All(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to query", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
switch len(rows) {
|
||||
case 0:
|
||||
http.Error(w, "Organization not found", http.StatusNotFound)
|
||||
return
|
||||
case 1:
|
||||
org := rows[0]
|
||||
if org.LogoUUID.IsNull() {
|
||||
http.Error(w, "Logo not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
file.ImageFileToWriter(file.CollectionLogo, org.LogoUUID.MustGet(), w)
|
||||
return
|
||||
default:
|
||||
http.Error(w, "Too many organizations, this is a programmer error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/render"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
)
|
||||
|
||||
func AddRoutes(r chi.Router) {
|
||||
// Authenticated endpoints
|
||||
r.Use(render.SetContentType(render.ContentTypeJSON))
|
||||
r.Method("GET", "/mosquito-source", auth.NewEnsureAuth(apiMosquitoSource))
|
||||
r.Method("GET", "/service-request", auth.NewEnsureAuth(apiServiceRequest))
|
||||
r.Method("GET", "/trap-data", auth.NewEnsureAuth(apiTrapData))
|
||||
r.Method("GET", "/client/ios", auth.NewEnsureAuth(handleClientIos))
|
||||
r.Method("POST", "/audio/{uuid}", auth.NewEnsureAuth(apiAudioPost))
|
||||
r.Method("POST", "/audio/{uuid}/content", auth.NewEnsureAuth(apiAudioContentPost))
|
||||
r.Method("POST", "/image/{uuid}", auth.NewEnsureAuth(apiImagePost))
|
||||
r.Method("POST", "/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentPost))
|
||||
|
||||
// Unauthenticated endpoints
|
||||
r.Get("/webhook/fieldseeker", webhookFieldseeker)
|
||||
r.Post("/webhook/fieldseeker", webhookFieldseeker)
|
||||
}
|
||||
169
api/event.go
Normal file
169
api/event.go
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/event"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/version"
|
||||
"github.com/google/uuid"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var connectionsSSE map[*ConnectionSSE]bool = make(map[*ConnectionSSE]bool, 0)
|
||||
var TYPE_STATUS string = "status"
|
||||
|
||||
type ConnectionSSE struct {
|
||||
chanEvent chan platform.Event
|
||||
id uuid.UUID
|
||||
organizationID int32
|
||||
userID int32
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
Resource string `json:"resource"`
|
||||
Time time.Time `json:"time"`
|
||||
Type string `json:"type"`
|
||||
URI string `json:"uri"`
|
||||
}
|
||||
|
||||
type Status struct {
|
||||
BuildTime time.Time `json:"build_time"`
|
||||
IsModified bool `json:"is_modified"`
|
||||
Revision string `json:"revision"`
|
||||
Status string `json:"status"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
func (c *ConnectionSSE) SendEvent(w http.ResponseWriter, m platform.Event) error {
|
||||
if m.Type == event.EventTypeShutdown {
|
||||
v := version.Get()
|
||||
return send(w, Status{
|
||||
BuildTime: v.BuildTime,
|
||||
IsModified: v.IsModified,
|
||||
Revision: v.Revision,
|
||||
Status: m.Type.String(),
|
||||
Type: TYPE_STATUS,
|
||||
})
|
||||
}
|
||||
return send(w, Message{
|
||||
Resource: m.Resource,
|
||||
Time: m.Time,
|
||||
Type: m.Type.String(),
|
||||
URI: m.URI,
|
||||
})
|
||||
}
|
||||
func (c *ConnectionSSE) SendHeartbeat(w http.ResponseWriter, t time.Time) error {
|
||||
return send(w, platform.Event{
|
||||
Resource: "clock",
|
||||
Time: t,
|
||||
Type: platform.EventTypeHeartbeat,
|
||||
URI: "",
|
||||
})
|
||||
}
|
||||
func SetEventChannel(chan_envelopes <-chan platform.Envelope) {
|
||||
go func() {
|
||||
for envelope := range chan_envelopes {
|
||||
for conn := range connectionsSSE {
|
||||
if conn.organizationID == envelope.OrganizationID || envelope.OrganizationID == 0 {
|
||||
log.Debug().Int("type", int(envelope.Event.Type)).Int32("env-org", envelope.OrganizationID).Msg("pushed event to client")
|
||||
conn.chanEvent <- envelope.Event
|
||||
} else if conn.userID == envelope.UserID {
|
||||
log.Debug().Int("type", int(envelope.Event.Type)).Int32("env-user", envelope.UserID).Msg("pushed event to user")
|
||||
conn.chanEvent <- envelope.Event
|
||||
} else {
|
||||
log.Debug().Int("type", int(envelope.Event.Type)).Int32("env-org", envelope.OrganizationID).Int32("conn-org", conn.organizationID).Msg("skipped event, bad org")
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func send[T any](w http.ResponseWriter, msg T) error {
|
||||
jsonData, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("marshaling json: %w", err)
|
||||
}
|
||||
// Write in SSE format: "data: <json>\n\n"
|
||||
_, err = fmt.Fprintf(w, "data: %s\n\n", jsonData)
|
||||
if err != nil {
|
||||
return fmt.Errorf("writing SSE message: %w", err)
|
||||
}
|
||||
|
||||
w.(http.Flusher).Flush()
|
||||
return nil
|
||||
}
|
||||
func streamEvents(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
// Set headers for SSE
|
||||
w.Header().Set("Content-Type", "text/event-stream")
|
||||
w.Header().Set("Cache-Control", "no-cache")
|
||||
w.Header().Set("Connection", "keep-alive")
|
||||
w.Header().Set("Access-Control-Allow-Origin", "*")
|
||||
|
||||
uid, err := uuid.NewUUID()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to create uuid")
|
||||
http.Error(w, "failed to create uuid", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
connection := ConnectionSSE{
|
||||
chanEvent: make(chan platform.Event),
|
||||
id: uid,
|
||||
organizationID: u.Organization.ID,
|
||||
userID: int32(u.ID),
|
||||
}
|
||||
connectionsSSE[&connection] = true
|
||||
log.Debug().Int32("org", u.Organization.ID).Int("user", u.ID).Str("id", uid.String()).Msg("connected SSE client")
|
||||
|
||||
// Send an initial connected event
|
||||
v := version.Get()
|
||||
status := Status{
|
||||
BuildTime: v.BuildTime,
|
||||
IsModified: v.IsModified,
|
||||
Revision: v.Revision,
|
||||
Status: "connected",
|
||||
Type: TYPE_STATUS,
|
||||
}
|
||||
body, err := json.Marshal(status)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal connect status")
|
||||
http.Error(w, "failed to marshal connect status", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
lint.Fprintf(w, "data: %s\n\n", body)
|
||||
w.(http.Flusher).Flush()
|
||||
|
||||
// Keep the connection open with a ticker sending periodic events
|
||||
ticker := time.NewTicker(5 * time.Second)
|
||||
defer ticker.Stop()
|
||||
|
||||
// Use a channel to detect when the client disconnects
|
||||
done := r.Context().Done()
|
||||
|
||||
// Keep connection open until client disconnects
|
||||
for {
|
||||
select {
|
||||
case <-done:
|
||||
log.Debug().Int32("org", u.Organization.ID).Int("user", u.ID).Str("id", uid.String()).Msg("Client closed connection")
|
||||
delete(connectionsSSE, &connection)
|
||||
return
|
||||
case t := <-ticker.C:
|
||||
// Send a heartbeat message
|
||||
err = connection.SendHeartbeat(w, t)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to send heartbeat")
|
||||
}
|
||||
case e := <-connection.chanEvent:
|
||||
err = connection.SendEvent(w, e)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to send heartbeat")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
413
api/handler.go
Normal file
413
api/handler.go
Normal file
|
|
@ -0,0 +1,413 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/resource"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/schema"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type ErrorAPI struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
var decoder = schema.NewDecoder()
|
||||
|
||||
type handlerBase func(context.Context, http.ResponseWriter, *http.Request) *nhttp.ErrorWithStatus
|
||||
type handlerBaseAuthenticated func(context.Context, http.ResponseWriter, *http.Request, platform.User) *nhttp.ErrorWithStatus
|
||||
type handlerFunctionDelete func(context.Context, *http.Request, platform.User) *nhttp.ErrorWithStatus
|
||||
type handlerFunctionGet[T any] func(context.Context, *http.Request, resource.QueryParams) (*T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetAuthenticated[T any] func(context.Context, *http.Request, platform.User, resource.QueryParams) (T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetImage func(context.Context, *http.Request, platform.User) (file.Collection, uuid.UUID, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetSlice[T any] func(context.Context, *http.Request, resource.QueryParams) ([]*T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetSliceAuthenticated[T any] func(context.Context, *http.Request, platform.User, resource.QueryParams) ([]T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPost[RequestType any, ResponseType any] func(context.Context, *http.Request, RequestType) (ResponseType, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPostAuthenticated[RequestType any, ResponseType any] func(context.Context, *http.Request, platform.User, RequestType) (ResponseType, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPostFormMultipart[RequestType any, ResponseType any] func(context.Context, *http.Request, RequestType) (*ResponseType, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPutAuthenticated[RequestType any] func(context.Context, *http.Request, platform.User, RequestType) (string, *nhttp.ErrorWithStatus)
|
||||
|
||||
func authenticatedHandlerBasic(f handlerBaseAuthenticated) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
e := f(ctx, w, r, u)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
return
|
||||
})
|
||||
}
|
||||
func authenticatedHandlerDelete(f handlerFunctionDelete) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
e := f(ctx, r, u)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
http.Error(w, "", http.StatusNoContent)
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerGetImage(f handlerFunctionGetImage) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
collection, uid, e := f(ctx, r, u)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
file.ImageFileToWriter(collection, uid, w)
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSON[T any](f handlerFunctionGetAuthenticated[T]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, u, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
body, err = json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
_, err = w.Write(body)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to write json: %w", err))
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSONSlice[T any](f handlerFunctionGetSliceAuthenticated[T]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, u, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
if resp == nil {
|
||||
body, err = json.Marshal([]struct{}{})
|
||||
} else {
|
||||
body, err = json.Marshal(resp)
|
||||
}
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
_, err = w.Write(body)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to write json: %w", err))
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
func authenticatedHandlerJSONPost[RequestType any, ResponseType any](f handlerFunctionPostAuthenticated[RequestType, ResponseType]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, u, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
_, err = w.Write(body)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to write json: %w", err))
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSONPut[RequestType any](f handlerFunctionPutAuthenticated[RequestType]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
path, e := f(ctx, r, u, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
if path == "" {
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Location", path)
|
||||
http.Redirect(w, r, path, http.StatusCreated)
|
||||
})
|
||||
}
|
||||
func authenticatedHandlerPostMultipart[ResponseType any](f handlerFunctionPostAuthenticated[[]file.Upload, ResponseType], collection file.Collection) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
err := r.ParseMultipartForm(32 << 10) // 32 MB buffer
|
||||
if err != nil {
|
||||
respondError(w, http.StatusBadRequest, "Failed to parse form: %w ", err)
|
||||
return
|
||||
}
|
||||
uploads, err := file.SaveFileUploads(r, collection)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusInternalServerError, "failed to save uploads: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
/*
|
||||
err = decoder.Decode(&content, r.PostForm)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusBadRequest, "Failed to decode form: %w", err)
|
||||
return
|
||||
}
|
||||
*/
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, u, uploads)
|
||||
if e != nil {
|
||||
http.Error(w, e.Error(), e.Status)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal json")
|
||||
http.Error(w, "{\"message\": \"failed to marshal json\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
lint.Write(w, body)
|
||||
})
|
||||
}
|
||||
func handlerBasic(f handlerBase) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
e := f(ctx, w, r)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
func handlerJSON[T any](f handlerFunctionGet[T]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
body, err = json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
lint.Write(w, body)
|
||||
}
|
||||
}
|
||||
func handlerJSONSlice[T any](f handlerFunctionGetSlice[T]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
body, err = json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
lint.Write(w, body)
|
||||
}
|
||||
}
|
||||
|
||||
func handlerJSONPost[RequestType any, ResponseType any](f handlerFunctionPost[RequestType, ResponseType]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
lint.Write(w, body)
|
||||
}
|
||||
}
|
||||
|
||||
func handlerJSONPut[RequestType any, ResponseType any](f handlerFunctionPost[RequestType, ResponseType]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
lint.Write(w, body)
|
||||
}
|
||||
}
|
||||
func handlerFormPost[RequestType any, ResponseType any](f handlerFunctionPostFormMultipart[RequestType, ResponseType]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
err := r.ParseMultipartForm(32 << 12) // 128 MB buffer
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("bad form: %w", err))
|
||||
return
|
||||
}
|
||||
var req RequestType
|
||||
err = decoder.Decode(&req, r.PostForm)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("decode form: %w", err))
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
lint.Write(w, body)
|
||||
}
|
||||
}
|
||||
func parseRequest[RequestType any](r *http.Request) (*RequestType, *nhttp.ErrorWithStatus) {
|
||||
var err error
|
||||
var req RequestType
|
||||
content_type := r.Header.Get("Content-Type")
|
||||
switch content_type {
|
||||
case "application/json":
|
||||
body, e := io.ReadAll(r.Body)
|
||||
if e != nil {
|
||||
return nil, nhttp.NewError("Failed to read body: %w", err)
|
||||
}
|
||||
err = json.Unmarshal(body, &req)
|
||||
case "application/x-www-form-urlencoded":
|
||||
e := r.ParseForm()
|
||||
if err != nil {
|
||||
return nil, nhttp.NewBadRequest("parsing form: %w", e)
|
||||
}
|
||||
err = decoder.Decode(&req, r.PostForm)
|
||||
default:
|
||||
return nil, nhttp.NewBadRequest("unrecognized content type '%s'", content_type)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "Failed to decode request: %w", err)
|
||||
}
|
||||
return &req, nil
|
||||
}
|
||||
func serializeError(w http.ResponseWriter, e *nhttp.ErrorWithStatus) {
|
||||
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
|
||||
body, err := json.Marshal(ErrorAPI{Message: e.Error()})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal error")
|
||||
http.Error(w, "{\"message\": \"boom. I can't even tell you what went wrong\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), e.Status)
|
||||
return
|
||||
}
|
||||
func respondError(w http.ResponseWriter, status int, format string, args ...any) {
|
||||
outer_err := fmt.Errorf(format, args...)
|
||||
body, err := json.Marshal(ErrorAPI{
|
||||
Message: outer_err.Error(),
|
||||
})
|
||||
if err != nil {
|
||||
http.Error(w, "{\"message\": \"failed to marshal json\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), status)
|
||||
}
|
||||
func respondErrorStatus(w http.ResponseWriter, e *nhttp.ErrorWithStatus) {
|
||||
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
|
||||
body, err := json.Marshal(ErrorAPI{Message: e.Error()})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal error")
|
||||
http.Error(w, "{\"message\": \"boom. I can't even tell you what went wrong\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), e.Status)
|
||||
}
|
||||
89
api/image.go
Normal file
89
api/image.go
Normal file
|
|
@ -0,0 +1,89 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func apiImagePost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
id := vars["uuid"]
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload NoteImagePayload
|
||||
body, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(body, &payload); err != nil {
|
||||
//debugSaveRequest(body, err, "Image note POST JSON decode error")
|
||||
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
setter := models.NoteImageSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(int32(u.ID)),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
OrganizationID: omit.From(u.Organization.ID),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
err = platform.NoteImageCreate(ctx, u, setter)
|
||||
if err != nil {
|
||||
if err := renderShim(w, r, errRender(err)); err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
}
|
||||
|
||||
func apiImageContentGet(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
u_str := vars["uuid"]
|
||||
imageUUID, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to parse image UUID")
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
}
|
||||
file.ImageFileToWriter(file.CollectionPublicImage, imageUUID, w)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
func apiImageContentPost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
u_str := vars["uuid"]
|
||||
imageUUID, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to parse image UUID")
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
}
|
||||
err = file.ImageFileFromReader(file.CollectionImageRaw, imageUUID, r.Body)
|
||||
if err != nil {
|
||||
if err := renderShim(w, r, errRender(err)); err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
log.Printf("Saved image file %s\n", imageUUID)
|
||||
lint.Fprintf(w, "PNG uploaded successfully")
|
||||
}
|
||||
1
api/lead.go
Normal file
1
api/lead.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
46
api/publicreport.go
Normal file
46
api/publicreport.go
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
)
|
||||
|
||||
type formPublicreportSignal struct {
|
||||
ReportID string `json:"reportID"`
|
||||
}
|
||||
|
||||
func postPublicreportSignal(ctx context.Context, r *http.Request, user platform.User, req formPublicreportSignal) (string, *nhttp.ErrorWithStatus) {
|
||||
signal_id, err := platform.SignalCreateFromPublicreport(ctx, user, req.ReportID)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("create signal: %w", err)
|
||||
}
|
||||
return fmt.Sprintf("/signal/%d", *signal_id), nil
|
||||
}
|
||||
|
||||
func postPublicreportInvalid(ctx context.Context, r *http.Request, user platform.User, req formPublicreportSignal) (string, *nhttp.ErrorWithStatus) {
|
||||
err := platform.PublicReportInvalid(ctx, user, req.ReportID)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("create signal: %w", err)
|
||||
}
|
||||
return fmt.Sprintf("/publicreport/%s", req.ReportID), nil
|
||||
}
|
||||
|
||||
type formPublicreportMessage struct {
|
||||
Message string `json:"message"`
|
||||
ReportID string `json:"reportID"`
|
||||
}
|
||||
|
||||
func postPublicreportMessage(ctx context.Context, r *http.Request, user platform.User, req formPublicreportMessage) (string, *nhttp.ErrorWithStatus) {
|
||||
msg_id, err := platform.PublicReportMessageCreate(ctx, user, req.ReportID, req.Message)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("failed to create message: %s", err)
|
||||
}
|
||||
if msg_id == nil {
|
||||
return "", nhttp.NewError("nil message id")
|
||||
}
|
||||
return fmt.Sprintf("/message/%d", *msg_id), nil
|
||||
}
|
||||
29
api/review.go
Normal file
29
api/review.go
Normal file
|
|
@ -0,0 +1,29 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
)
|
||||
|
||||
type createReviewPool struct {
|
||||
Status string `json:"status"`
|
||||
TaskID int32 `json:"task_id"`
|
||||
Updates *platform.PoolUpdate `json:"updates"`
|
||||
}
|
||||
|
||||
func postReviewPool(ctx context.Context, r *http.Request, user platform.User, req createReviewPool) (string, *nhttp.ErrorWithStatus) {
|
||||
id, err := platform.ReviewPoolCreate(ctx, user, req.TaskID, req.Status, req.Updates)
|
||||
|
||||
if err != nil {
|
||||
if errors.As(err, &platform.ErrorNotFound{}) {
|
||||
return "", nhttp.NewErrorStatus(http.StatusNotFound, "review task %d not found", req.TaskID)
|
||||
}
|
||||
return "", nhttp.NewError("failed to set review: %w", err)
|
||||
}
|
||||
return fmt.Sprintf("/review/%d", id), nil
|
||||
}
|
||||
173
api/routes.go
Normal file
173
api/routes.go
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/resource"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func AddRoutesRMO(r *mux.Router) {
|
||||
router := resource.NewRouter(r)
|
||||
|
||||
compliance_request := resource.ComplianceRequest(router)
|
||||
district := resource.District(router)
|
||||
geocode := resource.Geocode(router)
|
||||
nuisance := resource.Nuisance(router)
|
||||
pr_compliance := resource.PublicReportCompliance(router)
|
||||
publicreport := resource.Publicreport(router)
|
||||
publicreport_notification := resource.PublicreportNotification(router)
|
||||
qrcode := resource.QRCode(router)
|
||||
water := resource.Water(router)
|
||||
|
||||
r.HandleFunc("", handlerJSON(getRoot))
|
||||
r.HandleFunc("/compliance-request/image/pool/{public_id}", compliance_request.ImagePoolGet).Methods("GET").Name("compliance-request.image.pool.ByIDGet")
|
||||
r.Handle("/district", handlerJSONSlice(district.List)).Methods("GET")
|
||||
r.Handle("/district/{id}", handlerJSON(district.GetByID)).Methods("GET").Name("district.ByIDGet")
|
||||
r.HandleFunc("/district/{slug}/logo", apiGetDistrictLogo).Methods("GET").Name("district.logo.BySlug")
|
||||
r.Handle("/geocode/by-gid/{id:.*}", handlerJSON(geocode.ByGID)).Methods("GET")
|
||||
r.Handle("/geocode/reverse", handlerJSONPost(geocode.Reverse)).Methods("POST")
|
||||
r.Handle("/geocode/reverse/closest", handlerJSONPost(geocode.ReverseClosest)).Methods("POST")
|
||||
r.Handle("/geocode/suggestion", handlerJSONSlice(geocode.SuggestionList)).Methods("GET")
|
||||
|
||||
r.Handle("/publicreport-notification", handlerJSONPost(publicreport_notification.Create)).Methods("POST")
|
||||
r.Handle("/qr-code/mailer/{code}", handlerBasic(qrcode.Mailer)).Methods("GET")
|
||||
r.Handle("/qr-code/marketing", handlerBasic(qrcode.Marketing)).Methods("GET")
|
||||
r.Handle("/qr-code/report/{code}", handlerBasic(qrcode.Report)).Methods("GET")
|
||||
r.HandleFunc("/rmo/compliance", handlerJSONPost(pr_compliance.Create)).Methods("POST")
|
||||
r.HandleFunc("/rmo/nuisance", handlerFormPost(nuisance.Create)).Methods("POST")
|
||||
r.Handle("/rmo/publicreport/{id}", handlerBasic(publicreport.ByIDPublic)).Methods("GET").Name("publicreport.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/compliance/{id}/image", handlerFormPost(publicreport.ImageCreate)).Methods("POST")
|
||||
r.Handle("/rmo/publicreport/compliance/{id}", handlerJSON(pr_compliance.ByIDPublic)).Methods("GET").Name("publicreport.compliance.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/compliance/{id}", handlerJSONPut(pr_compliance.Update)).Methods("PUT")
|
||||
r.Handle("/rmo/publicreport/nuisance/{id}", handlerJSON(nuisance.ByIDPublic)).Methods("GET").Name("publicreport.nuisance.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/water/{id}", handlerJSON(water.ByIDPublic)).Methods("GET").Name("publicreport.water.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/{id}", handlerBasic(publicreport.ByIDPublic)).Methods("GET").Name("publicreport.ByIDGetPublicPublic")
|
||||
r.HandleFunc("/rmo/water", handlerFormPost(water.Create)).Methods("POST")
|
||||
}
|
||||
func AddRoutesSync(r *mux.Router) {
|
||||
router := resource.NewRouter(r)
|
||||
|
||||
compliance_request := resource.ComplianceRequest(router)
|
||||
district := resource.District(router)
|
||||
geocode := resource.Geocode(router)
|
||||
lob_hook := resource.LobHook(router)
|
||||
nuisance := resource.Nuisance(router)
|
||||
pr_compliance := resource.PublicReportCompliance(router)
|
||||
publicreport := resource.Publicreport(router)
|
||||
publicreport_notification := resource.PublicreportNotification(router)
|
||||
qrcode := resource.QRCode(router)
|
||||
service_request := resource.ServiceRequest(router)
|
||||
water := resource.Water(router)
|
||||
|
||||
//r.Use(render.SetContentType(render.ContentTypeJSON))
|
||||
// Unauthenticated endpoints
|
||||
r.HandleFunc("", handlerJSON(getRoot))
|
||||
r.HandleFunc("/compliance-request/image/pool/{public_id}", compliance_request.ImagePoolGet).Methods("GET").Name("compliance-request.image.pool.ByIDGet")
|
||||
r.Handle("/district", handlerJSONSlice(district.List)).Methods("GET")
|
||||
r.Handle("/district/{id}", handlerJSON(district.GetByID)).Methods("GET").Name("district.ByIDGet")
|
||||
r.HandleFunc("/district/{slug}/logo", apiGetDistrictLogo).Methods("GET").Name("district.logo.BySlug")
|
||||
r.Handle("/geocode/by-gid/{id:.*}", handlerJSON(geocode.ByGID)).Methods("GET")
|
||||
r.Handle("/geocode/reverse", handlerJSONPost(geocode.Reverse)).Methods("POST")
|
||||
r.Handle("/geocode/reverse/closest", handlerJSONPost(geocode.ReverseClosest)).Methods("POST")
|
||||
r.Handle("/geocode/suggestion", handlerJSONSlice(geocode.SuggestionList)).Methods("GET")
|
||||
r.Handle("/lob/event", handlerBasic(lob_hook.Event)).Methods("POST")
|
||||
|
||||
r.Handle("/publicreport-notification", handlerJSONPost(publicreport_notification.Create)).Methods("POST")
|
||||
r.Handle("/qr-code/mailer/{code}", handlerBasic(qrcode.Mailer)).Methods("GET")
|
||||
r.Handle("/qr-code/marketing", handlerBasic(qrcode.Marketing)).Methods("GET")
|
||||
r.Handle("/qr-code/report/{code}", handlerBasic(qrcode.Report)).Methods("GET")
|
||||
r.HandleFunc("/signin", handlerJSONPost(postSignin))
|
||||
r.Handle("/signout", authenticatedHandlerBasic(postSignout))
|
||||
r.HandleFunc("/signup", handlerJSONPost(postSignup))
|
||||
r.HandleFunc("/twilio/call", twilioCallPost).Methods("POST")
|
||||
r.HandleFunc("/twilio/call/status", twilioCallStatusPost).Methods("POST")
|
||||
r.HandleFunc("/twilio/message", twilioMessagePost).Methods("POST")
|
||||
r.HandleFunc("/twilio/text", twilioTextPost).Methods("POST")
|
||||
r.HandleFunc("/twilio/text/status", twilioTextStatusPost).Methods("POST")
|
||||
r.HandleFunc("/voipms/text", voipmsTextGet).Methods("GET")
|
||||
r.HandleFunc("/voipms/text", voipmsTextPost).Methods("POST")
|
||||
r.HandleFunc("/webhook/fieldseeker", webhookFieldseeker).Methods("GET")
|
||||
r.HandleFunc("/webhook/fieldseeker", webhookFieldseeker).Methods("POST")
|
||||
|
||||
// Authenticated endpoints
|
||||
r.Handle("/audio/{uuid}", auth.NewEnsureAuth(apiAudioPost)).Methods("POST")
|
||||
r.Handle("/audio/{uuid}/content", auth.NewEnsureAuth(apiAudioContentPost)).Methods("POST")
|
||||
avatar := resource.Avatar(router)
|
||||
r.Handle("/avatar/{uuid}", authenticatedHandlerGetImage(avatar.ByUUIDGet)).Methods("GET").Name("avatar.ByUUIDGet")
|
||||
r.Handle("/avatar", authenticatedHandlerPostMultipart(avatar.Create, file.CollectionAvatar)).Methods("POST")
|
||||
r.Handle("/client/ios", auth.NewEnsureAuth(handleClientIos)).Methods("GET")
|
||||
communication := resource.Communication(router)
|
||||
r.Handle("/communication", authenticatedHandlerJSONSlice(communication.List)).Methods("GET")
|
||||
r.Handle("/communication/{id}", authenticatedHandlerJSON(communication.Get)).Methods("GET").Name("communication.ByIDGet")
|
||||
r.Handle("/communication/{id}/mark/invalid", authenticatedHandlerJSONPost(communication.MarkInvalid)).Methods("POST").Name("communication.MarkInvalid")
|
||||
r.Handle("/communication/{id}/mark/pending-response", authenticatedHandlerJSONPost(communication.MarkPendingResponse)).Methods("POST").Name("communication.MarkPendingResponse")
|
||||
r.Handle("/communication/{id}/mark/possible-issue", authenticatedHandlerJSONPost(communication.MarkPossibleIssue)).Methods("POST").Name("communication.MarkPossibleIssue")
|
||||
r.Handle("/communication/{id}/mark/possible-resolved", authenticatedHandlerJSONPost(communication.MarkPossibleResolved)).Methods("POST").Name("communication.MarkPossibleResolved")
|
||||
r.Handle("/compliance-request/mailer", authenticatedHandlerJSONPost(compliance_request.CreateMailer)).Methods("POST")
|
||||
//r.HandleFunc("/compliance-request/image/pool/{public_id}", getComplianceRequestImagePool).Methods("GET")
|
||||
r.Handle("/configuration/integration/arcgis", authenticatedHandlerJSONPost(postConfigurationIntegrationArcgis)).Methods("POST")
|
||||
email := resource.Email(router)
|
||||
r.Handle("/email/{id}", authenticatedHandlerJSON(email.Get)).Methods("GET").Name("email.ByIDGet")
|
||||
r.Handle("/events", auth.NewEnsureAuth(streamEvents)).Methods("GET")
|
||||
r.Handle("/image/{uuid}", auth.NewEnsureAuth(apiImagePost)).Methods("POST")
|
||||
r.Handle("/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentGet)).Methods("GET")
|
||||
r.Handle("/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentPost)).Methods("POST")
|
||||
impersonation := resource.Impersonation(router)
|
||||
r.Handle("/impersonation", authenticatedHandlerJSONPost(impersonation.Create)).Methods("POST")
|
||||
r.Handle("/impersonation", authenticatedHandlerDelete(impersonation.Delete)).Methods("DELETE")
|
||||
lead := resource.Lead(r)
|
||||
r.Handle("/leads", authenticatedHandlerJSON(lead.List)).Methods("GET")
|
||||
r.Handle("/leads", authenticatedHandlerJSONPost(lead.Create)).Methods("POST")
|
||||
|
||||
mailer := resource.Mailer(router)
|
||||
r.Handle("/mailer", authenticatedHandlerJSONSlice(mailer.List)).Methods("GET")
|
||||
r.Handle("/mailer/{id}", authenticatedHandlerJSONPost(mailer.ByIDGet)).Methods("GET").Name("mailer.ByIDGet")
|
||||
r.Handle("/mosquito-source", auth.NewEnsureAuth(apiMosquitoSource)).Methods("GET")
|
||||
|
||||
r.Handle("/publicreport/invalid", authenticatedHandlerJSONPost(postPublicreportInvalid)).Methods("POST")
|
||||
r.Handle("/publicreport/signal", authenticatedHandlerJSONPost(postPublicreportSignal)).Methods("POST")
|
||||
r.Handle("/publicreport/message", authenticatedHandlerJSONPost(postPublicreportMessage)).Methods("POST")
|
||||
r.Handle("/publicreport/{id}", authenticatedHandlerBasic(publicreport.ByID)).Methods("GET").Name("publicreport.ByIDGet")
|
||||
r.Handle("/publicreport/compliance/{id}", authenticatedHandlerJSON(pr_compliance.ByID)).Methods("GET").Name("publicreport.compliance.ByIDGet")
|
||||
r.Handle("/publicreport/nuisance/{id}", authenticatedHandlerJSON(nuisance.ByID)).Methods("GET").Name("publicreport.nuisance.ByIDGet")
|
||||
r.Handle("/publicreport/water/{id}", authenticatedHandlerJSON(water.ByID)).Methods("GET").Name("publicreport.water.ByIDGet")
|
||||
|
||||
r.Handle("/publicreport-notification", handlerJSONPost(publicreport_notification.Create)).Methods("POST")
|
||||
r.Handle("/review/pool", authenticatedHandlerJSONPost(postReviewPool)).Methods("POST")
|
||||
review_task := resource.ReviewTask(r)
|
||||
r.Handle("/review-task", authenticatedHandlerJSON(review_task.List)).Methods("GET")
|
||||
r.Handle("/service-request", authenticatedHandlerJSONSlice(service_request.List)).Methods("GET")
|
||||
session := resource.Session(router)
|
||||
r.Handle("/session", authenticatedHandlerJSON(session.Get)).Methods("GET").Name("session.get")
|
||||
signal := resource.Signal(r)
|
||||
r.Handle("/signal", authenticatedHandlerJSON(signal.List)).Methods("GET")
|
||||
site := resource.Site(router)
|
||||
r.Handle("/site", authenticatedHandlerJSONSlice(site.List)).Methods("GET")
|
||||
r.Handle("/site/{id}", authenticatedHandlerJSON(site.ByIDGet)).Methods("GET").Name("site.ByIDGet")
|
||||
sync := resource.Sync(r)
|
||||
r.Handle("/sync", authenticatedHandlerJSONSlice(sync.List)).Methods("GET")
|
||||
r.Handle("/sudo/email", authenticatedHandlerJSONPost(postSudoEmail)).Methods("POST")
|
||||
r.Handle("/sudo/sms", authenticatedHandlerJSONPost(postSudoSMS)).Methods("POST")
|
||||
r.Handle("/sudo/sse", authenticatedHandlerJSONPost(postSudoSSE)).Methods("POST")
|
||||
text := resource.Text(router)
|
||||
r.Handle("/text/{id}", authenticatedHandlerJSON(text.Get)).Methods("GET").Name("text.ByIDGet")
|
||||
r.Handle("/trap-data", auth.NewEnsureAuth(apiTrapData)).Methods("GET")
|
||||
r.Handle("/tile/{z}/{y}/{x}", auth.NewEnsureAuth(getTile)).Methods("GET")
|
||||
upload := resource.Upload(r)
|
||||
r.Handle("/upload/pool/custom", authenticatedHandlerPostMultipart(upload.PoolCustomCreate, file.CollectionCSV)).Methods("POST")
|
||||
r.Handle("/upload/pool/flyover", authenticatedHandlerPostMultipart(upload.PoolFlyoverCreate, file.CollectionCSV)).Methods("POST")
|
||||
r.Handle("/upload", authenticatedHandlerJSON(upload.List)).Methods("GET")
|
||||
r.Handle("/upload/{id}", authenticatedHandlerJSON(upload.ByIDGet)).Methods("GET")
|
||||
r.Handle("/upload/{id}/commit", authenticatedHandlerJSONPost(upload.Commit)).Methods("POST")
|
||||
r.Handle("/upload/{id}/discard", authenticatedHandlerJSONPost(upload.Discard)).Methods("POST")
|
||||
|
||||
user := resource.User(router)
|
||||
r.Handle("/user/self", authenticatedHandlerJSON(user.SelfGet)).Methods("GET")
|
||||
r.Handle("/user/suggestion", authenticatedHandlerJSON(user.SuggestionGet)).Methods("GET")
|
||||
r.Handle("/user", authenticatedHandlerJSONSlice(user.List)).Methods("GET")
|
||||
r.Handle("/user/{id}", authenticatedHandlerJSON(user.ByIDGet)).Methods("GET").Name("user.ByIDGet")
|
||||
r.Handle("/user/{id}", authenticatedHandlerJSONPut(user.ByIDPut)).Methods("PUT")
|
||||
|
||||
// Unauthenticated endpoints
|
||||
}
|
||||
1
api/signal.go
Normal file
1
api/signal.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
46
api/signin.go
Normal file
46
api/signin.go
Normal file
|
|
@ -0,0 +1,46 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type reqSignin struct {
|
||||
Password string `schema:"password"`
|
||||
Username string `schema:"username"`
|
||||
}
|
||||
|
||||
func postSignin(ctx context.Context, r *http.Request, req reqSignin) (string, *nhttp.ErrorWithStatus) {
|
||||
if req.Password == "" {
|
||||
return "", nhttp.NewBadRequest("Empty password")
|
||||
}
|
||||
if req.Username == "" {
|
||||
return "", nhttp.NewBadRequest("Empty username")
|
||||
}
|
||||
log.Info().Str("username", req.Username).Msg("API Signin")
|
||||
_, err := auth.SigninUser(r, req.Username, req.Password)
|
||||
if err != nil {
|
||||
if errors.Is(err, auth.InvalidCredentials{}) {
|
||||
return "", nhttp.NewUnauthorized("invalid credentials")
|
||||
}
|
||||
if errors.Is(err, auth.InvalidUsername{}) {
|
||||
return "", nhttp.NewUnauthorized("invalid credentials")
|
||||
}
|
||||
if errors.Is(err, platform.NoUserError{}) {
|
||||
return "", nhttp.NewUnauthorized("invalid credentials")
|
||||
}
|
||||
log.Error().Err(err).Str("username", req.Username).Msg("Login server error")
|
||||
return "", nhttp.NewError("login server error")
|
||||
}
|
||||
return "/", nil
|
||||
}
|
||||
func postSignout(ctx context.Context, w http.ResponseWriter, r *http.Request, u platform.User) *nhttp.ErrorWithStatus {
|
||||
auth.SignoutUser(r, u)
|
||||
return nil
|
||||
}
|
||||
37
api/signup.go
Normal file
37
api/signup.go
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type reqSignup struct {
|
||||
Username string `json:"username"`
|
||||
Name string `json:"name"`
|
||||
Password string `json:"password"`
|
||||
Terms bool `json:"terms"`
|
||||
}
|
||||
|
||||
func postSignup(ctx context.Context, r *http.Request, signup reqSignup) (string, *nhttp.ErrorWithStatus) {
|
||||
|
||||
log.Info().Str("username", signup.Username).Str("name", signup.Name).Str("password", strings.Repeat("*", len(signup.Password))).Msg("Signup")
|
||||
|
||||
if !signup.Terms {
|
||||
log.Warn().Msg("Terms not agreed")
|
||||
return "", nhttp.NewErrorStatus(http.StatusBadRequest, "You must agree to the terms to register")
|
||||
}
|
||||
|
||||
user, err := auth.SignupUser(r.Context(), signup.Username, signup.Name, signup.Password)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("Failed to signup user", err)
|
||||
}
|
||||
|
||||
auth.AddUserSession(ctx, user)
|
||||
|
||||
return "/", nil
|
||||
}
|
||||
84
api/sudo.go
Normal file
84
api/sudo.go
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/comms/email"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/comms/text"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type FormEmail struct {
|
||||
Body string `schema:"emailBody"`
|
||||
From string `schema:"emailFrom"`
|
||||
Subject string `schema:"emailSubject"`
|
||||
To string `schema:"emailTo"`
|
||||
}
|
||||
|
||||
func postSudoEmail(ctx context.Context, r *http.Request, u platform.User, e FormEmail) (string, *nhttp.ErrorWithStatus) {
|
||||
if !u.HasRoot() {
|
||||
return "", &nhttp.ErrorWithStatus{
|
||||
Message: "You must have sudo powers to do this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
request := email.Request{
|
||||
From: e.From,
|
||||
HTML: fmt.Sprintf("<html><p>%s</p></html>", e.Body),
|
||||
Sender: e.From,
|
||||
Subject: e.Subject,
|
||||
To: e.To,
|
||||
Text: e.Body,
|
||||
}
|
||||
resp, err := email.Send(ctx, request)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Msg("Failed to send email")
|
||||
} else {
|
||||
log.Info().Str("id", resp.ID).Str("to", e.To).Msg("Sent Email")
|
||||
}
|
||||
return "/sudo", nil
|
||||
}
|
||||
|
||||
type FormSMS struct {
|
||||
Message string `schema:"smsMessage"`
|
||||
Phone string `schema:"smsPhone"`
|
||||
}
|
||||
|
||||
func postSudoSMS(ctx context.Context, r *http.Request, u platform.User, sms FormSMS) (string, *nhttp.ErrorWithStatus) {
|
||||
if !u.HasRoot() {
|
||||
return "", &nhttp.ErrorWithStatus{
|
||||
Message: "You must have sudo powers to do this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
id, err := text.SendText(ctx, config.VoipMSNumber, sms.Phone, sms.Message)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Msg("Failed to send SMS")
|
||||
} else {
|
||||
log.Info().Str("id", id).Msg("Sent SMS")
|
||||
}
|
||||
return "/sudo", nil
|
||||
}
|
||||
|
||||
type FormSSE struct {
|
||||
OrganizationID int32 `schema:"organizationID"`
|
||||
Resource string `schema:"resource"`
|
||||
Type string `schema:"type"`
|
||||
URIPath string `schema:"uriPath"`
|
||||
}
|
||||
|
||||
func postSudoSSE(ctx context.Context, r *http.Request, u platform.User, sse FormSSE) (string, *nhttp.ErrorWithStatus) {
|
||||
if !u.HasRoot() {
|
||||
return "", &nhttp.ErrorWithStatus{
|
||||
Message: "You must have sudo powers to do this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
platform.SudoEvent(sse.OrganizationID, sse.Resource, sse.Type, sse.URIPath)
|
||||
return "/sudo", nil
|
||||
}
|
||||
38
api/tile.go
Normal file
38
api/tile.go
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/gorilla/mux"
|
||||
//"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func getTile(w http.ResponseWriter, r *http.Request, user platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
x_str := vars["x"]
|
||||
y_str := vars["y"]
|
||||
z_str := vars["z"]
|
||||
|
||||
x, err := strconv.Atoi(x_str)
|
||||
if err != nil {
|
||||
http.Error(w, "can't parse x as an integer", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
y, err := strconv.Atoi(y_str)
|
||||
if err != nil {
|
||||
http.Error(w, "can't parse x as an integer", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
z, err := strconv.Atoi(z_str)
|
||||
if err != nil {
|
||||
http.Error(w, "can't parse x as an integer", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
err = platform.GetTile(r.Context(), w, user.Organization, true, uint(z), uint(y), uint(x))
|
||||
if err != nil {
|
||||
http.Error(w, "failed to do tile", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
||||
159
api/twilio.go
Normal file
159
api/twilio.go
Normal file
|
|
@ -0,0 +1,159 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/twilio/twilio-go/twiml"
|
||||
)
|
||||
|
||||
// Translate from Twilio's representation of a RCS message sender to our concept of a phone number
|
||||
// From: rcs:dev_report_mosquitoes_online_dosrvwxm_agent
|
||||
// To: +16235525879
|
||||
func getDst(to string) (string, error) {
|
||||
|
||||
if to == config.TwilioRCSSenderRMO {
|
||||
return config.PhoneNumberReportStr, nil
|
||||
}
|
||||
/*
|
||||
phone, err := models.FindCommsPhone(ctx, db.PGInstance.BobDB, to)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Failed to search for dest phone %s: %w", to, err)
|
||||
}
|
||||
return phone.E164, nil
|
||||
*/
|
||||
return "", fmt.Errorf("Cannot match phone number to '%s'", to)
|
||||
}
|
||||
|
||||
func splitPhoneSource(s string) (string, string) {
|
||||
parts := strings.Split(s, ":")
|
||||
switch len(parts) {
|
||||
case 0:
|
||||
return "this isn't", "possible"
|
||||
case 1:
|
||||
return "", s
|
||||
case 2:
|
||||
return parts[0], parts[1]
|
||||
default:
|
||||
log.Warn().Str("s", s).Msg("Got an incomprehensible number of parts of a phone number")
|
||||
return parts[0], parts[1]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func twilioMessagePost(w http.ResponseWriter, r *http.Request) {
|
||||
message_sid := r.PostFormValue("MessageSid")
|
||||
log.Info().Str("sid", message_sid).Msg("Twilio Message POST")
|
||||
lint.Fprintf(w, "")
|
||||
}
|
||||
func twilioCallPost(w http.ResponseWriter, r *http.Request) {
|
||||
called := r.PostFormValue("Called")
|
||||
tostate := r.PostFormValue("ToState")
|
||||
callercountry := r.PostFormValue("CallerCountry")
|
||||
direction := r.PostFormValue("Direction")
|
||||
callerstate := r.PostFormValue("CallerState")
|
||||
tozip := r.PostFormValue("ToZip")
|
||||
callsid := r.PostFormValue("CallSid")
|
||||
to := r.PostFormValue("To")
|
||||
callerzip := r.PostFormValue("CallerZip")
|
||||
tocountry := r.PostFormValue("ToCountry")
|
||||
stirverstat := r.PostFormValue("StirVerstat")
|
||||
//calltoken := r.PostFormValue("CallToken")
|
||||
calledzip := r.PostFormValue("CalledZip")
|
||||
apiversion := r.PostFormValue("ApiVersion")
|
||||
calledcity := r.PostFormValue("CalledCity")
|
||||
callstatus := r.PostFormValue("CallStatus")
|
||||
from := r.PostFormValue("From")
|
||||
accountsid := r.PostFormValue("AccountSid")
|
||||
calledcountry := r.PostFormValue("CalledCountry")
|
||||
callercity := r.PostFormValue("CallerCity")
|
||||
tocity := r.PostFormValue("ToCity")
|
||||
fromcountry := r.PostFormValue("FromCountry")
|
||||
caller := r.PostFormValue("Caller")
|
||||
fromcity := r.PostFormValue("FromCity")
|
||||
calledstate := r.PostFormValue("CalledState")
|
||||
fromzip := r.PostFormValue("FromZip")
|
||||
fromstate := r.PostFormValue("FromState")
|
||||
log.Info().Str("called", called).Str("tostate", tostate).Str("callercountry", callercountry).Str("direction", direction).Str("callerstate", callerstate).Str("tozip", tozip).Str("callsid", callsid).Str("to", to).Str("callerzip", callerzip).Str("tocountry", tocountry).Str("stirverstat", stirverstat).Str("calledzip", calledzip).Str("apiversion", apiversion).Str("calledcity", calledcity).Str("callstatus", callstatus).Str("from", from).Str("accountsid", accountsid).Str("calledcountry", calledcountry).Str("callercity", callercity).Str("tocity", tocity).Str("fromcountry", fromcountry).Str("caller", caller).Str("fromcity", fromcity).Str("calledstate", calledstate).Str("fromzip", fromzip).Str("fromstate", fromstate).Msg("Incoming phone call")
|
||||
|
||||
say := &twiml.VoiceSay{
|
||||
Message: "Thanks for calling Report Mosquitoes Online. I'll forward you to our tech support lead, Eli",
|
||||
}
|
||||
call := &twiml.VoiceDial{
|
||||
Number: config.PhoneNumberSupportStr,
|
||||
}
|
||||
twimlResult, err := twiml.Voice([]twiml.Element{say, call})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to produce TWIML")
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
w.Header().Set("Content-Type", "text/xml")
|
||||
lint.Fprintf(w, "%s", twimlResult)
|
||||
}
|
||||
|
||||
func twilioCallStatusPost(w http.ResponseWriter, r *http.Request) {
|
||||
call_sid := r.PostFormValue("CallSid")
|
||||
account_sid := r.PostFormValue("AccountSid")
|
||||
from := r.PostFormValue("From")
|
||||
to := r.PostFormValue("To")
|
||||
call_status := r.PostFormValue("CallStatus")
|
||||
api_version := r.PostFormValue("ApiVersion")
|
||||
direction := r.PostFormValue("Direction")
|
||||
forwarded_from := r.PostFormValue("ForwardedFrom")
|
||||
caller_name := r.PostFormValue("CallerName")
|
||||
parent_call_sid := r.PostFormValue("ParentCallSid")
|
||||
log.Info().Str("call_sid", call_sid).Str("account_sid", account_sid).Str("from", from).Str("to", to).Str("call_status", call_status).Str("api_version", api_version).Str("direction", direction).Str("forwarded_from", forwarded_from).Str("caller_name", caller_name).Str("parent_call_sid", parent_call_sid)
|
||||
lint.Fprintf(w, "")
|
||||
}
|
||||
func twilioTextPost(w http.ResponseWriter, r *http.Request) {
|
||||
message_sid := r.PostFormValue("MessageSid")
|
||||
account_sid := r.PostFormValue("AccountSid")
|
||||
messaging_service_sid := r.PostFormValue("MessagingServiceSid")
|
||||
from := r.PostFormValue("From")
|
||||
to_ := r.PostFormValue("To")
|
||||
body := r.PostFormValue("Body")
|
||||
num_media := r.PostFormValue("NumMedia")
|
||||
num_segments := r.PostFormValue("NumSegments")
|
||||
media_content_type0 := r.PostFormValue("MediaContentType0")
|
||||
media_url0 := r.PostFormValue("MediaUrl0")
|
||||
from_city := r.PostFormValue("FromCity")
|
||||
from_state := r.PostFormValue("FromState")
|
||||
from_zip := r.PostFormValue("FromZip")
|
||||
from_country := r.PostFormValue("FromCountry")
|
||||
to_city := r.PostFormValue("ToCity")
|
||||
to_state := r.PostFormValue("ToState")
|
||||
to_zip := r.PostFormValue("ToZip")
|
||||
to_country := r.PostFormValue("ToCountry")
|
||||
type_, src := splitPhoneSource(from)
|
||||
log.Info().Str("message_sid", message_sid).Str("account_sid", account_sid).Str("messaging_service_sid", messaging_service_sid).Str("from", from).Str("to_", to_).Str("body", body).Str("num_media", num_media).Str("num_segments", num_segments).Str("media_content_type0", media_content_type0).Str("media_url0", media_url0).Str("from_city", from_city).Str("from_state", from_state).Str("from_zip", from_zip).Str("from_country", from_country).Str("to_city", to_city).Str("to_state", to_state).Str("to_zip", to_zip).Str("to_country", to_country).Str("type_", type_).Msg("got text")
|
||||
|
||||
twiml, _ := twiml.Messages([]twiml.Element{})
|
||||
|
||||
dst, err := getDst(to_)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("to", to_).Msg("Failed to get dst")
|
||||
return
|
||||
}
|
||||
|
||||
go func() {
|
||||
err := text.HandleTextMessage(context.Background(), src, dst, body)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to handle Twilio incoming text")
|
||||
}
|
||||
}()
|
||||
w.Header().Set("Content-Type", "text/xml")
|
||||
lint.Fprintf(w, "%s", twiml)
|
||||
}
|
||||
func twilioTextStatusPost(w http.ResponseWriter, r *http.Request) {
|
||||
message_sid := r.PostFormValue("MessageSid")
|
||||
message_status := r.PostFormValue("MessageStatus")
|
||||
log.Info().Str("sid", message_sid).Str("status", message_status).Msg("Updated message status")
|
||||
text.UpdateMessageStatus(message_sid, message_status)
|
||||
lint.Fprintf(w, "")
|
||||
}
|
||||
112
api/types.go
112
api/types.go
|
|
@ -5,17 +5,16 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
"github.com/aarondl/opt/null"
|
||||
"github.com/go-chi/render"
|
||||
//"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type H3Cell uint64
|
||||
|
||||
type hasCreated interface {
|
||||
getCreated() string
|
||||
}
|
||||
|
||||
type Bounds struct {
|
||||
East float64
|
||||
North float64
|
||||
|
|
@ -32,13 +31,6 @@ func NewBounds() Bounds {
|
|||
}
|
||||
}
|
||||
|
||||
/* not sure if used
|
||||
type Location struct {
|
||||
Latitude float64
|
||||
Longitude float64
|
||||
}
|
||||
*/
|
||||
|
||||
type NoteImagePayload struct {
|
||||
UUID string `json:"uuid"`
|
||||
Cell H3Cell `json:"cell"`
|
||||
|
|
@ -62,6 +54,13 @@ type NoteAudioPayload struct {
|
|||
Version int32 `json:"version"`
|
||||
}
|
||||
|
||||
type ResponseDistrict struct {
|
||||
Agency string `json:"agency"`
|
||||
Manager string `json:"manager"`
|
||||
Phone string `json:"phone"`
|
||||
Website string `json:"website"`
|
||||
}
|
||||
|
||||
type ResponseMosquitoSource struct {
|
||||
Access string `json:"access"`
|
||||
Active *bool `json:"active"`
|
||||
|
|
@ -89,11 +88,10 @@ type NoteAudioBreadcrumbPayload struct {
|
|||
|
||||
type ResponseFieldseeker struct {
|
||||
MosquitoSources []ResponseMosquitoSource `json:"sources"`
|
||||
ServiceRequests []ResponseServiceRequest `json:"requests"`
|
||||
ServiceRequests []types.ServiceRequest `json:"requests"`
|
||||
TrapData []ResponseTrapData `json:"traps"`
|
||||
}
|
||||
|
||||
// ResponseErr renderer type for handling all sorts of errors.
|
||||
type ResponseClientIos struct {
|
||||
Fieldseeker ResponseFieldseeker `json:"fieldseeker"`
|
||||
Since time.Time `json:"since"`
|
||||
|
|
@ -103,23 +101,6 @@ func (i ResponseClientIos) Render(w http.ResponseWriter, r *http.Request) error
|
|||
return nil
|
||||
}
|
||||
|
||||
// In the best case scenario, the excellent github.com/pkg/errors package
|
||||
// helps reveal information on the error, setting it on Err, and in the Render()
|
||||
// method, using it to set the application-specific error code in AppCode.
|
||||
type ResponseErr struct {
|
||||
Error error `json:"-"` // low-level runtime error
|
||||
HTTPStatusCode int `json:"-"` // http response status code
|
||||
|
||||
StatusText string `json:"status"` // user-level status message
|
||||
AppCode int64 `json:"code,omitempty"` // application-specific error code
|
||||
ErrorText string `json:"error,omitempty"` // application-level error message, for debugging
|
||||
}
|
||||
|
||||
func (e *ResponseErr) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
render.Status(r, e.HTTPStatusCode)
|
||||
return nil
|
||||
}
|
||||
|
||||
type ResponseMosquitoInspection struct {
|
||||
ActionTaken string `json:"action_taken"`
|
||||
Comments string `json:"comments"`
|
||||
|
|
@ -154,19 +135,28 @@ func NewResponseMosquitoInspections(inspections models.FieldseekerMosquitoinspec
|
|||
return results
|
||||
}
|
||||
|
||||
func (rd ResponseDistrict) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (rtd ResponseMosquitoSource) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewResponseMosquitoSource(ms platform.MosquitoSource) ResponseMosquitoSource {
|
||||
pl := ms.PointLocation
|
||||
h3cell, err := h3utils.ToCell(pl.H3cell.GetOr("0"))
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Msg("Failed to convert h3 cell")
|
||||
h3cell = 0
|
||||
}
|
||||
return ResponseMosquitoSource{
|
||||
Active: toBool16(pl.Active),
|
||||
Access: pl.Accessdesc.GetOr(""),
|
||||
Comments: pl.Comments.GetOr(""),
|
||||
Created: formatTime(pl.Creationdate),
|
||||
Description: pl.Description.GetOr(""),
|
||||
//H3Cell: pl.H3Cell,
|
||||
H3Cell: int64(h3cell),
|
||||
ID: pl.Globalid.String(),
|
||||
LastInspectionDate: formatTime(pl.Lastinspectdate),
|
||||
Habitat: pl.Habitat.GetOr(""),
|
||||
|
|
@ -241,48 +231,10 @@ func (rtd ResponseNote) Render(w http.ResponseWriter, r *http.Request) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
type ResponseServiceRequest struct {
|
||||
Address string `json:"address"`
|
||||
AssignedTechnician string `json:"assigned_technician"`
|
||||
City string `json:"city"`
|
||||
Created string `json:"created"`
|
||||
H3Cell int64 `json:"h3cell"`
|
||||
HasDog *bool `json:"has_dog"`
|
||||
HasSpanishSpeaker *bool `json:"has_spanish_speaker"`
|
||||
ID string `json:"id"`
|
||||
Priority string `json:"priority"`
|
||||
RecordedDate string `json:"recorded_date"`
|
||||
Source string `json:"source"`
|
||||
Status string `json:"status"`
|
||||
Target string `json:"target"`
|
||||
Zip string `json:"zip"`
|
||||
}
|
||||
|
||||
func (srr ResponseServiceRequest) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewResponseServiceRequest(sr *models.FieldseekerServicerequest) ResponseServiceRequest {
|
||||
return ResponseServiceRequest{
|
||||
Address: sr.Reqaddr1.GetOr(""),
|
||||
AssignedTechnician: sr.Assignedtech.GetOr(""),
|
||||
City: sr.Reqcity.GetOr(""),
|
||||
Created: formatTime(sr.Creationdate),
|
||||
//H3Cell: sr.H3Cell,
|
||||
HasDog: toBool(sr.Dog),
|
||||
HasSpanishSpeaker: toBool(sr.Spanish),
|
||||
ID: sr.Globalid.String(),
|
||||
Priority: sr.Priority.GetOr(""),
|
||||
Status: sr.Status.GetOr(""),
|
||||
Source: sr.Source.GetOr(""),
|
||||
Target: sr.Reqtarget.GetOr(""),
|
||||
Zip: sr.Reqzip.GetOr(""),
|
||||
}
|
||||
}
|
||||
func NewResponseServiceRequests(requests models.FieldseekerServicerequestSlice) []ResponseServiceRequest {
|
||||
results := make([]ResponseServiceRequest, 0)
|
||||
func NewResponseServiceRequests(requests models.FieldseekerServicerequestSlice) []types.ServiceRequest {
|
||||
results := make([]types.ServiceRequest, 0)
|
||||
for _, i := range requests {
|
||||
results = append(results, NewResponseServiceRequest(i))
|
||||
results = append(results, types.ServiceRequestFromModel(i))
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
|
@ -344,16 +296,4 @@ func toBool16(t null.Val[int16]) *bool {
|
|||
}
|
||||
return &b
|
||||
}
|
||||
func toBool(t null.Val[int32]) *bool {
|
||||
if t.IsNull() {
|
||||
return nil
|
||||
}
|
||||
val := t.MustGet()
|
||||
var b bool
|
||||
if val == 0 {
|
||||
b = false
|
||||
} else {
|
||||
b = true
|
||||
}
|
||||
return &b
|
||||
}
|
||||
|
||||
|
|
|
|||
1
api/upload.go
Normal file
1
api/upload.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
1
api/user.go
Normal file
1
api/user.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
105
api/voipms.go
Normal file
105
api/voipms.go
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
|
||||
//"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/text"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
/*
|
||||
{
|
||||
"data": {
|
||||
"id": 101252305,
|
||||
"event_type": "message.received",
|
||||
"record_type": "event",
|
||||
"payload": {
|
||||
"id": 101252305,
|
||||
"record_type": "message",
|
||||
"from": {
|
||||
"phone_number": "+18016984649"
|
||||
},
|
||||
"to": [
|
||||
{
|
||||
"phone_number": "+15593720139",
|
||||
"status": "webhook_delivered"
|
||||
}
|
||||
],
|
||||
"text": "test3",
|
||||
"received_at": "2026-01-29T20:16:23.000000+00:00",
|
||||
"type": "SMS",
|
||||
"media": []
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
type VoipMSStatusPhoneFrom struct {
|
||||
PhoneNumber string `json:"phone_number"`
|
||||
}
|
||||
type VoipMSStatusPhoneTo struct {
|
||||
PhoneNumber string `json:"phone_number"`
|
||||
Status string `json:"status"`
|
||||
}
|
||||
type VoipMSStatusPayload struct {
|
||||
ID int `json:"id"`
|
||||
RecordType string `json:"record_type"`
|
||||
From VoipMSStatusPhoneFrom `json:"from"`
|
||||
To []VoipMSStatusPhoneTo `json:"to"`
|
||||
Text string `json:"text"`
|
||||
ReceivedAt string `json:"received_at"`
|
||||
Type string `json:"type"`
|
||||
//Media []something
|
||||
}
|
||||
type VoipMSStatusUpdate struct {
|
||||
ID int `json:"id"`
|
||||
EventType string `json:"event_type"`
|
||||
RecordType string `json:"record_type"`
|
||||
Payload VoipMSStatusPayload `json:"payload"`
|
||||
}
|
||||
type VoipMSTextPostBody struct {
|
||||
Data VoipMSStatusUpdate `json:"data"`
|
||||
}
|
||||
|
||||
func voipmsTextGet(w http.ResponseWriter, r *http.Request) {
|
||||
query := r.URL.Query()
|
||||
name := query.Get("to")
|
||||
age := query.Get("from")
|
||||
message := query.Get("message")
|
||||
files := query.Get("files")
|
||||
id := query.Get("id")
|
||||
date := query.Get("date")
|
||||
log.Info().Str("name", name).Str("age", age).Str("message", message).Str("files", files).Str("id", id).Str("date", date).Msg("Incoming text message")
|
||||
}
|
||||
func voipmsTextPost(w http.ResponseWriter, r *http.Request) {
|
||||
body, err := ioutil.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "failed to read", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
//debugSaveRequest(r)
|
||||
var b VoipMSTextPostBody
|
||||
err = json.Unmarshal(body, &b)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
to := "unknown"
|
||||
if len(b.Data.Payload.To) > 0 {
|
||||
to = b.Data.Payload.To[0].PhoneNumber
|
||||
}
|
||||
log.Info().Int("ID", b.Data.ID).Str("event_type", b.Data.EventType).Str("record_type", b.Data.RecordType).Str("from", b.Data.Payload.From.PhoneNumber).Str("to", to).Str("content", b.Data.Payload.Text).Msg("Text status")
|
||||
|
||||
// Convert phone numbers from Voip.ms into E164 format for consistency
|
||||
go func() {
|
||||
err := text.HandleTextMessage(context.Background(), b.Data.Payload.From.PhoneNumber, to, b.Data.Payload.Text)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to handle VoIP.ms incoming text")
|
||||
}
|
||||
}()
|
||||
lint.Fprintf(w, "ok")
|
||||
}
|
||||
|
|
@ -1 +1 @@
|
|||
Subproject commit af786fabcc08ed506a23718a71aa0dd52ce047ac
|
||||
Subproject commit 63cc8b573739294ea98f7e39d2baec3cd70dfd7f
|
||||
244
auth/auth.go
244
auth/auth.go
|
|
@ -6,27 +6,17 @@ import (
|
|||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/sql"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/debug"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/stephenafamo/bob/dialect/psql"
|
||||
"github.com/stephenafamo/bob/dialect/psql/sm"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
type NoCredentialsError struct{}
|
||||
type InactiveUser struct{}
|
||||
|
||||
func (e NoCredentialsError) Error() string { return "No credentials were present in the request" }
|
||||
|
||||
type NoUserError struct{}
|
||||
|
||||
func (e NoUserError) Error() string { return "That user does not exist" }
|
||||
func (e InactiveUser) Error() string { return "That user is not active" }
|
||||
|
||||
type InvalidCredentials struct{}
|
||||
|
||||
|
|
@ -36,30 +26,74 @@ type InvalidUsername struct{}
|
|||
|
||||
func (e InvalidUsername) Error() string { return "That username doesn't exist" }
|
||||
|
||||
type AuthenticatedHandler func(http.ResponseWriter, *http.Request, *models.User)
|
||||
type NoCredentialsError struct{}
|
||||
|
||||
func (e NoCredentialsError) Error() string { return "No credentials were present in the request" }
|
||||
|
||||
type AuthenticatedHandler func(http.ResponseWriter, *http.Request, platform.User)
|
||||
type EnsureAuth struct {
|
||||
handler AuthenticatedHandler
|
||||
}
|
||||
|
||||
func AddUserSession(r *http.Request, user *models.User) {
|
||||
id := strconv.Itoa(int(user.ID))
|
||||
sessionManager.Put(r.Context(), "user_id", id)
|
||||
sessionManager.Put(r.Context(), "username", user.Username)
|
||||
log.Info().Str("username", user.Username).Str("user_id", id).Msg("Created new user session")
|
||||
func AddUserSession(ctx context.Context, user *platform.User) {
|
||||
id_str := strconv.Itoa(int(user.ID))
|
||||
sessionManager.Put(ctx, "user_id", id_str)
|
||||
sessionManager.Put(ctx, "username", user.Username)
|
||||
log.Debug().Str("id", id_str).Str("username", user.Username).Msg("added user session")
|
||||
}
|
||||
func ImpersonateEnd(ctx context.Context) {
|
||||
sessionManager.Put(ctx, "impersonated_user_id", "")
|
||||
}
|
||||
func ImpersonateUser(ctx context.Context, target_user_id int) {
|
||||
target_user_id_str := strconv.Itoa(int(target_user_id))
|
||||
sessionManager.Put(ctx, "impersonated_user_id", target_user_id_str)
|
||||
}
|
||||
func ImpersonatedUser(ctx context.Context) *int32 {
|
||||
i_str := sessionManager.GetString(ctx, "impersonated_user_id")
|
||||
if i_str == "" {
|
||||
return nil
|
||||
}
|
||||
i, err := strconv.Atoi(i_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("impersonated_user_id", i_str).Msg("failed to parse impersonated_user_id")
|
||||
return nil
|
||||
}
|
||||
result := int32(i)
|
||||
return &result
|
||||
}
|
||||
func ImpersonatorID(ctx context.Context) *int32 {
|
||||
user_id_str := sessionManager.GetString(ctx, "user_id")
|
||||
user_id, err := strconv.Atoi(user_id_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("user_id", user_id_str).Msg("failed to parse user_id")
|
||||
return nil
|
||||
}
|
||||
result := int32(user_id)
|
||||
return &result
|
||||
|
||||
func GetAuthenticatedUser(r *http.Request) (*models.User, error) {
|
||||
//user_id := sessionManager.GetInt(r.Context(), "user_id")
|
||||
user_id_str := sessionManager.GetString(r.Context(), "user_id")
|
||||
}
|
||||
func GetAuthenticatedUser(r *http.Request) (*platform.User, error) {
|
||||
ctx := r.Context()
|
||||
user_id_str := sessionManager.GetString(ctx, "user_id")
|
||||
impersonated_user_id_str := sessionManager.GetString(ctx, "impersonated_user_id")
|
||||
if impersonated_user_id_str != "" {
|
||||
user_id_str = impersonated_user_id_str
|
||||
}
|
||||
if user_id_str != "" {
|
||||
user_id, err := strconv.Atoi(user_id_str)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to convert user_id to int: %w", err)
|
||||
}
|
||||
username := sessionManager.GetString(r.Context(), "username")
|
||||
log.Info().Int("user_id", user_id).Str("username", username).Msg("Current session info")
|
||||
username := sessionManager.GetString(ctx, "username")
|
||||
if user_id > 0 && username != "" {
|
||||
return findUser(r.Context(), user_id)
|
||||
user, err := platform.UserByID(ctx, int32(user_id))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("user by ID: %w", err)
|
||||
}
|
||||
if !user.IsActive {
|
||||
return nil, fmt.Errorf("user is inactive")
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
}
|
||||
// If we can't get the user from the session try to get from auth headers
|
||||
|
|
@ -67,11 +101,11 @@ func GetAuthenticatedUser(r *http.Request) (*models.User, error) {
|
|||
if !ok {
|
||||
return nil, &NoCredentialsError{}
|
||||
}
|
||||
user, err := validateUser(r.Context(), username, password)
|
||||
user, err := validateUser(ctx, username, password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
AddUserSession(r, user)
|
||||
AddUserSession(ctx, user)
|
||||
return user, nil
|
||||
}
|
||||
|
||||
|
|
@ -81,39 +115,44 @@ func NewEnsureAuth(handlerToWrap AuthenticatedHandler) *EnsureAuth {
|
|||
|
||||
func (ea *EnsureAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
// If this is an API request respond with a more machine-readable error state
|
||||
accept := r.Header.Values("Accept")
|
||||
accept := r.Header.Get("Accept")
|
||||
/*
|
||||
offers := []string{"application/json", "text/html"}
|
||||
|
||||
content_type := NegotiateContent(accept, offers)
|
||||
*/
|
||||
user, err := GetAuthenticatedUser(r)
|
||||
if err != nil || user == nil {
|
||||
var msg []byte
|
||||
// Don't send authentication headers for browsers because it forces the authentication popup
|
||||
requested_with := r.Header.Get("X-Requested-With")
|
||||
//log.Debug().Str("x-requested-with", requested_with).Send()
|
||||
if !strings.HasPrefix(requested_with, "nidus-web") && accept != "text/event-stream" {
|
||||
w.Header().Set("WWW-Authenticate", `Basic realm="Nidus Sync"`)
|
||||
// Separate return codes for different authentication failures
|
||||
if _, ok := err.(*NoCredentialsError); ok {
|
||||
fmt.Println("No credentials present and no session")
|
||||
log.Info().Msg("No credentials present and no session")
|
||||
w.Header().Set("WWW-Authenticate-Error", "no-credentials")
|
||||
msg = []byte("Please provide credentials.\n")
|
||||
} else if _, ok := err.(*NoUserError); ok {
|
||||
} else if _, ok := err.(*platform.NoUserError); ok {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
msg = []byte("Invalid credentials provided.\n")
|
||||
} else if _, ok := err.(*InvalidCredentials); ok {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
msg = []byte("Invalid credentials provided.\n")
|
||||
}
|
||||
|
||||
if content_type == "text/html" {
|
||||
http.Redirect(w, r, "/signin?next="+r.URL.Path, http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
w.Header().Set("WWW-Authenticate", `Basic realm="Nidus Sync"`)
|
||||
|
||||
w.WriteHeader(401)
|
||||
w.Write(msg)
|
||||
_, err = w.Write(msg)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to write response")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
ea.handler(w, r, user)
|
||||
ea.handler(w, r, *user)
|
||||
}
|
||||
func SigninUser(r *http.Request, username string, password string) (*models.User, error) {
|
||||
func SigninUser(r *http.Request, username string, password string) (*platform.User, error) {
|
||||
user, err := validateUser(r.Context(), username, password)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -121,105 +160,84 @@ func SigninUser(r *http.Request, username string, password string) (*models.User
|
|||
if user == nil {
|
||||
return nil, errors.New("No matching user")
|
||||
}
|
||||
AddUserSession(r, user)
|
||||
AddUserSession(r.Context(), user)
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func SignupUser(ctx context.Context, username string, name string, password string) (*models.User, error) {
|
||||
passwordHash, err := hashPassword(password)
|
||||
func SignoutUser(r *http.Request, user platform.User) {
|
||||
sessionManager.Put(r.Context(), "user_id", "")
|
||||
sessionManager.Put(r.Context(), "username", "")
|
||||
err := sessionManager.Destroy(r.Context())
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to destroy session for user on signout")
|
||||
}
|
||||
log.Info().Str("username", user.Username).Int("user_id", (user.ID)).Msg("Ended user session")
|
||||
}
|
||||
|
||||
func SignupUser(ctx context.Context, username string, name string, password string) (*platform.User, error) {
|
||||
password_hash, err := HashPassword(password)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Cannot signup user, failed to create hashed password: %w", err)
|
||||
}
|
||||
o_setter := models.OrganizationSetter{
|
||||
Name: omitnull.From(fmt.Sprintf("%s's organization", username)),
|
||||
ArcgisID: omitnull.From(""),
|
||||
ArcgisName: omitnull.From(""),
|
||||
FieldseekerURL: omitnull.From(""),
|
||||
}
|
||||
o, err := models.Organizations.Insert(&o_setter).One(ctx, db.PGInstance.BobDB)
|
||||
u, err := platform.CreateUser(ctx, username, name, password_hash)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to create organization: %w", err)
|
||||
return nil, fmt.Errorf("create user: %s", err)
|
||||
}
|
||||
log.Info().Int32("id", o.ID).Msg("Created organization")
|
||||
u_setter := models.UserSetter{
|
||||
DisplayName: omit.From(name),
|
||||
OrganizationID: omit.From(o.ID),
|
||||
PasswordHash: omit.From(passwordHash),
|
||||
PasswordHashType: omit.From(enums.HashtypeBcrypt14),
|
||||
Username: omit.From(username),
|
||||
}
|
||||
u, err := models.Users.Insert(&u_setter).One(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to create user: %w", err)
|
||||
}
|
||||
log.Info().Int32("id", u.ID).Str("username", u.Username).Msg("Created user")
|
||||
|
||||
return u, nil
|
||||
}
|
||||
|
||||
// Helper function to translate strings into solid error types for operating on
|
||||
func findUser(ctx context.Context, user_id int) (*models.User, error) {
|
||||
//user, err := models.FindUser(ctx, db.PGInstance.BobDB, int32(user_id))
|
||||
user, err := models.Users.Query(
|
||||
models.Preload.User.Organization(),
|
||||
sm.Where(models.Users.Columns.ID.EQ(psql.Arg(user_id))),
|
||||
).One(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
if err.Error() == "No such user" || err.Error() == "sql: no rows in result set" {
|
||||
return nil, &NoUserError{}
|
||||
} else {
|
||||
debug.LogErrorTypeInfo(err)
|
||||
log.Error().Err(err).Msg("Unrecognized error. This should be updated in the findUser code")
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
log.Info().Int32("user_id", user.ID).Int32("org_id", user.OrganizationID).Msg("Found user")
|
||||
return user, err
|
||||
}
|
||||
|
||||
func hashPassword(password string) (string, error) {
|
||||
func HashPassword(password string) (string, error) {
|
||||
bytes, err := bcrypt.GenerateFromPassword([]byte(password), 14)
|
||||
return string(bytes), err
|
||||
}
|
||||
|
||||
func redact(s string) string {
|
||||
if len(s) <= 4 {
|
||||
return s
|
||||
}
|
||||
|
||||
first_two := s[:2]
|
||||
last_two := s[len(s)-2:]
|
||||
middle_length := len(s) - 4
|
||||
|
||||
return first_two + strings.Repeat("*", middle_length) + last_two
|
||||
}
|
||||
|
||||
func validatePassword(password, hash string) bool {
|
||||
err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password))
|
||||
if err != nil {
|
||||
log.Debug().Err(err).Str("password", password).Str("hash", hash).Msg("!validate password")
|
||||
}
|
||||
return err == nil
|
||||
}
|
||||
|
||||
func validateUser(ctx context.Context, username string, password string) (*models.User, error) {
|
||||
passwordHash, err := hashPassword(password)
|
||||
func validateUser(ctx context.Context, username string, password string) (*platform.User, error) {
|
||||
log.Info().Str("username", username).Msg("begin validateUser. Hashing...")
|
||||
start := time.Now()
|
||||
passwordHash, err := HashPassword(password)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to hash password: %w", err)
|
||||
}
|
||||
log.Info().Str("username", username).Str("password", password).Str("hash", passwordHash).Msg("Validating user")
|
||||
result, err := sql.UserByUsername(username).All(ctx, db.PGInstance.BobDB)
|
||||
t1 := time.Now()
|
||||
elapsed := t1.Sub(start)
|
||||
log.Info().Int64("elapsed ms", elapsed.Milliseconds()).Msg("calculated hash")
|
||||
t2 := time.Now()
|
||||
user, err := platform.UserByUsername(ctx, username)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to query for user: %w", err)
|
||||
}
|
||||
switch len(result) {
|
||||
case 0:
|
||||
elapsed = t2.Sub(t1)
|
||||
log.Info().Int64("elapsed ms", elapsed.Milliseconds()).Str("username", username).Msg("queried user")
|
||||
if user == nil {
|
||||
log.Info().Str("username", username).Str("password", redact(password)).Msg("Invalid username")
|
||||
return nil, InvalidUsername{}
|
||||
case 1:
|
||||
row := result[0]
|
||||
if !validatePassword(password, row.PasswordHash) {
|
||||
}
|
||||
if !user.IsActive {
|
||||
return nil, InactiveUser{}
|
||||
}
|
||||
if !validatePassword(password, user.PasswordHash) {
|
||||
log.Info().Str("username", username).Str("password", redact(password)).Str("hash", passwordHash).Msg("Invalid password for user")
|
||||
return nil, InvalidCredentials{}
|
||||
}
|
||||
user := models.User{
|
||||
ID: row.ID,
|
||||
ArcgisAccessToken: row.ArcgisAccessToken,
|
||||
ArcgisLicense: row.ArcgisLicense,
|
||||
ArcgisRefreshToken: row.ArcgisRefreshToken,
|
||||
ArcgisRefreshTokenExpires: row.ArcgisRefreshTokenExpires,
|
||||
ArcgisRole: row.ArcgisRole,
|
||||
DisplayName: row.DisplayName,
|
||||
Email: row.Email,
|
||||
OrganizationID: row.OrganizationID,
|
||||
Username: row.Username,
|
||||
}
|
||||
return &user, nil
|
||||
default:
|
||||
return nil, errors.New("More than one matching row, this should be impossible.")
|
||||
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,9 @@ package auth
|
|||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alexedwards/scs/v2"
|
||||
"github.com/alexedwards/scs/pgxstore"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/alexedwards/scs/pgxstore"
|
||||
"github.com/alexedwards/scs/v2"
|
||||
)
|
||||
|
||||
var sessionManager *scs.SessionManager
|
||||
|
|
|
|||
1581
background/arcgis.go
1581
background/arcgis.go
File diff suppressed because it is too large
Load diff
35
cmd/passwordgen/main.go
Normal file
35
cmd/passwordgen/main.go
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var password string
|
||||
scanValue("Please enter your password : ", &password)
|
||||
|
||||
hash, err := auth.HashPassword(password)
|
||||
if err != nil {
|
||||
fmt.Printf("Failed to hash password: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
fmt.Println("Password:", password)
|
||||
fmt.Println("Hash: ", hash)
|
||||
|
||||
}
|
||||
|
||||
func scanValue(message string, result *string) {
|
||||
fmt.Print("%s", message)
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
if ok := scanner.Scan(); !ok {
|
||||
log.Fatal(errors.New("Failed to scan input"))
|
||||
}
|
||||
*result = scanner.Text()
|
||||
}
|
||||
|
|
@ -1,113 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
fslayer "github.com/Gleipnir-Technology/arcgis-go/fieldseeker/layer"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/alexedwards/scs/v2"
|
||||
"github.com/google/uuid"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var sessionManager *scs.SessionManager
|
||||
|
||||
var BaseURL, ClientID, ClientSecret, Environment, FieldseekerSchemaDirectory, MapboxToken string
|
||||
|
||||
func main() {
|
||||
zerolog.TimeFieldFormat = zerolog.TimeFormatUnix
|
||||
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
|
||||
|
||||
ClientID = os.Getenv("ARCGIS_CLIENT_ID")
|
||||
if ClientID == "" {
|
||||
log.Error().Msg("You must specify a non-empty ARCGIS_CLIENT_ID")
|
||||
os.Exit(1)
|
||||
}
|
||||
ClientSecret = os.Getenv("ARCGIS_CLIENT_SECRET")
|
||||
if ClientSecret == "" {
|
||||
log.Error().Msg("You must specify a non-empty ARCGIS_CLIENT_SECRET")
|
||||
os.Exit(1)
|
||||
}
|
||||
BaseURL = os.Getenv("BASE_URL")
|
||||
if BaseURL == "" {
|
||||
log.Error().Msg("You must specify a non-empty BASE_URL")
|
||||
os.Exit(1)
|
||||
}
|
||||
bind := os.Getenv("BIND")
|
||||
if bind == "" {
|
||||
bind = ":9001"
|
||||
}
|
||||
Environment = os.Getenv("ENVIRONMENT")
|
||||
if Environment == "" {
|
||||
log.Error().Msg("You must specify a non-empty ENVIRONMENT")
|
||||
os.Exit(1)
|
||||
}
|
||||
if !(Environment == "PRODUCTION" || Environment == "DEVELOPMENT") {
|
||||
log.Error().Str("ENVIRONMENT", Environment).Msg("ENVIRONMENT should be either DEVELOPMENT or PRODUCTION")
|
||||
os.Exit(2)
|
||||
}
|
||||
MapboxToken = os.Getenv("MAPBOX_TOKEN")
|
||||
if MapboxToken == "" {
|
||||
log.Error().Msg("You must specify a non-empty MAPBOX_TOKEN")
|
||||
os.Exit(1)
|
||||
}
|
||||
pg_dsn := os.Getenv("POSTGRES_DSN")
|
||||
if pg_dsn == "" {
|
||||
log.Error().Msg("You must specify a non-empty POSTGRES_DSN")
|
||||
os.Exit(1)
|
||||
}
|
||||
FieldseekerSchemaDirectory = os.Getenv("FIELDSEEKER_SCHEMA_DIRECTORY")
|
||||
if FieldseekerSchemaDirectory == "" {
|
||||
log.Error().Msg("You must specify a non-empty FIELDSEEKER_SCHEMA_DIRECTORY")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
log.Info().Msg("Starting...")
|
||||
err := db.InitializeDatabase(context.TODO(), pg_dsn)
|
||||
if err != nil {
|
||||
log.Error().Str("err", err.Error()).Msg("Failed to connect to database")
|
||||
os.Exit(2)
|
||||
}
|
||||
ctx := context.Background()
|
||||
row := fslayer.RodentLocation{
|
||||
ObjectID: 1,
|
||||
LocationName: "some location",
|
||||
Zone: "",
|
||||
Zone2: "",
|
||||
//Habitat: fslayer.RodentLocationRodentLocationHabitatCommercial,
|
||||
//Priority: fslayer.RodentLocationLocationPriority1None,
|
||||
//Usetype: fslayer.RodentLocationLocationUseType1Residential,
|
||||
//Active: fslayer.RodentLocationNotInUITF1True,
|
||||
Description: "",
|
||||
Accessdesc: "",
|
||||
Comments: "",
|
||||
//Symbology: fslayer.RodentLocationRodentLocationSymbologyActionrequired,
|
||||
ExternalID: "",
|
||||
Nextactiondatescheduled: time.Now(),
|
||||
Locationnumber: 1,
|
||||
LastInspectionDate: time.Now(),
|
||||
LastInspectionSpecies: "",
|
||||
LastInspectionAction: "",
|
||||
LastInspectionConditions: "",
|
||||
LastInspectionRodentEvidence: "",
|
||||
GlobalID: uuid.New(),
|
||||
CreatedUser: "",
|
||||
CreatedDate: time.Now(),
|
||||
LastEditedUser: "",
|
||||
LastEditedDate: time.Now(),
|
||||
CreationDate: time.Now(),
|
||||
Creator: "",
|
||||
EditDate: time.Now(),
|
||||
Editor: "",
|
||||
Jurisdiction: "",
|
||||
}
|
||||
err = db.TestPreparedQuery(ctx, &row)
|
||||
if err != nil {
|
||||
log.Error().Str("err", err.Error()).Msg("Failed to run prepared query")
|
||||
os.Exit(3)
|
||||
}
|
||||
log.Info().Msg("Complete.")
|
||||
}
|
||||
54
cmd/test-jet/main.go
Normal file
54
cmd/test-jet/main.go
Normal file
|
|
@ -0,0 +1,54 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/query/public"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
)
|
||||
|
||||
func main() {
|
||||
err := config.Parse()
|
||||
if err != nil {
|
||||
log.Printf("failed on config: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
ctx := context.TODO()
|
||||
err = db.InitializeDatabase(ctx, config.PGDSN)
|
||||
if err != nil {
|
||||
log.Printf("failed on db: %v", err)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
txn, err := db.BeginTxn(ctx)
|
||||
if err != nil {
|
||||
log.Printf("failed on txn: %v", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
defer lint.LogOnErrRollback(txn.Rollback, ctx, "rollback")
|
||||
log.Printf("doing address")
|
||||
gid := "openaddresses:address:us/ca/tulare-addresses-county:0dc28458fd03e3fa"
|
||||
address, err := public.AddressFromGID(ctx, txn, gid)
|
||||
if err != nil {
|
||||
log.Printf("failed on query: %v", err)
|
||||
os.Exit(4)
|
||||
}
|
||||
//log.Printf("address %d lat %f lng %f", address.ID, *address.LocationLatitude, *address.LocationLongitude)
|
||||
log.Printf("Address id %d location %s", address.ID, address.Location)
|
||||
lint.LogOnErrCtx(txn.Commit, ctx, "commit")
|
||||
|
||||
/*
|
||||
log.Printf("doing comm")
|
||||
id := int64(1)
|
||||
comm, err := public.CommunicationFromID(ctx, id)
|
||||
if err != nil {
|
||||
log.Printf("failed on query: %v", err)
|
||||
os.Exit(4)
|
||||
}
|
||||
log.Printf("communication %d", comm.ID)
|
||||
*/
|
||||
}
|
||||
102
comms/email/email.go
Normal file
102
comms/email/email.go
Normal file
|
|
@ -0,0 +1,102 @@
|
|||
package email
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/rs/zerolog/log"
|
||||
"resty.dev/v3"
|
||||
)
|
||||
|
||||
type attachmentRequest struct {
|
||||
Filename string `json:"filename"`
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
type Request struct {
|
||||
From string `json:"from"`
|
||||
To string `json:"to"`
|
||||
CC []string `json:"cc,omitempty"`
|
||||
BCC []string `json:"bcc,omitempty"`
|
||||
Subject string `json:"subject"`
|
||||
Text string `json:"text"`
|
||||
HTML string `json:"html,omitempty"`
|
||||
Attachments []attachmentRequest `json:"attachments,omitempty"`
|
||||
Sender string `json:"sender"`
|
||||
ReplyTo string `json:"replyTo,omitempty"`
|
||||
InReplyTo string `json:"inReplyTo,omitempty"`
|
||||
References []string `json:"references,omitempty"`
|
||||
}
|
||||
|
||||
type emailEnvelope struct {
|
||||
From string `json:"from"`
|
||||
To []string `json:"to"`
|
||||
}
|
||||
|
||||
type emailResponseError struct {
|
||||
StatusCode int `json:"statusCode"`
|
||||
Error string `json:"error"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
type emailResponse struct {
|
||||
IsRedacted bool `json:"is_redacted"`
|
||||
CreatedAt string `json:"created_at"`
|
||||
HardBounces []string `json:"hard_bounces"`
|
||||
SoftBounces []string `json:"soft_bounces"`
|
||||
IsBounce bool `json:"is_bounce"`
|
||||
Alias string `json:"alias"`
|
||||
Domain string `json:"domain"`
|
||||
User string `json:"user"`
|
||||
Status string `json:"status"`
|
||||
IsLocked bool `json:"is_locked"`
|
||||
Envelope emailEnvelope `json:"envelope"`
|
||||
RequireTLS bool `json:"requireTLS"`
|
||||
MessageID string `json:"messageId"`
|
||||
Headers map[string]string `json:"headers"`
|
||||
Date string `json:"date"`
|
||||
Subject string `json:"subject"`
|
||||
Accepted []string `json:"accepted"`
|
||||
Deliveries []string `json:"deliveries"`
|
||||
RejectedErrors []string `json:"rejectedErrors"`
|
||||
ID string `json:"id"`
|
||||
Object string `json:"object"`
|
||||
UpdatedAt string `json:"updated_at"`
|
||||
Link string `json:"link"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
var FORWARDEMAIL_EMAIL_POST_API = "https://api.forwardemail.net/v1/emails"
|
||||
|
||||
func Send(ctx context.Context, email Request) (result emailResponse, err error) {
|
||||
client := resty.New()
|
||||
|
||||
var err_resp emailResponseError
|
||||
r, err := client.R().
|
||||
SetBasicAuth(config.ForwardEmailAPIToken, "").
|
||||
SetBody(email).
|
||||
SetContext(ctx).
|
||||
SetError(&err_resp).
|
||||
SetHeader("Content-Type", "application/json").
|
||||
SetResult(&result).
|
||||
Post(FORWARDEMAIL_EMAIL_POST_API)
|
||||
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("Failed to marshal email request: %w", err)
|
||||
}
|
||||
|
||||
if r.IsError() {
|
||||
log.Error().
|
||||
Int("status", err_resp.StatusCode).
|
||||
Str("error", err_resp.Error).
|
||||
Str("msg", err_resp.Message).
|
||||
Str("email.from", email.From).
|
||||
Str("email.sender", email.Sender).
|
||||
Str("email.subject", email.Subject).
|
||||
Str("email.to", email.To).
|
||||
Str("email.text", email.Text).
|
||||
Msg("Email send error")
|
||||
return result, fmt.Errorf("Error response %d from email service: %s (%s)", err_resp.StatusCode, err_resp.Message, err_resp.Error)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
62
comms/email/websocket.go
Normal file
62
comms/email/websocket.go
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
package email
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var FORWARDEMAIL_WS_API = "wss://api.forwardemail.net/v1/ws"
|
||||
|
||||
func StartWebsocket(ctx context.Context, api_token string) {
|
||||
|
||||
var conn *websocket.Conn
|
||||
for {
|
||||
err := ensureConnected(conn, api_token)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Bailing on email websocket")
|
||||
return
|
||||
}
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
// Read message
|
||||
message_type, message, err := conn.ReadMessage()
|
||||
if err != nil {
|
||||
if !websocket.IsCloseError(err, websocket.CloseNormalClosure) {
|
||||
conn = nil
|
||||
}
|
||||
log.Error().Err(err).Msg("Error reading message")
|
||||
}
|
||||
|
||||
// Process and log the message
|
||||
log.Info().Int("message_type", message_type).Bytes("message", message).Msg("Got email notification")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func ensureConnected(conn *websocket.Conn, api_token string) error {
|
||||
if conn != nil {
|
||||
return nil
|
||||
}
|
||||
url := FORWARDEMAIL_WS_API + "?token=" + api_token
|
||||
for {
|
||||
new_conn, _, err := websocket.DefaultDialer.Dial(url, nil)
|
||||
if err == nil {
|
||||
log.Info().Msg("Connected to mail websocket")
|
||||
*conn = *new_conn
|
||||
return nil
|
||||
}
|
||||
if errors.Is(err, websocket.ErrBadHandshake) {
|
||||
return fmt.Errorf("Bad handshake connecting to email websocket, bailing.")
|
||||
}
|
||||
log.Error().Err(err).Str("url", url).Msg("Error connecting to WebSocket")
|
||||
time.Sleep(3 * time.Second)
|
||||
|
||||
}
|
||||
}
|
||||
18
comms/text/text.go
Normal file
18
comms/text/text.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
package text
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
)
|
||||
|
||||
func SendText(ctx context.Context, source string, destination string, message string) (string, error) {
|
||||
switch config.TextProvider {
|
||||
case "voipms":
|
||||
return sendTextVoipms(ctx, destination, message)
|
||||
case "twilio":
|
||||
return sendTextTwilio(ctx, source, destination, message)
|
||||
}
|
||||
return "", fmt.Errorf("Unsupported provider '%s'", config.TextProvider)
|
||||
}
|
||||
33
comms/text/twilio.go
Normal file
33
comms/text/twilio.go
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
package text
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/twilio/twilio-go"
|
||||
twilioApi "github.com/twilio/twilio-go/rest/api/v2010"
|
||||
)
|
||||
|
||||
func sendTextTwilio(ctx context.Context, source string, destination string, message string) (string, error) {
|
||||
client := twilio.NewRestClient()
|
||||
|
||||
params := &twilioApi.CreateMessageParams{}
|
||||
params.SetMessagingServiceSid(config.TwilioMessagingServiceSID)
|
||||
|
||||
params.SetBody(message)
|
||||
params.SetTo(destination)
|
||||
resp, err := client.Api.CreateMessage(params)
|
||||
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Failed to create message to %s: %w", destination, err)
|
||||
}
|
||||
if resp.Sid == nil {
|
||||
log.Warn().Str("src", source).Str("dst", destination).Msg("Text message sid is nil")
|
||||
return "", nil
|
||||
}
|
||||
log.Info().Str("src", source).Str("dst", destination).Str("message", message).Str("sid", *resp.Sid).Msg("Created text message")
|
||||
return *resp.Sid, nil
|
||||
}
|
||||
|
||||
88
comms/text/voipms.go
Normal file
88
comms/text/voipms.go
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
package text
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var VOIP_MS_API = "https://voip.ms/api/v1/rest.php"
|
||||
|
||||
type VoipMSResponse struct {
|
||||
MMS int `json:"mms"`
|
||||
Message string `json:"message"`
|
||||
Status string `json:"status"`
|
||||
SMS int `json:"sms"`
|
||||
}
|
||||
|
||||
func sendTextVoipms(ctx context.Context, to string, content string, media ...string) (string, error) {
|
||||
if len(content) > 2048 {
|
||||
return "", errors.New("Message content is more than 160 characters")
|
||||
}
|
||||
params := url.Values{}
|
||||
params.Add("api_password", config.VoipMSPassword)
|
||||
params.Add("api_username", config.VoipMSUsername)
|
||||
params.Add("method", "sendMMS")
|
||||
params.Add("did", config.VoipMSNumber)
|
||||
params.Add("dst", to)
|
||||
params.Add("message", content)
|
||||
/*
|
||||
for i, med := range media {
|
||||
// These should be one of:
|
||||
// 1. A full URL that the service cat GET
|
||||
// 2. A base64-encoded image starting with "data:image/png;base64,iVBORw0KGgoAAAANSUh..."
|
||||
params.Add(fmt.Sprintf("media%d", i+1), med)
|
||||
}
|
||||
params.Add(fmt.Sprintf("media%d", len(media)+1), "")
|
||||
*/
|
||||
|
||||
response, err := makeVoipMSRequest(params)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("Failed to send MMS: %w", err)
|
||||
}
|
||||
if response.Status == "ip_not_enabled" {
|
||||
return "", fmt.Errorf("Failed to send SMS: the IP address of the server is not enabled with voip.ms. You'll need to enable this server's IP with them.")
|
||||
}
|
||||
log.Info().Str("status", response.Status).Int("mms", response.MMS).Msg("Sent MMS message")
|
||||
return strconv.Itoa(response.MMS), nil
|
||||
}
|
||||
|
||||
func makeVoipMSRequest(params url.Values) (VoipMSResponse, error) {
|
||||
result := VoipMSResponse{}
|
||||
// Construct the URL with query parameters
|
||||
full_url := VOIP_MS_API + "?" + params.Encode()
|
||||
|
||||
// Make the HTTP request
|
||||
log.Debug().Str("full_url", full_url).Msg("Sending command to VoIP.ms")
|
||||
resp, err := http.Get(full_url)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Str("url", full_url).Msg("Failed to make request to Voip.MS")
|
||||
return result, fmt.Errorf("Error making request: %w", err)
|
||||
}
|
||||
defer lint.LogOnErr(resp.Body.Close, "failed closing response body")
|
||||
|
||||
// Read the response body
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Str("url", full_url).Msg("Failed to read Voip.MS response body")
|
||||
return result, fmt.Errorf("Failed to read response: %w", err)
|
||||
}
|
||||
log.Info().Str("response", string(body)).Msg("Response from Voip.MS")
|
||||
|
||||
// Parse the JSON response
|
||||
var response VoipMSResponse
|
||||
err = json.Unmarshal(body, &response)
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("Failed to unmarshal JSON response: %w", err)
|
||||
}
|
||||
return response, nil
|
||||
}
|
||||
237
config/config.go
237
config/config.go
|
|
@ -4,45 +4,76 @@ import (
|
|||
"fmt"
|
||||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
|
||||
"github.com/nyaruka/phonenumbers"
|
||||
//"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var Bind, ClientID, ClientSecret, Environment, FieldseekerSchemaDirectory, MapboxToken, PGDSN, URLReport, URLSync, FilesDirectoryPublic, FilesDirectoryUser string
|
||||
|
||||
// Build the ArcGIS authorization URL with PKCE
|
||||
func BuildArcGISAuthURL(clientID string) string {
|
||||
baseURL := "https://www.arcgis.com/sharing/rest/oauth2/authorize/"
|
||||
|
||||
params := url.Values{}
|
||||
params.Add("client_id", clientID)
|
||||
params.Add("redirect_uri", RedirectURL())
|
||||
params.Add("response_type", "code")
|
||||
//params.Add("code_challenge", generateCodeChallenge(codeVerifier))
|
||||
//params.Add("code_challenge_method", "S256")
|
||||
|
||||
// See https://developers.arcgis.com/rest/users-groups-and-items/token/
|
||||
// expiration is defined in minutes
|
||||
var expiration int
|
||||
if IsProductionEnvironment() {
|
||||
// 2 weeks is the maximum allowed
|
||||
expiration = 20160
|
||||
} else {
|
||||
expiration = 20
|
||||
}
|
||||
params.Add("expiration", strconv.Itoa(expiration))
|
||||
|
||||
return baseURL + "?" + params.Encode()
|
||||
}
|
||||
var (
|
||||
Bind string
|
||||
ClientID string
|
||||
ClientSecret string
|
||||
DomainRMO string
|
||||
DomainNidus string
|
||||
DomainTegola string
|
||||
Environment string
|
||||
FilesDirectory string
|
||||
FieldseekerSchemaDirectory string
|
||||
ForwardEmailAPIToken string
|
||||
ForwardEmailRMOAddress string
|
||||
ForwardEmailRMOPassword string
|
||||
ForwardEmailRMOUsername string
|
||||
ForwardEmailNidusAddress string
|
||||
ForwardEmailNidusPassword string
|
||||
ForwardEmailNidusUsername string
|
||||
LobAPIKey string
|
||||
PGDSN string
|
||||
PhoneNumberReport phonenumbers.PhoneNumber
|
||||
PhoneNumberReportStr string
|
||||
PhoneNumberSupport phonenumbers.PhoneNumber
|
||||
PhoneNumberSupportStr string
|
||||
SentryDSN string
|
||||
SentryDSNFrontend string
|
||||
StadiaMapsAPIKey string
|
||||
TextProvider string
|
||||
TwilioAuthToken string
|
||||
TwilioAccountSID string
|
||||
TwilioMessagingServiceSID string
|
||||
TwilioRCSSenderRMO string
|
||||
VoipMSNumber string
|
||||
VoipMSPassword string
|
||||
VoipMSUsername string
|
||||
)
|
||||
|
||||
func IsProductionEnvironment() bool {
|
||||
return Environment == "PRODUCTION"
|
||||
}
|
||||
|
||||
func MakeURLSync(path string) string {
|
||||
return fmt.Sprintf("https://%s%s", URLSync, path)
|
||||
func makeURL(domain, path string, args ...string) string {
|
||||
to_add := make([]any, 0)
|
||||
for _, a := range args {
|
||||
to_add = append(to_add, url.QueryEscape(a))
|
||||
}
|
||||
pattern := "https://" + domain + path
|
||||
return fmt.Sprintf(pattern, to_add...)
|
||||
}
|
||||
|
||||
func Parse() error {
|
||||
func MakeURLNidus(path string, args ...string) string {
|
||||
return makeURL(DomainNidus, path, args...)
|
||||
}
|
||||
func MakeURLReport(path string, args ...string) string {
|
||||
return makeURL(DomainRMO, path, args...)
|
||||
}
|
||||
func MakeURLTegola(path string, args ...string) string {
|
||||
//log.Debug().Str("path", path).Strs("args", args).Str("domain", DomainTegola).Msg("Making tegola url")
|
||||
return makeURL(DomainTegola, path, args...)
|
||||
}
|
||||
|
||||
func Parse() (err error) {
|
||||
Bind = os.Getenv("BIND")
|
||||
if Bind == "" {
|
||||
Bind = ":9001"
|
||||
}
|
||||
ClientID = os.Getenv("ARCGIS_CLIENT_ID")
|
||||
if ClientID == "" {
|
||||
return fmt.Errorf("You must specify a non-empty ARCGIS_CLIENT_ID")
|
||||
|
|
@ -51,48 +82,142 @@ func Parse() error {
|
|||
if ClientSecret == "" {
|
||||
return fmt.Errorf("You must specify a non-empty ARCGIS_CLIENT_SECRET")
|
||||
}
|
||||
URLReport = os.Getenv("URL_REPORT")
|
||||
if URLReport == "" {
|
||||
return fmt.Errorf("You must specify a non-empty URL_REPORT")
|
||||
DomainNidus = os.Getenv("DOMAIN_NIDUS")
|
||||
if DomainNidus == "" {
|
||||
return fmt.Errorf("You must specify a non-empty DOMAIN_NIDUS")
|
||||
}
|
||||
URLSync = os.Getenv("URL_SYNC")
|
||||
if URLSync == "" {
|
||||
return fmt.Errorf("You must specify a non-empty URL_SYNC")
|
||||
DomainRMO = os.Getenv("DOMAIN_RMO")
|
||||
if DomainRMO == "" {
|
||||
return fmt.Errorf("You must specify a non-empty DOMAIN_RMO")
|
||||
}
|
||||
Bind = os.Getenv("BIND")
|
||||
if Bind == "" {
|
||||
Bind = ":9001"
|
||||
DomainTegola = os.Getenv("DOMAIN_TEGOLA")
|
||||
if DomainTegola == "" {
|
||||
return fmt.Errorf("You must specify a non-empty DOMAIN_TEGOLA")
|
||||
}
|
||||
Environment = os.Getenv("ENVIRONMENT")
|
||||
if Environment == "" {
|
||||
return fmt.Errorf("You must specify a non-empty ENVIRONMENT")
|
||||
}
|
||||
if !(Environment == "PRODUCTION" || Environment == "DEVELOPMENT") {
|
||||
if Environment != "PRODUCTION" && Environment != "DEVELOPMENT" {
|
||||
return fmt.Errorf("ENVIRONMENT should be either DEVELOPMENT or PRODUCTION")
|
||||
}
|
||||
MapboxToken = os.Getenv("MAPBOX_TOKEN")
|
||||
if MapboxToken == "" {
|
||||
return fmt.Errorf("You must specify a non-empty MAPBOX_TOKEN")
|
||||
}
|
||||
PGDSN = os.Getenv("POSTGRES_DSN")
|
||||
if PGDSN == "" {
|
||||
return fmt.Errorf("You must specify a non-empty POSTGRES_DSN")
|
||||
}
|
||||
FieldseekerSchemaDirectory = os.Getenv("FIELDSEEKER_SCHEMA_DIRECTORY")
|
||||
if FieldseekerSchemaDirectory == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FIELDSEEKER_SCHEMA_DIRECTORY")
|
||||
}
|
||||
FilesDirectoryPublic = os.Getenv("FILES_DIRECTORY_PUBLIC")
|
||||
if FilesDirectoryPublic == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FILES_DIRECTORY_PUBLIC")
|
||||
FilesDirectory = os.Getenv("FILES_DIRECTORY")
|
||||
if FilesDirectory == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FILES_DIRECTORY")
|
||||
}
|
||||
FilesDirectoryUser = os.Getenv("FILES_DIRECTORY_USER")
|
||||
if FilesDirectoryUser == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FILES_DIRECTORY_USER")
|
||||
ForwardEmailAPIToken = os.Getenv("FORWARDEMAIL_API_TOKEN")
|
||||
if ForwardEmailAPIToken == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_API_TOKEN")
|
||||
}
|
||||
ForwardEmailRMOAddress = os.Getenv("FORWARDEMAIL_RMO_ADDRESS")
|
||||
if ForwardEmailRMOAddress == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_RMO_ADDRESS")
|
||||
}
|
||||
ForwardEmailRMOUsername = os.Getenv("FORWARDEMAIL_RMO_USERNAME")
|
||||
if ForwardEmailRMOUsername == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_RMO_USERNAME")
|
||||
}
|
||||
ForwardEmailRMOPassword = os.Getenv("FORWARDEMAIL_RMO_PASSWORD")
|
||||
if ForwardEmailRMOPassword == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_RMO_PASSWORD")
|
||||
}
|
||||
ForwardEmailNidusAddress = os.Getenv("FORWARDEMAIL_NIDUS_ADDRESS")
|
||||
if ForwardEmailNidusAddress == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_NIDUS_ADDRESS")
|
||||
}
|
||||
ForwardEmailNidusUsername = os.Getenv("FORWARDEMAIL_NIDUS_USERNAME")
|
||||
if ForwardEmailNidusUsername == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_NIDUS_USERNAME")
|
||||
}
|
||||
ForwardEmailNidusPassword = os.Getenv("FORWARDEMAIL_NIDUS_PASSWORD")
|
||||
if ForwardEmailNidusPassword == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_NIDUS_PASSWORD")
|
||||
}
|
||||
LobAPIKey = os.Getenv("LOB_API_KEY")
|
||||
if LobAPIKey == "" {
|
||||
return fmt.Errorf("You must specify a non-empty LOB_API_KEY")
|
||||
}
|
||||
PGDSN = os.Getenv("POSTGRES_DSN")
|
||||
if PGDSN == "" {
|
||||
return fmt.Errorf("You must specify a non-empty POSTGRES_DSN")
|
||||
}
|
||||
PhoneNumberReportStr = os.Getenv("PHONE_NUMBER_RMO")
|
||||
if PhoneNumberReportStr == "" {
|
||||
return fmt.Errorf("You must specify a non-empty PHONE_NUMBER_RMO")
|
||||
}
|
||||
p, err := phonenumbers.Parse(PhoneNumberReportStr, "US")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse '%s' as a valid phone number: %w", PhoneNumberReportStr, err)
|
||||
}
|
||||
PhoneNumberReport = *p
|
||||
|
||||
PhoneNumberSupportStr = os.Getenv("PHONE_NUMBER_SUPPORT")
|
||||
if PhoneNumberSupportStr == "" {
|
||||
return fmt.Errorf("You must specify a non-empty PHONE_NUMBER_SUPPORT")
|
||||
}
|
||||
p, err = phonenumbers.Parse(PhoneNumberSupportStr, "US")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to parse '%s' as a valid phone number: %w", PhoneNumberSupportStr, err)
|
||||
}
|
||||
PhoneNumberSupport = *p
|
||||
|
||||
SentryDSN = os.Getenv("SENTRY_DSN")
|
||||
if SentryDSN == "" {
|
||||
return fmt.Errorf("You must specify a non-empty SENTRY_DSN")
|
||||
}
|
||||
SentryDSNFrontend = os.Getenv("SENTRY_DSN_FRONTEND")
|
||||
if SentryDSNFrontend == "" {
|
||||
return fmt.Errorf("You must specify a non-empty SENTRY_DSN_FRONTEND")
|
||||
}
|
||||
StadiaMapsAPIKey = os.Getenv("STADIA_MAPS_API_KEY")
|
||||
if StadiaMapsAPIKey == "" {
|
||||
return fmt.Errorf("You must specify a non-empty STADIA_MAPS_API_KEY")
|
||||
}
|
||||
TextProvider = os.Getenv("TEXT_PROVIDER")
|
||||
switch TextProvider {
|
||||
case "":
|
||||
return fmt.Errorf("You must specify a non-empty TEXT_PROVIDER")
|
||||
case "twilio":
|
||||
case "voipms":
|
||||
break
|
||||
default:
|
||||
return fmt.Errorf("Unrecognized text provider '%s'", TextProvider)
|
||||
}
|
||||
TwilioAccountSID = os.Getenv("TWILIO_ACCOUNT_SID")
|
||||
if TwilioAccountSID == "" {
|
||||
return fmt.Errorf("You must specify a non-empty TWILIO_ACCOUNT_SID")
|
||||
}
|
||||
TwilioAuthToken = os.Getenv("TWILIO_AUTH_TOKEN")
|
||||
if TwilioAuthToken == "" {
|
||||
return fmt.Errorf("You must specify a non-empty TWILIO_AUTH_TOKEN")
|
||||
}
|
||||
TwilioMessagingServiceSID = os.Getenv("TWILIO_MESSAGING_SERVICE_SID")
|
||||
if TwilioMessagingServiceSID == "" {
|
||||
return fmt.Errorf("You must specify a non-empty TWILIO_MESSAGING_SERVICE_SID")
|
||||
}
|
||||
TwilioRCSSenderRMO = os.Getenv("TWILIO_RCS_SENDER_RMO")
|
||||
if TwilioRCSSenderRMO == "" {
|
||||
return fmt.Errorf("You must specify a non-empty TWILIO_RCS_SENDER_RMO")
|
||||
}
|
||||
VoipMSNumber = os.Getenv("VOIPMS_NUMBER")
|
||||
if VoipMSNumber == "" {
|
||||
return fmt.Errorf("You must specify a non-empty VOIPMS_NUMBER")
|
||||
}
|
||||
VoipMSPassword = os.Getenv("VOIPMS_PASSWORD")
|
||||
if VoipMSPassword == "" {
|
||||
return fmt.Errorf("You must specify a non-empty VOIPMS_PASSWORD")
|
||||
}
|
||||
VoipMSUsername = os.Getenv("VOIPMS_USERNAME")
|
||||
if VoipMSPassword == "" {
|
||||
return fmt.Errorf("You must specify a non-empty VOIPMS_USERNAME")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func RedirectURL() string {
|
||||
return MakeURLSync("/arcgis/oauth/callback")
|
||||
func ArcGISOauthRedirectURL() string {
|
||||
return MakeURLNidus("/oauth/arcgis/callback")
|
||||
}
|
||||
|
|
|
|||
1
db/bob
1
db/bob
|
|
@ -1 +0,0 @@
|
|||
Subproject commit d277a066d6bac5336e49615495ce2c74e736a7fd
|
||||
|
|
@ -1,2 +1,3 @@
|
|||
#!/run/current-system/sw/bin/bash
|
||||
PSQL_DSN="postgresql://?host=/var/run/postgresql&sslmode=disable&dbname=nidus-sync" bob/bobgen-psql
|
||||
PSQL_DSN="postgresql://?host=/var/run/postgresql&sslmode=disable&dbname=nidus-sync" /tmp/bobgen-psql
|
||||
#PSQL_DSN="postgresql://?host=/var/run/postgresql&sslmode=disable&dbname=nidus-sync" bob/gen/bobgen-psql/bobgen-psql
|
||||
|
|
|
|||
|
|
@ -1,5 +1,16 @@
|
|||
aliases:
|
||||
arcgis.user_:
|
||||
up_plural: "ArcgisUsers"
|
||||
up_singular: "ArcgisUser"
|
||||
down_plural: "arcgisusers"
|
||||
down_singular: "arcgisuser"
|
||||
organization:
|
||||
relationships:
|
||||
publicreport.pool.pool_organization_id_fkey: "PublicreportPool"
|
||||
fieldseeker.pool.pool_organization_id_fkey: "FieldseekerPool"
|
||||
user_:
|
||||
relationships:
|
||||
fileupload.pool.pool_creator_id_fkey: "FileuploadPool"
|
||||
up_plural: "Users"
|
||||
up_singular: "User"
|
||||
down_plural: "users"
|
||||
|
|
@ -7,10 +18,39 @@ aliases:
|
|||
no_tests: true
|
||||
psql:
|
||||
schemas:
|
||||
- "comms"
|
||||
- "fieldseeker"
|
||||
- "fileupload"
|
||||
- "lob"
|
||||
- "public"
|
||||
- "publicreport"
|
||||
- "fieldseeker"
|
||||
- "tile"
|
||||
shared_schema: "public"
|
||||
queries:
|
||||
- ./sql
|
||||
uuid_pkg: google
|
||||
plugins_preset: "all"
|
||||
plugins:
|
||||
counts:
|
||||
disabled: true
|
||||
dbinfo:
|
||||
destination: "dbinfo"
|
||||
disabled: false
|
||||
pkgname: "dbinfo"
|
||||
enums:
|
||||
destination: "enums"
|
||||
disabled: false
|
||||
pkgname: "enums"
|
||||
factory:
|
||||
disabled: true
|
||||
pkgname: "factory"
|
||||
destination: "factory"
|
||||
joins:
|
||||
disabled: true
|
||||
loaders:
|
||||
disabled: false
|
||||
models:
|
||||
destination: "models"
|
||||
disabled: false
|
||||
pkgname: "models"
|
||||
where:
|
||||
disabled: false
|
||||
|
|
|
|||
165
db/connection.go
165
db/connection.go
|
|
@ -7,38 +7,149 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"sync"
|
||||
|
||||
//"github.com/georgysavva/scany/v2/pgxscan"
|
||||
//"github.com/jackc/pgx/v5"
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/jet/postgres"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/jackc/pgx/v5/stdlib"
|
||||
_ "github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/pressly/goose/v3"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/stephenafamo/bob"
|
||||
"github.com/stephenafamo/scan"
|
||||
pgxgeom "github.com/twpayne/pgx-geom"
|
||||
)
|
||||
|
||||
var ErrNoRows = pgx.ErrNoRows
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var embedMigrations embed.FS
|
||||
|
||||
type postgres struct {
|
||||
type pginstance struct {
|
||||
BobDB bob.DB
|
||||
PGXPool *pgxpool.Pool
|
||||
}
|
||||
|
||||
var (
|
||||
PGInstance *postgres
|
||||
pgOnce sync.Once
|
||||
PGInstance *pginstance
|
||||
)
|
||||
|
||||
func ExecuteNone(ctx context.Context, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
_, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
return err
|
||||
}
|
||||
func ExecuteNoneTx(ctx context.Context, txn Ex, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
r, err := txn.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("query: %w", err)
|
||||
}
|
||||
r.Close()
|
||||
return nil
|
||||
}
|
||||
func ExecuteNoneTxBob(ctx context.Context, txn bob.Tx, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
r, err := txn.QueryContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("query: %w", err)
|
||||
}
|
||||
defer lint.LogOnErr(r.Close, "close rows")
|
||||
return nil
|
||||
}
|
||||
func ExecuteOne[T any](ctx context.Context, stmt postgres.Statement) (T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
var result T
|
||||
row, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
var collected *T
|
||||
collected, err = pgx.CollectOneRow(row, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil || collected == nil {
|
||||
return result, fmt.Errorf("collect row: %w", err)
|
||||
}
|
||||
return *collected, nil
|
||||
}
|
||||
func ExecuteOneTx[T any](ctx context.Context, txn Ex, stmt postgres.Statement) (T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
//result, err := scan.One(ctx, txn, scan.StructMapper[T](), query, args...)
|
||||
row, err := txn.Query(ctx, query, args...)
|
||||
var result T
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("txn query: %w", err)
|
||||
}
|
||||
var collected *T
|
||||
collected, err = pgx.CollectOneRow(row, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil || collected == nil {
|
||||
return result, fmt.Errorf("collect row: %w", err)
|
||||
}
|
||||
return *collected, nil
|
||||
}
|
||||
func ExecuteOneTxBob[T any](ctx context.Context, txn bob.Tx, stmt postgres.Statement) (T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
return scan.One(ctx, txn, scan.StructMapper[T](), query, args...)
|
||||
}
|
||||
func ExecuteMany[T any](ctx context.Context, stmt postgres.Statement) ([]T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
rows, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
collected, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil {
|
||||
return []T{}, fmt.Errorf("collect rows: %w", err)
|
||||
}
|
||||
results := make([]T, len(collected))
|
||||
for i, c := range collected {
|
||||
if c == nil {
|
||||
return results, fmt.Errorf("null collected")
|
||||
}
|
||||
results[i] = *c
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
func ExecuteManyTx[T any](ctx context.Context, txn Ex, stmt postgres.Statement) ([]T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
rows, err := txn.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
collected, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil {
|
||||
return []T{}, fmt.Errorf("collect rows: %w", err)
|
||||
}
|
||||
results := make([]T, len(collected))
|
||||
for i, c := range collected {
|
||||
if c == nil {
|
||||
return results, fmt.Errorf("null collected")
|
||||
}
|
||||
results[i] = *c
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
func doMigrations(connection_string string) error {
|
||||
log.Info().Str("dsn", connection_string).Msg("Connecting to database")
|
||||
log.Debug().Str("dsn", connection_string).Msg("Connecting to database")
|
||||
db, err := sql.Open("pgx", connection_string)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to open database connection: %w", err)
|
||||
}
|
||||
defer db.Close()
|
||||
defer func() {
|
||||
err := db.Close()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to close database connection")
|
||||
}
|
||||
}()
|
||||
row := db.QueryRowContext(context.Background(), "SELECT version()")
|
||||
var val string
|
||||
if err := row.Scan(&val); err != nil {
|
||||
|
|
@ -76,7 +187,7 @@ func doMigrations(connection_string string) error {
|
|||
}
|
||||
|
||||
func InitializeDatabase(ctx context.Context, uri string) error {
|
||||
log.Info().Str("dsn", uri).Msg("Connecting to database")
|
||||
log.Debug().Str("dsn", uri).Msg("Initializing database")
|
||||
needs, err := needsMigrations(uri)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to determine if migrations are needed: %w", err)
|
||||
|
|
@ -92,18 +203,26 @@ func InitializeDatabase(ctx context.Context, uri string) error {
|
|||
return fmt.Errorf("Failed to handle migrations: %w", err)
|
||||
}
|
||||
} else {
|
||||
log.Info().Msg("No database migrations necessary")
|
||||
log.Debug().Msg("No database migrations necessary")
|
||||
}
|
||||
|
||||
pgOnce.Do(func() {
|
||||
db, e := pgxpool.New(ctx, uri)
|
||||
bobDB := bob.NewDB(stdlib.OpenDBFromPool(db))
|
||||
PGInstance = &postgres{bobDB, db}
|
||||
err = e
|
||||
})
|
||||
config, err := pgxpool.ParseConfig(uri)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create connection pool: %w", err)
|
||||
return fmt.Errorf("parse config: %w", err)
|
||||
}
|
||||
config.AfterConnect = func(ctx2 context.Context, conn *pgx.Conn) error {
|
||||
err2 := pgxgeom.Register(ctx, conn)
|
||||
if err2 != nil {
|
||||
return fmt.Errorf("pgxgeom register: %w", err2)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
db, err := pgxpool.NewWithConfig(ctx, config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("new pool: %w", err)
|
||||
}
|
||||
bobDB := bob.NewDB(stdlib.OpenDBFromPool(db))
|
||||
PGInstance = &pginstance{bobDB, db}
|
||||
|
||||
var current string
|
||||
query := `SELECT current_database()`
|
||||
|
|
@ -111,11 +230,6 @@ func InitializeDatabase(ctx context.Context, uri string) error {
|
|||
if err != nil {
|
||||
return fmt.Errorf("Failed to get database current: %w", err)
|
||||
}
|
||||
log.Info().Str("database", current).Msg("Connected to database")
|
||||
err = prepareStatements(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to initialize prepared statements: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -124,7 +238,12 @@ func needsMigrations(connection_string string) (*bool, error) {
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to open database connection: %w", err)
|
||||
}
|
||||
defer db.Close()
|
||||
defer func() {
|
||||
err := db.Close()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to close database connection")
|
||||
}
|
||||
}()
|
||||
row := db.QueryRowContext(context.Background(), "SELECT version()")
|
||||
var val string
|
||||
if err := row.Scan(&val); err != nil {
|
||||
|
|
|
|||
26
db/dberrors/address.bob.go
Normal file
26
db/dberrors/address.bob.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var AddressErrors = &addressErrors{
|
||||
ErrUniqueAddressPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "address",
|
||||
columns: []string{"id"},
|
||||
s: "address_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueAddressGidUnique: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "address",
|
||||
columns: []string{"gid"},
|
||||
s: "address_gid_unique",
|
||||
},
|
||||
}
|
||||
|
||||
type addressErrors struct {
|
||||
ErrUniqueAddressPkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueAddressGidUnique *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
|
|||
17
db/dberrors/comms.email_contact.bob.go
Normal file
17
db/dberrors/comms.email_contact.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsEmailContactErrors = &commsEmailContactErrors{
|
||||
ErrUniqueEmailPkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "email_contact",
|
||||
columns: []string{"address"},
|
||||
s: "email_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type commsEmailContactErrors struct {
|
||||
ErrUniqueEmailPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/comms.email_log.bob.go
Normal file
17
db/dberrors/comms.email_log.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsEmailLogErrors = &commsEmailLogErrors{
|
||||
ErrUniqueEmailLogPkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "email_log",
|
||||
columns: []string{"id"},
|
||||
s: "email_log_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type commsEmailLogErrors struct {
|
||||
ErrUniqueEmailLogPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/comms.email_template.bob.go
Normal file
17
db/dberrors/comms.email_template.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsEmailTemplateErrors = &commsEmailTemplateErrors{
|
||||
ErrUniqueEmailTemplatePkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "email_template",
|
||||
columns: []string{"id"},
|
||||
s: "email_template_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type commsEmailTemplateErrors struct {
|
||||
ErrUniqueEmailTemplatePkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/comms.mailer.bob.go
Normal file
17
db/dberrors/comms.mailer.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsMailerErrors = &commsMailerErrors{
|
||||
ErrUniqueMailerPkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "mailer",
|
||||
columns: []string{"id"},
|
||||
s: "mailer_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type commsMailerErrors struct {
|
||||
ErrUniqueMailerPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/comms.phone.bob.go
Normal file
17
db/dberrors/comms.phone.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsPhoneErrors = &commsPhoneErrors{
|
||||
ErrUniquePhonePkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "phone",
|
||||
columns: []string{"e164"},
|
||||
s: "phone_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type commsPhoneErrors struct {
|
||||
ErrUniquePhonePkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/comms.text_job.bob.go
Normal file
17
db/dberrors/comms.text_job.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsTextJobErrors = &commsTextJobErrors{
|
||||
ErrUniqueTextJobPkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "text_job",
|
||||
columns: []string{"id"},
|
||||
s: "text_job_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type commsTextJobErrors struct {
|
||||
ErrUniqueTextJobPkey *UniqueConstraintError
|
||||
}
|
||||
26
db/dberrors/comms.text_log.bob.go
Normal file
26
db/dberrors/comms.text_log.bob.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommsTextLogErrors = &commsTextLogErrors{
|
||||
ErrUniqueTextLogPkey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "text_log",
|
||||
columns: []string{"id"},
|
||||
s: "text_log_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueTextLogTwilioSidKey: &UniqueConstraintError{
|
||||
schema: "comms",
|
||||
table: "text_log",
|
||||
columns: []string{"twilio_sid"},
|
||||
s: "text_log_twilio_sid_key",
|
||||
},
|
||||
}
|
||||
|
||||
type commsTextLogErrors struct {
|
||||
ErrUniqueTextLogPkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueTextLogTwilioSidKey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/communication.bob.go
Normal file
17
db/dberrors/communication.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommunicationErrors = &communicationErrors{
|
||||
ErrUniqueCommunicationPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "communication",
|
||||
columns: []string{"id"},
|
||||
s: "communication_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type communicationErrors struct {
|
||||
ErrUniqueCommunicationPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/communication_log_entry.bob.go
Normal file
17
db/dberrors/communication_log_entry.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommunicationLogEntryErrors = &communicationLogEntryErrors{
|
||||
ErrUniqueCommunicationLogEntryPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "communication_log_entry",
|
||||
columns: []string{"id"},
|
||||
s: "communication_log_entry_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type communicationLogEntryErrors struct {
|
||||
ErrUniqueCommunicationLogEntryPkey *UniqueConstraintError
|
||||
}
|
||||
26
db/dberrors/compliance_report_request.bob.go
Normal file
26
db/dberrors/compliance_report_request.bob.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ComplianceReportRequestErrors = &complianceReportRequestErrors{
|
||||
ErrUniqueComplianceReportRequestPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "compliance_report_request",
|
||||
columns: []string{"id"},
|
||||
s: "compliance_report_request_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueComplianceReportRequestPublicIdKey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "compliance_report_request",
|
||||
columns: []string{"public_id"},
|
||||
s: "compliance_report_request_public_id_key",
|
||||
},
|
||||
}
|
||||
|
||||
type complianceReportRequestErrors struct {
|
||||
ErrUniqueComplianceReportRequestPkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueComplianceReportRequestPublicIdKey *UniqueConstraintError
|
||||
}
|
||||
26
db/dberrors/compliance_report_request_mailer.bob.go
Normal file
26
db/dberrors/compliance_report_request_mailer.bob.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ComplianceReportRequestMailerErrors = &complianceReportRequestMailerErrors{
|
||||
ErrUniqueComplianceReportRequestMailerPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "compliance_report_request_mailer",
|
||||
columns: []string{"id"},
|
||||
s: "compliance_report_request_mailer_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueComplianceReportRequestMaiComplianceReportRequestId_Key: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "compliance_report_request_mailer",
|
||||
columns: []string{"compliance_report_request_id", "mailer_id"},
|
||||
s: "compliance_report_request_mai_compliance_report_request_id__key",
|
||||
},
|
||||
}
|
||||
|
||||
type complianceReportRequestMailerErrors struct {
|
||||
ErrUniqueComplianceReportRequestMailerPkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueComplianceReportRequestMaiComplianceReportRequestId_Key *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/district_subscription_email.bob.go
Normal file
17
db/dberrors/district_subscription_email.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var DistrictSubscriptionEmailErrors = &districtSubscriptionEmailErrors{
|
||||
ErrUniqueDistrictSubscriptionEmailPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "district_subscription_email",
|
||||
columns: []string{"organization_id", "email_contact_address"},
|
||||
s: "district_subscription_email_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type districtSubscriptionEmailErrors struct {
|
||||
ErrUniqueDistrictSubscriptionEmailPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/district_subscription_phone.bob.go
Normal file
17
db/dberrors/district_subscription_phone.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var DistrictSubscriptionPhoneErrors = &districtSubscriptionPhoneErrors{
|
||||
ErrUniqueDistrictSubscriptionPhonePkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "district_subscription_phone",
|
||||
columns: []string{"organization_id", "phone_e164"},
|
||||
s: "district_subscription_phone_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type districtSubscriptionPhoneErrors struct {
|
||||
ErrUniqueDistrictSubscriptionPhonePkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/feature.bob.go
Normal file
17
db/dberrors/feature.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var FeatureErrors = &featureErrors{
|
||||
ErrUniqueFeaturePkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "feature",
|
||||
columns: []string{"id"},
|
||||
s: "feature_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type featureErrors struct {
|
||||
ErrUniqueFeaturePkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/feature_pool.bob.go
Normal file
17
db/dberrors/feature_pool.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var FeaturePoolErrors = &featurePoolErrors{
|
||||
ErrUniqueFeaturePoolPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "feature_pool",
|
||||
columns: []string{"feature_id"},
|
||||
s: "feature_pool_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type featurePoolErrors struct {
|
||||
ErrUniqueFeaturePoolPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerContainerrelateErrors = &fieldseekerContainerrelateErrors{
|
|||
ErrUniqueContainerrelatePkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "containerrelate",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "containerrelate_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerFieldscoutinglogErrors = &fieldseekerFieldscoutinglogErrors{
|
|||
ErrUniqueFieldscoutinglogPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "fieldscoutinglog",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "fieldscoutinglog_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerHabitatrelateErrors = &fieldseekerHabitatrelateErrors{
|
|||
ErrUniqueHabitatrelatePkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "habitatrelate",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "habitatrelate_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerInspectionsampleErrors = &fieldseekerInspectionsampleErrors{
|
|||
ErrUniqueInspectionsamplePkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "inspectionsample",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "inspectionsample_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerInspectionsampledetailErrors = &fieldseekerInspectionsampledetail
|
|||
ErrUniqueInspectionsampledetailPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "inspectionsampledetail",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "inspectionsampledetail_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerLinelocationErrors = &fieldseekerLinelocationErrors{
|
|||
ErrUniqueLinelocationPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "linelocation",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "linelocation_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerLocationtrackingErrors = &fieldseekerLocationtrackingErrors{
|
|||
ErrUniqueLocationtrackingPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "locationtracking",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "locationtracking_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerMosquitoinspectionErrors = &fieldseekerMosquitoinspectionErrors{
|
|||
ErrUniqueMosquitoinspectionPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "mosquitoinspection",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "mosquitoinspection_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerPointlocationErrors = &fieldseekerPointlocationErrors{
|
|||
ErrUniquePointlocationPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "pointlocation",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "pointlocation_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerPolygonlocationErrors = &fieldseekerPolygonlocationErrors{
|
|||
ErrUniquePolygonlocationPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "polygonlocation",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "polygonlocation_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerPoolErrors = &fieldseekerPoolErrors{
|
|||
ErrUniquePoolPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "pool",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "pool_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerPooldetailErrors = &fieldseekerPooldetailErrors{
|
|||
ErrUniquePooldetailPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "pooldetail",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "pooldetail_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerProposedtreatmentareaErrors = &fieldseekerProposedtreatmentareaEr
|
|||
ErrUniqueProposedtreatmentareaPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "proposedtreatmentarea",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "proposedtreatmentarea_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerQamosquitoinspectionErrors = &fieldseekerQamosquitoinspectionErro
|
|||
ErrUniqueQamosquitoinspectionPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "qamosquitoinspection",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "qamosquitoinspection_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerRodentlocationErrors = &fieldseekerRodentlocationErrors{
|
|||
ErrUniqueRodentlocationPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "rodentlocation",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "rodentlocation_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerSamplecollectionErrors = &fieldseekerSamplecollectionErrors{
|
|||
ErrUniqueSamplecollectionPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "samplecollection",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "samplecollection_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerSamplelocationErrors = &fieldseekerSamplelocationErrors{
|
|||
ErrUniqueSamplelocationPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "samplelocation",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "samplelocation_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerServicerequestErrors = &fieldseekerServicerequestErrors{
|
|||
ErrUniqueServicerequestPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "servicerequest",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "servicerequest_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerSpeciesabundanceErrors = &fieldseekerSpeciesabundanceErrors{
|
|||
ErrUniqueSpeciesabundancePkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "speciesabundance",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "speciesabundance_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerStormdrainErrors = &fieldseekerStormdrainErrors{
|
|||
ErrUniqueStormdrainPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "stormdrain",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "stormdrain_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerTimecardErrors = &fieldseekerTimecardErrors{
|
|||
ErrUniqueTimecardPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "timecard",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "timecard_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerTrapdatumErrors = &fieldseekerTrapdatumErrors{
|
|||
ErrUniqueTrapdataPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "trapdata",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "trapdata_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerTraplocationErrors = &fieldseekerTraplocationErrors{
|
|||
ErrUniqueTraplocationPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "traplocation",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "traplocation_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerTreatmentErrors = &fieldseekerTreatmentErrors{
|
|||
ErrUniqueTreatmentPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "treatment",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "treatment_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerTreatmentareaErrors = &fieldseekerTreatmentareaErrors{
|
|||
ErrUniqueTreatmentareaPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "treatmentarea",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "treatmentarea_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerZoneErrors = &fieldseekerZoneErrors{
|
|||
ErrUniqueZonesPkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "zones",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "zones_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
@ -7,7 +7,7 @@ var FieldseekerZones2Errors = &fieldseekerZones2Errors{
|
|||
ErrUniqueZones2Pkey: &UniqueConstraintError{
|
||||
schema: "fieldseeker",
|
||||
table: "zones2",
|
||||
columns: []string{"objectid", "version"},
|
||||
columns: []string{"globalid", "version"},
|
||||
s: "zones2_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Code generated by BobGen psql v0.0.4-0.20260105020634-53e08d840e47+dirty. DO NOT EDIT.
|
||||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
|
|
|||
17
db/dberrors/fileupload.csv.bob.go
Normal file
17
db/dberrors/fileupload.csv.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var FileuploadCSVErrors = &fileuploadCSVErrors{
|
||||
ErrUniqueCsvPkey: &UniqueConstraintError{
|
||||
schema: "fileupload",
|
||||
table: "csv",
|
||||
columns: []string{"file_id"},
|
||||
s: "csv_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type fileuploadCSVErrors struct {
|
||||
ErrUniqueCsvPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/fileupload.error_csv.bob.go
Normal file
17
db/dberrors/fileupload.error_csv.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var FileuploadErrorCSVErrors = &fileuploadErrorCSVErrors{
|
||||
ErrUniqueErrorCsvPkey: &UniqueConstraintError{
|
||||
schema: "fileupload",
|
||||
table: "error_csv",
|
||||
columns: []string{"id"},
|
||||
s: "error_csv_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type fileuploadErrorCSVErrors struct {
|
||||
ErrUniqueErrorCsvPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/fileupload.error_file.bob.go
Normal file
17
db/dberrors/fileupload.error_file.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var FileuploadErrorFileErrors = &fileuploadErrorFileErrors{
|
||||
ErrUniqueErrorFilePkey: &UniqueConstraintError{
|
||||
schema: "fileupload",
|
||||
table: "error_file",
|
||||
columns: []string{"id"},
|
||||
s: "error_file_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type fileuploadErrorFileErrors struct {
|
||||
ErrUniqueErrorFilePkey *UniqueConstraintError
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue