Compare commits
722 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 970cd568dc | |||
| 935800e334 | |||
| b5bc54b7f4 | |||
| 0301545df9 | |||
| e36b512908 | |||
| aa3c6e6209 | |||
| 93b69c4cbb | |||
| 7ffa2e891b | |||
| be99baf64c | |||
| 8592659432 | |||
| 1b6fac3313 | |||
| 01f35b603e | |||
| da90401b2d | |||
| 28cf7683a7 | |||
| d1ba2f53fa | |||
| 24a3610c4c | |||
| 7da653efc6 | |||
| 735a9dc1d2 | |||
| f2585c569c | |||
| 0fc46d5916 | |||
| 61ad3fbe45 | |||
| 12213fb31b | |||
| 7a361a330d | |||
| 34a136eba5 | |||
| fcd95f1a25 | |||
| a95e44cf42 | |||
| 040ab106b4 | |||
| 5f3fcc2b3e | |||
| 114aec73ed | |||
| 60bf09e813 | |||
| 347f62bd6d | |||
| 878f0e9bcf | |||
| 18db17fe0b | |||
| b53c908b55 | |||
| dc2fee3a9d | |||
| 387be40076 | |||
| 3153e8bf13 | |||
| 67c99436d1 | |||
| 431435f8bd | |||
| 57dc2023cd | |||
| d6b664d84a | |||
| 52d4c47e43 | |||
| 7f71ff9a2e | |||
| a82732a49c | |||
| a6ce0b7e67 | |||
| bab3200b6c | |||
| 89ed2003fa | |||
| a82b2b8cb8 | |||
| 6a47302192 | |||
| 0e0b2489e6 | |||
| 9ef4dad27c | |||
| e5a84e09a8 | |||
| 4bd62b3567 | |||
| 503cde6063 | |||
| 8757f1cda3 | |||
| ace2557a60 | |||
| 537d5c9133 | |||
| 00d26a684a | |||
| 1cfe51f894 | |||
| 43edca7093 | |||
| 839ed138ca | |||
| 797067ee38 | |||
| e45e05f337 | |||
| b8a9ecb253 | |||
| 0c464a9963 | |||
| 2f6cbe59eb | |||
| 33ecfce313 | |||
| 9229725300 | |||
| 89eca2ddf9 | |||
| a1b2d580a8 | |||
| c6cb645453 | |||
| 7e79308868 | |||
| da75aeecf2 | |||
| af39a73e8f | |||
| 53ce100859 | |||
| 364b4ddc32 | |||
| 06fda0554c | |||
| 0375f666d6 | |||
| aefb5ec6bd | |||
| 0a0e6f6301 | |||
| ab5dd13fcb | |||
| d67c54c6e7 | |||
| 4bb37c5ab3 | |||
| 2fbceb11e3 | |||
| 524353bfa1 | |||
| 822dad5352 | |||
| 5ac778ea53 | |||
| f3af19f03a | |||
| bd3d3881f5 | |||
| a17544bb4b | |||
| a101ff3cc9 | |||
| a5b9ee0c6c | |||
| 4a90917645 | |||
| d5d6201177 | |||
| 309f8fe2c5 | |||
| bf6b5dcb17 | |||
| 4ed005fb37 | |||
| 4fcb184286 | |||
| 2911c7b215 | |||
| ff668c223b | |||
| 52c41e29d8 | |||
| 38359e20e9 | |||
| 20bf272746 | |||
| 060d0dd95f | |||
| 72626e8dd0 | |||
| b68d93ec91 | |||
| 6e3d079c46 | |||
| 878b43c0a6 | |||
| 175fd8d0fb | |||
| dba8b6c475 | |||
| 32a0d895c4 | |||
| 4ae0410930 | |||
| 8bdd18649d | |||
| 8fcd926d43 | |||
| 68adab88bc | |||
| 82b313f62f | |||
| 4ce91d77d4 | |||
| 6350aa00d5 | |||
| 909665ab6c | |||
| 1dba58472b | |||
| 9c392e5791 | |||
| 63ebe382b6 | |||
| a2b8527d91 | |||
| 3867737fcc | |||
| 937953f2a2 | |||
| 96498c01bf | |||
| b92697b8c8 | |||
| ffe427564b | |||
| be8d92d7ae | |||
| 8a05ba2faf | |||
| c783ab7942 | |||
| 5e638bdf1d | |||
| 203d2014b0 | |||
| 3bfcfff1eb | |||
| e5080eaaf6 | |||
| a88aa4c8a0 | |||
| 6992031007 | |||
| c1a8249dc0 | |||
| 77283b3654 | |||
| f1e4aca9b8 | |||
| 963254973b | |||
| cad01e689e | |||
| c6282c9f5e | |||
| c8989237b0 | |||
| 516dd6f429 | |||
| 72a8ed5c16 | |||
| b4e6bac566 | |||
| cc59ccb9b5 | |||
| 1ddba5ebb1 | |||
| 582aa952e4 | |||
| 10dc5c0bd7 | |||
| 7be8b428e4 | |||
| 1d266c88c1 | |||
| 5caa9d8c7a | |||
| b0170b20d5 | |||
| f24a583e2e | |||
| 23819961e6 | |||
| a8819c907e | |||
| b5923137a7 | |||
| 78458760ec | |||
| 1286d0ea2a | |||
| 2fbcb9f918 | |||
| 5cdbc4eb53 | |||
| b4527fba8b | |||
| bcd51cf5cf | |||
| 8a9a3e8c0c | |||
| 986d12eab2 | |||
| 839abcbd28 | |||
| 544ac78a3b | |||
| 8d37e8fab5 | |||
| 2b30411c1b | |||
| baaa3bff5b | |||
| 0ce3420792 | |||
| 4db1a6f678 | |||
| f24104dc94 | |||
| ee9a355613 | |||
| 810a13cee0 | |||
| fe2041f22b | |||
| a0ac5c0674 | |||
| bcc5151116 | |||
| 8fd86d478c | |||
| 0b005c3e76 | |||
| 4a214b099e | |||
| eb27af7d90 | |||
| 0d8d7f3aeb | |||
| 80031c1d1a | |||
| bcea3c6bdf | |||
| bad50a8772 | |||
| bd3e42f83e | |||
| f927b0a911 | |||
| 8eae73eefb | |||
| 5d510915d2 | |||
| e2d4f917a0 | |||
| 2a3dbbdad3 | |||
| 7a6cffa74c | |||
| e929118349 | |||
| aae0d1ed74 | |||
| 8387cf667b | |||
| ffd424df12 | |||
| 0b32492fd6 | |||
| ade629ecf5 | |||
| 55cb4ca962 | |||
| efd6f59fca | |||
| a6ca30fdb1 | |||
| 3196b73a80 | |||
| 5a865cc5e1 | |||
| abbe80b1f0 | |||
| ac552be7e7 | |||
| cedbb3372e | |||
| 0420b777c9 | |||
| 83bf3023de | |||
| 0e777568fb | |||
| 75e9d5a621 | |||
| a2cdbc26bd | |||
| be9065354d | |||
| fa675f293d | |||
| b7d26d5ad7 | |||
| 21587493c0 | |||
| 4625dd39d0 | |||
| fd662721bb | |||
| 4a8c0d2e60 | |||
| c938cb231e | |||
| ba8c0016ac | |||
| b6e1bffd79 | |||
| 61351dabf1 | |||
| efa01cffc2 | |||
| bf156eaf7f | |||
| cb92b845e6 | |||
| b85deb229f | |||
| bff81eb6e3 | |||
| 617631063f | |||
| 1f8e6b698f | |||
| 09fe773987 | |||
| 273e2b7b32 | |||
| dc3cce0b8a | |||
| 2c0aa980e7 | |||
| 3ab0a00959 | |||
| 2ddf015a68 | |||
| b7eff027e7 | |||
| fde9539191 | |||
| 6945b9f9ed | |||
| 5100c8f0be | |||
| 1aba99f732 | |||
| 97ec0667a5 | |||
| c0935c848b | |||
| a2271a2ce8 | |||
| 83c013785f | |||
| e464a9fcdb | |||
| 59bf360937 | |||
| a33056039a | |||
| f1890332ae | |||
| b3a89d9c68 | |||
| a922196f20 | |||
| 89a2cb30e6 | |||
| 1bc452bc09 | |||
| ee2254281c | |||
| 59755a0b42 | |||
| d03c12ffb6 | |||
| 163b0f9edc | |||
| a6f9396760 | |||
| 84da2bdc7d | |||
| ed6dde2f0a | |||
| 3379251ccb | |||
| 7483a6a695 | |||
| d047c460ed | |||
| 81e057b900 | |||
| b6d1bd9ee2 | |||
| 08a1b5b81d | |||
| 7b95cfe833 | |||
| 6b90edf053 | |||
| f444bf39fb | |||
| 2ea47f03f4 | |||
| 74e24b7de3 | |||
| 5a35c1d1f8 | |||
| 5d06afbecc | |||
| 8514ec36d5 | |||
| 35ab261ee8 | |||
| 838e24bbed | |||
| 84604dfdc8 | |||
| d86ef13345 | |||
| b9c257a635 | |||
| 671397ba81 | |||
| c4c22f6733 | |||
| 03dccb638a | |||
| e3f9a19b84 | |||
| 262aa009c2 | |||
| 74e0630a41 | |||
| 29c3b267d9 | |||
| e1f3c93a1d | |||
| 259960cf45 | |||
| d395699dc4 | |||
| f490e4a1a4 | |||
| d352b0d932 | |||
| a9077b6c36 | |||
| 5f68eb453f | |||
| 5e0981e2a2 | |||
| b2d8e3ba27 | |||
| 5bf93c3dfd | |||
| b2c5bb6735 | |||
| 171672ee33 | |||
| aa5a35b15f | |||
| 82dd5e8683 | |||
| f5ac7bb4ee | |||
| e894ae28dc | |||
| d55a7ec5af | |||
| 0e165b57d0 | |||
| dfe7d3650f | |||
| b6951d64d4 | |||
| ee38d0d2b6 | |||
| ac27c60e0c | |||
| 6a8ae6d81a | |||
| 87c802fa90 | |||
| b08582224a | |||
| 66d35428fa | |||
| 344f4bcaa5 | |||
| ac65129ba6 | |||
| 322be2fe40 | |||
| 1097004245 | |||
| f4d0ce015d | |||
| adcff5c5c8 | |||
| 388801fd09 | |||
| cd98751667 | |||
| b0e2e97f09 | |||
| 239340a7a9 | |||
| 31d8d2d0d5 | |||
| 05ec6798ac | |||
| 5549f9d79f | |||
| 0fbf891c23 | |||
| 9ea99c92f9 | |||
| 8ebcff7390 | |||
| 659df00cc9 | |||
| 5451c297c2 | |||
| b09725726c | |||
| 4a440e3022 | |||
| 28ec1c3d67 | |||
| 347e8dcb86 | |||
| 67dcb87b81 | |||
| b849eec7ea | |||
| ebbd79ed7e | |||
| fe41df3e16 | |||
| 4a28a16639 | |||
| 59e58840c9 | |||
| 7e2a22c58c | |||
| 9b1de15373 | |||
| c84a2ef42b | |||
| 5527731e83 | |||
| 84db38c985 | |||
| a23866619d | |||
| 7545b2e4ef | |||
| 5448702a7d | |||
| 2408bcbeff | |||
| e707d91e7f | |||
| e2b6bc6502 | |||
| b805374a6c | |||
| 6e1a5b4348 | |||
| cadf6afb5f | |||
| 3c62fe2ca1 | |||
| 02139450c6 | |||
| 3ae72c8944 | |||
| a189348b36 | |||
| 8f494991e2 | |||
| f74d2c3ca1 | |||
| c0389fa4b1 | |||
| 9c557a0391 | |||
| 96878f24de | |||
| 083c4ddae9 | |||
| ba76c8b1db | |||
| 9bca15ae7e | |||
| ffedae0373 | |||
| dcab2e1f8f | |||
| 92f4282674 | |||
| 8baa056fab | |||
| 5011f4c137 | |||
| 1a031f16bd | |||
| 5db4c05544 | |||
| 0f94292ab7 | |||
| b701771dfb | |||
| 447ea18d95 | |||
| 756cc0d266 | |||
| 0297114faf | |||
| dddeafe6cd | |||
| 6f5b8f5575 | |||
| 9ba99d5ceb | |||
| 5306f8ba62 | |||
| ae10e4fee8 | |||
| c8f74d3c26 | |||
| a3c340f787 | |||
| 875298fe88 | |||
| ab47259534 | |||
| 60eb6b9bbf | |||
| 4735734404 | |||
| 4060e7ddcd | |||
| 730f40956f | |||
| e04b86218d | |||
| 12aedaf543 | |||
| bac55774f8 | |||
| 14c0d453e9 | |||
| b23fc6edc5 | |||
| c48aebcb0b | |||
| 97acdb0e2c | |||
| f969f262b8 | |||
| ae50a1abd8 | |||
| 553b65556a | |||
| 3ad95e1365 | |||
| 86ab67e70b | |||
| 3bde7a9cac | |||
| a4a9662c94 | |||
| 79a56c2d20 | |||
| d3662b8240 | |||
| dbc5db9727 | |||
| 5b5a63114c | |||
| a6912929a7 | |||
| 8d6976c770 | |||
| 9dccd21cee | |||
| cb9e5146bf | |||
| 882636de8f | |||
| 531f3282d9 | |||
| f88ca57d97 | |||
| b2c24a0438 | |||
| 37ce3183ca | |||
| 2c0bfb9904 | |||
| c41154a200 | |||
| 7f90391ecd | |||
| 1a7a2b13aa | |||
| 6c79b8a85e | |||
| 765b8fbef7 | |||
| 8e536d1d2f | |||
| 68315a3fb2 | |||
| 6fb5a7f971 | |||
| cc7ce44f47 | |||
| 53bfbbc5ef | |||
| 9601b88b41 | |||
| f22ddd0405 | |||
| 1a53d5338f | |||
| e7c33d7e10 | |||
| d52101d25b | |||
| 6f677b5638 | |||
| 4faa7fa8c0 | |||
| 20614acb86 | |||
| c393f6fd81 | |||
| 9ef6aaa406 | |||
| 43dce16fbd | |||
| 437f87013a | |||
| 380b41f695 | |||
| 2d5dca3fb5 | |||
| b6cfbee102 | |||
| 5681ff2283 | |||
| 332e64c9ab | |||
| beb6d9d066 | |||
| e56e83161b | |||
| e08f614d11 | |||
| 10e368c403 | |||
| 597aedc2af | |||
| 07e48aa071 | |||
| c5c78a2b84 | |||
| 9104e2f7c3 | |||
| 27fd1faa9c | |||
| 6203e3da75 | |||
| b6037d7525 | |||
| 51fe851c5a | |||
| c0e414bdc3 | |||
| 5842b6251d | |||
| bfecae7d61 | |||
| 4f9617aa2f | |||
| fd7607f5b7 | |||
| 64a8de7a32 | |||
| 4a9d6e0db6 | |||
| 4d718f9a12 | |||
| 2342a99405 | |||
| 4f6369fa27 | |||
| e8db3de122 | |||
| b658e28f2e | |||
| b919472f42 | |||
| d7d6888f63 | |||
| 54e77f72f4 | |||
| 3e0003095b | |||
| 095ab828b6 | |||
| 06345099eb | |||
| 5cabea8577 | |||
| 42bcdb8af8 | |||
| bfe2b88622 | |||
| 377683c4e3 | |||
| 457f123f69 | |||
| 4b87c74f41 | |||
| 522c5785a2 | |||
| 76c395d613 | |||
| 51811132a4 | |||
| ea231fb0cc | |||
| 9574ed4812 | |||
| af2299f417 | |||
| 945b482b00 | |||
| f9934095b3 | |||
| aa02d2e729 | |||
| ee76dddf2f | |||
| fc56c1406a | |||
| 7ee70b24ee | |||
| 3745231f51 | |||
| 353a3ea442 | |||
| 124d1b7078 | |||
| 42d111aac9 | |||
| 00ebc27069 | |||
| 4145944b1b | |||
| a89a4fbec5 | |||
| 0a7a2512d4 | |||
| 6fbde6389d | |||
| a656d45a6d | |||
| ab519020fc | |||
| 6c311c76e3 | |||
| 5172400803 | |||
| c253e655b1 | |||
| 0ecf9c1be1 | |||
| 05a7bbb4e3 | |||
| 7f8491a1c2 | |||
| 7f72e82ceb | |||
| af136f324d | |||
| 4f96f35d9a | |||
| 7b3c1f2b54 | |||
| 21b7b68f50 | |||
| 6f9a511874 | |||
| ad90f9c95e | |||
| da7549eeda | |||
| 92ed974e4b | |||
| 15371ec064 | |||
| 4bfaaa72ce | |||
| e59794f5e0 | |||
| 1f9f1ae166 | |||
| 9a9371301c | |||
| 9921618c12 | |||
| 33399b5e2a | |||
| da14410fc7 | |||
| 6f8c012394 | |||
| daf921accf | |||
| c67afa7e1e | |||
| 2e0f657585 | |||
| 7699e58bc3 | |||
| 4bbfbdb9e6 | |||
| f60bde7fd9 | |||
| 1ad3c5a5c8 | |||
| 747544bb58 | |||
| 0d1bd752a4 | |||
| 88d88ec8d3 | |||
| 670310de15 | |||
| df8cab4b07 | |||
| aee9bb9267 | |||
| d7c07fc65f | |||
| 3ff7ff05ab | |||
| bf2a7582fa | |||
| ef412b28ec | |||
| 2a92420bbe | |||
| ba89b2e994 | |||
| 0718d88f7a | |||
| a64df8a687 | |||
| f33020e2b8 | |||
| 0318b332bb | |||
| 09ae9d0ce3 | |||
| 6fe107601e | |||
| 69eabe4e85 | |||
| 0289bf5756 | |||
| 6f45325d9d | |||
| d5cf65f4cb | |||
| fb853a2bd3 | |||
| e0a586b311 | |||
| 55c8b8f1dd | |||
| 8594723a0d | |||
| 761af13270 | |||
| 7f756ce8ca | |||
| 21a8f3029e | |||
| 4f92fdced8 | |||
| 7360a9d2e1 | |||
| b081dcf6d5 | |||
| 2856587aca | |||
| ea318af65f | |||
| da62cc8f98 | |||
| 4a835d3f16 | |||
| 84102dd50e | |||
| 32f00afa8a | |||
| 3ae88f984a | |||
| 96237c7599 | |||
| f90faa4732 | |||
| a7fe9ee6d9 | |||
| 9eb7022336 | |||
| 0a79c5d945 | |||
| b384252c7c | |||
| 47f900ab76 | |||
| 50643698c2 | |||
| 8d5fb1ef0b | |||
| 354c07f2bf | |||
| 11f56bfd1c | |||
| a4a8bcdaa9 | |||
| 35fc57e8d1 | |||
| a410bf441c | |||
| 71ffa13167 | |||
| bcde604690 | |||
| 29d98796fb | |||
| de0fbd7188 | |||
| da8074d1e0 | |||
| b671133f88 | |||
| 6797dfa251 | |||
| b152cf9c36 | |||
| 0b8bea393e | |||
| 674801c8b2 | |||
| 6cd5821e5f | |||
| d4165ec2d0 | |||
| 82ecf0f5d1 | |||
| 9c56f148e4 | |||
| b68332afc0 | |||
| 22c2df11f8 | |||
| 978c20d72a | |||
| bacfe7218f | |||
| c73a1123d2 | |||
| 7dd61a06e2 | |||
| 5f54cfa6ed | |||
| ac6cd878af | |||
| 821647cef1 | |||
| 03301518f0 | |||
| d9a98e9eb2 | |||
| ef552af054 | |||
| 46edbbae74 | |||
| 31a9490210 | |||
| 21180816be | |||
| 736c71eefc | |||
| c75c5446f7 | |||
| 6422609150 | |||
| bf3204992e | |||
| 6d6fe9e1d6 | |||
| eaeedd5356 | |||
| 34d14846a1 | |||
| d367166e77 | |||
| dba1468e4d | |||
| e5af41b703 | |||
| 48d44487da | |||
| 1bd0adbc50 | |||
| 9b8c079d79 | |||
| efece7733f | |||
| 5779242f22 | |||
| 004a49c4e4 | |||
| 80f4f51b02 | |||
| f3c818a48f | |||
| 1e67c0090d | |||
| 0126d24242 | |||
| ccdb391ccc | |||
| 228f4a6db9 | |||
| 5d8314d13b | |||
| 303b4b826b | |||
| cee76ddd53 | |||
| a2c3f52ab4 | |||
| 9cbce4ff14 | |||
| 0d6a6fa797 | |||
| 16499fe23e | |||
| 31947c848a | |||
| 976a29b7d7 | |||
| 701f4853b5 | |||
| 9b6cacda0e | |||
| ddc63bfa91 | |||
| 931ea00e22 | |||
| 2cdcbb3784 | |||
| c2c1f3377a | |||
| c4359a3c81 | |||
| e86cdc6764 | |||
| b034fa5cf5 | |||
| 8c6bb7db26 | |||
| 94400aa808 | |||
| 23fdfc5a98 | |||
| 2f8f579430 | |||
| c392029a11 | |||
| bf5c49378b | |||
| 557caef8e5 | |||
| 067465ab35 | |||
| 3c26ebdaf2 | |||
| daf5aa316f | |||
| aa94cce2ad | |||
| f09533c742 | |||
| e88f40793f | |||
| 76bfc09aa5 | |||
| edfd8e285f | |||
| 441e4d45b1 | |||
| 313dacd956 | |||
| 9d2b757bc7 | |||
| c9802b78d0 | |||
| 6fcaf7fb5d | |||
| 9ca8ec4ce2 | |||
| 29e66327ee | |||
| a87904f2ff | |||
| 42d9d2372d | |||
| 68e0da1133 | |||
| cb34c43ef4 | |||
| 6042e7d337 | |||
| 31a767c944 | |||
| 87961bac58 | |||
| c7c1c45008 | |||
| fdab54a775 | |||
| ba03bf9d4f | |||
| 17fb3dcdb5 | |||
| f2b7d30a7f | |||
| 429b724cf2 | |||
| 2c4e7c4f96 | |||
| 7a111ab9b3 | |||
| 2f1b612e9e | |||
| a5b8a333d6 | |||
| 908ac4faea | |||
| 2a207fd613 | |||
| ee61b6d24b | |||
| 954a4330ee | |||
| 786a6c16a3 | |||
| 97c9269215 | |||
| 0cc0b57e33 | |||
| dad867a356 | |||
| ab5840dd54 | |||
| 5fd85d7052 | |||
| 544f99c09b | |||
| 6338d9f3f3 | |||
| 45643e8369 | |||
| c872cebb8f | |||
| 5fa4dd2884 | |||
| c039c70e3e | |||
| 434746aa99 | |||
| 2f61b224de | |||
| f2ea1367e2 | |||
| d287fa44df | |||
| b2eb98a66c | |||
| 732b123342 | |||
| f66d40f28b |
1001 changed files with 69493 additions and 35751 deletions
22
.gitignore
vendored
22
.gitignore
vendored
|
|
@ -1,14 +1,26 @@
|
|||
.env
|
||||
.sass-cache/
|
||||
cmd/geocode-test/geocode-test
|
||||
cmd/passwordgen/passwordgen
|
||||
/db/jet/jet
|
||||
districts/
|
||||
flogo.log
|
||||
html/static/css/bootstrap.css
|
||||
html/static/css/bootstrap.css.map
|
||||
nidus-sync
|
||||
nidus-sync.log
|
||||
lob/cmd/letter-create/letter-create
|
||||
lob/cmd/letter-list/letter-list
|
||||
lob/cmd/address-create/address-create
|
||||
lob/cmd/address-list/address-list
|
||||
/nidus-sync
|
||||
/nidus-sync.log
|
||||
node_modules/
|
||||
postgrid/cmd/send-pdf/send-pdf
|
||||
result
|
||||
stadia/cmd/bulk-geocode/bulk-geocode
|
||||
stadia/cmd/geocode-autocomplete/geocode-autocomplete
|
||||
stadia/cmd/geocode-bygid/geocode-bygid
|
||||
stadia/cmd/reverse-geocode/reverse-geocode
|
||||
stadia/cmd/structured-geocode/structured-geocode
|
||||
tmp/
|
||||
stadia/cmd/tile-raster/tile-raster
|
||||
static/gen/
|
||||
temp/
|
||||
ts/gen
|
||||
vite/*/.vite/
|
||||
|
|
|
|||
|
|
@ -1,17 +1,11 @@
|
|||
{
|
||||
"plugins": ["/nix/store/6kfm5qrd2bckffxphb5ylvbg3sz1657r-prettier-plugin-go-template-0.0.15-unstable-2023-07-26/lib/node_modules/prettier-plugin-go-template/lib/index.js"],
|
||||
"useTabs": true,
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["*.html"],
|
||||
"options": {
|
||||
"parser": "go-template",
|
||||
"useTabs": true,
|
||||
},
|
||||
},
|
||||
{
|
||||
"files": ["*.js"],
|
||||
"options": {
|
||||
"useTabs": true,
|
||||
},
|
||||
},
|
||||
],
|
||||
|
|
|
|||
303
CLEANUP.md
Normal file
303
CLEANUP.md
Normal file
|
|
@ -0,0 +1,303 @@
|
|||
# nidus-sync — Cleanup Tasks
|
||||
|
||||
This file lists code, files, and patterns that are remnants of older architectural approaches. These should be removed to reduce complexity, maintenance burden, and confusion.
|
||||
|
||||
---
|
||||
|
||||
## 1. Bob → Jet Migration (Incomplete)
|
||||
|
||||
**Status:** Bob is still the primary ORM. Jet was introduced May 2026 but only covers 3 schemas partially.
|
||||
|
||||
### 1a. Port remaining schemas from Bob to Jet
|
||||
|
||||
Jet-based queries exist for:
|
||||
- `db/query/public/` — address, communication, communication_log_entry, compliance_report_request, feature, feature_pool, job, lead, signal, site
|
||||
- `db/query/publicreport/` — compliance, image, image_exif, nuisance, report, report_image, report_log, water
|
||||
- `db/query/arcgis/` — account, oauth, service_feature, service_map, user, user_privileges
|
||||
|
||||
Still using Bob directly (not yet ported to Jet queries):
|
||||
- `platform/report/notification.go` (13 bob references)
|
||||
- `platform/background/background.go` (8)
|
||||
- `platform/arcgis.go` (8)
|
||||
- `platform/text/send.go` (7)
|
||||
- `platform/report/some_report.go` (6)
|
||||
- `platform/site.go` (5)
|
||||
- `platform/csv/flyover.go` (7)
|
||||
- `platform/csv/pool.go` (5)
|
||||
- `platform/csv/csv.go` (4)
|
||||
- `platform/text/report.go` (4)
|
||||
- `platform/text/phone_number.go` (3)
|
||||
- `platform/publicreport/log.go` (3)
|
||||
- `platform/mailer.go` (3)
|
||||
- `platform/email/template.go` (2)
|
||||
- `db/connection.go` (4 — bob.Tx types)
|
||||
- `db/prepared.go` (2)
|
||||
- `resource/review_task.go` (2)
|
||||
- `rmo/status.go` (2)
|
||||
- `rmo/report.go` (1)
|
||||
- `rmo/mailer.go` (1)
|
||||
- Plus many api/* files
|
||||
|
||||
### 1b. Remove Bob-generated models after migration
|
||||
|
||||
Once all queries are ported to Jet, delete the 103 `.bob.go` files in `db/models/`:
|
||||
```
|
||||
db/models/*.bob.go
|
||||
```
|
||||
|
||||
### 1c. Remove Bob-specific helper files
|
||||
|
||||
These are Bob-specific and can be removed once Bob is fully replaced:
|
||||
- `db/dberrors/` — Bob error types (still referenced)
|
||||
- `db/dbinfo/` — Bob type info (still referenced)
|
||||
- `db/models/bob_loaders.bob.go`
|
||||
- `db/models/bob_where.bob.go`
|
||||
|
||||
### 1d. Remove Bob from go.mod and dependencies
|
||||
|
||||
After all Bob code is gone:
|
||||
- Remove `github.com/Gleipnir-Technology/bob` from `go.mod`
|
||||
- Run `go mod tidy`
|
||||
|
||||
### 1e. Remove Bob codegen scripts
|
||||
|
||||
- `db/bobgen.sh`
|
||||
- `db/bobgen.yaml`
|
||||
|
||||
### 1f. Regenerate Jet output
|
||||
|
||||
The `db/jet/main.go` generator outputs to `db/gen/` but no output is currently checked in. Run the generator and ensure generated code is usable:
|
||||
```bash
|
||||
cd db/jet && go run .
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 2. Go HTML Templates → Vue SPA (Mostly Complete)
|
||||
|
||||
**Status:** Nearly all Go template routes are commented out in `sync/routes.go` and `rmo/routes.go`. Both hosts serve Vue SPAs via `static.SinglePageApp()`. Some Go template routes remain active.
|
||||
|
||||
### 2a. Remaining active Go template routes (sync)
|
||||
|
||||
These routes in `sync/routes.go` still render Go templates:
|
||||
- `/oauth/arcgis/begin` → `getArcgisOauthBegin` (redirect, no template but in Go)
|
||||
- `/oauth/arcgis/callback` → `getArcgisOauthCallback`
|
||||
- `/mailer/pool/random` → `getMailerPoolRandom`
|
||||
- `/mailer/mode-1` → `getMailer1` (generates PDF)
|
||||
- `/mailer/mode-2` → `getMailer2` (generates PDF)
|
||||
- `/mailer/mode-3/{code}` → `getMailer3` (generates PDF)
|
||||
- `/mailer/mode-1/preview` → `getMailer1Preview`
|
||||
- `/mailer/mode-2/preview` → `getMailer2Preview`
|
||||
- `/mailer/mode-3/{code}/preview` → `getMailer3Preview`
|
||||
- `/privacy` → `getPrivacy`
|
||||
|
||||
The mailer routes use `platform/pdf` which in turn uses headless Chrome (`chromedp`) to render HTML to PDF. This is legitimate server-side functionality, not just a template remnant. However, the PDF templates themselves may be candidates for migration to the Vue ecosystem.
|
||||
|
||||
### 2b. Remove all commented-out routes
|
||||
|
||||
Both `sync/routes.go` and `rmo/routes.go` have large blocks of commented-out route registrations. Remove these once migration is confirmed complete.
|
||||
|
||||
### 2c. Remove unused Go template files
|
||||
|
||||
Once all routes are ported or confirmed dead, remove the entire `html/template/` directory. The `html/` package (`html/embed.go`, `html/filesystem.go`, `html/func.go`, etc.) should also be removed once nothing references it.
|
||||
|
||||
### 2d. Reduce the html/ package surface
|
||||
|
||||
**Note:** The `html/` package is still actively imported by 40+ Go files. It provides:
|
||||
- Template rendering (`html/embed.go`, `html/filesystem.go`) — mostly for mailer PDFs and privacy page
|
||||
- `html.ContentConfig` — used extensively in sync/routes (mailer previews, admin pages)
|
||||
- `html.MakeGet`, `html.MakePost` — HTTP handler wrappers (used by active `sync/` routes)
|
||||
- `html.RespondError` — HTTP error responses
|
||||
- Form parsing, image upload handling, URL building
|
||||
|
||||
**Short-term:** Remove the template rendering portion once mailer PDFs and privacy page are migrated.
|
||||
**Long-term:** The full `html/` package can be removed only after all server-rendered pages are gone and handler wrappers are replaced with the `resource/` pattern.
|
||||
|
||||
---
|
||||
|
||||
## 3. esbuild (`build.js`) — Removed ✅
|
||||
|
||||
*(Completed 2026-05-09: `build.js` removed and `pkgs.esbuild` dropped from flake.nix devShell — Vite is the build tool)*
|
||||
|
||||
---
|
||||
|
||||
## 4. Legacy Static JavaScript Files
|
||||
|
||||
**Status:** `static/js/` contains 20 plain JavaScript files written as custom HTML elements and standalone scripts for the Go template era. These are referenced by old Go HTML templates but most of those templates are now unused.
|
||||
|
||||
### 4a. Files in static/js/
|
||||
|
||||
```
|
||||
address-display.js
|
||||
address-or-report-suggestion.js
|
||||
address-suggestion.js
|
||||
events.js
|
||||
geocode.js
|
||||
location.js
|
||||
map-admin.js
|
||||
map-aggregate.js
|
||||
map-arcgis-tile.js
|
||||
map-cell.js
|
||||
map-locator.js
|
||||
map-locator-ro.js
|
||||
map-multipoint.js
|
||||
map-proxied-arcgis-tile.js
|
||||
map-routing.js
|
||||
map-service-area.js
|
||||
photo-upload.js
|
||||
table-report.js
|
||||
table-site.js
|
||||
time-relative.js
|
||||
user-selector.js
|
||||
```
|
||||
|
||||
### 4b. Determine which are still used
|
||||
|
||||
The remaining active Go templates (mailer, oauth, privacy) may reference some of these. Check each active template for `<script src="/static/js/...">` references. Templates that are confirmed unused:
|
||||
- All templates in `html/template/sync/` (dashboard, cell, communication-root, district, intelligence, layout, operations-root, planning-root, radar, review, sudo, upload-*) — these are replaced by Vue SPAs
|
||||
- Most templates in `html/template/rmo/` — RMO routes are all commented out
|
||||
|
||||
### 4c. Migrate any still-needed functionality
|
||||
|
||||
The map-locator, address-suggestion, and photo-upload functionality has Vue equivalents in `ts/components/`. The remaining custom element patterns should be fully replaced by Vue components.
|
||||
|
||||
---
|
||||
|
||||
## 5. TomTom Integration — Removed ✅
|
||||
|
||||
*(Completed 2026-05-09: `tomtom/` directory removed — zero imports outside itself, Stadia Maps is now the geocoding/tile provider)*
|
||||
|
||||
---
|
||||
|
||||
## 6. Postgrid — Alternate Mail Provider
|
||||
|
||||
**Status:** `postgrid/` contains a single CLI tool (`cmd/send-pdf`) and a `postgrid` Go package reference in `main.go`. Lob is now the mail provider, with its own integration in `lob/`.
|
||||
|
||||
### 6a. Investigate and remove if unused
|
||||
|
||||
- Check if Postgrid is actually being used in production vs Lob
|
||||
- If Lob is the chosen provider, remove `postgrid/` entirely
|
||||
- Remove any Postgrid configuration references
|
||||
|
||||
---
|
||||
|
||||
## 7. Duplicate Architecture: `api/` vs `resource/`
|
||||
|
||||
**Status:** The `api/` package contains both route registration (`api/routes.go`) and handler functions (`api/signin.go`, `api/publicreport.go`, `api/compliance.go`, etc.). The `resource/` package provides typed resource handlers that expose `List`, `Get`, `Create`, etc. Some functionality exists in both layers.
|
||||
|
||||
### 7a. Consolidate handler functions
|
||||
|
||||
Functions in `api/` that directly handle business logic should be moved to `resource/`:
|
||||
- `api/signin.go` — `postSignin`, `postSignout`, `postSignup`
|
||||
- `api/compliance.go` — various compliance handlers
|
||||
- `api/publicreport.go` — `postPublicreportInvalid`, `postPublicreportSignal`, `postPublicreportMessage`
|
||||
- `api/sudo.go` — `postSudoEmail`, `postSudoSMS`, `postSudoSSE`
|
||||
- `api/configuration.go` — `postConfigurationIntegrationArcgis`
|
||||
- `api/review.go` — `postReviewPool`
|
||||
- `api/twilio.go`, `api/voipms.go` — webhook handlers
|
||||
- `api/audio.go`, `api/image.go` — media upload handlers
|
||||
- `api/tile.go`, `api/debug.go` — utilities
|
||||
|
||||
### 7b. Standardize on resource pattern
|
||||
|
||||
Either move everything to `resource/` or keep both but clearly define responsibilities:
|
||||
- `resource/` — domain resource CRUD + URI generation
|
||||
- `api/` — route registration + HTTP concerns only
|
||||
|
||||
Currently the split is unclear and some `api/` files do substantial business logic.
|
||||
|
||||
---
|
||||
|
||||
## 8. `arcgis-go` Submodule — Not Checked Out
|
||||
|
||||
**Status:** The `arcgis-go` submodule (referenced in `.gitmodules`) is not checked out (empty directory). The external `github.com/Gleipnir-Technology/arcgis-go` package is used via `go.mod` instead.
|
||||
|
||||
### 8a. Remove submodule
|
||||
|
||||
```bash
|
||||
git submodule deinit arcgis-go
|
||||
git rm arcgis-go
|
||||
```
|
||||
|
||||
Verify that all code references use the external package, not a local path.
|
||||
|
||||
---
|
||||
|
||||
## 9. `go-geojson2h3` Local Copy
|
||||
|
||||
**Status:** `go-geojson2h3/` is also a submodule. The external package `github.com/Gleipnir-Technology/go-geojson2h3/v2` is imported in `go.mod`. Only `h3utils/h3.go` references it.
|
||||
|
||||
### 9a. Consolidate
|
||||
|
||||
- If the local copy isn't needed, remove the submodule
|
||||
- If local modifications exist, merge upstream or maintain intentionally with documentation
|
||||
|
||||
---
|
||||
|
||||
## 10. Old Generated Files & Artifacts
|
||||
|
||||
### 10a. `query.go` at project root — Removed ✅
|
||||
|
||||
### 10b. `db/sql/` directory
|
||||
|
||||
Contains `.bob.go` and `.bob.sql` files — these are Bob-style named queries. Once Bob is removed, these can be cleaned up or migrated to Jet equivalents.
|
||||
|
||||
### 10c. `static/gen/main.js`
|
||||
|
||||
A leftover built artifact. The new build output goes to `static/gen/sync/` and `static/gen/rmo/` via Vite. Ensure `static/gen/` is in `.gitignore` and the stale `main.js` is removed.
|
||||
|
||||
### 10d. `static/css/placeholder`
|
||||
|
||||
Empty placeholder file. Remove.
|
||||
|
||||
---
|
||||
|
||||
## 11. Nix devShell Cleanup
|
||||
|
||||
**Status:** `flake.nix` devShell includes several tools from older workflows:
|
||||
|
||||
### 11a. Potentially unnecessary devShell packages
|
||||
|
||||
- `pkgs.esbuild` — replaced by Vite (keep only if `build.js` is retained)
|
||||
- `pkgs.dart-sass` — Vue/Vite uses the `sass` npm package; check if Go code invokes dart-sass directly
|
||||
- `pkgs.autoprefixer` — may not be needed with Vite's built-in PostCSS
|
||||
|
||||
---
|
||||
|
||||
## 12. Start Scripts — Consolidate
|
||||
|
||||
**Status:** Four start scripts exist:
|
||||
|
||||
| Script | Purpose |
|
||||
|--------|---------|
|
||||
| `start-air.sh` | Development with air (live reload) |
|
||||
| `start-flogo.sh` | Unknown (references `flogo`) |
|
||||
| `start-nidus-sync.sh` | Production-like direct run |
|
||||
| `start-nix-built.sh` | Run Nix-built output |
|
||||
|
||||
`start-flogo.sh` may be a remnant. Investigate and remove if unused.
|
||||
|
||||
---
|
||||
|
||||
## Priority Summary
|
||||
|
||||
1. **High impact, low effort:**
|
||||
- ~~Remove `tomtom/` (unused, no imports)~~ ✅
|
||||
- ~~Remove `build.js` (dead, replaced by Vite)~~ ✅
|
||||
- Remove commented-out routes in `sync/routes.go` and `rmo/routes.go`
|
||||
- ~~Remove `query.go` commented-out code~~ ✅
|
||||
- Remove `static/gen/main.js` stale artifact
|
||||
- Remove `static/css/placeholder`
|
||||
|
||||
2. **Medium impact, medium effort:**
|
||||
- Remove unused Go HTML templates (confirm which are still active first)
|
||||
- Remove unused `static/js/` files (verify against active templates)
|
||||
- Remove `arcgis-go` submodule
|
||||
- Clean up Nix devShell
|
||||
|
||||
3. **High impact, high effort:**
|
||||
- Complete Bob → Jet migration across all schemas
|
||||
- Remove Bob-generated models, helpers, scripts
|
||||
- Remove Bob from go.mod
|
||||
- Consolidate `api/` and `resource/` handler patterns
|
||||
- Remove `html/` package (after all Go templates are gone)
|
||||
207
HISTORY.md
Normal file
207
HISTORY.md
Normal file
|
|
@ -0,0 +1,207 @@
|
|||
# nidus-sync — Project History
|
||||
|
||||
## Overview
|
||||
|
||||
nidus-sync is a dual-tenant mosquito abatement platform serving two domains:
|
||||
- **RMO** (`report.mosquitoes.online`) — Public-facing mosquito/water/nuisance reporting
|
||||
- **Sync** (`sync.nidus.cloud`) — Administrative dashboard for vector control districts
|
||||
|
||||
The project was started in November 2025 and has undergone several major architectural shifts across ~1655 commits spanning 6 months.
|
||||
|
||||
---
|
||||
|
||||
## Timeline
|
||||
|
||||
### Phase 1: Foundation (November 2025)
|
||||
|
||||
**Nov 3 – Nov 13: Project bootstrap**
|
||||
- Initial Go project with Nix build system (`flake.nix`, `default.nix`)
|
||||
- Basic `net/http` web serving with `gorilla/mux` routing
|
||||
- Go `html/template` server-side rendering
|
||||
- Bob ORM integration (`github.com/Gleipnir-Technology/bob`) for PostgreSQL — code-generated models via `bobgen`
|
||||
- ArcGIS OAuth integration for user authentication
|
||||
- ArcGIS Fieldseeker data synchronization (treatment areas, inspections, breeding sources, etc.)
|
||||
- MapBox GL JS integration for heatmap visualization
|
||||
- Dashboard with login, basic CRUD mocks
|
||||
|
||||
**Nov 13 – Nov 24: Logging & DB restructuring**
|
||||
- Migration from standard `log` to `zerolog` for structured, colorized output
|
||||
- Database logic moved into a separate `db/` subdirectory
|
||||
- Clean shutdown logic, token refresh loops
|
||||
|
||||
**Key characteristics:** Monolithic Go server, HTML templates, Bob ORM, MapBox maps, ArcGIS OAuth
|
||||
|
||||
---
|
||||
|
||||
### Phase 2: Fieldseeker & Schema Evolution (December 2025)
|
||||
|
||||
**Dec 2 – Dec 24: Fieldseeker schema v2**
|
||||
- Bob codegen updated to latest version
|
||||
- Fieldseeker schema captured on OAuth connect and stored locally
|
||||
- Dynamic SQL functions replacing hardcoded per-table sync logic
|
||||
- Old Fieldseeker tables removed, v2 generated tables used
|
||||
- Note/image audio support added
|
||||
- MMS file downloads from SMS webhooks
|
||||
|
||||
**Key characteristics:** Bob-generated fieldseeker models, prepared SQL functions, SMS/MMS debugging
|
||||
|
||||
---
|
||||
|
||||
### Phase 3: Architecture Maturation (January 2026)
|
||||
|
||||
**Jan 2 – Jan 8: Domain split & template system**
|
||||
- WIP pass-through models concept ("Checkpoint on initial idea for passing through models")
|
||||
- Massive reorganization: templates split into `rmo/` (public) and `sync/` (admin) subdirectories
|
||||
- `html/` package created with embedded template loading
|
||||
- Bob submodule removed, `arcgis-go` became external dependency
|
||||
- Public report domain support added
|
||||
- Version bumped 7 times in rapid iteration (v0.0.4 → v0.0.10)
|
||||
|
||||
**Jan 8 – Jan 31: Platform Layer emergence**
|
||||
- "Report platform layer" introduced (`a9b0a55f`) — initial abstraction between HTTP handlers and database
|
||||
- Address suggestion and map-locator components via custom HTML elements
|
||||
- SVG auto-transformation into Go templates
|
||||
- Report submission forms wired up (nuisance, water)
|
||||
- Email template system
|
||||
|
||||
**Key characteristics:** Two-domain architecture (RMO/Sync), `html/` template package, platform layer beginning, custom element web components
|
||||
|
||||
---
|
||||
|
||||
### Phase 4: Map Migration & Platform Expansion (February 2026)
|
||||
|
||||
**Feb 1 – Feb 28: Map provider transition**
|
||||
- MapBox → MapLibre GL (open-source fork) via `maplibre-gl`
|
||||
- Stadia Maps integration for tile serving and geocoding (Feb 12-14)
|
||||
- TomTom routing integration added (Feb 17)
|
||||
- Bulk geocoding via Stadia
|
||||
- Parcel image generation debugging
|
||||
|
||||
**Platform layer expansion:**
|
||||
- Emails moved to platform layer
|
||||
- Phone/SMS support
|
||||
- OAuth integration settings
|
||||
- Upload platform functions
|
||||
- QR code and image tile moved into platform
|
||||
- Admin map components
|
||||
|
||||
**Key characteristics:** MapLibre/Stadia replacing MapBox, TomTom added, platform layer expanding, heavy template iteration
|
||||
|
||||
---
|
||||
|
||||
### Phase 5: VueJS Revolution (March 2026) — 448 commits
|
||||
|
||||
**Mar 5 – Mar 12: Pre-Vue cleanup**
|
||||
- Stadia Maps client initialization
|
||||
- Signal database schema added
|
||||
- Review task/mailer schema rework
|
||||
- Generated Bob files pruned
|
||||
|
||||
**Mar 12: Massive platform layer rework** (`44c4f17f`)
|
||||
- User/organization handling restructured in platform layer
|
||||
- Signal creation moved inside platform
|
||||
|
||||
**Mar 18 – Mar 22: VueJS Migration** (the biggest architectural shift)
|
||||
- Mar 18: Auto-generated report IDs
|
||||
- Mar 21: **VueJS introduced** — begins with TypeScript bundle, then Vue SFC components, vue-router, Bootstrap/SCSS integration
|
||||
- Mar 21: Dashboard, Intelligence, sidebar all moved to Vue
|
||||
- Mar 22: **esbuild replaced by Vite** (`47f900ab`) — `vite/` directory with separate configs for `sync` and `rmo` SPAs
|
||||
- Mar 22: TypeScript checking clean across entire frontend
|
||||
- Mar 23: Public report card component, auth checks off API client
|
||||
- Mar 24-31: Communication page ripped into components, impersonation support, users page
|
||||
|
||||
**Key characteristics:** VueJS 3 + TypeScript + Vite frontend, Pinia stores, vue-router, SCSS, SPA architecture replacing server-rendered Go templates
|
||||
|
||||
---
|
||||
|
||||
### Phase 6: Compliance & Communication (April 2026) — 454 commits
|
||||
|
||||
**Apr 1 – Apr 9: RMO frontend & resources**
|
||||
- Resource layer expanded (user, avatar, district, nuisance, water, compliance resources)
|
||||
- RMO frontend checkpoint — Vue ports of public-facing pages
|
||||
- TS types migrated into API module
|
||||
- Old bundle paths removed, old SPA generation removed
|
||||
|
||||
**Apr 10 – Apr 17: Compliance workflow**
|
||||
- Compliance report creation, mailer flow
|
||||
- Site/pool review tasks
|
||||
- Stadia Maps cache, direct tile access
|
||||
- OAuth refresh in frontend
|
||||
- Image upload components
|
||||
|
||||
**Apr 17 – Apr 25: Communication system**
|
||||
- Background jobs reworked for shorter transactions
|
||||
- Lob (physical mail) integration — direct API client, address creation, letter events
|
||||
- QR code generation moved to API
|
||||
- Compliance report evidence, mailer views
|
||||
- Vue map system generalized (`cad01e68`)
|
||||
|
||||
**Apr 25 – Apr 30: Map & communication polish**
|
||||
- VueJS reimplementation of address/report suggestion
|
||||
- Communication workbench with map, list, detail views
|
||||
- Text message log, email/phone display
|
||||
- Compliance card detail display
|
||||
- SSE event system with status vs resource message distinction
|
||||
- Systemd socket activation for downtime-free deploys
|
||||
- Sentry error tracking for Vue frontend
|
||||
|
||||
**Key characteristics:** Compliance/mailer operational, communication system born, Lob integration, Sentry, generalized Vue map system
|
||||
|
||||
---
|
||||
|
||||
### Phase 7: Jet Migration & Cleanup (May 2026) — 46 commits so far
|
||||
|
||||
**May 1 – May 9: SQL generation transition**
|
||||
- **Jet (go-jet/jet) introduced** — type-safe SQL builder replacing Bob's query building
|
||||
- Custom Jet generator created with geometry/Box2D type support (`db/jet/main.go`)
|
||||
- `publicreport` schema ported to Jet
|
||||
- `arcgis` schema ported to Jet (compiles, not fully tested per commit message)
|
||||
- New `communication` table added
|
||||
- Communication marking workflow (invalid, pending-response, possible-issue, possible-resolved)
|
||||
- Linting: `golangci-lint` added to lefthook, per-file linting
|
||||
- Cleanup of legacy generated columns (latitude/longitude), string-based queries
|
||||
- Centralized error handler for Vue sync app
|
||||
|
||||
**Key characteristics:** Bob→Jet transition in progress, communication workflow, code quality improvements
|
||||
|
||||
---
|
||||
|
||||
## Architectural Patterns (by layer)
|
||||
|
||||
### Current architecture stack
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────┐
|
||||
│ Vue 3 SPA (TypeScript) │
|
||||
│ ts/ — shared components, composables, stores │
|
||||
│ vite/sync/ — admin SPA entry │
|
||||
│ vite/rmo/ — public SPA entry │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ Go HTTP Server (gorilla/mux) │
|
||||
│ api/routes.go — central route registration │
|
||||
│ resource/ — resource handlers (REST patterns) │
|
||||
│ sync/ — remaining Go template routes │
|
||||
│ rmo/ — remaining Go template routes │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ platform/ — business logic layer │
|
||||
│ (address, compliance, communication, district, │
|
||||
│ email, fieldseeker, mailer, publicreport, │
|
||||
│ review, signal, text, user, upload, etc.) │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ db/ — database access │
|
||||
│ db/models/ — Bob-generated models (103 files) │
|
||||
│ db/query/ — Jet-based query functions │
|
||||
│ db/prepared.go — prepared SQL functions │
|
||||
├─────────────────────────────────────────────────┤
|
||||
│ PostgreSQL │
|
||||
└─────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### Pattern: Platform Layer
|
||||
Introduced January 2026, the `platform/` package encapsulates business logic between HTTP handlers and the database. It grew from initial report handling to encompass users, organizations, emails, texts, compliance, communications, signals, geocoding, tiles, uploads, and more.
|
||||
|
||||
### Pattern: Resource Layer
|
||||
Added March–April 2026, `resource/` provides typed REST resource handlers with URI generation (via mux route naming). Resources are instantiated with a `resource.NewRouter()` and expose methods like `List`, `Get`, `Create`, `Update`, `Delete` that return domain types. This replaced ad-hoc handler functions in `api/`.
|
||||
|
||||
### Pattern: Dual SPA + API
|
||||
Since late March 2026, both domains serve Vue SPAs for most routes, with the Go server acting as an API backend. The `static.SinglePageApp()` handler serves the Vite-built output and falls back to `index.html` for client-side routing. Some Go template routes remain for mailer PDF generation, OAuth flows, and previews.
|
||||
51
README.md
51
README.md
|
|
@ -2,6 +2,25 @@
|
|||
|
||||
This is the software that powers [Nidus Cloud Sync](https://sync.nidus.cloud).
|
||||
|
||||
## Administration
|
||||
|
||||
### Password resets
|
||||
|
||||
If you need to manually reset a password you can do so with:
|
||||
|
||||
```
|
||||
$ nix-shell -p genpass
|
||||
$ genpass 12
|
||||
abc123abc123
|
||||
# this is from nidus, installed on deployment servers at the system layer
|
||||
$ passwordgen
|
||||
Please enter your password: abc123abc123
|
||||
Password: abc123abc123
|
||||
Hash: $2a$14$hdtoAtP7joczutY3bxaFqemBApH8xc5NbXLvDQqBfdzWV3jGSy4zi
|
||||
$ psql -d nidus-sync
|
||||
nidus-sync=> update user set password_hash='$2a$14$hdtoAtP7joczutY3bxaFqemBApH8xc5NbXLvDQqBfdzWV3jGSy4zi' where id=<something>;
|
||||
```
|
||||
|
||||
## Building from source
|
||||
|
||||
First, you'll need [Nix](https://nix.dev).
|
||||
|
|
@ -48,7 +67,7 @@ There's a table containing district information in the database, `import.distric
|
|||
psql
|
||||
CREATE SCHEMA import;
|
||||
shp2pgsql -s 3857 -c -D -I CA_districts.shp import.district | psql -d nidus-sync
|
||||
psql
|
||||
psql -d nidus-sync
|
||||
ALTER TABLE import.district ADD COLUMN geom_4326 geometry(MultiPolygon,4326) GENERATED ALWAYS AS (ST_Transform(geom, 4326)) STORED;
|
||||
```
|
||||
|
||||
|
|
@ -84,10 +103,38 @@ This uses [goose](https://github.com/pressly/goose). You can use the goose comma
|
|||
> GOOSE_DRIVER=postgres GOOSE_DBSTRING="dbname=nidus-sync sslmode=disable" goose up
|
||||
```
|
||||
|
||||
### svg icons
|
||||
|
||||
These icons are generated as part of the build system. You can generate them manually with:
|
||||
|
||||
```
|
||||
pnpm generate-icons
|
||||
```
|
||||
|
||||
This will produce an scss file at `ts/gen/custom-icons.scss`
|
||||
|
||||
### typescript
|
||||
|
||||
In order to work on the TypeScript code you'll need to install the dependencies locally in your dev environment:
|
||||
|
||||
```
|
||||
nix develop
|
||||
pnpm install
|
||||
```
|
||||
|
||||
You can then generate the TypeScript with:
|
||||
|
||||
```
|
||||
pnpm watch
|
||||
|
||||
```
|
||||
|
||||
The only page that works right now is `https://sync.nidus.cloud/template-test`
|
||||
|
||||
### watchexec
|
||||
|
||||
For iterating on styles
|
||||
|
||||
```
|
||||
watchexec -e *.scss sass scss/custom.scss:html/static/css/bootstrap.css
|
||||
watchexec -e scss sass scss/custom.scss:static/gen/css/bootstrap.css
|
||||
```
|
||||
|
|
|
|||
271
api/api.go
271
api/api.go
|
|
@ -1,7 +1,7 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
|
@ -9,84 +9,70 @@ import (
|
|||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/resource"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/version"
|
||||
//"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func apiAudioPost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
id := chi.URLParam(r, "uuid")
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
/*
|
||||
type renderer struct {
|
||||
}
|
||||
func (ren *renderer) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
*/
|
||||
// In the best case scenario, the excellent github.com/pkg/errors package
|
||||
// helps reveal information on the error, setting it on Err, and in the Render()
|
||||
// method, using it to set the application-specific error code in AppCode.
|
||||
type ResponseErr struct {
|
||||
Error error `json:"-"` // low-level runtime error
|
||||
HTTPStatusCode int `json:"-"` // http response status code
|
||||
|
||||
var payload NoteAudioPayload
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(body, &payload); err != nil {
|
||||
//debugSaveRequest(body, err, "Audio note POST JSON decode error")
|
||||
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
setter := models.NoteAudioSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(int32(u.ID)),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
Duration: omit.From(payload.Duration),
|
||||
OrganizationID: omit.From(u.Organization.ID()),
|
||||
Transcription: omitnull.FromPtr(payload.Transcription),
|
||||
TranscriptionUserEdited: omit.From(payload.TranscriptionUserEdited),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
if err := platform.NoteAudioCreate(ctx, u, setter); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
StatusText string `json:"status"` // user-level status message
|
||||
AppCode int64 `json:"code,omitempty"` // application-specific error code
|
||||
ErrorText string `json:"error,omitempty"` // application-level error message, for debugging
|
||||
}
|
||||
|
||||
func apiAudioContentPost(w http.ResponseWriter, r *http.Request, user platform.User) {
|
||||
u_str := chi.URLParam(r, "uuid")
|
||||
u, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
err = file.FileContentWrite(r.Body, file.CollectionAudioRaw, u)
|
||||
if err != nil {
|
||||
log.Printf("Failed to write content file: %v", err)
|
||||
http.Error(w, "failed to write content file", http.StatusInternalServerError)
|
||||
}
|
||||
ctx := r.Context()
|
||||
a, err := models.NoteAudios.Query(
|
||||
models.SelectWhere.NoteAudios.UUID.EQ(u),
|
||||
models.SelectWhere.NoteAudios.OrganizationID.EQ(user.Organization.ID()),
|
||||
).One(ctx, db.PGInstance.BobDB)
|
||||
background.NewAudioTranscode(ctx, db.PGInstance.BobDB, a.ID)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
func (e *ResponseErr) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
http.Error(w, e.StatusText, e.HTTPStatusCode)
|
||||
return nil
|
||||
}
|
||||
|
||||
func errRender(err error) *ResponseErr {
|
||||
log.Error().Err(err).Msg("Rendering error")
|
||||
return &ResponseErr{
|
||||
Error: err,
|
||||
HTTPStatusCode: 500,
|
||||
StatusText: "Error rendering response",
|
||||
ErrorText: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
type Renderable interface {
|
||||
Render(http.ResponseWriter, *http.Request) error
|
||||
}
|
||||
|
||||
func renderShim(w http.ResponseWriter, r *http.Request, renderer Renderable) error {
|
||||
return renderer.Render(w, r)
|
||||
}
|
||||
func renderList(w http.ResponseWriter, r *http.Request, data []Renderable) error {
|
||||
return nil
|
||||
}
|
||||
func handleClientIos(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
var sinceStr string
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse GET form: %w", err)))
|
||||
err = renderShim(w, r, errRender(fmt.Errorf("Failed to parse GET form: %w", err)))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
} else {
|
||||
sinceStr = r.FormValue("since")
|
||||
|
|
@ -98,14 +84,20 @@ func handleClientIos(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
} else {
|
||||
since, err = parseTime(sinceStr)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse 'since' value: %w", err)))
|
||||
err = renderShim(w, r, errRender(fmt.Errorf("Failed to parse 'since' value: %w", err)))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
csync, err := platform.ContentClientIos(r.Context(), u, since)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -119,8 +111,11 @@ func handleClientIos(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
Fieldseeker: toResponseFieldseeker(csync.Fieldseeker),
|
||||
Since: since_used,
|
||||
}
|
||||
if err := render.Render(w, r, response); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderShim(w, r, response); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
@ -128,7 +123,10 @@ func handleClientIos(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
func apiMosquitoSource(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
bounds, err := parseBounds(r)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -137,23 +135,32 @@ func apiMosquitoSource(w http.ResponseWriter, r *http.Request, u platform.User)
|
|||
query.Limit = 100
|
||||
sources, err := platform.MosquitoSourceQuery()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
data := []render.Renderer{}
|
||||
data := []Renderable{}
|
||||
for _, s := range sources {
|
||||
data = append(data, NewResponseMosquitoSource(s))
|
||||
}
|
||||
if err := render.RenderList(w, r, data); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderList(w, r, data); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func apiTrapData(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
bounds, err := parseBounds(r)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -162,23 +169,32 @@ func apiTrapData(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
query.Limit = 100
|
||||
trap_data, err := platform.TrapDataQuery()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
data := []render.Renderer{}
|
||||
data := []Renderable{}
|
||||
for _, td := range trap_data {
|
||||
data = append(data, NewResponseTrapDatum(td))
|
||||
}
|
||||
if err := render.RenderList(w, r, data); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderList(w, r, data); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func apiServiceRequest(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
bounds, err := parseBounds(r)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
query := db.NewGeoQuery()
|
||||
|
|
@ -186,16 +202,22 @@ func apiServiceRequest(w http.ResponseWriter, r *http.Request, u platform.User)
|
|||
query.Limit = 100
|
||||
requests, err := platform.ServiceRequestQuery()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
data := []render.Renderer{}
|
||||
data := []Renderable{}
|
||||
for _, sr := range requests {
|
||||
data = append(data, NewResponseServiceRequest(sr))
|
||||
data = append(data, types.ServiceRequestFromModel(sr))
|
||||
}
|
||||
if err := render.RenderList(w, r, data); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderList(w, r, data); err != nil {
|
||||
err = renderShim(w, r, errRender(err))
|
||||
if err != nil {
|
||||
http.Error(w, fmt.Sprintf("render shim: %v", err), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -236,16 +258,6 @@ func parseBounds(r *http.Request) (*db.GeoBounds, error) {
|
|||
return &bounds, nil
|
||||
}
|
||||
|
||||
func errRender(err error) render.Renderer {
|
||||
log.Error().Err(err).Msg("Rendering error")
|
||||
return &ResponseErr{
|
||||
Error: err,
|
||||
HTTPStatusCode: 500,
|
||||
StatusText: "Error rendering response",
|
||||
ErrorText: err.Error(),
|
||||
}
|
||||
}
|
||||
|
||||
func webhookFieldseeker(w http.ResponseWriter, r *http.Request) {
|
||||
// Create or open the log file
|
||||
file, err := os.OpenFile("webhook/request.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0644)
|
||||
|
|
@ -254,17 +266,32 @@ func webhookFieldseeker(w http.ResponseWriter, r *http.Request) {
|
|||
http.Error(w, "Internal Server Error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
defer lint.LogOnErr(file.Close, "close request log")
|
||||
|
||||
// Write timestamp
|
||||
timestamp := time.Now().Format("2006-01-02 15:04:05")
|
||||
fmt.Fprintf(file, "\n=== Request logged at %s ===\n", timestamp)
|
||||
_, err = fmt.Fprintf(file, "\n=== Request logged at %s ===\n", timestamp)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Write request line
|
||||
fmt.Fprintf(file, "%s %s %s\n", r.Method, r.RequestURI, r.Proto)
|
||||
_, err = fmt.Fprintf(file, "%s %s %s\n", r.Method, r.RequestURI, r.Proto)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
// Write all headers
|
||||
fmt.Fprintf(file, "\nHeaders:\n")
|
||||
_, err = fmt.Fprintf(file, "\nHeaders:\n")
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
for name, values := range r.Header {
|
||||
for _, value := range values {
|
||||
fmt.Fprintf(file, "%s: %s\n", name, value)
|
||||
|
|
@ -272,13 +299,29 @@ func webhookFieldseeker(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
// Write body
|
||||
fmt.Fprintf(file, "\nBody:\n")
|
||||
_, err = fmt.Fprintf(file, "\nBody:\n")
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
log.Printf("Error reading request body: %v", err)
|
||||
fmt.Fprintf(file, "Error reading body: %v\n", err)
|
||||
_, err = fmt.Fprintf(file, "Error reading body: %v\n", err)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
} else {
|
||||
file.Write(body)
|
||||
_, err = file.Write(body)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("writing response")
|
||||
http.Error(w, "Internal server error", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if len(body) == 0 {
|
||||
fmt.Fprintf(file, "(empty body)")
|
||||
}
|
||||
|
|
@ -300,3 +343,27 @@ func parseTime(x string) (*time.Time, error) {
|
|||
created := time.UnixMilli(created_epoch)
|
||||
return &created, nil
|
||||
}
|
||||
|
||||
type about struct {
|
||||
Environment string `json:"environment"`
|
||||
SentryDSN string `json:"sentry_dsn"`
|
||||
Tegola tegolaURLs `json:"tegola"`
|
||||
Version version.VersionInfo `json:"version"`
|
||||
}
|
||||
type tegolaURLs struct {
|
||||
Nidus string `json:"nidus"`
|
||||
RMO string `json:"rmo"`
|
||||
}
|
||||
|
||||
func getRoot(ctx context.Context, r *http.Request, q resource.QueryParams) (*about, *nhttp.ErrorWithStatus) {
|
||||
v := version.Get()
|
||||
return &about{
|
||||
Environment: config.Environment,
|
||||
SentryDSN: config.SentryDSNFrontend,
|
||||
Tegola: tegolaURLs{
|
||||
Nidus: config.MakeURLTegola("/maps/nidus/{z}/{x}/{y}?id={organization_id}"),
|
||||
RMO: config.MakeURLTegola("/maps/rmo/{z}/{x}/{y}"),
|
||||
},
|
||||
Version: v,
|
||||
}, nil
|
||||
}
|
||||
|
|
|
|||
93
api/audio.go
Normal file
93
api/audio.go
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/background"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func apiAudioPost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
id := vars["uuid"]
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var payload NoteAudioPayload
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to read the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
if err := json.Unmarshal(body, &payload); err != nil {
|
||||
//debugSaveRequest(body, err, "Audio note POST JSON decode error")
|
||||
http.Error(w, "Failed to decode the payload", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
setter := models.NoteAudioSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(int32(u.ID)),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
Duration: omit.From(payload.Duration),
|
||||
OrganizationID: omit.From(u.Organization.ID),
|
||||
Transcription: omitnull.FromPtr(payload.Transcription),
|
||||
TranscriptionUserEdited: omit.From(payload.TranscriptionUserEdited),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
if err := platform.NoteAudioCreate(ctx, u, setter); err != nil {
|
||||
renderShim(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
}
|
||||
|
||||
func apiAudioContentPost(w http.ResponseWriter, r *http.Request, user platform.User) {
|
||||
vars := mux.Vars(r)
|
||||
u_str := vars["uuid"]
|
||||
u, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
err = file.FileContentWrite(r.Body, file.CollectionAudioRaw, u)
|
||||
if err != nil {
|
||||
log.Printf("Failed to write content file: %v", err)
|
||||
http.Error(w, "failed to write content file", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
a, err := models.NoteAudios.Query(
|
||||
models.SelectWhere.NoteAudios.UUID.EQ(u),
|
||||
models.SelectWhere.NoteAudios.OrganizationID.EQ(user.Organization.ID),
|
||||
).One(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
log.Printf("Failed to get note audio %s for org %d: %w", u_str, user.Organization.ID, err)
|
||||
http.Error(w, "failed to update database", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
err = background.NewAudioTranscode(ctx, db.PGInstance.BobDB, a.ID)
|
||||
if err != nil {
|
||||
log.Printf("Failed to transcode audio %s for org %d: %w", u_str, user.Organization.ID, err)
|
||||
http.Error(w, "failed to transcode audio", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
1
api/avatar.go
Normal file
1
api/avatar.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
|
|
@ -1,67 +1 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"slices"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/publicreport"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
"github.com/google/uuid"
|
||||
//"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type communication struct {
|
||||
Created time.Time `json:"created"`
|
||||
ID string `json:"id"`
|
||||
PublicReport types.PublicReport `json:"public_report"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
type contentListCommunication struct {
|
||||
Communications []communication `json:"communications"`
|
||||
}
|
||||
|
||||
func listCommunication(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListCommunication, *nhttp.ErrorWithStatus) {
|
||||
reports, err := publicreport.ReportsForOrganization(ctx, user.Organization.ID())
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("nuisance report query: %w", err)
|
||||
}
|
||||
comms := make([]communication, len(reports))
|
||||
for i, report := range reports {
|
||||
comms[i] = communication{
|
||||
Created: report.Created,
|
||||
ID: report.PublicID,
|
||||
PublicReport: report,
|
||||
Type: "publicreport." + string(report.Type),
|
||||
}
|
||||
}
|
||||
_by_created := func(a, b communication) int {
|
||||
if a.Created == b.Created {
|
||||
return 0
|
||||
} else if a.Created.Before(b.Created) {
|
||||
return 1
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
}
|
||||
slices.SortFunc(comms, _by_created)
|
||||
return &contentListCommunication{
|
||||
Communications: comms,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func toImageURLs(m map[string][]uuid.UUID, id string) []string {
|
||||
uuids, ok := m[id]
|
||||
if !ok {
|
||||
return []string{}
|
||||
}
|
||||
urls := make([]string, len(uuids))
|
||||
for i, u := range uuids {
|
||||
urls[i] = config.MakeURLNidus("/api/image/%s/content", u.String())
|
||||
}
|
||||
return urls
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,14 +13,15 @@ import (
|
|||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/paulmach/orb/geojson"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/stephenafamo/scan"
|
||||
)
|
||||
|
||||
func getComplianceRequestImagePool(w http.ResponseWriter, r *http.Request) {
|
||||
code := chi.URLParam(r, "public_id")
|
||||
vars := mux.Vars(r)
|
||||
code := vars["public_id"]
|
||||
if code == "" {
|
||||
http.Error(w, "empty public_id", http.StatusBadRequest)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package sync
|
||||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
|
@ -7,7 +7,8 @@ import (
|
|||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/gen/nidus-sync/arcgis/model"
|
||||
queryarcgis "github.com/Gleipnir-Technology/nidus-sync/db/query/arcgis"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/html"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
|
|
@ -25,9 +26,9 @@ type contentSettingOrganization struct {
|
|||
}
|
||||
|
||||
type contentSettingIntegration struct {
|
||||
ArcGISAccount *models.ArcgisAccount
|
||||
ArcGISOAuth *models.ArcgisOauthToken
|
||||
ServiceMaps []*models.ArcgisServiceMap
|
||||
ArcGISAccount *model.Account
|
||||
ArcGISOAuth *model.OAuthToken
|
||||
ServiceMaps []model.ServiceMap
|
||||
}
|
||||
|
||||
func getConfigurationOrganization(ctx context.Context, r *http.Request, u platform.User) (*html.Response[contentSettingOrganization], *nhttp.ErrorWithStatus) {
|
||||
|
|
@ -82,23 +83,21 @@ func getConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, u p
|
|||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to get oauth: %w", err)
|
||||
}
|
||||
var account *models.ArcgisAccount
|
||||
var service_maps []*models.ArcgisServiceMap
|
||||
var account model.Account
|
||||
var service_maps []model.ServiceMap
|
||||
account_id := u.Organization.ArcgisAccountID()
|
||||
if account_id != "" {
|
||||
account, err = models.FindArcgisAccount(ctx, db.PGInstance.BobDB, account_id)
|
||||
account, err = queryarcgis.AccountFromID(ctx, account_id)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to get arcgis: %w", err)
|
||||
}
|
||||
service_maps, err = models.ArcgisServiceMaps.Query(
|
||||
models.SelectWhere.ArcgisServiceMaps.AccountID.EQ(account.ID),
|
||||
).All(ctx, db.PGInstance.BobDB)
|
||||
service_maps, err = queryarcgis.ServiceMapsFromAccountID(ctx, account.ID)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("Failed to get map services: %w", err)
|
||||
}
|
||||
}
|
||||
data := contentSettingIntegration{
|
||||
ArcGISAccount: account,
|
||||
ArcGISAccount: &account,
|
||||
ArcGISOAuth: oauth,
|
||||
ServiceMaps: service_maps,
|
||||
}
|
||||
|
|
@ -133,12 +132,12 @@ func postConfigurationIntegrationArcgis(ctx context.Context, r *http.Request, u
|
|||
_, err := psql.Update(
|
||||
um.Table("organization"),
|
||||
um.SetCol("arcgis_map_service_id").ToArg(f.MapService),
|
||||
um.Where(psql.Quote("id").EQ(psql.Arg(u.Organization.ID()))),
|
||||
um.Where(psql.Quote("id").EQ(psql.Arg(u.Organization.ID))),
|
||||
).Exec(ctx, db.PGInstance.BobDB)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("Failed to update map service config: %w", err)
|
||||
}
|
||||
log.Info().Str("map-service", *f.MapService).Int32("org-id", u.Organization.ID()).Msg("changed map service")
|
||||
log.Info().Str("map-service", *f.MapService).Int32("org-id", u.Organization.ID).Msg("changed map service")
|
||||
} else {
|
||||
log.Info().Msg("no map service")
|
||||
}
|
||||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
|
|
@ -14,7 +15,7 @@ func debugSaveRequest(r *http.Request) {
|
|||
log.Error().Err(err).Msg("failed to create temp file for debugSaveRequest")
|
||||
return
|
||||
}
|
||||
defer tmpFile.Close()
|
||||
defer lint.LogOnErr(tmpFile.Close, "close temp file")
|
||||
|
||||
_, err = io.Copy(tmpFile, r.Body)
|
||||
if err != nil {
|
||||
|
|
|
|||
|
|
@ -9,15 +9,14 @@ import (
|
|||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/render"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func apiGetDistrict(w http.ResponseWriter, r *http.Request) {
|
||||
var latStr, lngStr string
|
||||
err := r.ParseForm()
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse GET form: %w", err)))
|
||||
renderShim(w, r, errRender(fmt.Errorf("Failed to parse GET form: %w", err)))
|
||||
return
|
||||
} else {
|
||||
latStr = r.FormValue("lat")
|
||||
|
|
@ -25,17 +24,17 @@ func apiGetDistrict(w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
lat, err := strconv.ParseFloat(latStr, 64)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse lat as float: %w", err)))
|
||||
renderShim(w, r, errRender(fmt.Errorf("Failed to parse lat as float: %w", err)))
|
||||
return
|
||||
}
|
||||
lng, err := strconv.ParseFloat(lngStr, 64)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse lng as float: %w", err)))
|
||||
renderShim(w, r, errRender(fmt.Errorf("Failed to parse lng as float: %w", err)))
|
||||
return
|
||||
}
|
||||
org, err := platform.DistrictForLocation(r.Context(), lng, lat)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to get district: %w", err)))
|
||||
renderShim(w, r, errRender(fmt.Errorf("Failed to get district: %w", err)))
|
||||
return
|
||||
}
|
||||
if org == nil {
|
||||
|
|
@ -48,13 +47,14 @@ func apiGetDistrict(w http.ResponseWriter, r *http.Request) {
|
|||
Phone: org.OfficePhone.GetOr(""),
|
||||
Website: org.Website.GetOr(""),
|
||||
}
|
||||
if err := render.Render(w, r, d); err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
if err := renderShim(w, r, d); err != nil {
|
||||
renderShim(w, r, errRender(err))
|
||||
}
|
||||
}
|
||||
|
||||
func apiGetDistrictLogo(w http.ResponseWriter, r *http.Request) {
|
||||
slug := chi.URLParam(r, "slug")
|
||||
vars := mux.Vars(r)
|
||||
slug := vars["slug"]
|
||||
ctx := r.Context()
|
||||
rows, err := models.Organizations.Query(
|
||||
models.SelectWhere.Organizations.Slug.EQ(slug),
|
||||
|
|
@ -73,7 +73,7 @@ func apiGetDistrictLogo(w http.ResponseWriter, r *http.Request) {
|
|||
http.Error(w, "Logo not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
file.ImageFileContentWriteLogo(w, org.LogoUUID.MustGet())
|
||||
file.ImageFileToWriter(file.CollectionLogo, org.LogoUUID.MustGet(), w)
|
||||
return
|
||||
default:
|
||||
http.Error(w, "Too many organizations, this is a programmer error", http.StatusInternalServerError)
|
||||
|
|
|
|||
58
api/event.go
58
api/event.go
|
|
@ -7,17 +7,20 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/event"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/version"
|
||||
"github.com/google/uuid"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var connectionsSSE map[*ConnectionSSE]bool = make(map[*ConnectionSSE]bool, 0)
|
||||
var TYPE_STATUS string = "status"
|
||||
|
||||
type ConnectionSSE struct {
|
||||
chanEvent chan platform.Event
|
||||
id uuid.UUID
|
||||
organizationID int32
|
||||
userID int
|
||||
userID int32
|
||||
}
|
||||
|
||||
type Message struct {
|
||||
|
|
@ -27,7 +30,25 @@ type Message struct {
|
|||
URI string `json:"uri"`
|
||||
}
|
||||
|
||||
type Status struct {
|
||||
BuildTime time.Time `json:"build_time"`
|
||||
IsModified bool `json:"is_modified"`
|
||||
Revision string `json:"revision"`
|
||||
Status string `json:"status"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
func (c *ConnectionSSE) SendEvent(w http.ResponseWriter, m platform.Event) error {
|
||||
if m.Type == event.EventTypeShutdown {
|
||||
v := version.Get()
|
||||
return send(w, Status{
|
||||
BuildTime: v.BuildTime,
|
||||
IsModified: v.IsModified,
|
||||
Revision: v.Revision,
|
||||
Status: m.Type.String(),
|
||||
Type: TYPE_STATUS,
|
||||
})
|
||||
}
|
||||
return send(w, Message{
|
||||
Resource: m.Resource,
|
||||
Time: m.Time,
|
||||
|
|
@ -46,10 +67,13 @@ func (c *ConnectionSSE) SendHeartbeat(w http.ResponseWriter, t time.Time) error
|
|||
func SetEventChannel(chan_envelopes <-chan platform.Envelope) {
|
||||
go func() {
|
||||
for envelope := range chan_envelopes {
|
||||
for conn, _ := range connectionsSSE {
|
||||
if conn.organizationID == envelope.OrganizationID {
|
||||
for conn := range connectionsSSE {
|
||||
if conn.organizationID == envelope.OrganizationID || envelope.OrganizationID == 0 {
|
||||
log.Debug().Int("type", int(envelope.Event.Type)).Int32("env-org", envelope.OrganizationID).Msg("pushed event to client")
|
||||
conn.chanEvent <- envelope.Event
|
||||
} else if conn.userID == envelope.UserID {
|
||||
log.Debug().Int("type", int(envelope.Event.Type)).Int32("env-user", envelope.UserID).Msg("pushed event to user")
|
||||
conn.chanEvent <- envelope.Event
|
||||
} else {
|
||||
log.Debug().Int("type", int(envelope.Event.Type)).Int32("env-org", envelope.OrganizationID).Int32("conn-org", conn.organizationID).Msg("skipped event, bad org")
|
||||
}
|
||||
|
|
@ -58,6 +82,7 @@ func SetEventChannel(chan_envelopes <-chan platform.Envelope) {
|
|||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func send[T any](w http.ResponseWriter, msg T) error {
|
||||
jsonData, err := json.Marshal(msg)
|
||||
if err != nil {
|
||||
|
|
@ -82,18 +107,35 @@ func streamEvents(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
uid, err := uuid.NewUUID()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to create uuid")
|
||||
http.Error(w, "failed to create uuid", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
connection := ConnectionSSE{
|
||||
chanEvent: make(chan platform.Event),
|
||||
id: uid,
|
||||
organizationID: u.Organization.ID(),
|
||||
userID: u.ID,
|
||||
organizationID: u.Organization.ID,
|
||||
userID: int32(u.ID),
|
||||
}
|
||||
connectionsSSE[&connection] = true
|
||||
log.Debug().Int32("org", u.Organization.ID()).Int("user", u.ID).Str("id", uid.String()).Msg("connected SSE client")
|
||||
log.Debug().Int32("org", u.Organization.ID).Int("user", u.ID).Str("id", uid.String()).Msg("connected SSE client")
|
||||
|
||||
// Send an initial connected event
|
||||
fmt.Fprintf(w, "event: connected\ndata: {\"status\": \"connected\", \"time\": \"%s\"}\n\n", time.Now().Format(time.RFC3339))
|
||||
v := version.Get()
|
||||
status := Status{
|
||||
BuildTime: v.BuildTime,
|
||||
IsModified: v.IsModified,
|
||||
Revision: v.Revision,
|
||||
Status: "connected",
|
||||
Type: TYPE_STATUS,
|
||||
}
|
||||
body, err := json.Marshal(status)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal connect status")
|
||||
http.Error(w, "failed to marshal connect status", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, "data: %s\n\n", body)
|
||||
w.(http.Flusher).Flush()
|
||||
|
||||
// Keep the connection open with a ticker sending periodic events
|
||||
|
|
@ -107,7 +149,7 @@ func streamEvents(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
for {
|
||||
select {
|
||||
case <-done:
|
||||
log.Debug().Int32("org", u.Organization.ID()).Int("user", u.ID).Str("id", uid.String()).Msg("Client closed connection")
|
||||
log.Debug().Int32("org", u.Organization.ID).Int("user", u.ID).Str("id", uid.String()).Msg("Client closed connection")
|
||||
delete(connectionsSSE, &connection)
|
||||
return
|
||||
case t := <-ticker.C:
|
||||
|
|
|
|||
394
api/handler.go
394
api/handler.go
|
|
@ -8,99 +8,387 @@ import (
|
|||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/html"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/resource"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/schema"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
var decoder = schema.NewDecoder()
|
||||
|
||||
type handlerFunctionGet[T any] func(context.Context, *http.Request, platform.User, queryParams) (*T, *nhttp.ErrorWithStatus)
|
||||
type wrappedHandler func(http.ResponseWriter, *http.Request)
|
||||
type contentAuthenticated[T any] struct {
|
||||
C T
|
||||
Config html.ContentConfig
|
||||
User platform.User
|
||||
}
|
||||
|
||||
type ErrorAPI struct {
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSON[T any](f handlerFunctionGet[T]) http.Handler {
|
||||
var decoder = schema.NewDecoder()
|
||||
|
||||
type handlerBase func(context.Context, http.ResponseWriter, *http.Request) *nhttp.ErrorWithStatus
|
||||
type handlerBaseAuthenticated func(context.Context, http.ResponseWriter, *http.Request, platform.User) *nhttp.ErrorWithStatus
|
||||
type handlerFunctionDelete func(context.Context, *http.Request, platform.User) *nhttp.ErrorWithStatus
|
||||
type handlerFunctionGet[T any] func(context.Context, *http.Request, resource.QueryParams) (*T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetAuthenticated[T any] func(context.Context, *http.Request, platform.User, resource.QueryParams) (*T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetImage func(context.Context, *http.Request, platform.User) (file.Collection, uuid.UUID, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetSlice[T any] func(context.Context, *http.Request, resource.QueryParams) ([]*T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionGetSliceAuthenticated[T any] func(context.Context, *http.Request, platform.User, resource.QueryParams) ([]T, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPost[RequestType any, ResponseType any] func(context.Context, *http.Request, RequestType) (ResponseType, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPostAuthenticated[RequestType any, ResponseType any] func(context.Context, *http.Request, platform.User, RequestType) (ResponseType, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPostFormMultipart[RequestType any, ResponseType any] func(context.Context, *http.Request, RequestType) (*ResponseType, *nhttp.ErrorWithStatus)
|
||||
type handlerFunctionPutAuthenticated[RequestType any] func(context.Context, *http.Request, platform.User, RequestType) (string, *nhttp.ErrorWithStatus)
|
||||
|
||||
func authenticatedHandlerBasic(f handlerBaseAuthenticated) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
e := f(ctx, w, r, u)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
return
|
||||
})
|
||||
}
|
||||
func authenticatedHandlerDelete(f handlerFunctionDelete) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
e := f(ctx, r, u)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
http.Error(w, "", http.StatusNoContent)
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerGetImage(f handlerFunctionGetImage) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
collection, uid, e := f(ctx, r, u)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
file.ImageFileToWriter(collection, uid, w)
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSON[T any](f handlerFunctionGetAuthenticated[T]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params queryParams
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("decode query failure")
|
||||
http.Error(w, "failed to decode query", http.StatusInternalServerError)
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, u, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
|
||||
body, err = json.Marshal(ErrorAPI{Message: e.Error()})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal error")
|
||||
http.Error(w, "{\"message\": \"boom. I can't even tell you what went wrong\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), e.Status)
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
body, err = json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
_, err = w.Write(body)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to write json: %w", err))
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSONSlice[T any](f handlerFunctionGetSliceAuthenticated[T]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, u, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
if resp == nil {
|
||||
body, err = json.Marshal([]struct{}{})
|
||||
} else {
|
||||
body, err = json.Marshal(resp)
|
||||
}
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
_, err = w.Write(body)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to write json: %w", err))
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
func authenticatedHandlerJSONPost[RequestType any, ResponseType any](f handlerFunctionPostAuthenticated[RequestType, ResponseType]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, u, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
_, err = w.Write(body)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to write json: %w", err))
|
||||
return
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func authenticatedHandlerJSONPut[RequestType any](f handlerFunctionPutAuthenticated[RequestType]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
path, e := f(ctx, r, u, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
if path == "" {
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
return
|
||||
}
|
||||
w.Header().Set("Location", path)
|
||||
http.Redirect(w, r, path, http.StatusCreated)
|
||||
})
|
||||
}
|
||||
func authenticatedHandlerPostMultipart[ResponseType any](f handlerFunctionPostAuthenticated[[]file.Upload, ResponseType], collection file.Collection) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
err := r.ParseMultipartForm(32 << 10) // 32 MB buffer
|
||||
if err != nil {
|
||||
respondError(w, http.StatusBadRequest, "Failed to parse form: %w ", err)
|
||||
return
|
||||
}
|
||||
uploads, err := file.SaveFileUploads(r, collection)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusInternalServerError, "failed to save uploads: %w", err)
|
||||
return
|
||||
}
|
||||
|
||||
/*
|
||||
err = decoder.Decode(&content, r.PostForm)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusBadRequest, "Failed to decode form: %w", err)
|
||||
return
|
||||
}
|
||||
*/
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, u, uploads)
|
||||
if e != nil {
|
||||
http.Error(w, e.Error(), e.Status)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal json")
|
||||
http.Error(w, "{\"message\": \"failed to marshal json\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
w.Write(body)
|
||||
})
|
||||
}
|
||||
|
||||
type handlerFunctionPost[ReqType any, ResponseType any] func(context.Context, *http.Request, platform.User, ReqType) (ResponseType, *nhttp.ErrorWithStatus)
|
||||
|
||||
func authenticatedHandlerJSONPost[ReqType any, ResponseType any](f handlerFunctionPost[ReqType, ResponseType]) http.Handler {
|
||||
return auth.NewEnsureAuth(func(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
var req ReqType
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusInternalServerError, "Failed to read body: %w", err)
|
||||
func handlerBasic(f handlerBase) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
e := f(ctx, w, r)
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
err = json.Unmarshal(body, &req)
|
||||
}
|
||||
}
|
||||
func handlerJSON[T any](f handlerFunctionGet[T]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondError(w, http.StatusBadRequest, "Failed to decode request: %w", err)
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
body, err = json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
w.Write(body)
|
||||
}
|
||||
}
|
||||
func handlerJSONSlice[T any](f handlerFunctionGetSlice[T]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
var body []byte
|
||||
var params resource.QueryParams
|
||||
err := decoder.Decode(¶ms, r.URL.Query())
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("failed to decode query: %w", err))
|
||||
return
|
||||
}
|
||||
resp, e := f(ctx, r, params)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
//log.Info().Str("template", template).Err(e).Msg("handler done")
|
||||
if e != nil {
|
||||
respondErrorStatus(w, e)
|
||||
return
|
||||
}
|
||||
body, err = json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
w.Write(body)
|
||||
}
|
||||
}
|
||||
|
||||
func handlerJSONPost[RequestType any, ResponseType any](f handlerFunctionPost[RequestType, ResponseType]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
response, e := f(ctx, r, u, req)
|
||||
resp, e := f(ctx, r, *req)
|
||||
if e != nil {
|
||||
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
|
||||
body, err = json.Marshal(ErrorAPI{Message: e.Error()})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal error")
|
||||
http.Error(w, "{\"message\": \"boom. I can't even tell you what went wrong\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), e.Status)
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
resp_body, err := json.Marshal(response)
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondError(w, http.StatusInternalServerError, "Failed to marshal json response: %w", err)
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
w.Write(resp_body)
|
||||
})
|
||||
w.Write(body)
|
||||
}
|
||||
}
|
||||
|
||||
func handlerJSONPut[RequestType any, ResponseType any](f handlerFunctionPost[RequestType, ResponseType]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
req, e := parseRequest[RequestType](r)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, *req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
w.Write(body)
|
||||
}
|
||||
}
|
||||
func handlerFormPost[RequestType any, ResponseType any](f handlerFunctionPostFormMultipart[RequestType, ResponseType]) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
err := r.ParseMultipartForm(32 << 12) // 128 MB buffer
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("bad form: %w", err))
|
||||
return
|
||||
}
|
||||
var req RequestType
|
||||
err = decoder.Decode(&req, r.PostForm)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewBadRequest("decode form: %w", err))
|
||||
return
|
||||
}
|
||||
ctx := r.Context()
|
||||
resp, e := f(ctx, r, req)
|
||||
if e != nil {
|
||||
serializeError(w, e)
|
||||
return
|
||||
}
|
||||
body, err := json.Marshal(resp)
|
||||
if err != nil {
|
||||
respondErrorStatus(w, nhttp.NewError("failed to marshal json: %w", err))
|
||||
return
|
||||
}
|
||||
w.Write(body)
|
||||
}
|
||||
}
|
||||
func parseRequest[RequestType any](r *http.Request) (*RequestType, *nhttp.ErrorWithStatus) {
|
||||
var err error
|
||||
var req RequestType
|
||||
content_type := r.Header.Get("Content-Type")
|
||||
switch content_type {
|
||||
case "application/json":
|
||||
body, e := io.ReadAll(r.Body)
|
||||
if e != nil {
|
||||
return nil, nhttp.NewError("Failed to read body: %w", err)
|
||||
}
|
||||
err = json.Unmarshal(body, &req)
|
||||
case "application/x-www-form-urlencoded":
|
||||
e := r.ParseForm()
|
||||
if err != nil {
|
||||
return nil, nhttp.NewBadRequest("parsing form: %w", e)
|
||||
}
|
||||
err = decoder.Decode(&req, r.PostForm)
|
||||
default:
|
||||
return nil, nhttp.NewBadRequest("unrecognized content type '%s'", content_type)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "Failed to decode request: %w", err)
|
||||
}
|
||||
return &req, nil
|
||||
}
|
||||
func serializeError(w http.ResponseWriter, e *nhttp.ErrorWithStatus) {
|
||||
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
|
||||
body, err := json.Marshal(ErrorAPI{Message: e.Error()})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal error")
|
||||
http.Error(w, "{\"message\": \"boom. I can't even tell you what went wrong\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), e.Status)
|
||||
return
|
||||
}
|
||||
func respondError(w http.ResponseWriter, status int, format string, args ...any) {
|
||||
outer_err := fmt.Errorf(format, args...)
|
||||
body, err := json.Marshal(ErrorAPI{
|
||||
|
|
@ -112,3 +400,13 @@ func respondError(w http.ResponseWriter, status int, format string, args ...any)
|
|||
}
|
||||
http.Error(w, string(body), status)
|
||||
}
|
||||
func respondErrorStatus(w http.ResponseWriter, e *nhttp.ErrorWithStatus) {
|
||||
log.Warn().Int("status", e.Status).Err(e).Str("user message", e.Message).Msg("Responding with an error from api")
|
||||
body, err := json.Marshal(ErrorAPI{Message: e.Error()})
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to marshal error")
|
||||
http.Error(w, "{\"message\": \"boom. I can't even tell you what went wrong\"}", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
http.Error(w, string(body), e.Status)
|
||||
}
|
||||
|
|
|
|||
33
api/image.go
33
api/image.go
|
|
@ -11,14 +11,14 @@ import (
|
|||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/render"
|
||||
"github.com/google/uuid"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func apiImagePost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
id := chi.URLParam(r, "uuid")
|
||||
vars := mux.Vars(r)
|
||||
id := vars["uuid"]
|
||||
noteUUID, err := uuid.Parse(id)
|
||||
if err != nil {
|
||||
http.Error(w, "Failed to decode the uuid", http.StatusBadRequest)
|
||||
|
|
@ -38,41 +38,44 @@ func apiImagePost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
|||
}
|
||||
ctx := r.Context()
|
||||
setter := models.NoteImageSetter{
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(int32(u.ID)),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
Created: omit.From(payload.Created),
|
||||
CreatorID: omit.From(int32(u.ID)),
|
||||
Deleted: omitnull.FromPtr(payload.Deleted),
|
||||
DeletorID: omitnull.FromPtr(payload.DeletorID),
|
||||
OrganizationID: omit.From(u.Organization.ID),
|
||||
Version: omit.From(payload.Version),
|
||||
UUID: omit.From(noteUUID),
|
||||
}
|
||||
err = platform.NoteImageCreate(ctx, u, setter)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
renderShim(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusAccepted)
|
||||
}
|
||||
|
||||
func apiImageContentGet(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
u_str := chi.URLParam(r, "uuid")
|
||||
vars := mux.Vars(r)
|
||||
u_str := vars["uuid"]
|
||||
imageUUID, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to parse image UUID")
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
}
|
||||
file.PublicImageFileToResponse(w, imageUUID)
|
||||
file.ImageFileToWriter(file.CollectionPublicImage, imageUUID, w)
|
||||
w.WriteHeader(http.StatusOK)
|
||||
}
|
||||
func apiImageContentPost(w http.ResponseWriter, r *http.Request, u platform.User) {
|
||||
u_str := chi.URLParam(r, "uuid")
|
||||
vars := mux.Vars(r)
|
||||
u_str := vars["uuid"]
|
||||
imageUUID, err := uuid.Parse(u_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("Failed to parse image UUID")
|
||||
http.Error(w, "Failed to parse image UUID", http.StatusBadRequest)
|
||||
}
|
||||
err = file.ImageFileContentWrite(imageUUID, r.Body)
|
||||
err = file.ImageFileFromReader(file.CollectionImageRaw, imageUUID, r.Body)
|
||||
if err != nil {
|
||||
render.Render(w, r, errRender(err))
|
||||
renderShim(w, r, errRender(err))
|
||||
return
|
||||
}
|
||||
w.WriteHeader(http.StatusOK)
|
||||
|
|
|
|||
54
api/lead.go
54
api/lead.go
|
|
@ -1,55 +1 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
)
|
||||
|
||||
type createLead struct {
|
||||
PoolLocations map[int]platform.Location `json:"pool_locations"`
|
||||
SignalIDs []int `json:"signal_ids"`
|
||||
}
|
||||
type createdLead struct {
|
||||
ID int32 `json:"id"`
|
||||
}
|
||||
type contentListLead struct {
|
||||
Leads []lead `json:"leads"`
|
||||
}
|
||||
type lead struct {
|
||||
ID int32 `json:"id"`
|
||||
}
|
||||
|
||||
func listLead(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListLead, *nhttp.ErrorWithStatus) {
|
||||
return &contentListLead{
|
||||
Leads: make([]lead, 0),
|
||||
}, nil
|
||||
}
|
||||
func postLeads(ctx context.Context, r *http.Request, user platform.User, req createLead) (*createdLead, *nhttp.ErrorWithStatus) {
|
||||
if len(req.SignalIDs) == 0 {
|
||||
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "can't make a lead with no signals")
|
||||
}
|
||||
if len(req.SignalIDs) > 1 {
|
||||
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "can't make a lead with multiple signals yet")
|
||||
}
|
||||
signal_id := req.SignalIDs[0]
|
||||
var pool_location *platform.Location
|
||||
l, ok := req.PoolLocations[signal_id]
|
||||
if ok {
|
||||
pool_location = &l
|
||||
}
|
||||
site_id, err := platform.SiteFromSignal(ctx, user, int32(signal_id))
|
||||
if err != nil || site_id == nil {
|
||||
return nil, nhttp.NewError("site from signal: %w", err)
|
||||
}
|
||||
lead_id, err := platform.LeadCreate(ctx, user, int32(signal_id), *site_id, pool_location)
|
||||
if err != nil || lead_id == nil {
|
||||
return nil, nhttp.NewError("lead create: %w", err)
|
||||
}
|
||||
|
||||
return &createdLead{
|
||||
ID: *lead_id,
|
||||
}, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,62 +2,49 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strconv"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
)
|
||||
|
||||
type formPublicreportLead struct {
|
||||
type formPublicreportSignal struct {
|
||||
ReportID string `json:"reportID"`
|
||||
}
|
||||
|
||||
func postPublicreportLead(ctx context.Context, r *http.Request, user platform.User, req formPublicreportLead) (*createdLead, *nhttp.ErrorWithStatus) {
|
||||
lead_id, err := platform.LeadCreateFromPublicreport(ctx, user, req.ReportID)
|
||||
func postPublicreportSignal(ctx context.Context, r *http.Request, user platform.User, req formPublicreportSignal) (string, *nhttp.ErrorWithStatus) {
|
||||
signal_id, err := platform.SignalCreateFromPublicreport(ctx, user, req.ReportID)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("create lead: %w", err)
|
||||
return "", nhttp.NewError("create signal: %w", err)
|
||||
}
|
||||
return &createdLead{
|
||||
ID: *lead_id,
|
||||
}, nil
|
||||
return fmt.Sprintf("/signal/%d", *signal_id), nil
|
||||
}
|
||||
|
||||
type formPublicreportInvalid struct {
|
||||
ReportID string `json:"reportID"`
|
||||
}
|
||||
type createdReport struct {
|
||||
URI string `json:"uri"`
|
||||
}
|
||||
|
||||
func postPublicreportInvalid(ctx context.Context, r *http.Request, user platform.User, req formPublicreportLead) (*createdReport, *nhttp.ErrorWithStatus) {
|
||||
err := platform.PublicreportInvalid(ctx, user, req.ReportID)
|
||||
func postPublicreportInvalid(ctx context.Context, r *http.Request, user platform.User, req formPublicreportSignal) (string, *nhttp.ErrorWithStatus) {
|
||||
err := platform.PublicReportInvalid(ctx, user, req.ReportID)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("create lead: %w", err)
|
||||
return "", nhttp.NewError("create signal: %w", err)
|
||||
}
|
||||
return &createdReport{
|
||||
URI: config.MakeURLNidus("/publicreport/%s", req.ReportID),
|
||||
}, nil
|
||||
return fmt.Sprintf("/publicreport/%s", req.ReportID), nil
|
||||
}
|
||||
|
||||
type formPublicreportMessage struct {
|
||||
Message string `json:"message"`
|
||||
ReportID string `json:"reportID"`
|
||||
}
|
||||
type createdMessage struct {
|
||||
URI string `json:"uri"`
|
||||
}
|
||||
|
||||
func postPublicreportMessage(ctx context.Context, r *http.Request, user platform.User, req formPublicreportMessage) (*createdMessage, *nhttp.ErrorWithStatus) {
|
||||
func postPublicreportMessage(ctx context.Context, r *http.Request, user platform.User, req formPublicreportMessage) (string, *nhttp.ErrorWithStatus) {
|
||||
msg_id, err := platform.PublicReportMessageCreate(ctx, user, req.ReportID, req.Message)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("failed to create message: %s", err)
|
||||
return "", nhttp.NewError("failed to create message: %s", err)
|
||||
}
|
||||
if msg_id == nil {
|
||||
return nil, nhttp.NewError("nil message id")
|
||||
return "", nhttp.NewError("nil message id")
|
||||
}
|
||||
return &createdMessage{
|
||||
URI: config.MakeURLNidus("/message/%s", strconv.Itoa(int(*msg_id))),
|
||||
}, nil
|
||||
return fmt.Sprintf("/message/%d", *msg_id), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,25 +0,0 @@
|
|||
package api
|
||||
|
||||
type queryParams struct {
|
||||
Limit *int `schema:"limit"`
|
||||
Sort *string `schema:"sort"`
|
||||
Type *string `schema:"type"`
|
||||
}
|
||||
|
||||
func (qp queryParams) SortOrDefault(default_name string, ascending bool) (string, bool) {
|
||||
if qp.Sort == nil {
|
||||
return default_name, ascending
|
||||
}
|
||||
s := *qp.Sort
|
||||
if s == "" {
|
||||
return default_name, ascending
|
||||
}
|
||||
a := true
|
||||
if s[0] == '-' {
|
||||
a = false
|
||||
}
|
||||
if s[0] == '+' || s[0] == '-' {
|
||||
s = s[1:]
|
||||
}
|
||||
return s, a
|
||||
}
|
||||
147
api/review.go
147
api/review.go
|
|
@ -2,147 +2,28 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/um"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/enums"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/rs/zerolog/log"
|
||||
/*
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/geom"
|
||||
"github.com/aarondl/opt/omit"
|
||||
"github.com/aarondl/opt/omitnull"
|
||||
"github.com/stephenafamo/scan"
|
||||
*/)
|
||||
)
|
||||
|
||||
type reviewPoolUpdate struct {
|
||||
Condition *string `json:"condition"`
|
||||
Latitude *float32 `json:"latitude"`
|
||||
Longitude *float32 `json:"longitude"`
|
||||
}
|
||||
type createReviewPool struct {
|
||||
Status string `json:"status"`
|
||||
TaskID int32 `json:"task_id"`
|
||||
Updates *reviewPoolUpdate `json:"updates"`
|
||||
Status string `json:"status"`
|
||||
TaskID int32 `json:"task_id"`
|
||||
Updates *platform.PoolUpdate `json:"updates"`
|
||||
}
|
||||
type createdReviewPool struct{}
|
||||
|
||||
func postReviewPool(ctx context.Context, r *http.Request, user platform.User, req createReviewPool) (*createdReviewPool, *nhttp.ErrorWithStatus) {
|
||||
txn, err := db.PGInstance.BobDB.BeginTx(ctx, nil)
|
||||
func postReviewPool(ctx context.Context, r *http.Request, user platform.User, req createReviewPool) (string, *nhttp.ErrorWithStatus) {
|
||||
id, err := platform.ReviewPoolCreate(ctx, user, req.TaskID, req.Status, req.Updates)
|
||||
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("start txn: %w", err)
|
||||
if errors.As(err, &platform.ErrorNotFound{}) {
|
||||
return "", nhttp.NewErrorStatus(http.StatusNotFound, "review task %d not found", req.TaskID)
|
||||
}
|
||||
return "", nhttp.NewError("failed to set review: %w", err)
|
||||
}
|
||||
defer txn.Rollback(ctx)
|
||||
review_task, err := models.ReviewTasks.Query(
|
||||
models.SelectWhere.ReviewTasks.ID.EQ(req.TaskID),
|
||||
models.SelectWhere.ReviewTasks.OrganizationID.EQ(user.Organization.ID()),
|
||||
).One(ctx, txn)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewErrorStatus(http.StatusNotFound, "review task %d not found", req.TaskID)
|
||||
}
|
||||
var resolution enums.Reviewtaskresolutiontype
|
||||
err = resolution.Scan(req.Status)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewErrorStatus(http.StatusNotFound, "status '%s' is not recognized", req.Status)
|
||||
}
|
||||
review_task.Update(ctx, txn, &models.ReviewTaskSetter{
|
||||
Resolution: omitnull.From(resolution),
|
||||
Reviewed: omitnull.From(time.Now()),
|
||||
ReviewerID: omitnull.From(int32(user.ID)),
|
||||
})
|
||||
review_task_pool, err := models.ReviewTaskPools.Query(
|
||||
models.SelectWhere.ReviewTaskPools.ReviewTaskID.EQ(review_task.ID),
|
||||
).One(ctx, txn)
|
||||
var e *nhttp.ErrorWithStatus
|
||||
switch req.Status {
|
||||
case "discarded":
|
||||
e = discardReviewPool(ctx, txn, user, req, review_task_pool)
|
||||
case "committed":
|
||||
e = commitReviewPool(ctx, txn, user, req, review_task_pool)
|
||||
default:
|
||||
return nil, nhttp.NewErrorStatus(http.StatusBadRequest, "unrecognized status %s", req.Status)
|
||||
}
|
||||
if e != nil {
|
||||
return nil, e
|
||||
}
|
||||
txn.Commit(ctx)
|
||||
log.Info().Int32("id", review_task.ID).Str("status", req.Status).Msg("committed")
|
||||
return &createdReviewPool{}, e
|
||||
}
|
||||
func discardReviewPool(ctx context.Context, txn bob.Tx, user platform.User, req createReviewPool, review_task_pool *models.ReviewTaskPool) *nhttp.ErrorWithStatus {
|
||||
return nil
|
||||
}
|
||||
func commitReviewPool(ctx context.Context, txn bob.Tx, user platform.User, req createReviewPool, review_task_pool *models.ReviewTaskPool) *nhttp.ErrorWithStatus {
|
||||
if req.Updates == nil {
|
||||
return nil
|
||||
}
|
||||
up := *req.Updates
|
||||
feature_pool, err := models.FindFeaturePool(ctx, txn, review_task_pool.FeaturePoolID)
|
||||
if err != nil {
|
||||
return nhttp.NewError("find feature pool: %w", err)
|
||||
}
|
||||
if up.Condition != nil {
|
||||
var condition enums.Poolconditiontype
|
||||
err := condition.Scan(*up.Condition)
|
||||
if err != nil {
|
||||
return nhttp.NewErrorStatus(http.StatusBadRequest, "unrecognized condition %s", up.Condition)
|
||||
}
|
||||
err = review_task_pool.Update(ctx, txn, &models.ReviewTaskPoolSetter{
|
||||
Condition: omitnull.From(condition),
|
||||
})
|
||||
if err != nil {
|
||||
return nhttp.NewError("update rewiew task: %w", err)
|
||||
}
|
||||
err = feature_pool.Update(ctx, txn, &models.FeaturePoolSetter{
|
||||
Condition: omit.From(condition),
|
||||
})
|
||||
if err != nil {
|
||||
return nhttp.NewError("update feature_pool: %w", err)
|
||||
}
|
||||
}
|
||||
if up.Latitude != nil || up.Longitude != nil {
|
||||
if up.Latitude == nil || up.Longitude == nil {
|
||||
return nhttp.NewErrorStatus(http.StatusBadRequest, "you have to specify lat and lng together")
|
||||
}
|
||||
_, err = psql.Update(
|
||||
um.Table("review_task_pool"),
|
||||
um.SetCol("location").To(
|
||||
psql.F("ST_SetSRID",
|
||||
psql.F("ST_MakePoint",
|
||||
psql.Arg(*up.Longitude),
|
||||
psql.Arg(*up.Latitude),
|
||||
), psql.Arg(4326),
|
||||
),
|
||||
),
|
||||
um.Where(psql.Quote("review_task_pool", "review_task_id").EQ(psql.Arg(review_task_pool.ReviewTaskID))),
|
||||
).Exec(ctx, txn)
|
||||
if err != nil {
|
||||
return nhttp.NewError("save task: %w", err)
|
||||
}
|
||||
_, err = psql.Update(
|
||||
um.Table("feature"),
|
||||
um.SetCol("location").To(
|
||||
psql.F("ST_SetSRID",
|
||||
psql.F("ST_MakePoint",
|
||||
psql.Arg(*up.Longitude),
|
||||
psql.Arg(*up.Latitude),
|
||||
), psql.Arg(4326),
|
||||
),
|
||||
),
|
||||
um.Where(psql.Quote("feature", "id").EQ(psql.Arg(review_task_pool.FeaturePoolID))),
|
||||
).Exec(ctx, txn)
|
||||
if err != nil {
|
||||
return nhttp.NewError("save feature: %w", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return fmt.Sprintf("/review/%d", id), nil
|
||||
}
|
||||
|
|
|
|||
198
api/routes.go
198
api/routes.go
|
|
@ -1,49 +1,169 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/go-chi/render"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/file"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/resource"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
func AddRoutes(r chi.Router) {
|
||||
func AddRoutesRMO(r *mux.Router) {
|
||||
router := resource.NewRouter(r)
|
||||
|
||||
compliance_request := resource.ComplianceRequest(router)
|
||||
district := resource.District(router)
|
||||
geocode := resource.Geocode(router)
|
||||
nuisance := resource.Nuisance(router)
|
||||
pr_compliance := resource.PublicReportCompliance(router)
|
||||
publicreport := resource.Publicreport(router)
|
||||
publicreport_notification := resource.PublicreportNotification(router)
|
||||
qrcode := resource.QRCode(router)
|
||||
water := resource.Water(router)
|
||||
|
||||
r.HandleFunc("", handlerJSON(getRoot))
|
||||
r.HandleFunc("/compliance-request/image/pool/{public_id}", compliance_request.ImagePoolGet).Methods("GET").Name("compliance-request.image.pool.ByIDGet")
|
||||
r.Handle("/district", handlerJSONSlice(district.List)).Methods("GET")
|
||||
r.Handle("/district/{id}", handlerJSON(district.GetByID)).Methods("GET").Name("district.ByIDGet")
|
||||
r.HandleFunc("/district/{slug}/logo", apiGetDistrictLogo).Methods("GET").Name("district.logo.BySlug")
|
||||
r.Handle("/geocode/by-gid/{id:.*}", handlerJSON(geocode.ByGID)).Methods("GET")
|
||||
r.Handle("/geocode/reverse", handlerJSONPost(geocode.Reverse)).Methods("POST")
|
||||
r.Handle("/geocode/reverse/closest", handlerJSONPost(geocode.ReverseClosest)).Methods("POST")
|
||||
r.Handle("/geocode/suggestion", handlerJSONSlice(geocode.SuggestionList)).Methods("GET")
|
||||
|
||||
r.Handle("/publicreport-notification", handlerJSONPost(publicreport_notification.Create)).Methods("POST")
|
||||
r.Handle("/qr-code/mailer/{code}", handlerBasic(qrcode.Mailer)).Methods("GET")
|
||||
r.Handle("/qr-code/marketing", handlerBasic(qrcode.Marketing)).Methods("GET")
|
||||
r.Handle("/qr-code/report/{code}", handlerBasic(qrcode.Report)).Methods("GET")
|
||||
r.HandleFunc("/rmo/compliance", handlerJSONPost(pr_compliance.Create)).Methods("POST")
|
||||
r.HandleFunc("/rmo/nuisance", handlerFormPost(nuisance.Create)).Methods("POST")
|
||||
r.Handle("/rmo/publicreport/{id}", handlerBasic(publicreport.ByIDPublic)).Methods("GET").Name("publicreport.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/compliance/{id}/image", handlerFormPost(publicreport.ImageCreate)).Methods("POST")
|
||||
r.Handle("/rmo/publicreport/compliance/{id}", handlerJSON(pr_compliance.ByIDPublic)).Methods("GET").Name("publicreport.compliance.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/compliance/{id}", handlerJSONPut(pr_compliance.Update)).Methods("PUT")
|
||||
r.Handle("/rmo/publicreport/nuisance/{id}", handlerJSON(nuisance.ByIDPublic)).Methods("GET").Name("publicreport.nuisance.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/water/{id}", handlerJSON(water.ByIDPublic)).Methods("GET").Name("publicreport.water.ByIDGetPublic")
|
||||
r.Handle("/rmo/publicreport/{id}", handlerBasic(publicreport.ByIDPublic)).Methods("GET").Name("publicreport.ByIDGetPublicPublic")
|
||||
r.HandleFunc("/rmo/water", handlerFormPost(water.Create)).Methods("POST")
|
||||
}
|
||||
func AddRoutesSync(r *mux.Router) {
|
||||
router := resource.NewRouter(r)
|
||||
|
||||
compliance_request := resource.ComplianceRequest(router)
|
||||
district := resource.District(router)
|
||||
geocode := resource.Geocode(router)
|
||||
lob_hook := resource.LobHook(router)
|
||||
nuisance := resource.Nuisance(router)
|
||||
pr_compliance := resource.PublicReportCompliance(router)
|
||||
publicreport := resource.Publicreport(router)
|
||||
publicreport_notification := resource.PublicreportNotification(router)
|
||||
qrcode := resource.QRCode(router)
|
||||
service_request := resource.ServiceRequest(router)
|
||||
water := resource.Water(router)
|
||||
|
||||
//r.Use(render.SetContentType(render.ContentTypeJSON))
|
||||
// Unauthenticated endpoints
|
||||
r.HandleFunc("", handlerJSON(getRoot))
|
||||
r.HandleFunc("/compliance-request/image/pool/{public_id}", compliance_request.ImagePoolGet).Methods("GET").Name("compliance-request.image.pool.ByIDGet")
|
||||
r.Handle("/district", handlerJSONSlice(district.List)).Methods("GET")
|
||||
r.Handle("/district/{id}", handlerJSON(district.GetByID)).Methods("GET").Name("district.ByIDGet")
|
||||
r.HandleFunc("/district/{slug}/logo", apiGetDistrictLogo).Methods("GET").Name("district.logo.BySlug")
|
||||
r.Handle("/geocode/by-gid/{id:.*}", handlerJSON(geocode.ByGID)).Methods("GET")
|
||||
r.Handle("/geocode/reverse", handlerJSONPost(geocode.Reverse)).Methods("POST")
|
||||
r.Handle("/geocode/reverse/closest", handlerJSONPost(geocode.ReverseClosest)).Methods("POST")
|
||||
r.Handle("/geocode/suggestion", handlerJSONSlice(geocode.SuggestionList)).Methods("GET")
|
||||
r.Handle("/lob/event", handlerBasic(lob_hook.Event)).Methods("POST")
|
||||
|
||||
r.Handle("/publicreport-notification", handlerJSONPost(publicreport_notification.Create)).Methods("POST")
|
||||
r.Handle("/qr-code/mailer/{code}", handlerBasic(qrcode.Mailer)).Methods("GET")
|
||||
r.Handle("/qr-code/marketing", handlerBasic(qrcode.Marketing)).Methods("GET")
|
||||
r.Handle("/qr-code/report/{code}", handlerBasic(qrcode.Report)).Methods("GET")
|
||||
r.HandleFunc("/signin", handlerJSONPost(postSignin))
|
||||
r.Handle("/signout", authenticatedHandlerBasic(postSignout))
|
||||
r.HandleFunc("/signup", handlerJSONPost(postSignup))
|
||||
r.HandleFunc("/twilio/call", twilioCallPost).Methods("POST")
|
||||
r.HandleFunc("/twilio/call/status", twilioCallStatusPost).Methods("POST")
|
||||
r.HandleFunc("/twilio/message", twilioMessagePost).Methods("POST")
|
||||
r.HandleFunc("/twilio/text", twilioTextPost).Methods("POST")
|
||||
r.HandleFunc("/twilio/text/status", twilioTextStatusPost).Methods("POST")
|
||||
r.HandleFunc("/voipms/text", voipmsTextGet).Methods("GET")
|
||||
r.HandleFunc("/voipms/text", voipmsTextPost).Methods("POST")
|
||||
r.HandleFunc("/webhook/fieldseeker", webhookFieldseeker).Methods("GET")
|
||||
r.HandleFunc("/webhook/fieldseeker", webhookFieldseeker).Methods("POST")
|
||||
|
||||
// Authenticated endpoints
|
||||
r.Use(render.SetContentType(render.ContentTypeJSON))
|
||||
r.Method("POST", "/audio/{uuid}", auth.NewEnsureAuth(apiAudioPost))
|
||||
r.Method("POST", "/audio/{uuid}/content", auth.NewEnsureAuth(apiAudioContentPost))
|
||||
r.Method("GET", "/client/ios", auth.NewEnsureAuth(handleClientIos))
|
||||
r.Method("GET", "/communication", authenticatedHandlerJSON(listCommunication))
|
||||
r.Method("GET", "/events", auth.NewEnsureAuth(streamEvents))
|
||||
r.Method("POST", "/image/{uuid}", auth.NewEnsureAuth(apiImagePost))
|
||||
r.Method("GET", "/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentGet))
|
||||
r.Method("POST", "/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentPost))
|
||||
r.Method("GET", "/leads", authenticatedHandlerJSON(listLead))
|
||||
r.Method("POST", "/leads", authenticatedHandlerJSONPost(postLeads))
|
||||
r.Method("GET", "/mosquito-source", auth.NewEnsureAuth(apiMosquitoSource))
|
||||
r.Method("POST", "/publicreport/invalid", authenticatedHandlerJSONPost(postPublicreportInvalid))
|
||||
r.Method("POST", "/publicreport/lead", authenticatedHandlerJSONPost(postPublicreportLead))
|
||||
r.Method("POST", "/publicreport/message", authenticatedHandlerJSONPost(postPublicreportMessage))
|
||||
r.Method("POST", "/review/pool", authenticatedHandlerJSONPost(postReviewPool))
|
||||
r.Method("GET", "/review-task/pool", authenticatedHandlerJSON(listReviewTaskPool))
|
||||
r.Method("GET", "/service-request", auth.NewEnsureAuth(apiServiceRequest))
|
||||
r.Method("GET", "/signal", authenticatedHandlerJSON(listSignal))
|
||||
r.Method("GET", "/trap-data", auth.NewEnsureAuth(apiTrapData))
|
||||
r.Method("GET", "/tile/{z}/{y}/{x}", auth.NewEnsureAuth(getTile))
|
||||
r.Method("GET", "/user", authenticatedHandlerJSON(getUser))
|
||||
r.Handle("/audio/{uuid}", auth.NewEnsureAuth(apiAudioPost)).Methods("POST")
|
||||
r.Handle("/audio/{uuid}/content", auth.NewEnsureAuth(apiAudioContentPost)).Methods("POST")
|
||||
avatar := resource.Avatar(router)
|
||||
r.Handle("/avatar/{uuid}", authenticatedHandlerGetImage(avatar.ByUUIDGet)).Methods("GET").Name("avatar.ByUUIDGet")
|
||||
r.Handle("/avatar", authenticatedHandlerPostMultipart(avatar.Create, file.CollectionAvatar)).Methods("POST")
|
||||
r.Handle("/client/ios", auth.NewEnsureAuth(handleClientIos)).Methods("GET")
|
||||
communication := resource.Communication(router)
|
||||
r.Handle("/communication", authenticatedHandlerJSONSlice(communication.List)).Methods("GET")
|
||||
r.Handle("/communication/{id}", authenticatedHandlerJSON(communication.Get)).Methods("GET").Name("communication.ByIDGet")
|
||||
r.Handle("/communication/{id}/mark/invalid", authenticatedHandlerJSONPost(communication.MarkInvalid)).Methods("POST").Name("communication.MarkInvalid")
|
||||
r.Handle("/communication/{id}/mark/pending-response", authenticatedHandlerJSONPost(communication.MarkPendingResponse)).Methods("POST").Name("communication.MarkPendingResponse")
|
||||
r.Handle("/communication/{id}/mark/possible-issue", authenticatedHandlerJSONPost(communication.MarkPossibleIssue)).Methods("POST").Name("communication.MarkPossibleIssue")
|
||||
r.Handle("/communication/{id}/mark/possible-resolved", authenticatedHandlerJSONPost(communication.MarkPossibleResolved)).Methods("POST").Name("communication.MarkPossibleResolved")
|
||||
r.Handle("/compliance-request/mailer", authenticatedHandlerJSONPost(compliance_request.CreateMailer)).Methods("POST")
|
||||
//r.HandleFunc("/compliance-request/image/pool/{public_id}", getComplianceRequestImagePool).Methods("GET")
|
||||
r.Handle("/configuration/integration/arcgis", authenticatedHandlerJSONPost(postConfigurationIntegrationArcgis)).Methods("POST")
|
||||
r.Handle("/events", auth.NewEnsureAuth(streamEvents)).Methods("GET")
|
||||
r.Handle("/image/{uuid}", auth.NewEnsureAuth(apiImagePost)).Methods("POST")
|
||||
r.Handle("/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentGet)).Methods("GET")
|
||||
r.Handle("/image/{uuid}/content", auth.NewEnsureAuth(apiImageContentPost)).Methods("POST")
|
||||
impersonation := resource.Impersonation(router)
|
||||
r.Handle("/impersonation", authenticatedHandlerJSONPost(impersonation.Create)).Methods("POST")
|
||||
r.Handle("/impersonation", authenticatedHandlerDelete(impersonation.Delete)).Methods("DELETE")
|
||||
lead := resource.Lead(r)
|
||||
r.Handle("/leads", authenticatedHandlerJSON(lead.List)).Methods("GET")
|
||||
r.Handle("/leads", authenticatedHandlerJSONPost(lead.Create)).Methods("POST")
|
||||
|
||||
mailer := resource.Mailer(router)
|
||||
r.Handle("/mailer", authenticatedHandlerJSONSlice(mailer.List)).Methods("GET")
|
||||
r.Handle("/mailer/{id}", authenticatedHandlerJSONPost(mailer.ByIDGet)).Methods("GET").Name("mailer.ByIDGet")
|
||||
r.Handle("/mosquito-source", auth.NewEnsureAuth(apiMosquitoSource)).Methods("GET")
|
||||
|
||||
r.Handle("/publicreport/invalid", authenticatedHandlerJSONPost(postPublicreportInvalid)).Methods("POST")
|
||||
r.Handle("/publicreport/signal", authenticatedHandlerJSONPost(postPublicreportSignal)).Methods("POST")
|
||||
r.Handle("/publicreport/message", authenticatedHandlerJSONPost(postPublicreportMessage)).Methods("POST")
|
||||
r.Handle("/publicreport/{id}", authenticatedHandlerBasic(publicreport.ByID)).Methods("GET").Name("publicreport.ByIDGet")
|
||||
r.Handle("/publicreport/compliance/{id}", authenticatedHandlerJSON(pr_compliance.ByID)).Methods("GET").Name("publicreport.compliance.ByIDGet")
|
||||
r.Handle("/publicreport/nuisance/{id}", authenticatedHandlerJSON(nuisance.ByID)).Methods("GET").Name("publicreport.nuisance.ByIDGet")
|
||||
r.Handle("/publicreport/water/{id}", authenticatedHandlerJSON(water.ByID)).Methods("GET").Name("publicreport.water.ByIDGet")
|
||||
|
||||
r.Handle("/publicreport-notification", handlerJSONPost(publicreport_notification.Create)).Methods("POST")
|
||||
r.Handle("/review/pool", authenticatedHandlerJSONPost(postReviewPool)).Methods("POST")
|
||||
review_task := resource.ReviewTask(r)
|
||||
r.Handle("/review-task", authenticatedHandlerJSON(review_task.List)).Methods("GET")
|
||||
r.Handle("/service-request", authenticatedHandlerJSONSlice(service_request.List)).Methods("GET")
|
||||
session := resource.Session(router)
|
||||
r.Handle("/session", authenticatedHandlerJSON(session.Get)).Methods("GET").Name("session.get")
|
||||
signal := resource.Signal(r)
|
||||
r.Handle("/signal", authenticatedHandlerJSON(signal.List)).Methods("GET")
|
||||
site := resource.Site(router)
|
||||
r.Handle("/site", authenticatedHandlerJSONSlice(site.List)).Methods("GET")
|
||||
r.Handle("/site/{id}", authenticatedHandlerJSON(site.ByIDGet)).Methods("GET").Name("site.ByIDGet")
|
||||
sync := resource.Sync(r)
|
||||
r.Handle("/sync", authenticatedHandlerJSONSlice(sync.List)).Methods("GET")
|
||||
r.Handle("/sudo/email", authenticatedHandlerJSONPost(postSudoEmail)).Methods("POST")
|
||||
r.Handle("/sudo/sms", authenticatedHandlerJSONPost(postSudoSMS)).Methods("POST")
|
||||
r.Handle("/sudo/sse", authenticatedHandlerJSONPost(postSudoSSE)).Methods("POST")
|
||||
r.Handle("/trap-data", auth.NewEnsureAuth(apiTrapData)).Methods("GET")
|
||||
r.Handle("/tile/{z}/{y}/{x}", auth.NewEnsureAuth(getTile)).Methods("GET")
|
||||
upload := resource.Upload(r)
|
||||
r.Handle("/upload/pool/custom", authenticatedHandlerPostMultipart(upload.PoolCustomCreate, file.CollectionCSV)).Methods("POST")
|
||||
r.Handle("/upload/pool/flyover", authenticatedHandlerPostMultipart(upload.PoolFlyoverCreate, file.CollectionCSV)).Methods("POST")
|
||||
r.Handle("/upload", authenticatedHandlerJSON(upload.List)).Methods("GET")
|
||||
r.Handle("/upload/{id}", authenticatedHandlerJSON(upload.ByIDGet)).Methods("GET")
|
||||
r.Handle("/upload/{id}/commit", authenticatedHandlerJSONPost(upload.Commit)).Methods("POST")
|
||||
r.Handle("/upload/{id}/discard", authenticatedHandlerJSONPost(upload.Discard)).Methods("POST")
|
||||
|
||||
user := resource.User(router)
|
||||
r.Handle("/user/self", authenticatedHandlerJSON(user.SelfGet)).Methods("GET")
|
||||
r.Handle("/user/suggestion", authenticatedHandlerJSON(user.SuggestionGet)).Methods("GET")
|
||||
r.Handle("/user", authenticatedHandlerJSONSlice(user.List)).Methods("GET")
|
||||
r.Handle("/user/{id}", authenticatedHandlerJSON(user.ByIDGet)).Methods("GET").Name("user.ByIDGet")
|
||||
r.Handle("/user/{id}", authenticatedHandlerJSONPut(user.ByIDPut)).Methods("PUT")
|
||||
|
||||
// Unauthenticated endpoints
|
||||
r.Get("/district", apiGetDistrict)
|
||||
r.Get("/district/{slug}/logo", apiGetDistrictLogo)
|
||||
r.Get("/compliance-request/image/pool/{public_id}", getComplianceRequestImagePool)
|
||||
r.Post("/signin", postSignin)
|
||||
r.Post("/twilio/call", twilioCallPost)
|
||||
r.Post("/twilio/call/status", twilioCallStatusPost)
|
||||
r.Post("/twilio/message", twilioMessagePost)
|
||||
r.Post("/twilio/text", twilioTextPost)
|
||||
r.Post("/twilio/text/status", twilioTextStatusPost)
|
||||
r.Get("/voipms/text", voipmsTextGet)
|
||||
r.Post("/voipms/text", voipmsTextPost)
|
||||
r.Get("/webhook/fieldseeker", webhookFieldseeker)
|
||||
r.Post("/webhook/fieldseeker", webhookFieldseeker)
|
||||
}
|
||||
|
|
|
|||
143
api/signal.go
143
api/signal.go
|
|
@ -1,144 +1 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql"
|
||||
"github.com/Gleipnir-Technology/bob/dialect/psql/sm"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
//"github.com/aarondl/opt/null"
|
||||
"github.com/stephenafamo/scan"
|
||||
)
|
||||
|
||||
type signal struct {
|
||||
Address types.Address `json:"address"`
|
||||
Addressed *time.Time `json:"addressed"`
|
||||
Addressor *platform.User `json:"addressor"`
|
||||
Created time.Time `json:"created"`
|
||||
Creator platform.User `json:"creator"`
|
||||
ID int32 `json:"id"`
|
||||
Location types.Location `json:"location"`
|
||||
Species string `json:"species"`
|
||||
Title string `json:"title"`
|
||||
Type string `json:"type"`
|
||||
}
|
||||
type contentListSignal struct {
|
||||
Signals []signal `json:"signals"`
|
||||
}
|
||||
|
||||
func listSignal(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*contentListSignal, *nhttp.ErrorWithStatus) {
|
||||
type _Row struct {
|
||||
Address types.Address `db:"address"`
|
||||
Addressed *time.Time `db:"addressed"`
|
||||
Addressor *int32 `db:"addressor"`
|
||||
Created time.Time `db:"created"`
|
||||
Creator int32 `db:"creator_id"`
|
||||
ID int32 `db:"id"`
|
||||
Latitude float64 `db:"latitude"`
|
||||
Longitude float64 `db:"longitude"`
|
||||
Location types.Location `db:"location"`
|
||||
Species *string `db:"species"`
|
||||
Title string `db:"title"`
|
||||
Type string `db:"type"`
|
||||
}
|
||||
limit := 20
|
||||
if query.Limit != nil {
|
||||
limit = *query.Limit
|
||||
}
|
||||
rows, err := bob.All(ctx, db.PGInstance.BobDB, psql.Select(
|
||||
sm.Columns(
|
||||
"signal.addressed AS addressed",
|
||||
"signal.addressor AS addressor",
|
||||
"signal.created AS created",
|
||||
"signal.creator AS creator_id",
|
||||
"signal.id AS id",
|
||||
"signal.species AS species",
|
||||
"signal.title AS title",
|
||||
"signal.type_ AS type",
|
||||
"address.country AS \"address.country\"",
|
||||
"address.locality AS \"address.locality\"",
|
||||
"address.number_ AS \"address.number\"",
|
||||
"address.postal_code AS \"address.postal_code\"",
|
||||
"address.region AS \"address.region\"",
|
||||
"address.street AS \"address.street\"",
|
||||
"address.unit AS \"address.unit\"",
|
||||
"ST_Y(address.geom) AS latitude",
|
||||
"ST_X(address.geom) AS longitude",
|
||||
),
|
||||
sm.From("signal"),
|
||||
sm.InnerJoin("signal_pool").OnEQ(
|
||||
psql.Quote("signal", "id"),
|
||||
psql.Quote("signal_pool", "signal_id"),
|
||||
),
|
||||
sm.InnerJoin("pool").OnEQ(
|
||||
psql.Quote("signal_pool", "pool_id"),
|
||||
psql.Quote("pool", "id"),
|
||||
),
|
||||
sm.InnerJoin("site").On(
|
||||
psql.Quote("pool", "site_id").EQ(psql.Quote("site", "id")),
|
||||
),
|
||||
sm.InnerJoin("address").OnEQ(
|
||||
psql.Quote("site", "address_id"),
|
||||
psql.Quote("address", "id"),
|
||||
),
|
||||
sm.Where(psql.Quote("signal", "organization_id").EQ(psql.Arg(user.Organization.ID()))),
|
||||
sm.Where(psql.Quote("signal", "addressed").IsNull()),
|
||||
sm.Limit(limit),
|
||||
), scan.StructMapper[_Row]())
|
||||
|
||||
/*
|
||||
rows, err := models.Signals.Query(
|
||||
models.SelectWhere.Signals.OrganizationID.EQ(org.ID),
|
||||
sm.OrderBy("created").Desc(),
|
||||
).All(ctx, db.PGInstance.BobDB)
|
||||
*/
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("failed to get signals: %w", err)
|
||||
}
|
||||
users_by_id, err := platform.UsersByOrg(ctx, user.Organization)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("users by id: %w", err)
|
||||
}
|
||||
signals := make([]signal, len(rows))
|
||||
for i, row := range rows {
|
||||
var species string = ""
|
||||
if row.Species != nil {
|
||||
species = *row.Species
|
||||
}
|
||||
signals[i] = signal{
|
||||
Address: row.Address,
|
||||
Addressed: row.Addressed,
|
||||
Addressor: userOrNil(users_by_id, row.Addressor),
|
||||
Created: row.Created,
|
||||
Creator: *users_by_id[row.Creator],
|
||||
ID: row.ID,
|
||||
Location: types.Location{
|
||||
Latitude: row.Latitude,
|
||||
Longitude: row.Longitude,
|
||||
},
|
||||
Species: species,
|
||||
Title: row.Title,
|
||||
Type: row.Type,
|
||||
}
|
||||
}
|
||||
return &contentListSignal{
|
||||
Signals: signals,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func userOrNil(usersByID map[int32]*platform.User, id *int32) *platform.User {
|
||||
if id == nil {
|
||||
return nil
|
||||
}
|
||||
u, ok := usersByID[*id]
|
||||
if !ok {
|
||||
return nil
|
||||
}
|
||||
return u
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,46 +1,46 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
"github.com/go-chi/render"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func postSignin(w http.ResponseWriter, r *http.Request) {
|
||||
if err := r.ParseForm(); err != nil {
|
||||
render.Render(w, r, errRender(fmt.Errorf("Failed to parse POST form: %w", err)))
|
||||
return
|
||||
}
|
||||
type reqSignin struct {
|
||||
Password string `schema:"password"`
|
||||
Username string `schema:"username"`
|
||||
}
|
||||
|
||||
username := r.FormValue("username")
|
||||
password := r.FormValue("password")
|
||||
|
||||
if password == "" || username == "" {
|
||||
w.Header().Set("WWW-Authenticate-Error", "no-credentials")
|
||||
http.Error(w, "invalid-credentials", http.StatusUnauthorized)
|
||||
return
|
||||
func postSignin(ctx context.Context, r *http.Request, req reqSignin) (string, *nhttp.ErrorWithStatus) {
|
||||
if req.Password == "" {
|
||||
return "", nhttp.NewBadRequest("Empty password")
|
||||
}
|
||||
log.Info().Str("username", username).Msg("API Signin")
|
||||
_, err := auth.SigninUser(r, username, password)
|
||||
if req.Username == "" {
|
||||
return "", nhttp.NewBadRequest("Empty username")
|
||||
}
|
||||
log.Info().Str("username", req.Username).Msg("API Signin")
|
||||
_, err := auth.SigninUser(r, req.Username, req.Password)
|
||||
if err != nil {
|
||||
if errors.Is(err, auth.InvalidCredentials{}) {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
http.Error(w, "invalid-credentials", http.StatusUnauthorized)
|
||||
return
|
||||
return "", nhttp.NewUnauthorized("invalid credentials")
|
||||
}
|
||||
if errors.Is(err, auth.InvalidUsername{}) {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
http.Error(w, "invalid-credentials", http.StatusUnauthorized)
|
||||
return
|
||||
return "", nhttp.NewUnauthorized("invalid credentials")
|
||||
}
|
||||
log.Error().Err(err).Str("username", username).Msg("Login server error")
|
||||
http.Error(w, "signin-server-error", http.StatusInternalServerError)
|
||||
return
|
||||
if errors.Is(err, platform.NoUserError{}) {
|
||||
return "", nhttp.NewUnauthorized("invalid credentials")
|
||||
}
|
||||
log.Error().Err(err).Str("username", req.Username).Msg("Login server error")
|
||||
return "", nhttp.NewError("login server error")
|
||||
}
|
||||
|
||||
http.Error(w, "", http.StatusAccepted)
|
||||
return "/", nil
|
||||
}
|
||||
func postSignout(ctx context.Context, w http.ResponseWriter, r *http.Request, u platform.User) *nhttp.ErrorWithStatus {
|
||||
auth.SignoutUser(r, u)
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
37
api/signup.go
Normal file
37
api/signup.go
Normal file
|
|
@ -0,0 +1,37 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/auth"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type reqSignup struct {
|
||||
Username string `json:"username"`
|
||||
Name string `json:"name"`
|
||||
Password string `json:"password"`
|
||||
Terms bool `json:"terms"`
|
||||
}
|
||||
|
||||
func postSignup(ctx context.Context, r *http.Request, signup reqSignup) (string, *nhttp.ErrorWithStatus) {
|
||||
|
||||
log.Info().Str("username", signup.Username).Str("name", signup.Name).Str("password", strings.Repeat("*", len(signup.Password))).Msg("Signup")
|
||||
|
||||
if !signup.Terms {
|
||||
log.Warn().Msg("Terms not agreed")
|
||||
return "", nhttp.NewErrorStatus(http.StatusBadRequest, "You must agree to the terms to register")
|
||||
}
|
||||
|
||||
user, err := auth.SignupUser(r.Context(), signup.Username, signup.Name, signup.Password)
|
||||
if err != nil {
|
||||
return "", nhttp.NewError("Failed to signup user", err)
|
||||
}
|
||||
|
||||
auth.AddUserSession(ctx, user)
|
||||
|
||||
return "/", nil
|
||||
}
|
||||
104
api/sudo.go
Normal file
104
api/sudo.go
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/comms/email"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/comms/text"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/html"
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type contentSudo struct {
|
||||
ForwardEmailRMOAddress string
|
||||
ForwardEmailNidusAddress string
|
||||
}
|
||||
|
||||
func getSudo(ctx context.Context, r *http.Request, user platform.User) (*html.Response[contentSudo], *nhttp.ErrorWithStatus) {
|
||||
if !user.HasRoot() {
|
||||
return nil, &nhttp.ErrorWithStatus{
|
||||
Message: "You have to be a root user to access this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
content := contentSudo{
|
||||
ForwardEmailRMOAddress: config.ForwardEmailRMOAddress,
|
||||
ForwardEmailNidusAddress: config.ForwardEmailNidusAddress,
|
||||
}
|
||||
return html.NewResponse("sync/sudo.html", content), nil
|
||||
}
|
||||
|
||||
type FormEmail struct {
|
||||
Body string `schema:"emailBody"`
|
||||
From string `schema:"emailFrom"`
|
||||
Subject string `schema:"emailSubject"`
|
||||
To string `schema:"emailTo"`
|
||||
}
|
||||
|
||||
func postSudoEmail(ctx context.Context, r *http.Request, u platform.User, e FormEmail) (string, *nhttp.ErrorWithStatus) {
|
||||
if !u.HasRoot() {
|
||||
return "", &nhttp.ErrorWithStatus{
|
||||
Message: "You must have sudo powers to do this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
request := email.Request{
|
||||
From: e.From,
|
||||
HTML: fmt.Sprintf("<html><p>%s</p></html>", e.Body),
|
||||
Sender: e.From,
|
||||
Subject: e.Subject,
|
||||
To: e.To,
|
||||
Text: e.Body,
|
||||
}
|
||||
resp, err := email.Send(ctx, request)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Msg("Failed to send email")
|
||||
} else {
|
||||
log.Info().Str("id", resp.ID).Str("to", e.To).Msg("Sent Email")
|
||||
}
|
||||
return "/sudo", nil
|
||||
}
|
||||
|
||||
type FormSMS struct {
|
||||
Message string `schema:"smsMessage"`
|
||||
Phone string `schema:"smsPhone"`
|
||||
}
|
||||
|
||||
func postSudoSMS(ctx context.Context, r *http.Request, u platform.User, sms FormSMS) (string, *nhttp.ErrorWithStatus) {
|
||||
if !u.HasRoot() {
|
||||
return "", &nhttp.ErrorWithStatus{
|
||||
Message: "You must have sudo powers to do this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
id, err := text.SendText(ctx, config.VoipMSNumber, sms.Phone, sms.Message)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Msg("Failed to send SMS")
|
||||
} else {
|
||||
log.Info().Str("id", id).Msg("Sent SMS")
|
||||
}
|
||||
return "/sudo", nil
|
||||
}
|
||||
|
||||
type FormSSE struct {
|
||||
OrganizationID int32 `schema:"organizationID"`
|
||||
Resource string `schema:"resource"`
|
||||
Type string `schema:"type"`
|
||||
URIPath string `schema:"uriPath"`
|
||||
}
|
||||
|
||||
func postSudoSSE(ctx context.Context, r *http.Request, u platform.User, sse FormSSE) (string, *nhttp.ErrorWithStatus) {
|
||||
if !u.HasRoot() {
|
||||
return "", &nhttp.ErrorWithStatus{
|
||||
Message: "You must have sudo powers to do this",
|
||||
Status: http.StatusForbidden,
|
||||
}
|
||||
}
|
||||
platform.SudoEvent(sse.OrganizationID, sse.Resource, sse.Type, sse.URIPath)
|
||||
return "/sudo", nil
|
||||
}
|
||||
14
api/tile.go
14
api/tile.go
|
|
@ -5,14 +5,15 @@ import (
|
|||
"strconv"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/go-chi/chi/v5"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/gorilla/mux"
|
||||
//"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
func getTile(w http.ResponseWriter, r *http.Request, user platform.User) {
|
||||
x_str := chi.URLParam(r, "x")
|
||||
y_str := chi.URLParam(r, "y")
|
||||
z_str := chi.URLParam(r, "z")
|
||||
vars := mux.Vars(r)
|
||||
x_str := vars["x"]
|
||||
y_str := vars["y"]
|
||||
z_str := vars["z"]
|
||||
|
||||
x, err := strconv.Atoi(x_str)
|
||||
if err != nil {
|
||||
|
|
@ -29,9 +30,8 @@ func getTile(w http.ResponseWriter, r *http.Request, user platform.User) {
|
|||
http.Error(w, "can't parse x as an integer", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
err = platform.GetTile(r.Context(), w, user.Organization, uint(z), uint(y), uint(x))
|
||||
err = platform.GetTile(r.Context(), w, user.Organization, true, uint(z), uint(y), uint(x))
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to do tile")
|
||||
http.Error(w, "failed to do tile", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
|
|
|||
67
api/types.go
67
api/types.go
|
|
@ -7,8 +7,9 @@ import (
|
|||
"github.com/Gleipnir-Technology/nidus-sync/db/models"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/h3utils"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform/types"
|
||||
"github.com/aarondl/opt/null"
|
||||
"github.com/go-chi/render"
|
||||
//"github.com/gorilla/mux"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
|
|
@ -91,11 +92,10 @@ type NoteAudioBreadcrumbPayload struct {
|
|||
|
||||
type ResponseFieldseeker struct {
|
||||
MosquitoSources []ResponseMosquitoSource `json:"sources"`
|
||||
ServiceRequests []ResponseServiceRequest `json:"requests"`
|
||||
ServiceRequests []types.ServiceRequest `json:"requests"`
|
||||
TrapData []ResponseTrapData `json:"traps"`
|
||||
}
|
||||
|
||||
// ResponseErr renderer type for handling all sorts of errors.
|
||||
type ResponseClientIos struct {
|
||||
Fieldseeker ResponseFieldseeker `json:"fieldseeker"`
|
||||
Since time.Time `json:"since"`
|
||||
|
|
@ -105,23 +105,6 @@ func (i ResponseClientIos) Render(w http.ResponseWriter, r *http.Request) error
|
|||
return nil
|
||||
}
|
||||
|
||||
// In the best case scenario, the excellent github.com/pkg/errors package
|
||||
// helps reveal information on the error, setting it on Err, and in the Render()
|
||||
// method, using it to set the application-specific error code in AppCode.
|
||||
type ResponseErr struct {
|
||||
Error error `json:"-"` // low-level runtime error
|
||||
HTTPStatusCode int `json:"-"` // http response status code
|
||||
|
||||
StatusText string `json:"status"` // user-level status message
|
||||
AppCode int64 `json:"code,omitempty"` // application-specific error code
|
||||
ErrorText string `json:"error,omitempty"` // application-level error message, for debugging
|
||||
}
|
||||
|
||||
func (e *ResponseErr) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
render.Status(r, e.HTTPStatusCode)
|
||||
return nil
|
||||
}
|
||||
|
||||
type ResponseMosquitoInspection struct {
|
||||
ActionTaken string `json:"action_taken"`
|
||||
Comments string `json:"comments"`
|
||||
|
|
@ -252,48 +235,10 @@ func (rtd ResponseNote) Render(w http.ResponseWriter, r *http.Request) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
type ResponseServiceRequest struct {
|
||||
Address string `json:"address"`
|
||||
AssignedTechnician string `json:"assigned_technician"`
|
||||
City string `json:"city"`
|
||||
Created string `json:"created"`
|
||||
H3Cell int64 `json:"h3cell"`
|
||||
HasDog *bool `json:"has_dog"`
|
||||
HasSpanishSpeaker *bool `json:"has_spanish_speaker"`
|
||||
ID string `json:"id"`
|
||||
Priority string `json:"priority"`
|
||||
RecordedDate string `json:"recorded_date"`
|
||||
Source string `json:"source"`
|
||||
Status string `json:"status"`
|
||||
Target string `json:"target"`
|
||||
Zip string `json:"zip"`
|
||||
}
|
||||
|
||||
func (srr ResponseServiceRequest) Render(w http.ResponseWriter, r *http.Request) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func NewResponseServiceRequest(sr *models.FieldseekerServicerequest) ResponseServiceRequest {
|
||||
return ResponseServiceRequest{
|
||||
Address: sr.Reqaddr1.GetOr(""),
|
||||
AssignedTechnician: sr.Assignedtech.GetOr(""),
|
||||
City: sr.Reqcity.GetOr(""),
|
||||
Created: formatTime(sr.Creationdate),
|
||||
//H3Cell: sr.H3Cell,
|
||||
HasDog: toBool(sr.Dog),
|
||||
HasSpanishSpeaker: toBool(sr.Spanish),
|
||||
ID: sr.Globalid.String(),
|
||||
Priority: sr.Priority.GetOr(""),
|
||||
Status: sr.Status.GetOr(""),
|
||||
Source: sr.Source.GetOr(""),
|
||||
Target: sr.Reqtarget.GetOr(""),
|
||||
Zip: sr.Reqzip.GetOr(""),
|
||||
}
|
||||
}
|
||||
func NewResponseServiceRequests(requests models.FieldseekerServicerequestSlice) []ResponseServiceRequest {
|
||||
results := make([]ResponseServiceRequest, 0)
|
||||
func NewResponseServiceRequests(requests models.FieldseekerServicerequestSlice) []types.ServiceRequest {
|
||||
results := make([]types.ServiceRequest, 0)
|
||||
for _, i := range requests {
|
||||
results = append(results, NewResponseServiceRequest(i))
|
||||
results = append(results, types.ServiceRequestFromModel(i))
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
|
|
|||
1
api/upload.go
Normal file
1
api/upload.go
Normal file
|
|
@ -0,0 +1 @@
|
|||
package api
|
||||
17
api/user.go
17
api/user.go
|
|
@ -1,18 +1 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
nhttp "github.com/Gleipnir-Technology/nidus-sync/http"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/platform"
|
||||
)
|
||||
|
||||
func getUser(ctx context.Context, r *http.Request, user platform.User, query queryParams) (*platform.User, *nhttp.ErrorWithStatus) {
|
||||
counts, err := platform.NotificationCountsForUser(ctx, user)
|
||||
if err != nil {
|
||||
return nil, nhttp.NewError("get notifications: %w", err)
|
||||
}
|
||||
user.NotificationCounts = *counts
|
||||
return &user, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
Subproject commit f5ec5c75c10bf711aa31ff0df56b445fbc2e208e
|
||||
Subproject commit 63cc8b573739294ea98f7e39d2baec3cd70dfd7f
|
||||
118
auth/auth.go
118
auth/auth.go
|
|
@ -13,9 +13,9 @@ import (
|
|||
"golang.org/x/crypto/bcrypt"
|
||||
)
|
||||
|
||||
type NoCredentialsError struct{}
|
||||
type InactiveUser struct{}
|
||||
|
||||
func (e NoCredentialsError) Error() string { return "No credentials were present in the request" }
|
||||
func (e InactiveUser) Error() string { return "That user is not active" }
|
||||
|
||||
type InvalidCredentials struct{}
|
||||
|
||||
|
|
@ -25,21 +25,59 @@ type InvalidUsername struct{}
|
|||
|
||||
func (e InvalidUsername) Error() string { return "That username doesn't exist" }
|
||||
|
||||
type NoCredentialsError struct{}
|
||||
|
||||
func (e NoCredentialsError) Error() string { return "No credentials were present in the request" }
|
||||
|
||||
type AuthenticatedHandler func(http.ResponseWriter, *http.Request, platform.User)
|
||||
type EnsureAuth struct {
|
||||
handler AuthenticatedHandler
|
||||
}
|
||||
|
||||
func AddUserSession(r *http.Request, user *platform.User) {
|
||||
id := strconv.Itoa(int(user.ID))
|
||||
sessionManager.Put(r.Context(), "user_id", id)
|
||||
sessionManager.Put(r.Context(), "username", user.Username)
|
||||
log.Debug().Str("id", id).Str("username", user.Username).Msg("added user session")
|
||||
func AddUserSession(ctx context.Context, user *platform.User) {
|
||||
id_str := strconv.Itoa(int(user.ID))
|
||||
sessionManager.Put(ctx, "user_id", id_str)
|
||||
sessionManager.Put(ctx, "username", user.Username)
|
||||
log.Debug().Str("id", id_str).Str("username", user.Username).Msg("added user session")
|
||||
}
|
||||
func ImpersonateEnd(ctx context.Context) {
|
||||
sessionManager.Put(ctx, "impersonated_user_id", "")
|
||||
}
|
||||
func ImpersonateUser(ctx context.Context, target_user_id int) {
|
||||
target_user_id_str := strconv.Itoa(int(target_user_id))
|
||||
sessionManager.Put(ctx, "impersonated_user_id", target_user_id_str)
|
||||
}
|
||||
func ImpersonatedUser(ctx context.Context) *int32 {
|
||||
i_str := sessionManager.GetString(ctx, "impersonated_user_id")
|
||||
if i_str == "" {
|
||||
return nil
|
||||
}
|
||||
i, err := strconv.Atoi(i_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("impersonated_user_id", i_str).Msg("failed to parse impersonated_user_id")
|
||||
return nil
|
||||
}
|
||||
result := int32(i)
|
||||
return &result
|
||||
}
|
||||
func ImpersonatorID(ctx context.Context) *int32 {
|
||||
user_id_str := sessionManager.GetString(ctx, "user_id")
|
||||
user_id, err := strconv.Atoi(user_id_str)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Str("user_id", user_id_str).Msg("failed to parse user_id")
|
||||
return nil
|
||||
}
|
||||
result := int32(user_id)
|
||||
return &result
|
||||
|
||||
}
|
||||
func GetAuthenticatedUser(r *http.Request) (*platform.User, error) {
|
||||
ctx := r.Context()
|
||||
user_id_str := sessionManager.GetString(ctx, "user_id")
|
||||
impersonated_user_id_str := sessionManager.GetString(ctx, "impersonated_user_id")
|
||||
if impersonated_user_id_str != "" {
|
||||
user_id_str = impersonated_user_id_str
|
||||
}
|
||||
if user_id_str != "" {
|
||||
user_id, err := strconv.Atoi(user_id_str)
|
||||
if err != nil {
|
||||
|
|
@ -47,7 +85,14 @@ func GetAuthenticatedUser(r *http.Request) (*platform.User, error) {
|
|||
}
|
||||
username := sessionManager.GetString(ctx, "username")
|
||||
if user_id > 0 && username != "" {
|
||||
return platform.UserByID(ctx, int32(user_id))
|
||||
user, err := platform.UserByID(ctx, int32(user_id))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("user by ID: %w", err)
|
||||
}
|
||||
if !user.IsActive {
|
||||
return nil, fmt.Errorf("user is inactive")
|
||||
}
|
||||
return user, nil
|
||||
}
|
||||
}
|
||||
// If we can't get the user from the session try to get from auth headers
|
||||
|
|
@ -59,7 +104,7 @@ func GetAuthenticatedUser(r *http.Request) (*platform.User, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
AddUserSession(r, user)
|
||||
AddUserSession(ctx, user)
|
||||
return user, nil
|
||||
}
|
||||
|
||||
|
|
@ -69,33 +114,39 @@ func NewEnsureAuth(handlerToWrap AuthenticatedHandler) *EnsureAuth {
|
|||
|
||||
func (ea *EnsureAuth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
// If this is an API request respond with a more machine-readable error state
|
||||
accept := r.Header.Values("Accept")
|
||||
offers := []string{"application/json", "text/html"}
|
||||
accept := r.Header.Get("Accept")
|
||||
/*
|
||||
offers := []string{"application/json", "text/html"}
|
||||
|
||||
content_type := NegotiateContent(accept, offers)
|
||||
content_type := NegotiateContent(accept, offers)
|
||||
*/
|
||||
user, err := GetAuthenticatedUser(r)
|
||||
if err != nil || user == nil {
|
||||
var msg []byte
|
||||
// Separate return codes for different authentication failures
|
||||
if _, ok := err.(*NoCredentialsError); ok {
|
||||
log.Info().Msg("No credentials present and no session")
|
||||
w.Header().Set("WWW-Authenticate-Error", "no-credentials")
|
||||
msg = []byte("Please provide credentials.\n")
|
||||
} else if _, ok := err.(*platform.NoUserError); ok {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
msg = []byte("Invalid credentials provided.\n")
|
||||
} else if _, ok := err.(*InvalidCredentials); ok {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
msg = []byte("Invalid credentials provided.\n")
|
||||
// Don't send authentication headers for browsers because it forces the authentication popup
|
||||
requested_with := r.Header.Get("X-Requested-With")
|
||||
//log.Debug().Str("x-requested-with", requested_with).Send()
|
||||
if !strings.HasPrefix(requested_with, "nidus-web") && accept != "text/event-stream" {
|
||||
w.Header().Set("WWW-Authenticate", `Basic realm="Nidus Sync"`)
|
||||
// Separate return codes for different authentication failures
|
||||
if _, ok := err.(*NoCredentialsError); ok {
|
||||
log.Info().Msg("No credentials present and no session")
|
||||
w.Header().Set("WWW-Authenticate-Error", "no-credentials")
|
||||
msg = []byte("Please provide credentials.\n")
|
||||
} else if _, ok := err.(*platform.NoUserError); ok {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
msg = []byte("Invalid credentials provided.\n")
|
||||
} else if _, ok := err.(*InvalidCredentials); ok {
|
||||
w.Header().Set("WWW-Authenticate-Error", "invalid-credentials")
|
||||
msg = []byte("Invalid credentials provided.\n")
|
||||
}
|
||||
}
|
||||
|
||||
if content_type == "text/html" {
|
||||
http.Redirect(w, r, "/signin?next="+r.URL.Path, http.StatusSeeOther)
|
||||
return
|
||||
}
|
||||
w.Header().Set("WWW-Authenticate", `Basic realm="Nidus Sync"`)
|
||||
w.WriteHeader(401)
|
||||
w.Write(msg)
|
||||
_, err = w.Write(msg)
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to write response")
|
||||
}
|
||||
return
|
||||
}
|
||||
ea.handler(w, r, *user)
|
||||
|
|
@ -108,13 +159,17 @@ func SigninUser(r *http.Request, username string, password string) (*platform.Us
|
|||
if user == nil {
|
||||
return nil, errors.New("No matching user")
|
||||
}
|
||||
AddUserSession(r, user)
|
||||
AddUserSession(r.Context(), user)
|
||||
return user, nil
|
||||
}
|
||||
|
||||
func SignoutUser(r *http.Request, user platform.User) {
|
||||
sessionManager.Put(r.Context(), "user_id", "")
|
||||
sessionManager.Put(r.Context(), "username", "")
|
||||
err := sessionManager.Destroy(r.Context())
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to destroy session for user on signout")
|
||||
}
|
||||
log.Info().Str("username", user.Username).Int("user_id", (user.ID)).Msg("Ended user session")
|
||||
}
|
||||
|
||||
|
|
@ -168,6 +223,9 @@ func validateUser(ctx context.Context, username string, password string) (*platf
|
|||
log.Info().Str("username", username).Str("password", redact(password)).Msg("Invalid username")
|
||||
return nil, InvalidUsername{}
|
||||
}
|
||||
if !user.IsActive {
|
||||
return nil, InactiveUser{}
|
||||
}
|
||||
if !validatePassword(password, user.PasswordHash) {
|
||||
log.Info().Str("username", username).Str("password", redact(password)).Str("hash", passwordHash).Msg("Invalid password for user")
|
||||
return nil, InvalidCredentials{}
|
||||
|
|
|
|||
|
|
@ -3,9 +3,9 @@ package auth
|
|||
import (
|
||||
"time"
|
||||
|
||||
"github.com/alexedwards/scs/v2"
|
||||
"github.com/alexedwards/scs/pgxstore"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/alexedwards/scs/pgxstore"
|
||||
"github.com/alexedwards/scs/v2"
|
||||
)
|
||||
|
||||
var sessionManager *scs.SessionManager
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ func main() {
|
|||
}
|
||||
|
||||
func scanValue(message string, result *string) {
|
||||
fmt.Printf(message)
|
||||
fmt.Print("%s", message)
|
||||
scanner := bufio.NewScanner(os.Stdin)
|
||||
if ok := scanner.Scan(); !ok {
|
||||
log.Fatal(errors.New("Failed to scan input"))
|
||||
|
|
|
|||
53
cmd/test-jet/main.go
Normal file
53
cmd/test-jet/main.go
Normal file
|
|
@ -0,0 +1,53 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/db/query/public"
|
||||
)
|
||||
|
||||
func main() {
|
||||
err := config.Parse()
|
||||
if err != nil {
|
||||
log.Printf("failed on config: %v", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
ctx := context.TODO()
|
||||
err = db.InitializeDatabase(ctx, config.PGDSN)
|
||||
if err != nil {
|
||||
log.Printf("failed on db: %v", err)
|
||||
os.Exit(2)
|
||||
}
|
||||
|
||||
txn, err := db.BeginTxn(ctx)
|
||||
if err != nil {
|
||||
log.Printf("failed on txn: %v", err)
|
||||
os.Exit(3)
|
||||
}
|
||||
defer txn.Rollback(ctx)
|
||||
log.Printf("doing address")
|
||||
gid := "openaddresses:address:us/ca/tulare-addresses-county:0dc28458fd03e3fa"
|
||||
address, err := public.AddressFromGID(ctx, txn, gid)
|
||||
if err != nil {
|
||||
log.Printf("failed on query: %v", err)
|
||||
os.Exit(4)
|
||||
}
|
||||
//log.Printf("address %d lat %f lng %f", address.ID, *address.LocationLatitude, *address.LocationLongitude)
|
||||
log.Printf("Address id %d location %s", address.ID, address.Location)
|
||||
txn.Commit(ctx)
|
||||
|
||||
/*
|
||||
log.Printf("doing comm")
|
||||
id := int64(1)
|
||||
comm, err := public.CommunicationFromID(ctx, id)
|
||||
if err != nil {
|
||||
log.Printf("failed on query: %v", err)
|
||||
os.Exit(4)
|
||||
}
|
||||
log.Printf("communication %d", comm.ID)
|
||||
*/
|
||||
}
|
||||
|
|
@ -5,12 +5,13 @@ import (
|
|||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/Gleipnir-Technology/nidus-sync/config"
|
||||
"github.com/Gleipnir-Technology/nidus-sync/lint"
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
|
|
@ -87,10 +88,10 @@ func makeVoipMSRequest(params url.Values) (VoipMSResponse, error) {
|
|||
log.Warn().Err(err).Str("url", full_url).Msg("Failed to make request to Voip.MS")
|
||||
return result, fmt.Errorf("Error making request: %w", err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
defer lint.LogOnErr(resp.Body.Close, "failed closing response body")
|
||||
|
||||
// Read the response body
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
log.Warn().Err(err).Str("url", full_url).Msg("Failed to read Voip.MS response body")
|
||||
return result, fmt.Errorf("Failed to read response: %w", err)
|
||||
|
|
|
|||
|
|
@ -26,12 +26,14 @@ var (
|
|||
ForwardEmailNidusAddress string
|
||||
ForwardEmailNidusPassword string
|
||||
ForwardEmailNidusUsername string
|
||||
LobAPIKey string
|
||||
PGDSN string
|
||||
PhoneNumberReport phonenumbers.PhoneNumber
|
||||
PhoneNumberReportStr string
|
||||
PhoneNumberSupport phonenumbers.PhoneNumber
|
||||
PhoneNumberSupportStr string
|
||||
SentryDSN string
|
||||
SentryDSNFrontend string
|
||||
StadiaMapsAPIKey string
|
||||
TextProvider string
|
||||
TwilioAuthToken string
|
||||
|
|
@ -96,7 +98,7 @@ func Parse() (err error) {
|
|||
if Environment == "" {
|
||||
return fmt.Errorf("You must specify a non-empty ENVIRONMENT")
|
||||
}
|
||||
if !(Environment == "PRODUCTION" || Environment == "DEVELOPMENT") {
|
||||
if Environment != "PRODUCTION" && Environment != "DEVELOPMENT" {
|
||||
return fmt.Errorf("ENVIRONMENT should be either DEVELOPMENT or PRODUCTION")
|
||||
}
|
||||
FieldseekerSchemaDirectory = os.Getenv("FIELDSEEKER_SCHEMA_DIRECTORY")
|
||||
|
|
@ -135,6 +137,10 @@ func Parse() (err error) {
|
|||
if ForwardEmailNidusPassword == "" {
|
||||
return fmt.Errorf("You must specify a non-empty FORWARDEMAIL_NIDUS_PASSWORD")
|
||||
}
|
||||
LobAPIKey = os.Getenv("LOB_API_KEY")
|
||||
if LobAPIKey == "" {
|
||||
return fmt.Errorf("You must specify a non-empty LOB_API_KEY")
|
||||
}
|
||||
PGDSN = os.Getenv("POSTGRES_DSN")
|
||||
if PGDSN == "" {
|
||||
return fmt.Errorf("You must specify a non-empty POSTGRES_DSN")
|
||||
|
|
@ -163,6 +169,10 @@ func Parse() (err error) {
|
|||
if SentryDSN == "" {
|
||||
return fmt.Errorf("You must specify a non-empty SENTRY_DSN")
|
||||
}
|
||||
SentryDSNFrontend = os.Getenv("SENTRY_DSN_FRONTEND")
|
||||
if SentryDSNFrontend == "" {
|
||||
return fmt.Errorf("You must specify a non-empty SENTRY_DSN_FRONTEND")
|
||||
}
|
||||
StadiaMapsAPIKey = os.Getenv("STADIA_MAPS_API_KEY")
|
||||
if StadiaMapsAPIKey == "" {
|
||||
return fmt.Errorf("You must specify a non-empty STADIA_MAPS_API_KEY")
|
||||
|
|
@ -209,5 +219,5 @@ func Parse() (err error) {
|
|||
}
|
||||
|
||||
func ArcGISOauthRedirectURL() string {
|
||||
return MakeURLNidus("/arcgis/oauth/callback")
|
||||
return MakeURLNidus("/oauth/arcgis/callback")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,10 +18,10 @@ aliases:
|
|||
no_tests: true
|
||||
psql:
|
||||
schemas:
|
||||
- "arcgis"
|
||||
- "comms"
|
||||
- "fieldseeker"
|
||||
- "fileupload"
|
||||
- "lob"
|
||||
- "public"
|
||||
- "publicreport"
|
||||
- "tile"
|
||||
|
|
|
|||
155
db/connection.go
155
db/connection.go
|
|
@ -7,38 +7,148 @@ import (
|
|||
"errors"
|
||||
"fmt"
|
||||
"io/fs"
|
||||
"sync"
|
||||
|
||||
//"github.com/georgysavva/scany/v2/pgxscan"
|
||||
//"github.com/jackc/pgx/v5"
|
||||
"github.com/Gleipnir-Technology/bob"
|
||||
"github.com/Gleipnir-Technology/jet/postgres"
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/jackc/pgx/v5/stdlib"
|
||||
_ "github.com/jackc/pgx/v5/stdlib"
|
||||
"github.com/pressly/goose/v3"
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/stephenafamo/scan"
|
||||
pgxgeom "github.com/twpayne/pgx-geom"
|
||||
)
|
||||
|
||||
var ErrNoRows = pgx.ErrNoRows
|
||||
|
||||
//go:embed migrations/*.sql
|
||||
var embedMigrations embed.FS
|
||||
|
||||
type postgres struct {
|
||||
type pginstance struct {
|
||||
BobDB bob.DB
|
||||
PGXPool *pgxpool.Pool
|
||||
}
|
||||
|
||||
var (
|
||||
PGInstance *postgres
|
||||
pgOnce sync.Once
|
||||
PGInstance *pginstance
|
||||
)
|
||||
|
||||
func ExecuteNone(ctx context.Context, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
_, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
return err
|
||||
}
|
||||
func ExecuteNoneTx(ctx context.Context, txn Ex, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
r, err := txn.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("query: %w", err)
|
||||
}
|
||||
r.Close()
|
||||
return nil
|
||||
}
|
||||
func ExecuteNoneTxBob(ctx context.Context, txn bob.Tx, stmt postgres.Statement) error {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
r, err := txn.QueryContext(ctx, query, args...)
|
||||
if err != nil {
|
||||
return fmt.Errorf("query: %w", err)
|
||||
}
|
||||
r.Close()
|
||||
return nil
|
||||
}
|
||||
func ExecuteOne[T any](ctx context.Context, stmt postgres.Statement) (T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
var result T
|
||||
row, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
var collected *T
|
||||
collected, err = pgx.CollectOneRow(row, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil || collected == nil {
|
||||
return result, fmt.Errorf("collect row: %w", err)
|
||||
}
|
||||
return *collected, nil
|
||||
}
|
||||
func ExecuteOneTx[T any](ctx context.Context, txn Ex, stmt postgres.Statement) (T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
//result, err := scan.One(ctx, txn, scan.StructMapper[T](), query, args...)
|
||||
row, err := txn.Query(ctx, query, args...)
|
||||
var result T
|
||||
if err != nil {
|
||||
return result, fmt.Errorf("txn query: %w", err)
|
||||
}
|
||||
var collected *T
|
||||
collected, err = pgx.CollectOneRow(row, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil || collected == nil {
|
||||
return result, fmt.Errorf("collect row: %w", err)
|
||||
}
|
||||
return *collected, nil
|
||||
}
|
||||
func ExecuteOneTxBob[T any](ctx context.Context, txn bob.Tx, stmt postgres.Statement) (T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
return scan.One(ctx, txn, scan.StructMapper[T](), query, args...)
|
||||
}
|
||||
func ExecuteMany[T any](ctx context.Context, stmt postgres.Statement) ([]T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
rows, err := PGInstance.PGXPool.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
collected, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil {
|
||||
return []T{}, fmt.Errorf("collect rows: %w", err)
|
||||
}
|
||||
results := make([]T, len(collected))
|
||||
for i, c := range collected {
|
||||
if c == nil {
|
||||
return results, fmt.Errorf("null collected")
|
||||
}
|
||||
results[i] = *c
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
func ExecuteManyTx[T any](ctx context.Context, txn Ex, stmt postgres.Statement) ([]T, error) {
|
||||
query, args := stmt.Sql()
|
||||
|
||||
rows, err := txn.Query(ctx, query, args...)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("execute query: %w", err)
|
||||
}
|
||||
collected, err := pgx.CollectRows(rows, pgx.RowToAddrOfStructByPos[T])
|
||||
if err != nil {
|
||||
return []T{}, fmt.Errorf("collect rows: %w", err)
|
||||
}
|
||||
results := make([]T, len(collected))
|
||||
for i, c := range collected {
|
||||
if c == nil {
|
||||
return results, fmt.Errorf("null collected")
|
||||
}
|
||||
results[i] = *c
|
||||
}
|
||||
return results, nil
|
||||
}
|
||||
func doMigrations(connection_string string) error {
|
||||
log.Debug().Str("dsn", connection_string).Msg("Connecting to database")
|
||||
db, err := sql.Open("pgx", connection_string)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to open database connection: %w", err)
|
||||
}
|
||||
defer db.Close()
|
||||
defer func() {
|
||||
err := db.Close()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to close database connection")
|
||||
}
|
||||
}()
|
||||
row := db.QueryRowContext(context.Background(), "SELECT version()")
|
||||
var val string
|
||||
if err := row.Scan(&val); err != nil {
|
||||
|
|
@ -95,15 +205,23 @@ func InitializeDatabase(ctx context.Context, uri string) error {
|
|||
log.Debug().Msg("No database migrations necessary")
|
||||
}
|
||||
|
||||
pgOnce.Do(func() {
|
||||
db, e := pgxpool.New(ctx, uri)
|
||||
bobDB := bob.NewDB(stdlib.OpenDBFromPool(db))
|
||||
PGInstance = &postgres{bobDB, db}
|
||||
err = e
|
||||
})
|
||||
config, err := pgxpool.ParseConfig(uri)
|
||||
if err != nil {
|
||||
return fmt.Errorf("unable to create connection pool: %w", err)
|
||||
return fmt.Errorf("parse config: %w", err)
|
||||
}
|
||||
config.AfterConnect = func(ctx2 context.Context, conn *pgx.Conn) error {
|
||||
err2 := pgxgeom.Register(ctx, conn)
|
||||
if err2 != nil {
|
||||
return fmt.Errorf("pgxgeom register: %w", err2)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
db, err := pgxpool.NewWithConfig(ctx, config)
|
||||
if err != nil {
|
||||
return fmt.Errorf("new pool: %w", err)
|
||||
}
|
||||
bobDB := bob.NewDB(stdlib.OpenDBFromPool(db))
|
||||
PGInstance = &pginstance{bobDB, db}
|
||||
|
||||
var current string
|
||||
query := `SELECT current_database()`
|
||||
|
|
@ -111,10 +229,6 @@ func InitializeDatabase(ctx context.Context, uri string) error {
|
|||
if err != nil {
|
||||
return fmt.Errorf("Failed to get database current: %w", err)
|
||||
}
|
||||
err = prepareStatements(ctx)
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to initialize prepared statements: %w", err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
@ -123,7 +237,12 @@ func needsMigrations(connection_string string) (*bool, error) {
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("Failed to open database connection: %w", err)
|
||||
}
|
||||
defer db.Close()
|
||||
defer func() {
|
||||
err := db.Close()
|
||||
if err != nil {
|
||||
log.Error().Err(err).Msg("failed to close database connection")
|
||||
}
|
||||
}()
|
||||
row := db.QueryRowContext(context.Background(), "SELECT version()")
|
||||
var val string
|
||||
if err := row.Scan(&val); err != nil {
|
||||
|
|
|
|||
|
|
@ -10,8 +10,17 @@ var AddressErrors = &addressErrors{
|
|||
columns: []string{"id"},
|
||||
s: "address_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueAddressGidUnique: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "address",
|
||||
columns: []string{"gid"},
|
||||
s: "address_gid_unique",
|
||||
},
|
||||
}
|
||||
|
||||
type addressErrors struct {
|
||||
ErrUniqueAddressPkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueAddressGidUnique *UniqueConstraintError
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisAccountErrors = &arcgisAccountErrors{
|
||||
ErrUniqueAccountPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "account",
|
||||
columns: []string{"id"},
|
||||
s: "account_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisAccountErrors struct {
|
||||
ErrUniqueAccountPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisAddressMappingErrors = &arcgisAddressMappingErrors{
|
||||
ErrUniqueAddressMappingPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "address_mapping",
|
||||
columns: []string{"organization_id", "destination"},
|
||||
s: "address_mapping_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisAddressMappingErrors struct {
|
||||
ErrUniqueAddressMappingPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisLayerErrors = &arcgisLayerErrors{
|
||||
ErrUniqueLayerPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "layer",
|
||||
columns: []string{"feature_service_item_id", "index_"},
|
||||
s: "layer_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisLayerErrors struct {
|
||||
ErrUniqueLayerPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisLayerFieldErrors = &arcgisLayerFieldErrors{
|
||||
ErrUniqueLayerFieldPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "layer_field",
|
||||
columns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
s: "layer_field_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisLayerFieldErrors struct {
|
||||
ErrUniqueLayerFieldPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisOauthTokenErrors = &arcgisOauthTokenErrors{
|
||||
ErrUniqueOauthTokenPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "oauth_token",
|
||||
columns: []string{"id"},
|
||||
s: "oauth_token_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisOauthTokenErrors struct {
|
||||
ErrUniqueOauthTokenPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisParcelMappingErrors = &arcgisParcelMappingErrors{
|
||||
ErrUniqueParcelMappingPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "parcel_mapping",
|
||||
columns: []string{"organization_id", "destination"},
|
||||
s: "parcel_mapping_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisParcelMappingErrors struct {
|
||||
ErrUniqueParcelMappingPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisServiceFeatureErrors = &arcgisServiceFeatureErrors{
|
||||
ErrUniqueFeatureServicePkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "service_feature",
|
||||
columns: []string{"item_id"},
|
||||
s: "feature_service_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureErrors struct {
|
||||
ErrUniqueFeatureServicePkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisServiceMapErrors = &arcgisServiceMapErrors{
|
||||
ErrUniqueServiceMapPkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "service_map",
|
||||
columns: []string{"arcgis_id"},
|
||||
s: "service_map_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisServiceMapErrors struct {
|
||||
ErrUniqueServiceMapPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisUserErrors = &arcgisuserErrors{
|
||||
ErrUniqueUser_Pkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "user_",
|
||||
columns: []string{"id"},
|
||||
s: "user__pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisuserErrors struct {
|
||||
ErrUniqueUser_Pkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var ArcgisUserPrivilegeErrors = &arcgisUserPrivilegeErrors{
|
||||
ErrUniqueUserPrivilegePkey: &UniqueConstraintError{
|
||||
schema: "arcgis",
|
||||
table: "user_privilege",
|
||||
columns: []string{"user_id", "privilege"},
|
||||
s: "user_privilege_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeErrors struct {
|
||||
ErrUniqueUserPrivilegePkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/communication.bob.go
Normal file
17
db/dberrors/communication.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommunicationErrors = &communicationErrors{
|
||||
ErrUniqueCommunicationPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "communication",
|
||||
columns: []string{"id"},
|
||||
s: "communication_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type communicationErrors struct {
|
||||
ErrUniqueCommunicationPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/communication_log_entry.bob.go
Normal file
17
db/dberrors/communication_log_entry.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var CommunicationLogEntryErrors = &communicationLogEntryErrors{
|
||||
ErrUniqueCommunicationLogEntryPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "communication_log_entry",
|
||||
columns: []string{"id"},
|
||||
s: "communication_log_entry_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type communicationLogEntryErrors struct {
|
||||
ErrUniqueCommunicationLogEntryPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -4,6 +4,13 @@
|
|||
package dberrors
|
||||
|
||||
var ComplianceReportRequestMailerErrors = &complianceReportRequestMailerErrors{
|
||||
ErrUniqueComplianceReportRequestMailerPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "compliance_report_request_mailer",
|
||||
columns: []string{"id"},
|
||||
s: "compliance_report_request_mailer_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueComplianceReportRequestMaiComplianceReportRequestId_Key: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "compliance_report_request_mailer",
|
||||
|
|
@ -13,5 +20,7 @@ var ComplianceReportRequestMailerErrors = &complianceReportRequestMailerErrors{
|
|||
}
|
||||
|
||||
type complianceReportRequestMailerErrors struct {
|
||||
ErrUniqueComplianceReportRequestMailerPkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueComplianceReportRequestMaiComplianceReportRequestId_Key *UniqueConstraintError
|
||||
}
|
||||
|
|
|
|||
17
db/dberrors/lob.event.bob.go
Normal file
17
db/dberrors/lob.event.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var LobEventErrors = &lobEventErrors{
|
||||
ErrUniqueEventPkey: &UniqueConstraintError{
|
||||
schema: "lob",
|
||||
table: "event",
|
||||
columns: []string{"id"},
|
||||
s: "event_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type lobEventErrors struct {
|
||||
ErrUniqueEventPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/log_impersonation.bob.go
Normal file
17
db/dberrors/log_impersonation.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var LogImpersonationErrors = &logImpersonationErrors{
|
||||
ErrUniqueLogImpersonationPkey: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "log_impersonation",
|
||||
columns: []string{"id"},
|
||||
s: "log_impersonation_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type logImpersonationErrors struct {
|
||||
ErrUniqueLogImpersonationPkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -10,8 +10,17 @@ var NoteImageErrors = ¬eImageErrors{
|
|||
columns: []string{"version", "uuid"},
|
||||
s: "note_image_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueNoteImageIdUnique: &UniqueConstraintError{
|
||||
schema: "",
|
||||
table: "note_image",
|
||||
columns: []string{"id"},
|
||||
s: "note_image_id_unique",
|
||||
},
|
||||
}
|
||||
|
||||
type noteImageErrors struct {
|
||||
ErrUniqueNoteImagePkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueNoteImageIdUnique *UniqueConstraintError
|
||||
}
|
||||
|
|
|
|||
17
db/dberrors/publicreport.client.bob.go
Normal file
17
db/dberrors/publicreport.client.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var PublicreportClientErrors = &publicreportClientErrors{
|
||||
ErrUniqueClientPkey: &UniqueConstraintError{
|
||||
schema: "publicreport",
|
||||
table: "client",
|
||||
columns: []string{"uuid"},
|
||||
s: "client_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type publicreportClientErrors struct {
|
||||
ErrUniqueClientPkey *UniqueConstraintError
|
||||
}
|
||||
17
db/dberrors/publicreport.compliance.bob.go
Normal file
17
db/dberrors/publicreport.compliance.bob.go
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var PublicreportComplianceErrors = &publicreportComplianceErrors{
|
||||
ErrUniqueCompliancePkey: &UniqueConstraintError{
|
||||
schema: "publicreport",
|
||||
table: "compliance",
|
||||
columns: []string{"report_id"},
|
||||
s: "compliance_pkey",
|
||||
},
|
||||
}
|
||||
|
||||
type publicreportComplianceErrors struct {
|
||||
ErrUniqueCompliancePkey *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -7,7 +7,7 @@ var TileCachedImageErrors = &tileCachedImageErrors{
|
|||
ErrUniqueCachedImagePkey: &UniqueConstraintError{
|
||||
schema: "tile",
|
||||
table: "cached_image",
|
||||
columns: []string{"arcgis_id", "x", "y", "z"},
|
||||
columns: []string{"service_id", "x", "y", "z"},
|
||||
s: "cached_image_pkey",
|
||||
},
|
||||
}
|
||||
|
|
|
|||
26
db/dberrors/tile.service.bob.go
Normal file
26
db/dberrors/tile.service.bob.go
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dberrors
|
||||
|
||||
var TileServiceErrors = &tileServiceErrors{
|
||||
ErrUniqueServicePkey: &UniqueConstraintError{
|
||||
schema: "tile",
|
||||
table: "service",
|
||||
columns: []string{"id"},
|
||||
s: "service_pkey",
|
||||
},
|
||||
|
||||
ErrUniqueServiceNameUnique: &UniqueConstraintError{
|
||||
schema: "tile",
|
||||
table: "service",
|
||||
columns: []string{"name"},
|
||||
s: "service_name_unique",
|
||||
},
|
||||
}
|
||||
|
||||
type tileServiceErrors struct {
|
||||
ErrUniqueServicePkey *UniqueConstraintError
|
||||
|
||||
ErrUniqueServiceNameUnique *UniqueConstraintError
|
||||
}
|
||||
|
|
@ -17,7 +17,7 @@ var Addresses = Table[
|
|||
Columns: addressColumns{
|
||||
Country: column{
|
||||
Name: "country",
|
||||
DBType: "public.countrytype",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
|
|
@ -114,6 +114,15 @@ var Addresses = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Gid: column{
|
||||
Name: "gid",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: addressIndexes{
|
||||
AddressPkey: index{
|
||||
|
|
@ -133,6 +142,23 @@ var Addresses = Table[
|
|||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
AddressGidUnique: index{
|
||||
Type: "btree",
|
||||
Name: "address_gid_unique",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "gid",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
IdxAddressGeom: index{
|
||||
Type: "gist",
|
||||
Name: "idx_address_geom",
|
||||
|
|
@ -157,6 +183,14 @@ var Addresses = Table[
|
|||
Comment: "",
|
||||
},
|
||||
|
||||
Uniques: addressUniques{
|
||||
AddressGidUnique: constraint{
|
||||
Name: "address_gid_unique",
|
||||
Columns: []string{"gid"},
|
||||
Comment: "",
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
|
|
@ -172,22 +206,24 @@ type addressColumns struct {
|
|||
Unit column
|
||||
Region column
|
||||
Number column
|
||||
Gid column
|
||||
}
|
||||
|
||||
func (c addressColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Country, c.Created, c.Location, c.H3cell, c.ID, c.Locality, c.PostalCode, c.Street, c.Unit, c.Region, c.Number,
|
||||
c.Country, c.Created, c.Location, c.H3cell, c.ID, c.Locality, c.PostalCode, c.Street, c.Unit, c.Region, c.Number, c.Gid,
|
||||
}
|
||||
}
|
||||
|
||||
type addressIndexes struct {
|
||||
AddressPkey index
|
||||
IdxAddressGeom index
|
||||
AddressPkey index
|
||||
AddressGidUnique index
|
||||
IdxAddressGeom index
|
||||
}
|
||||
|
||||
func (i addressIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.AddressPkey, i.IdxAddressGeom,
|
||||
i.AddressPkey, i.AddressGidUnique, i.IdxAddressGeom,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -197,10 +233,14 @@ func (f addressForeignKeys) AsSlice() []foreignKey {
|
|||
return []foreignKey{}
|
||||
}
|
||||
|
||||
type addressUniques struct{}
|
||||
type addressUniques struct {
|
||||
AddressGidUnique constraint
|
||||
}
|
||||
|
||||
func (u addressUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
return []constraint{
|
||||
u.AddressGidUnique,
|
||||
}
|
||||
}
|
||||
|
||||
type addressChecks struct{}
|
||||
|
|
|
|||
|
|
@ -1,177 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisAccounts = Table[
|
||||
arcgisAccountColumns,
|
||||
arcgisAccountIndexes,
|
||||
arcgisAccountForeignKeys,
|
||||
arcgisAccountUniques,
|
||||
arcgisAccountChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "account",
|
||||
Columns: arcgisAccountColumns{
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLFeatures: column{
|
||||
Name: "url_features",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLInsights: column{
|
||||
Name: "url_insights",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLGeometry: column{
|
||||
Name: "url_geometry",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLNotebooks: column{
|
||||
Name: "url_notebooks",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URLTiles: column{
|
||||
Name: "url_tiles",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisAccountIndexes{
|
||||
AccountPkey: index{
|
||||
Type: "btree",
|
||||
Name: "account_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "account_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisAccountForeignKeys{
|
||||
ArcgisAccountAccountOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.account.account_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisAccountColumns struct {
|
||||
ID column
|
||||
Name column
|
||||
OrganizationID column
|
||||
URLFeatures column
|
||||
URLInsights column
|
||||
URLGeometry column
|
||||
URLNotebooks column
|
||||
URLTiles column
|
||||
}
|
||||
|
||||
func (c arcgisAccountColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ID, c.Name, c.OrganizationID, c.URLFeatures, c.URLInsights, c.URLGeometry, c.URLNotebooks, c.URLTiles,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAccountIndexes struct {
|
||||
AccountPkey index
|
||||
}
|
||||
|
||||
func (i arcgisAccountIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.AccountPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAccountForeignKeys struct {
|
||||
ArcgisAccountAccountOrganizationIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisAccountForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisAccountAccountOrganizationIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAccountUniques struct{}
|
||||
|
||||
func (u arcgisAccountUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisAccountChecks struct{}
|
||||
|
||||
func (c arcgisAccountChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisAddressMappings = Table[
|
||||
arcgisAddressMappingColumns,
|
||||
arcgisAddressMappingIndexes,
|
||||
arcgisAddressMappingForeignKeys,
|
||||
arcgisAddressMappingUniques,
|
||||
arcgisAddressMappingChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "address_mapping",
|
||||
Columns: arcgisAddressMappingColumns{
|
||||
Destination: column{
|
||||
Name: "destination",
|
||||
DBType: "arcgis.mappingdestinationaddress",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFeatureServiceItemID: column{
|
||||
Name: "layer_feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerIndex: column{
|
||||
Name: "layer_index",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFieldName: column{
|
||||
Name: "layer_field_name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisAddressMappingIndexes{
|
||||
AddressMappingPkey: index{
|
||||
Type: "btree",
|
||||
Name: "address_mapping_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "organization_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "destination",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "address_mapping_pkey",
|
||||
Columns: []string{"organization_id", "destination"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisAddressMappingForeignKeys{
|
||||
ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.address_mapping.address_mapping_layer_feature_service_item_id_layer_index__fkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.layer_field",
|
||||
ForeignColumns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
},
|
||||
ArcgisAddressMappingAddressMappingOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.address_mapping.address_mapping_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisAddressMappingColumns struct {
|
||||
Destination column
|
||||
LayerFeatureServiceItemID column
|
||||
LayerIndex column
|
||||
LayerFieldName column
|
||||
OrganizationID column
|
||||
}
|
||||
|
||||
func (c arcgisAddressMappingColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Destination, c.LayerFeatureServiceItemID, c.LayerIndex, c.LayerFieldName, c.OrganizationID,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingIndexes struct {
|
||||
AddressMappingPkey index
|
||||
}
|
||||
|
||||
func (i arcgisAddressMappingIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.AddressMappingPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingForeignKeys struct {
|
||||
ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey foreignKey
|
||||
ArcgisAddressMappingAddressMappingOrganizationIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisAddressMappingForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisAddressMappingAddressMappingLayerFeatureServiceItemIDLayerIndexFkey, f.ArcgisAddressMappingAddressMappingOrganizationIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingUniques struct{}
|
||||
|
||||
func (u arcgisAddressMappingUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisAddressMappingChecks struct{}
|
||||
|
||||
func (c arcgisAddressMappingChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,132 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisLayers = Table[
|
||||
arcgisLayerColumns,
|
||||
arcgisLayerIndexes,
|
||||
arcgisLayerForeignKeys,
|
||||
arcgisLayerUniques,
|
||||
arcgisLayerChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "layer",
|
||||
Columns: arcgisLayerColumns{
|
||||
Extent: column{
|
||||
Name: "extent",
|
||||
DBType: "box2d",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
FeatureServiceItemID: column{
|
||||
Name: "feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Index: column{
|
||||
Name: "index_",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisLayerIndexes{
|
||||
LayerPkey: index{
|
||||
Type: "btree",
|
||||
Name: "layer_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "feature_service_item_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "index_",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "layer_pkey",
|
||||
Columns: []string{"feature_service_item_id", "index_"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisLayerForeignKeys{
|
||||
ArcgisLayerLayerFeatureServiceItemIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.layer.layer_feature_service_item_id_fkey",
|
||||
Columns: []string{"feature_service_item_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.service_feature",
|
||||
ForeignColumns: []string{"item_id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisLayerColumns struct {
|
||||
Extent column
|
||||
FeatureServiceItemID column
|
||||
Index column
|
||||
}
|
||||
|
||||
func (c arcgisLayerColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Extent, c.FeatureServiceItemID, c.Index,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerIndexes struct {
|
||||
LayerPkey index
|
||||
}
|
||||
|
||||
func (i arcgisLayerIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.LayerPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerForeignKeys struct {
|
||||
ArcgisLayerLayerFeatureServiceItemIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisLayerForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisLayerLayerFeatureServiceItemIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerUniques struct{}
|
||||
|
||||
func (u arcgisLayerUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisLayerChecks struct{}
|
||||
|
||||
func (c arcgisLayerChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisLayerFields = Table[
|
||||
arcgisLayerFieldColumns,
|
||||
arcgisLayerFieldIndexes,
|
||||
arcgisLayerFieldForeignKeys,
|
||||
arcgisLayerFieldUniques,
|
||||
arcgisLayerFieldChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "layer_field",
|
||||
Columns: arcgisLayerFieldColumns{
|
||||
LayerFeatureServiceItemID: column{
|
||||
Name: "layer_feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerIndex: column{
|
||||
Name: "layer_index",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Type: column{
|
||||
Name: "type_",
|
||||
DBType: "arcgis.fieldtype",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisLayerFieldIndexes{
|
||||
LayerFieldPkey: index{
|
||||
Type: "btree",
|
||||
Name: "layer_field_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "layer_feature_service_item_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "layer_index",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "name",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "layer_field_pkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisLayerFieldForeignKeys{
|
||||
ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.layer_field.layer_field_layer_feature_service_item_id_layer_index_fkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.layer",
|
||||
ForeignColumns: []string{"feature_service_item_id", "index_"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisLayerFieldColumns struct {
|
||||
LayerFeatureServiceItemID column
|
||||
LayerIndex column
|
||||
Name column
|
||||
Type column
|
||||
}
|
||||
|
||||
func (c arcgisLayerFieldColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.LayerFeatureServiceItemID, c.LayerIndex, c.Name, c.Type,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldIndexes struct {
|
||||
LayerFieldPkey index
|
||||
}
|
||||
|
||||
func (i arcgisLayerFieldIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.LayerFieldPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldForeignKeys struct {
|
||||
ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisLayerFieldForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisLayerFieldLayerFieldLayerFeatureServiceItemIDLayerIndexFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldUniques struct{}
|
||||
|
||||
func (u arcgisLayerFieldUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisLayerFieldChecks struct{}
|
||||
|
||||
func (c arcgisLayerFieldChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,227 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisOauthTokens = Table[
|
||||
arcgisOauthTokenColumns,
|
||||
arcgisOauthTokenIndexes,
|
||||
arcgisOauthTokenForeignKeys,
|
||||
arcgisOauthTokenUniques,
|
||||
arcgisOauthTokenChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "oauth_token",
|
||||
Columns: arcgisOauthTokenColumns{
|
||||
AccessToken: column{
|
||||
Name: "access_token",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AccessTokenExpires: column{
|
||||
Name: "access_token_expires",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisAccountID: column{
|
||||
Name: "arcgis_account_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisID: column{
|
||||
Name: "arcgis_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisLicenseTypeID: column{
|
||||
Name: "arcgis_license_type_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('arcgis.oauth_token_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
InvalidatedAt: column{
|
||||
Name: "invalidated_at",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
RefreshToken: column{
|
||||
Name: "refresh_token",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
RefreshTokenExpires: column{
|
||||
Name: "refresh_token_expires",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserID: column{
|
||||
Name: "user_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Username: column{
|
||||
Name: "username",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisOauthTokenIndexes{
|
||||
OauthTokenPkey: index{
|
||||
Type: "btree",
|
||||
Name: "oauth_token_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "oauth_token_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisOauthTokenForeignKeys{
|
||||
ArcgisOauthTokenOauthTokenArcgisAccountIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.oauth_token.oauth_token_arcgis_account_id_fkey",
|
||||
Columns: []string{"arcgis_account_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.account",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
ArcgisOauthTokenOauthTokenUserIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.oauth_token.oauth_token_user_id_fkey",
|
||||
Columns: []string{"user_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisOauthTokenColumns struct {
|
||||
AccessToken column
|
||||
AccessTokenExpires column
|
||||
ArcgisAccountID column
|
||||
ArcgisID column
|
||||
ArcgisLicenseTypeID column
|
||||
Created column
|
||||
ID column
|
||||
InvalidatedAt column
|
||||
RefreshToken column
|
||||
RefreshTokenExpires column
|
||||
UserID column
|
||||
Username column
|
||||
}
|
||||
|
||||
func (c arcgisOauthTokenColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AccessToken, c.AccessTokenExpires, c.ArcgisAccountID, c.ArcgisID, c.ArcgisLicenseTypeID, c.Created, c.ID, c.InvalidatedAt, c.RefreshToken, c.RefreshTokenExpires, c.UserID, c.Username,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenIndexes struct {
|
||||
OauthTokenPkey index
|
||||
}
|
||||
|
||||
func (i arcgisOauthTokenIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.OauthTokenPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenForeignKeys struct {
|
||||
ArcgisOauthTokenOauthTokenArcgisAccountIDFkey foreignKey
|
||||
ArcgisOauthTokenOauthTokenUserIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisOauthTokenForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisOauthTokenOauthTokenArcgisAccountIDFkey, f.ArcgisOauthTokenOauthTokenUserIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenUniques struct{}
|
||||
|
||||
func (u arcgisOauthTokenUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisOauthTokenChecks struct{}
|
||||
|
||||
func (c arcgisOauthTokenChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,162 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisParcelMappings = Table[
|
||||
arcgisParcelMappingColumns,
|
||||
arcgisParcelMappingIndexes,
|
||||
arcgisParcelMappingForeignKeys,
|
||||
arcgisParcelMappingUniques,
|
||||
arcgisParcelMappingChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "parcel_mapping",
|
||||
Columns: arcgisParcelMappingColumns{
|
||||
Destination: column{
|
||||
Name: "destination",
|
||||
DBType: "arcgis.mappingdestinationparcel",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFeatureServiceItemID: column{
|
||||
Name: "layer_feature_service_item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerIndex: column{
|
||||
Name: "layer_index",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LayerFieldName: column{
|
||||
Name: "layer_field_name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisParcelMappingIndexes{
|
||||
ParcelMappingPkey: index{
|
||||
Type: "btree",
|
||||
Name: "parcel_mapping_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "organization_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "destination",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "parcel_mapping_pkey",
|
||||
Columns: []string{"organization_id", "destination"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisParcelMappingForeignKeys{
|
||||
ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.parcel_mapping.parcel_mapping_layer_feature_service_item_id_layer_index_l_fkey",
|
||||
Columns: []string{"layer_feature_service_item_id", "layer_index", "layer_field_name"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.layer_field",
|
||||
ForeignColumns: []string{"layer_feature_service_item_id", "layer_index", "name"},
|
||||
},
|
||||
ArcgisParcelMappingParcelMappingOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.parcel_mapping.parcel_mapping_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisParcelMappingColumns struct {
|
||||
Destination column
|
||||
LayerFeatureServiceItemID column
|
||||
LayerIndex column
|
||||
LayerFieldName column
|
||||
OrganizationID column
|
||||
}
|
||||
|
||||
func (c arcgisParcelMappingColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Destination, c.LayerFeatureServiceItemID, c.LayerIndex, c.LayerFieldName, c.OrganizationID,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingIndexes struct {
|
||||
ParcelMappingPkey index
|
||||
}
|
||||
|
||||
func (i arcgisParcelMappingIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ParcelMappingPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingForeignKeys struct {
|
||||
ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey foreignKey
|
||||
ArcgisParcelMappingParcelMappingOrganizationIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisParcelMappingForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisParcelMappingParcelMappingLayerFeatureServiceItemIDLayerIndexLFkey, f.ArcgisParcelMappingParcelMappingOrganizationIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingUniques struct{}
|
||||
|
||||
func (u arcgisParcelMappingUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisParcelMappingChecks struct{}
|
||||
|
||||
func (c arcgisParcelMappingChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisServiceFeatures = Table[
|
||||
arcgisServiceFeatureColumns,
|
||||
arcgisServiceFeatureIndexes,
|
||||
arcgisServiceFeatureForeignKeys,
|
||||
arcgisServiceFeatureUniques,
|
||||
arcgisServiceFeatureChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "service_feature",
|
||||
Columns: arcgisServiceFeatureColumns{
|
||||
Extent: column{
|
||||
Name: "extent",
|
||||
DBType: "box2d",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ItemID: column{
|
||||
Name: "item_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
SpatialReference: column{
|
||||
Name: "spatial_reference",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URL: column{
|
||||
Name: "url",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AccountID: column{
|
||||
Name: "account_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisServiceFeatureIndexes{
|
||||
FeatureServicePkey: index{
|
||||
Type: "btree",
|
||||
Name: "feature_service_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "item_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "feature_service_pkey",
|
||||
Columns: []string{"item_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisServiceFeatureForeignKeys{
|
||||
ArcgisServiceFeatureServiceFeatureAccountIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.service_feature.service_feature_account_id_fkey",
|
||||
Columns: []string{"account_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.account",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureColumns struct {
|
||||
Extent column
|
||||
ItemID column
|
||||
SpatialReference column
|
||||
URL column
|
||||
AccountID column
|
||||
}
|
||||
|
||||
func (c arcgisServiceFeatureColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Extent, c.ItemID, c.SpatialReference, c.URL, c.AccountID,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureIndexes struct {
|
||||
FeatureServicePkey index
|
||||
}
|
||||
|
||||
func (i arcgisServiceFeatureIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.FeatureServicePkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureForeignKeys struct {
|
||||
ArcgisServiceFeatureServiceFeatureAccountIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisServiceFeatureForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisServiceFeatureServiceFeatureAccountIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureUniques struct{}
|
||||
|
||||
func (u arcgisServiceFeatureUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisServiceFeatureChecks struct{}
|
||||
|
||||
func (c arcgisServiceFeatureChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,147 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisServiceMaps = Table[
|
||||
arcgisServiceMapColumns,
|
||||
arcgisServiceMapIndexes,
|
||||
arcgisServiceMapForeignKeys,
|
||||
arcgisServiceMapUniques,
|
||||
arcgisServiceMapChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "service_map",
|
||||
Columns: arcgisServiceMapColumns{
|
||||
AccountID: column{
|
||||
Name: "account_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisID: column{
|
||||
Name: "arcgis_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Title: column{
|
||||
Name: "title",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
URL: column{
|
||||
Name: "url",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisServiceMapIndexes{
|
||||
ServiceMapPkey: index{
|
||||
Type: "btree",
|
||||
Name: "service_map_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "arcgis_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "service_map_pkey",
|
||||
Columns: []string{"arcgis_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisServiceMapForeignKeys{
|
||||
ArcgisServiceMapServiceMapAccountIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.service_map.service_map_account_id_fkey",
|
||||
Columns: []string{"account_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.account",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisServiceMapColumns struct {
|
||||
AccountID column
|
||||
ArcgisID column
|
||||
Name column
|
||||
Title column
|
||||
URL column
|
||||
}
|
||||
|
||||
func (c arcgisServiceMapColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AccountID, c.ArcgisID, c.Name, c.Title, c.URL,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceMapIndexes struct {
|
||||
ServiceMapPkey index
|
||||
}
|
||||
|
||||
func (i arcgisServiceMapIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ServiceMapPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceMapForeignKeys struct {
|
||||
ArcgisServiceMapServiceMapAccountIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisServiceMapForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisServiceMapServiceMapAccountIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisServiceMapUniques struct{}
|
||||
|
||||
func (u arcgisServiceMapUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisServiceMapChecks struct{}
|
||||
|
||||
func (c arcgisServiceMapChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,237 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisUsers = Table[
|
||||
arcgisuserColumns,
|
||||
arcgisuserIndexes,
|
||||
arcgisuserForeignKeys,
|
||||
arcgisuserUniques,
|
||||
arcgisuserChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "user_",
|
||||
Columns: arcgisuserColumns{
|
||||
Access: column{
|
||||
Name: "access",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Email: column{
|
||||
Name: "email",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
FullName: column{
|
||||
Name: "full_name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Level: column{
|
||||
Name: "level",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrgID: column{
|
||||
Name: "org_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
PublicUserID: column{
|
||||
Name: "public_user_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Region: column{
|
||||
Name: "region",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Role: column{
|
||||
Name: "role",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
RoleID: column{
|
||||
Name: "role_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Username: column{
|
||||
Name: "username",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserLicenseTypeID: column{
|
||||
Name: "user_license_type_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserType: column{
|
||||
Name: "user_type",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisuserIndexes{
|
||||
UserPkey: index{
|
||||
Type: "btree",
|
||||
Name: "user__pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "user__pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisuserForeignKeys{
|
||||
ArcgisUserUserPublicUserIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.user_.user__public_user_id_fkey",
|
||||
Columns: []string{"public_user_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisuserColumns struct {
|
||||
Access column
|
||||
Created column
|
||||
Email column
|
||||
FullName column
|
||||
ID column
|
||||
Level column
|
||||
OrgID column
|
||||
PublicUserID column
|
||||
Region column
|
||||
Role column
|
||||
RoleID column
|
||||
Username column
|
||||
UserLicenseTypeID column
|
||||
UserType column
|
||||
}
|
||||
|
||||
func (c arcgisuserColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Access, c.Created, c.Email, c.FullName, c.ID, c.Level, c.OrgID, c.PublicUserID, c.Region, c.Role, c.RoleID, c.Username, c.UserLicenseTypeID, c.UserType,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisuserIndexes struct {
|
||||
UserPkey index
|
||||
}
|
||||
|
||||
func (i arcgisuserIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.UserPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisuserForeignKeys struct {
|
||||
ArcgisUserUserPublicUserIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisuserForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisUserUserPublicUserIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisuserUniques struct{}
|
||||
|
||||
func (u arcgisuserUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisuserChecks struct{}
|
||||
|
||||
func (c arcgisuserChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -1,122 +0,0 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var ArcgisUserPrivileges = Table[
|
||||
arcgisUserPrivilegeColumns,
|
||||
arcgisUserPrivilegeIndexes,
|
||||
arcgisUserPrivilegeForeignKeys,
|
||||
arcgisUserPrivilegeUniques,
|
||||
arcgisUserPrivilegeChecks,
|
||||
]{
|
||||
Schema: "arcgis",
|
||||
Name: "user_privilege",
|
||||
Columns: arcgisUserPrivilegeColumns{
|
||||
UserID: column{
|
||||
Name: "user_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Privilege: column{
|
||||
Name: "privilege",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: arcgisUserPrivilegeIndexes{
|
||||
UserPrivilegePkey: index{
|
||||
Type: "btree",
|
||||
Name: "user_privilege_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "user_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
{
|
||||
Name: "privilege",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false, false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "user_privilege_pkey",
|
||||
Columns: []string{"user_id", "privilege"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: arcgisUserPrivilegeForeignKeys{
|
||||
ArcgisUserPrivilegeUserPrivilegeUserIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "arcgis.user_privilege.user_privilege_user_id_fkey",
|
||||
Columns: []string{"user_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeColumns struct {
|
||||
UserID column
|
||||
Privilege column
|
||||
}
|
||||
|
||||
func (c arcgisUserPrivilegeColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.UserID, c.Privilege,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeIndexes struct {
|
||||
UserPrivilegePkey index
|
||||
}
|
||||
|
||||
func (i arcgisUserPrivilegeIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.UserPrivilegePkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeForeignKeys struct {
|
||||
ArcgisUserPrivilegeUserPrivilegeUserIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f arcgisUserPrivilegeForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.ArcgisUserPrivilegeUserPrivilegeUserIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeUniques struct{}
|
||||
|
||||
func (u arcgisUserPrivilegeUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type arcgisUserPrivilegeChecks struct{}
|
||||
|
||||
func (c arcgisUserPrivilegeChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -60,6 +60,15 @@ var CommsMailers = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ExternalID: column{
|
||||
Name: "external_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: commsMailerIndexes{
|
||||
MailerPkey: index{
|
||||
|
|
@ -101,16 +110,17 @@ var CommsMailers = Table[
|
|||
}
|
||||
|
||||
type commsMailerColumns struct {
|
||||
AddressID column
|
||||
Created column
|
||||
ID column
|
||||
Recipient column
|
||||
UUID column
|
||||
AddressID column
|
||||
Created column
|
||||
ID column
|
||||
Recipient column
|
||||
UUID column
|
||||
ExternalID column
|
||||
}
|
||||
|
||||
func (c commsMailerColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AddressID, c.Created, c.ID, c.Recipient, c.UUID,
|
||||
c.AddressID, c.Created, c.ID, c.Recipient, c.UUID, c.ExternalID,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -42,6 +42,15 @@ var CommsPhones = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
CanSMS: column{
|
||||
Name: "can_sms",
|
||||
DBType: "boolean",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: commsPhoneIndexes{
|
||||
PhonePkey: index{
|
||||
|
|
@ -75,11 +84,12 @@ type commsPhoneColumns struct {
|
|||
E164 column
|
||||
IsSubscribed column
|
||||
Status column
|
||||
CanSMS column
|
||||
}
|
||||
|
||||
func (c commsPhoneColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.E164, c.IsSubscribed, c.Status,
|
||||
c.E164, c.IsSubscribed, c.Status, c.CanSMS,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
237
db/dbinfo/communication.bob.go
Normal file
237
db/dbinfo/communication.bob.go
Normal file
|
|
@ -0,0 +1,237 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var Communications = Table[
|
||||
communicationColumns,
|
||||
communicationIndexes,
|
||||
communicationForeignKeys,
|
||||
communicationUniques,
|
||||
communicationChecks,
|
||||
]{
|
||||
Schema: "",
|
||||
Name: "communication",
|
||||
Columns: communicationColumns{
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('communication_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
OrganizationID: column{
|
||||
Name: "organization_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ResponseEmailLogID: column{
|
||||
Name: "response_email_log_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ResponseTextLogID: column{
|
||||
Name: "response_text_log_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
SourceEmailLogID: column{
|
||||
Name: "source_email_log_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
SourceReportID: column{
|
||||
Name: "source_report_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
SourceTextLogID: column{
|
||||
Name: "source_text_log_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Status: column{
|
||||
Name: "status",
|
||||
DBType: "public.communicationstatus",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: communicationIndexes{
|
||||
CommunicationPkey: index{
|
||||
Type: "btree",
|
||||
Name: "communication_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "communication_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: communicationForeignKeys{
|
||||
CommunicationCommunicationOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication.communication_organization_id_fkey",
|
||||
Columns: []string{"organization_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
CommunicationCommunicationResponseEmailLogIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication.communication_response_email_log_id_fkey",
|
||||
Columns: []string{"response_email_log_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "comms.email_log",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
CommunicationCommunicationResponseTextLogIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication.communication_response_text_log_id_fkey",
|
||||
Columns: []string{"response_text_log_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "comms.text_log",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
CommunicationCommunicationSourceEmailLogIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication.communication_source_email_log_id_fkey",
|
||||
Columns: []string{"source_email_log_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "comms.email_log",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
CommunicationCommunicationSourceReportIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication.communication_source_report_id_fkey",
|
||||
Columns: []string{"source_report_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "publicreport.report",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
CommunicationCommunicationSourceTextLogIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication.communication_source_text_log_id_fkey",
|
||||
Columns: []string{"source_text_log_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "comms.text_log",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type communicationColumns struct {
|
||||
Created column
|
||||
ID column
|
||||
OrganizationID column
|
||||
ResponseEmailLogID column
|
||||
ResponseTextLogID column
|
||||
SourceEmailLogID column
|
||||
SourceReportID column
|
||||
SourceTextLogID column
|
||||
Status column
|
||||
}
|
||||
|
||||
func (c communicationColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Created, c.ID, c.OrganizationID, c.ResponseEmailLogID, c.ResponseTextLogID, c.SourceEmailLogID, c.SourceReportID, c.SourceTextLogID, c.Status,
|
||||
}
|
||||
}
|
||||
|
||||
type communicationIndexes struct {
|
||||
CommunicationPkey index
|
||||
}
|
||||
|
||||
func (i communicationIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.CommunicationPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type communicationForeignKeys struct {
|
||||
CommunicationCommunicationOrganizationIDFkey foreignKey
|
||||
CommunicationCommunicationResponseEmailLogIDFkey foreignKey
|
||||
CommunicationCommunicationResponseTextLogIDFkey foreignKey
|
||||
CommunicationCommunicationSourceEmailLogIDFkey foreignKey
|
||||
CommunicationCommunicationSourceReportIDFkey foreignKey
|
||||
CommunicationCommunicationSourceTextLogIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f communicationForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.CommunicationCommunicationOrganizationIDFkey, f.CommunicationCommunicationResponseEmailLogIDFkey, f.CommunicationCommunicationResponseTextLogIDFkey, f.CommunicationCommunicationSourceEmailLogIDFkey, f.CommunicationCommunicationSourceReportIDFkey, f.CommunicationCommunicationSourceTextLogIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type communicationUniques struct{}
|
||||
|
||||
func (u communicationUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type communicationChecks struct{}
|
||||
|
||||
func (c communicationChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
157
db/dbinfo/communication_log_entry.bob.go
Normal file
157
db/dbinfo/communication_log_entry.bob.go
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var CommunicationLogEntries = Table[
|
||||
communicationLogEntryColumns,
|
||||
communicationLogEntryIndexes,
|
||||
communicationLogEntryForeignKeys,
|
||||
communicationLogEntryUniques,
|
||||
communicationLogEntryChecks,
|
||||
]{
|
||||
Schema: "",
|
||||
Name: "communication_log_entry",
|
||||
Columns: communicationLogEntryColumns{
|
||||
CommunicationID: column{
|
||||
Name: "communication_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('communication_log_entry_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Type: column{
|
||||
Name: "type_",
|
||||
DBType: "public.communicationlogentry",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
User: column{
|
||||
Name: "user_",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: communicationLogEntryIndexes{
|
||||
CommunicationLogEntryPkey: index{
|
||||
Type: "btree",
|
||||
Name: "communication_log_entry_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "communication_log_entry_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: communicationLogEntryForeignKeys{
|
||||
CommunicationLogEntryCommunicationLogEntryCommunicationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication_log_entry.communication_log_entry_communication_id_fkey",
|
||||
Columns: []string{"communication_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "communication",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
CommunicationLogEntryCommunicationLogEntryUserFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "communication_log_entry.communication_log_entry_user__fkey",
|
||||
Columns: []string{"user_"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type communicationLogEntryColumns struct {
|
||||
CommunicationID column
|
||||
Created column
|
||||
ID column
|
||||
Type column
|
||||
User column
|
||||
}
|
||||
|
||||
func (c communicationLogEntryColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.CommunicationID, c.Created, c.ID, c.Type, c.User,
|
||||
}
|
||||
}
|
||||
|
||||
type communicationLogEntryIndexes struct {
|
||||
CommunicationLogEntryPkey index
|
||||
}
|
||||
|
||||
func (i communicationLogEntryIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.CommunicationLogEntryPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type communicationLogEntryForeignKeys struct {
|
||||
CommunicationLogEntryCommunicationLogEntryCommunicationIDFkey foreignKey
|
||||
CommunicationLogEntryCommunicationLogEntryUserFkey foreignKey
|
||||
}
|
||||
|
||||
func (f communicationLogEntryForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.CommunicationLogEntryCommunicationLogEntryCommunicationIDFkey, f.CommunicationLogEntryCommunicationLogEntryUserFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type communicationLogEntryUniques struct{}
|
||||
|
||||
func (u communicationLogEntryUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type communicationLogEntryChecks struct{}
|
||||
|
||||
func (c communicationLogEntryChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -33,8 +33,34 @@ var ComplianceReportRequestMailers = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('compliance_report_request_mailer_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: complianceReportRequestMailerIndexes{
|
||||
ComplianceReportRequestMailerPkey: index{
|
||||
Type: "btree",
|
||||
Name: "compliance_report_request_mailer_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
ComplianceReportRequestMaiComplianceReportRequestIDKey: index{
|
||||
Type: "btree",
|
||||
Name: "compliance_report_request_mai_compliance_report_request_id__key",
|
||||
|
|
@ -58,7 +84,11 @@ var ComplianceReportRequestMailers = Table[
|
|||
Include: []string{},
|
||||
},
|
||||
},
|
||||
|
||||
PrimaryKey: &constraint{
|
||||
Name: "compliance_report_request_mailer_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: complianceReportRequestMailerForeignKeys{
|
||||
ComplianceReportRequestMailerComplianceReportRequestMaiComplianceReportRequestIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
|
|
@ -93,21 +123,23 @@ var ComplianceReportRequestMailers = Table[
|
|||
type complianceReportRequestMailerColumns struct {
|
||||
ComplianceReportRequestID column
|
||||
MailerID column
|
||||
ID column
|
||||
}
|
||||
|
||||
func (c complianceReportRequestMailerColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ComplianceReportRequestID, c.MailerID,
|
||||
c.ComplianceReportRequestID, c.MailerID, c.ID,
|
||||
}
|
||||
}
|
||||
|
||||
type complianceReportRequestMailerIndexes struct {
|
||||
ComplianceReportRequestMailerPkey index
|
||||
ComplianceReportRequestMaiComplianceReportRequestIDKey index
|
||||
}
|
||||
|
||||
func (i complianceReportRequestMailerIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ComplianceReportRequestMaiComplianceReportRequestIDKey,
|
||||
i.ComplianceReportRequestMailerPkey, i.ComplianceReportRequestMaiComplianceReportRequestIDKey,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -114,6 +114,15 @@ var FileuploadFiles = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Error: column{
|
||||
Name: "error",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: fileuploadFileIndexes{
|
||||
FilePkey: index{
|
||||
|
|
@ -184,11 +193,12 @@ type fileuploadFileColumns struct {
|
|||
SizeBytes column
|
||||
FileUUID column
|
||||
Committer column
|
||||
Error column
|
||||
}
|
||||
|
||||
func (c fileuploadFileColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ID, c.ContentType, c.Created, c.CreatorID, c.Deleted, c.Name, c.OrganizationID, c.Status, c.SizeBytes, c.FileUUID, c.Committer,
|
||||
c.ID, c.ContentType, c.Created, c.CreatorID, c.Deleted, c.Name, c.OrganizationID, c.Status, c.SizeBytes, c.FileUUID, c.Committer, c.Error,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -222,6 +222,15 @@ var FileuploadPools = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressID: column{
|
||||
Name: "address_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: fileuploadPoolIndexes{
|
||||
PoolPkey: index{
|
||||
|
|
@ -248,6 +257,15 @@ var FileuploadPools = Table[
|
|||
Comment: "",
|
||||
},
|
||||
ForeignKeys: fileuploadPoolForeignKeys{
|
||||
FileuploadPoolPoolAddressIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "fileupload.pool.pool_address_id_fkey",
|
||||
Columns: []string{"address_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "address",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
FileuploadPoolPoolCreatorIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "fileupload.pool.pool_creator_id_fkey",
|
||||
|
|
@ -313,11 +331,12 @@ type fileuploadPoolColumns struct {
|
|||
AddressLocality column
|
||||
AddressRegion column
|
||||
Condition column
|
||||
AddressID column
|
||||
}
|
||||
|
||||
func (c fileuploadPoolColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AddressPostalCode, c.AddressStreet, c.Committed, c.Created, c.CreatorID, c.CSVFile, c.Deleted, c.Geom, c.H3cell, c.ID, c.IsInDistrict, c.IsNew, c.Notes, c.PropertyOwnerName, c.PropertyOwnerPhoneE164, c.ResidentOwned, c.ResidentPhoneE164, c.LineNumber, c.Tags, c.AddressNumber, c.AddressLocality, c.AddressRegion, c.Condition,
|
||||
c.AddressPostalCode, c.AddressStreet, c.Committed, c.Created, c.CreatorID, c.CSVFile, c.Deleted, c.Geom, c.H3cell, c.ID, c.IsInDistrict, c.IsNew, c.Notes, c.PropertyOwnerName, c.PropertyOwnerPhoneE164, c.ResidentOwned, c.ResidentPhoneE164, c.LineNumber, c.Tags, c.AddressNumber, c.AddressLocality, c.AddressRegion, c.Condition, c.AddressID,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -332,6 +351,7 @@ func (i fileuploadPoolIndexes) AsSlice() []index {
|
|||
}
|
||||
|
||||
type fileuploadPoolForeignKeys struct {
|
||||
FileuploadPoolPoolAddressIDFkey foreignKey
|
||||
FileuploadPoolPoolCreatorIDFkey foreignKey
|
||||
FileuploadPoolPoolCSVFileFkey foreignKey
|
||||
FileuploadPoolPoolPropertyOwnerPhoneE164Fkey foreignKey
|
||||
|
|
@ -340,7 +360,7 @@ type fileuploadPoolForeignKeys struct {
|
|||
|
||||
func (f fileuploadPoolForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.FileuploadPoolPoolCreatorIDFkey, f.FileuploadPoolPoolCSVFileFkey, f.FileuploadPoolPoolPropertyOwnerPhoneE164Fkey, f.FileuploadPoolPoolResidentPhoneE164Fkey,
|
||||
f.FileuploadPoolPoolAddressIDFkey, f.FileuploadPoolPoolCreatorIDFkey, f.FileuploadPoolPoolCSVFileFkey, f.FileuploadPoolPoolPropertyOwnerPhoneE164Fkey, f.FileuploadPoolPoolResidentPhoneE164Fkey,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
122
db/dbinfo/lob.event.bob.go
Normal file
122
db/dbinfo/lob.event.bob.go
Normal file
|
|
@ -0,0 +1,122 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var LobEvents = Table[
|
||||
lobEventColumns,
|
||||
lobEventIndexes,
|
||||
lobEventForeignKeys,
|
||||
lobEventUniques,
|
||||
lobEventChecks,
|
||||
]{
|
||||
Schema: "lob",
|
||||
Name: "event",
|
||||
Columns: lobEventColumns{
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Body: column{
|
||||
Name: "body",
|
||||
DBType: "jsonb",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Type: column{
|
||||
Name: "type_",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: lobEventIndexes{
|
||||
EventPkey: index{
|
||||
Type: "btree",
|
||||
Name: "event_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "event_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type lobEventColumns struct {
|
||||
Created column
|
||||
Body column
|
||||
ID column
|
||||
Type column
|
||||
}
|
||||
|
||||
func (c lobEventColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Created, c.Body, c.ID, c.Type,
|
||||
}
|
||||
}
|
||||
|
||||
type lobEventIndexes struct {
|
||||
EventPkey index
|
||||
}
|
||||
|
||||
func (i lobEventIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.EventPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type lobEventForeignKeys struct{}
|
||||
|
||||
func (f lobEventForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{}
|
||||
}
|
||||
|
||||
type lobEventUniques struct{}
|
||||
|
||||
func (u lobEventUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type lobEventChecks struct{}
|
||||
|
||||
func (c lobEventChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
157
db/dbinfo/log_impersonation.bob.go
Normal file
157
db/dbinfo/log_impersonation.bob.go
Normal file
|
|
@ -0,0 +1,157 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var LogImpersonations = Table[
|
||||
logImpersonationColumns,
|
||||
logImpersonationIndexes,
|
||||
logImpersonationForeignKeys,
|
||||
logImpersonationUniques,
|
||||
logImpersonationChecks,
|
||||
]{
|
||||
Schema: "",
|
||||
Name: "log_impersonation",
|
||||
Columns: logImpersonationColumns{
|
||||
BeginAt: column{
|
||||
Name: "begin_at",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
EndAt: column{
|
||||
Name: "end_at",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('log_impersonation_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ImpersonatorID: column{
|
||||
Name: "impersonator_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
TargetID: column{
|
||||
Name: "target_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: logImpersonationIndexes{
|
||||
LogImpersonationPkey: index{
|
||||
Type: "btree",
|
||||
Name: "log_impersonation_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "log_impersonation_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: logImpersonationForeignKeys{
|
||||
LogImpersonationLogImpersonationImpersonatorIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "log_impersonation.log_impersonation_impersonator_id_fkey",
|
||||
Columns: []string{"impersonator_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
LogImpersonationLogImpersonationTargetIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "log_impersonation.log_impersonation_target_id_fkey",
|
||||
Columns: []string{"target_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type logImpersonationColumns struct {
|
||||
BeginAt column
|
||||
EndAt column
|
||||
ID column
|
||||
ImpersonatorID column
|
||||
TargetID column
|
||||
}
|
||||
|
||||
func (c logImpersonationColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.BeginAt, c.EndAt, c.ID, c.ImpersonatorID, c.TargetID,
|
||||
}
|
||||
}
|
||||
|
||||
type logImpersonationIndexes struct {
|
||||
LogImpersonationPkey index
|
||||
}
|
||||
|
||||
func (i logImpersonationIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.LogImpersonationPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type logImpersonationForeignKeys struct {
|
||||
LogImpersonationLogImpersonationImpersonatorIDFkey foreignKey
|
||||
LogImpersonationLogImpersonationTargetIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f logImpersonationForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.LogImpersonationLogImpersonationImpersonatorIDFkey, f.LogImpersonationLogImpersonationTargetIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type logImpersonationUniques struct{}
|
||||
|
||||
func (u logImpersonationUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type logImpersonationChecks struct{}
|
||||
|
||||
func (c logImpersonationChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -78,6 +78,15 @@ var NoteImages = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "IDENTITY",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: true,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: noteImageIndexes{
|
||||
NoteImagePkey: index{
|
||||
|
|
@ -102,6 +111,23 @@ var NoteImages = Table[
|
|||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
NoteImageIDUnique: index{
|
||||
Type: "btree",
|
||||
Name: "note_image_id_unique",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "note_image_pkey",
|
||||
|
|
@ -137,6 +163,13 @@ var NoteImages = Table[
|
|||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
Uniques: noteImageUniques{
|
||||
NoteImageIDUnique: constraint{
|
||||
Name: "note_image_id_unique",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
|
@ -149,21 +182,23 @@ type noteImageColumns struct {
|
|||
OrganizationID column
|
||||
Version column
|
||||
UUID column
|
||||
ID column
|
||||
}
|
||||
|
||||
func (c noteImageColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Created, c.CreatorID, c.Deleted, c.DeletorID, c.OrganizationID, c.Version, c.UUID,
|
||||
c.Created, c.CreatorID, c.Deleted, c.DeletorID, c.OrganizationID, c.Version, c.UUID, c.ID,
|
||||
}
|
||||
}
|
||||
|
||||
type noteImageIndexes struct {
|
||||
NoteImagePkey index
|
||||
NoteImagePkey index
|
||||
NoteImageIDUnique index
|
||||
}
|
||||
|
||||
func (i noteImageIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.NoteImagePkey,
|
||||
i.NoteImagePkey, i.NoteImageIDUnique,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -179,10 +214,14 @@ func (f noteImageForeignKeys) AsSlice() []foreignKey {
|
|||
}
|
||||
}
|
||||
|
||||
type noteImageUniques struct{}
|
||||
type noteImageUniques struct {
|
||||
NoteImageIDUnique constraint
|
||||
}
|
||||
|
||||
func (u noteImageUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
return []constraint{
|
||||
u.NoteImageIDUnique,
|
||||
}
|
||||
}
|
||||
|
||||
type noteImageChecks struct{}
|
||||
|
|
|
|||
|
|
@ -321,6 +321,15 @@ var Organizations = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LobAddressID: column{
|
||||
Name: "lob_address_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: organizationIndexes{
|
||||
OrganizationPkey: index{
|
||||
|
|
@ -486,11 +495,12 @@ type organizationColumns struct {
|
|||
FieldseekerServiceFeatureItemID column
|
||||
ArcgisMapServiceID column
|
||||
IsCatchall column
|
||||
LobAddressID column
|
||||
}
|
||||
|
||||
func (c organizationColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ID, c.Name, c.ImportDistrictGid, c.Website, c.LogoUUID, c.Slug, c.GeneralManagerName, c.MailingAddressCity, c.MailingAddressPostalCode, c.MailingAddressStreet, c.OfficeAddressCity, c.OfficeAddressPostalCode, c.OfficeAddressStreet, c.ServiceAreaGeometry, c.ServiceAreaSquareMeters, c.ServiceAreaCentroid, c.ServiceAreaExtent, c.OfficeFax, c.OfficePhone, c.ServiceAreaXmin, c.ServiceAreaYmin, c.ServiceAreaXmax, c.ServiceAreaYmax, c.ServiceAreaCentroidGeojson, c.ServiceAreaCentroidX, c.ServiceAreaCentroidY, c.MailingAddressCountry, c.MailingAddressState, c.OfficeAddressCountry, c.OfficeAddressState, c.ArcgisAccountID, c.FieldseekerServiceFeatureItemID, c.ArcgisMapServiceID, c.IsCatchall,
|
||||
c.ID, c.Name, c.ImportDistrictGid, c.Website, c.LogoUUID, c.Slug, c.GeneralManagerName, c.MailingAddressCity, c.MailingAddressPostalCode, c.MailingAddressStreet, c.OfficeAddressCity, c.OfficeAddressPostalCode, c.OfficeAddressStreet, c.ServiceAreaGeometry, c.ServiceAreaSquareMeters, c.ServiceAreaCentroid, c.ServiceAreaExtent, c.OfficeFax, c.OfficePhone, c.ServiceAreaXmin, c.ServiceAreaYmin, c.ServiceAreaXmax, c.ServiceAreaYmax, c.ServiceAreaCentroidGeojson, c.ServiceAreaCentroidX, c.ServiceAreaCentroidY, c.MailingAddressCountry, c.MailingAddressState, c.OfficeAddressCountry, c.OfficeAddressState, c.ArcgisAccountID, c.FieldseekerServiceFeatureItemID, c.ArcgisMapServiceID, c.IsCatchall, c.LobAddressID,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
112
db/dbinfo/publicreport.client.bob.go
Normal file
112
db/dbinfo/publicreport.client.bob.go
Normal file
|
|
@ -0,0 +1,112 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var PublicreportClients = Table[
|
||||
publicreportClientColumns,
|
||||
publicreportClientIndexes,
|
||||
publicreportClientForeignKeys,
|
||||
publicreportClientUniques,
|
||||
publicreportClientChecks,
|
||||
]{
|
||||
Schema: "publicreport",
|
||||
Name: "client",
|
||||
Columns: publicreportClientColumns{
|
||||
Created: column{
|
||||
Name: "created",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UserAgent: column{
|
||||
Name: "user_agent",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
UUID: column{
|
||||
Name: "uuid",
|
||||
DBType: "uuid",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: publicreportClientIndexes{
|
||||
ClientPkey: index{
|
||||
Type: "btree",
|
||||
Name: "client_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "uuid",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "client_pkey",
|
||||
Columns: []string{"uuid"},
|
||||
Comment: "",
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type publicreportClientColumns struct {
|
||||
Created column
|
||||
UserAgent column
|
||||
UUID column
|
||||
}
|
||||
|
||||
func (c publicreportClientColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Created, c.UserAgent, c.UUID,
|
||||
}
|
||||
}
|
||||
|
||||
type publicreportClientIndexes struct {
|
||||
ClientPkey index
|
||||
}
|
||||
|
||||
func (i publicreportClientIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ClientPkey,
|
||||
}
|
||||
}
|
||||
|
||||
type publicreportClientForeignKeys struct{}
|
||||
|
||||
func (f publicreportClientForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{}
|
||||
}
|
||||
|
||||
type publicreportClientUniques struct{}
|
||||
|
||||
func (u publicreportClientUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type publicreportClientChecks struct{}
|
||||
|
||||
func (c publicreportClientChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
197
db/dbinfo/publicreport.compliance.bob.go
Normal file
197
db/dbinfo/publicreport.compliance.bob.go
Normal file
|
|
@ -0,0 +1,197 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var PublicreportCompliances = Table[
|
||||
publicreportComplianceColumns,
|
||||
publicreportComplianceIndexes,
|
||||
publicreportComplianceForeignKeys,
|
||||
publicreportComplianceUniques,
|
||||
publicreportComplianceChecks,
|
||||
]{
|
||||
Schema: "publicreport",
|
||||
Name: "compliance",
|
||||
Columns: publicreportComplianceColumns{
|
||||
AccessInstructions: column{
|
||||
Name: "access_instructions",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AvailabilityNotes: column{
|
||||
Name: "availability_notes",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Comments: column{
|
||||
Name: "comments",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
GateCode: column{
|
||||
Name: "gate_code",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
HasDog: column{
|
||||
Name: "has_dog",
|
||||
DBType: "boolean",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
PermissionType: column{
|
||||
Name: "permission_type",
|
||||
DBType: "publicreport.permissionaccess",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ReportID: column{
|
||||
Name: "report_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ReportPhoneCanText: column{
|
||||
Name: "report_phone_can_text",
|
||||
DBType: "boolean",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
WantsScheduled: column{
|
||||
Name: "wants_scheduled",
|
||||
DBType: "boolean",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Submitted: column{
|
||||
Name: "submitted",
|
||||
DBType: "timestamp without time zone",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: publicreportComplianceIndexes{
|
||||
CompliancePkey: index{
|
||||
Type: "btree",
|
||||
Name: "compliance_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "report_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "compliance_pkey",
|
||||
Columns: []string{"report_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: publicreportComplianceForeignKeys{
|
||||
PublicreportComplianceComplianceReportIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "publicreport.compliance.compliance_report_id_fkey",
|
||||
Columns: []string{"report_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "publicreport.report",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type publicreportComplianceColumns struct {
|
||||
AccessInstructions column
|
||||
AvailabilityNotes column
|
||||
Comments column
|
||||
GateCode column
|
||||
HasDog column
|
||||
PermissionType column
|
||||
ReportID column
|
||||
ReportPhoneCanText column
|
||||
WantsScheduled column
|
||||
Submitted column
|
||||
}
|
||||
|
||||
func (c publicreportComplianceColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AccessInstructions, c.AvailabilityNotes, c.Comments, c.GateCode, c.HasDog, c.PermissionType, c.ReportID, c.ReportPhoneCanText, c.WantsScheduled, c.Submitted,
|
||||
}
|
||||
}
|
||||
|
||||
type publicreportComplianceIndexes struct {
|
||||
CompliancePkey index
|
||||
}
|
||||
|
||||
func (i publicreportComplianceIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.CompliancePkey,
|
||||
}
|
||||
}
|
||||
|
||||
type publicreportComplianceForeignKeys struct {
|
||||
PublicreportComplianceComplianceReportIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f publicreportComplianceForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.PublicreportComplianceComplianceReportIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type publicreportComplianceUniques struct{}
|
||||
|
||||
func (u publicreportComplianceUniques) AsSlice() []constraint {
|
||||
return []constraint{}
|
||||
}
|
||||
|
||||
type publicreportComplianceChecks struct{}
|
||||
|
||||
func (c publicreportComplianceChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -24,60 +24,6 @@ var PublicreportReports = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressNumber: column{
|
||||
Name: "address_number",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressStreet: column{
|
||||
Name: "address_street",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressLocality: column{
|
||||
Name: "address_locality",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressRegion: column{
|
||||
Name: "address_region",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressPostalCode: column{
|
||||
Name: "address_postal_code",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressCountry: column{
|
||||
Name: "address_country",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
AddressID: column{
|
||||
Name: "address_id",
|
||||
DBType: "integer",
|
||||
|
|
@ -240,22 +186,31 @@ var PublicreportReports = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LocationLatitude: column{
|
||||
Name: "location_latitude",
|
||||
DBType: "double precision",
|
||||
Default: "GENERATED",
|
||||
AddressGid: column{
|
||||
Name: "address_gid",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: true,
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
LocationLongitude: column{
|
||||
Name: "location_longitude",
|
||||
DBType: "double precision",
|
||||
Default: "GENERATED",
|
||||
ClientUUID: column{
|
||||
Name: "client_uuid",
|
||||
DBType: "uuid",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ReporterPhoneCanSMS: column{
|
||||
Name: "reporter_phone_can_sms",
|
||||
DBType: "boolean",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
|
|
@ -310,6 +265,15 @@ var PublicreportReports = Table[
|
|||
ForeignTable: "address",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
PublicreportReportReportClientUUIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "publicreport.report.report_client_uuid_fkey",
|
||||
Columns: []string{"client_uuid"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "publicreport.client",
|
||||
ForeignColumns: []string{"uuid"},
|
||||
},
|
||||
PublicreportReportReportOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "publicreport.report.report_organization_id_fkey",
|
||||
|
|
@ -342,12 +306,6 @@ var PublicreportReports = Table[
|
|||
|
||||
type publicreportReportColumns struct {
|
||||
AddressRaw column
|
||||
AddressNumber column
|
||||
AddressStreet column
|
||||
AddressLocality column
|
||||
AddressRegion column
|
||||
AddressPostalCode column
|
||||
AddressCountry column
|
||||
AddressID column
|
||||
Created column
|
||||
Location column
|
||||
|
|
@ -366,13 +324,14 @@ type publicreportReportColumns struct {
|
|||
Reviewed column
|
||||
ReviewerID column
|
||||
Status column
|
||||
LocationLatitude column
|
||||
LocationLongitude column
|
||||
AddressGid column
|
||||
ClientUUID column
|
||||
ReporterPhoneCanSMS column
|
||||
}
|
||||
|
||||
func (c publicreportReportColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AddressRaw, c.AddressNumber, c.AddressStreet, c.AddressLocality, c.AddressRegion, c.AddressPostalCode, c.AddressCountry, c.AddressID, c.Created, c.Location, c.H3cell, c.ID, c.LatlngAccuracyType, c.LatlngAccuracyValue, c.MapZoom, c.OrganizationID, c.PublicID, c.ReporterName, c.ReporterEmail, c.ReporterPhone, c.ReporterContactConsent, c.ReportType, c.Reviewed, c.ReviewerID, c.Status, c.LocationLatitude, c.LocationLongitude,
|
||||
c.AddressRaw, c.AddressID, c.Created, c.Location, c.H3cell, c.ID, c.LatlngAccuracyType, c.LatlngAccuracyValue, c.MapZoom, c.OrganizationID, c.PublicID, c.ReporterName, c.ReporterEmail, c.ReporterPhone, c.ReporterContactConsent, c.ReportType, c.Reviewed, c.ReviewerID, c.Status, c.AddressGid, c.ClientUUID, c.ReporterPhoneCanSMS,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -389,13 +348,14 @@ func (i publicreportReportIndexes) AsSlice() []index {
|
|||
|
||||
type publicreportReportForeignKeys struct {
|
||||
PublicreportReportReportAddressIDFkey foreignKey
|
||||
PublicreportReportReportClientUUIDFkey foreignKey
|
||||
PublicreportReportReportOrganizationIDFkey foreignKey
|
||||
PublicreportReportReportReviewerIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f publicreportReportForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.PublicreportReportReportAddressIDFkey, f.PublicreportReportReportOrganizationIDFkey, f.PublicreportReportReportReviewerIDFkey,
|
||||
f.PublicreportReportReportAddressIDFkey, f.PublicreportReportReportClientUUIDFkey, f.PublicreportReportReportOrganizationIDFkey, f.PublicreportReportReportReviewerIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -168,6 +168,15 @@ var PublicreportWaters = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Duration: column{
|
||||
Name: "duration",
|
||||
DBType: "publicreport.nuisancedurationtype",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: publicreportWaterIndexes{
|
||||
WaterPkey: index{
|
||||
|
|
@ -226,11 +235,12 @@ type publicreportWaterColumns struct {
|
|||
OwnerName column
|
||||
OwnerPhone column
|
||||
ReportID column
|
||||
Duration column
|
||||
}
|
||||
|
||||
func (c publicreportWaterColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.AccessComments, c.AccessGate, c.AccessFence, c.AccessLocked, c.AccessDog, c.AccessOther, c.Comments, c.IsReporterConfidential, c.IsReporterOwner, c.HasAdult, c.HasBackyardPermission, c.HasLarvae, c.HasPupae, c.OwnerEmail, c.OwnerName, c.OwnerPhone, c.ReportID,
|
||||
c.AccessComments, c.AccessGate, c.AccessFence, c.AccessLocked, c.AccessDog, c.AccessOther, c.Comments, c.IsReporterConfidential, c.IsReporterOwner, c.HasAdult, c.HasBackyardPermission, c.HasLarvae, c.HasPupae, c.OwnerEmail, c.OwnerName, c.OwnerPhone, c.ReportID, c.Duration,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -78,15 +78,6 @@ var Signals = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Title: column{
|
||||
Name: "title",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Type: column{
|
||||
Name: "type_",
|
||||
DBType: "public.signaltype",
|
||||
|
|
@ -96,6 +87,42 @@ var Signals = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
SiteID: column{
|
||||
Name: "site_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Location: column{
|
||||
Name: "location",
|
||||
DBType: "geometry",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
FeaturePoolFeatureID: column{
|
||||
Name: "feature_pool_feature_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ReportID: column{
|
||||
Name: "report_id",
|
||||
DBType: "integer",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: signalIndexes{
|
||||
SignalPkey: index{
|
||||
|
|
@ -115,6 +142,23 @@ var Signals = Table[
|
|||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
IdxSignalLocation: index{
|
||||
Type: "gist",
|
||||
Name: "idx_signal_location",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "location",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: false,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "signal_pkey",
|
||||
|
|
@ -140,6 +184,15 @@ var Signals = Table[
|
|||
ForeignTable: "user_",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
SignalSignalFeaturePoolFeatureIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "signal.signal_feature_pool_feature_id_fkey",
|
||||
Columns: []string{"feature_pool_feature_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "feature_pool",
|
||||
ForeignColumns: []string{"feature_id"},
|
||||
},
|
||||
SignalSignalOrganizationIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "signal.signal_organization_id_fkey",
|
||||
|
|
@ -149,48 +202,83 @@ var Signals = Table[
|
|||
ForeignTable: "organization",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
SignalSignalReportIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "signal.signal_report_id_fkey",
|
||||
Columns: []string{"report_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "publicreport.report",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
SignalSignalSiteIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "signal.signal_site_id_fkey",
|
||||
Columns: []string{"site_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "site",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
Checks: signalChecks{
|
||||
CheckExclusiveReference: check{
|
||||
constraint: constraint{
|
||||
Name: "check_exclusive_reference",
|
||||
Columns: []string{"feature_pool_feature_id", "report_id"},
|
||||
Comment: "",
|
||||
},
|
||||
Expression: "((feature_pool_feature_id IS NULL) OR (report_id IS NULL))",
|
||||
},
|
||||
},
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type signalColumns struct {
|
||||
Addressed column
|
||||
Addressor column
|
||||
Created column
|
||||
Creator column
|
||||
ID column
|
||||
OrganizationID column
|
||||
Species column
|
||||
Title column
|
||||
Type column
|
||||
Addressed column
|
||||
Addressor column
|
||||
Created column
|
||||
Creator column
|
||||
ID column
|
||||
OrganizationID column
|
||||
Species column
|
||||
Type column
|
||||
SiteID column
|
||||
Location column
|
||||
FeaturePoolFeatureID column
|
||||
ReportID column
|
||||
}
|
||||
|
||||
func (c signalColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.Addressed, c.Addressor, c.Created, c.Creator, c.ID, c.OrganizationID, c.Species, c.Title, c.Type,
|
||||
c.Addressed, c.Addressor, c.Created, c.Creator, c.ID, c.OrganizationID, c.Species, c.Type, c.SiteID, c.Location, c.FeaturePoolFeatureID, c.ReportID,
|
||||
}
|
||||
}
|
||||
|
||||
type signalIndexes struct {
|
||||
SignalPkey index
|
||||
SignalPkey index
|
||||
IdxSignalLocation index
|
||||
}
|
||||
|
||||
func (i signalIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.SignalPkey,
|
||||
i.SignalPkey, i.IdxSignalLocation,
|
||||
}
|
||||
}
|
||||
|
||||
type signalForeignKeys struct {
|
||||
SignalSignalAddressorFkey foreignKey
|
||||
SignalSignalCreatorFkey foreignKey
|
||||
SignalSignalOrganizationIDFkey foreignKey
|
||||
SignalSignalAddressorFkey foreignKey
|
||||
SignalSignalCreatorFkey foreignKey
|
||||
SignalSignalFeaturePoolFeatureIDFkey foreignKey
|
||||
SignalSignalOrganizationIDFkey foreignKey
|
||||
SignalSignalReportIDFkey foreignKey
|
||||
SignalSignalSiteIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f signalForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.SignalSignalAddressorFkey, f.SignalSignalCreatorFkey, f.SignalSignalOrganizationIDFkey,
|
||||
f.SignalSignalAddressorFkey, f.SignalSignalCreatorFkey, f.SignalSignalFeaturePoolFeatureIDFkey, f.SignalSignalOrganizationIDFkey, f.SignalSignalReportIDFkey, f.SignalSignalSiteIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -200,8 +288,12 @@ func (u signalUniques) AsSlice() []constraint {
|
|||
return []constraint{}
|
||||
}
|
||||
|
||||
type signalChecks struct{}
|
||||
type signalChecks struct {
|
||||
CheckExclusiveReference check
|
||||
}
|
||||
|
||||
func (c signalChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
return []check{
|
||||
c.CheckExclusiveReference,
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -15,15 +15,6 @@ var TileCachedImages = Table[
|
|||
Schema: "tile",
|
||||
Name: "cached_image",
|
||||
Columns: tileCachedImageColumns{
|
||||
ArcgisID: column{
|
||||
Name: "arcgis_id",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
X: column{
|
||||
Name: "x",
|
||||
DBType: "integer",
|
||||
|
|
@ -60,6 +51,15 @@ var TileCachedImages = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ServiceID: column{
|
||||
Name: "service_id",
|
||||
DBType: "integer",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: tileCachedImageIndexes{
|
||||
CachedImagePkey: index{
|
||||
|
|
@ -67,7 +67,7 @@ var TileCachedImages = Table[
|
|||
Name: "cached_image_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "arcgis_id",
|
||||
Name: "service_id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
|
|
@ -97,18 +97,18 @@ var TileCachedImages = Table[
|
|||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "cached_image_pkey",
|
||||
Columns: []string{"arcgis_id", "x", "y", "z"},
|
||||
Columns: []string{"service_id", "x", "y", "z"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: tileCachedImageForeignKeys{
|
||||
TileCachedImageCachedImageArcgisIDFkey: foreignKey{
|
||||
TileCachedImageCachedImageServiceIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "tile.cached_image.cached_image_arcgis_id_fkey",
|
||||
Columns: []string{"arcgis_id"},
|
||||
Name: "tile.cached_image.cached_image_service_id_fkey",
|
||||
Columns: []string{"service_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.service_map",
|
||||
ForeignColumns: []string{"arcgis_id"},
|
||||
ForeignTable: "tile.service",
|
||||
ForeignColumns: []string{"id"},
|
||||
},
|
||||
},
|
||||
|
||||
|
|
@ -116,16 +116,16 @@ var TileCachedImages = Table[
|
|||
}
|
||||
|
||||
type tileCachedImageColumns struct {
|
||||
ArcgisID column
|
||||
X column
|
||||
Y column
|
||||
Z column
|
||||
IsEmpty column
|
||||
X column
|
||||
Y column
|
||||
Z column
|
||||
IsEmpty column
|
||||
ServiceID column
|
||||
}
|
||||
|
||||
func (c tileCachedImageColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ArcgisID, c.X, c.Y, c.Z, c.IsEmpty,
|
||||
c.X, c.Y, c.Z, c.IsEmpty, c.ServiceID,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -140,12 +140,12 @@ func (i tileCachedImageIndexes) AsSlice() []index {
|
|||
}
|
||||
|
||||
type tileCachedImageForeignKeys struct {
|
||||
TileCachedImageCachedImageArcgisIDFkey foreignKey
|
||||
TileCachedImageCachedImageServiceIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f tileCachedImageForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.TileCachedImageCachedImageArcgisIDFkey,
|
||||
f.TileCachedImageCachedImageServiceIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
156
db/dbinfo/tile.service.bob.go
Normal file
156
db/dbinfo/tile.service.bob.go
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
// Code generated by BobGen psql v0.42.5. DO NOT EDIT.
|
||||
// This file is meant to be re-generated in place and/or deleted at any time.
|
||||
|
||||
package dbinfo
|
||||
|
||||
import "github.com/aarondl/opt/null"
|
||||
|
||||
var TileServices = Table[
|
||||
tileServiceColumns,
|
||||
tileServiceIndexes,
|
||||
tileServiceForeignKeys,
|
||||
tileServiceUniques,
|
||||
tileServiceChecks,
|
||||
]{
|
||||
Schema: "tile",
|
||||
Name: "service",
|
||||
Columns: tileServiceColumns{
|
||||
ID: column{
|
||||
Name: "id",
|
||||
DBType: "integer",
|
||||
Default: "nextval('tile.service_id_seq'::regclass)",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Name: column{
|
||||
Name: "name",
|
||||
DBType: "text",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
ArcgisID: column{
|
||||
Name: "arcgis_id",
|
||||
DBType: "text",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: tileServiceIndexes{
|
||||
ServicePkey: index{
|
||||
Type: "btree",
|
||||
Name: "service_pkey",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "id",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
ServiceNameUnique: index{
|
||||
Type: "btree",
|
||||
Name: "service_name_unique",
|
||||
Columns: []indexColumn{
|
||||
{
|
||||
Name: "name",
|
||||
Desc: null.FromCond(false, true),
|
||||
IsExpression: false,
|
||||
},
|
||||
},
|
||||
Unique: true,
|
||||
Comment: "",
|
||||
NullsFirst: []bool{false},
|
||||
NullsDistinct: false,
|
||||
Where: "",
|
||||
Include: []string{},
|
||||
},
|
||||
},
|
||||
PrimaryKey: &constraint{
|
||||
Name: "service_pkey",
|
||||
Columns: []string{"id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignKeys: tileServiceForeignKeys{
|
||||
TileServiceServiceArcgisIDFkey: foreignKey{
|
||||
constraint: constraint{
|
||||
Name: "tile.service.service_arcgis_id_fkey",
|
||||
Columns: []string{"arcgis_id"},
|
||||
Comment: "",
|
||||
},
|
||||
ForeignTable: "arcgis.service_map",
|
||||
ForeignColumns: []string{"arcgis_id"},
|
||||
},
|
||||
},
|
||||
Uniques: tileServiceUniques{
|
||||
ServiceNameUnique: constraint{
|
||||
Name: "service_name_unique",
|
||||
Columns: []string{"name"},
|
||||
Comment: "",
|
||||
},
|
||||
},
|
||||
|
||||
Comment: "",
|
||||
}
|
||||
|
||||
type tileServiceColumns struct {
|
||||
ID column
|
||||
Name column
|
||||
ArcgisID column
|
||||
}
|
||||
|
||||
func (c tileServiceColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ID, c.Name, c.ArcgisID,
|
||||
}
|
||||
}
|
||||
|
||||
type tileServiceIndexes struct {
|
||||
ServicePkey index
|
||||
ServiceNameUnique index
|
||||
}
|
||||
|
||||
func (i tileServiceIndexes) AsSlice() []index {
|
||||
return []index{
|
||||
i.ServicePkey, i.ServiceNameUnique,
|
||||
}
|
||||
}
|
||||
|
||||
type tileServiceForeignKeys struct {
|
||||
TileServiceServiceArcgisIDFkey foreignKey
|
||||
}
|
||||
|
||||
func (f tileServiceForeignKeys) AsSlice() []foreignKey {
|
||||
return []foreignKey{
|
||||
f.TileServiceServiceArcgisIDFkey,
|
||||
}
|
||||
}
|
||||
|
||||
type tileServiceUniques struct {
|
||||
ServiceNameUnique constraint
|
||||
}
|
||||
|
||||
func (u tileServiceUniques) AsSlice() []constraint {
|
||||
return []constraint{
|
||||
u.ServiceNameUnique,
|
||||
}
|
||||
}
|
||||
|
||||
type tileServiceChecks struct{}
|
||||
|
||||
func (c tileServiceChecks) AsSlice() []check {
|
||||
return []check{}
|
||||
}
|
||||
|
|
@ -132,6 +132,42 @@ var Users = Table[
|
|||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
Avatar: column{
|
||||
Name: "avatar",
|
||||
DBType: "uuid",
|
||||
Default: "NULL",
|
||||
Comment: "",
|
||||
Nullable: true,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
IsActive: column{
|
||||
Name: "is_active",
|
||||
DBType: "boolean",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
IsDronePilot: column{
|
||||
Name: "is_drone_pilot",
|
||||
DBType: "boolean",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
IsWarrant: column{
|
||||
Name: "is_warrant",
|
||||
DBType: "boolean",
|
||||
Default: "",
|
||||
Comment: "",
|
||||
Nullable: false,
|
||||
Generated: false,
|
||||
AutoIncr: false,
|
||||
},
|
||||
},
|
||||
Indexes: userIndexes{
|
||||
UserPkey: index{
|
||||
|
|
@ -210,11 +246,15 @@ type userColumns struct {
|
|||
PasswordHashType column
|
||||
PasswordHash column
|
||||
Role column
|
||||
Avatar column
|
||||
IsActive column
|
||||
IsDronePilot column
|
||||
IsWarrant column
|
||||
}
|
||||
|
||||
func (c userColumns) AsSlice() []column {
|
||||
return []column{
|
||||
c.ID, c.ArcgisAccessToken, c.ArcgisLicense, c.ArcgisRefreshToken, c.ArcgisRefreshTokenExpires, c.ArcgisRole, c.DisplayName, c.Email, c.OrganizationID, c.Username, c.PasswordHashType, c.PasswordHash, c.Role,
|
||||
c.ID, c.ArcgisAccessToken, c.ArcgisLicense, c.ArcgisRefreshToken, c.ArcgisRefreshTokenExpires, c.ArcgisRole, c.DisplayName, c.Email, c.OrganizationID, c.Username, c.PasswordHashType, c.PasswordHash, c.Role, c.Avatar, c.IsActive, c.IsDronePilot, c.IsWarrant,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -8,270 +8,6 @@ import (
|
|||
"fmt"
|
||||
)
|
||||
|
||||
// Enum values for ArcgisFieldtype
|
||||
const (
|
||||
ArcgisFieldtypeEsrifieldtypesmallinteger ArcgisFieldtype = "esriFieldTypeSmallInteger"
|
||||
ArcgisFieldtypeEsrifieldtypeinteger ArcgisFieldtype = "esriFieldTypeInteger"
|
||||
ArcgisFieldtypeEsrifieldtypesingle ArcgisFieldtype = "esriFieldTypeSingle"
|
||||
ArcgisFieldtypeEsrifieldtypedouble ArcgisFieldtype = "esriFieldTypeDouble"
|
||||
ArcgisFieldtypeEsrifieldtypestring ArcgisFieldtype = "esriFieldTypeString"
|
||||
ArcgisFieldtypeEsrifieldtypedate ArcgisFieldtype = "esriFieldTypeDate"
|
||||
ArcgisFieldtypeEsrifieldtypeoid ArcgisFieldtype = "esriFieldTypeOID"
|
||||
ArcgisFieldtypeEsrifieldtypegeometry ArcgisFieldtype = "esriFieldTypeGeometry"
|
||||
ArcgisFieldtypeEsrifieldtypeblob ArcgisFieldtype = "esriFieldTypeBlob"
|
||||
ArcgisFieldtypeEsrifieldtyperaster ArcgisFieldtype = "esriFieldTypeRaster"
|
||||
ArcgisFieldtypeEsrifieldtypeguid ArcgisFieldtype = "esriFieldTypeGUID"
|
||||
ArcgisFieldtypeEsrifieldtypeglobalid ArcgisFieldtype = "esriFieldTypeGlobalID"
|
||||
ArcgisFieldtypeEsrifieldtypexml ArcgisFieldtype = "esriFieldTypeXML"
|
||||
ArcgisFieldtypeEsrifieldtypebiginteger ArcgisFieldtype = "esriFieldTypeBigInteger"
|
||||
)
|
||||
|
||||
func AllArcgisFieldtype() []ArcgisFieldtype {
|
||||
return []ArcgisFieldtype{
|
||||
ArcgisFieldtypeEsrifieldtypesmallinteger,
|
||||
ArcgisFieldtypeEsrifieldtypeinteger,
|
||||
ArcgisFieldtypeEsrifieldtypesingle,
|
||||
ArcgisFieldtypeEsrifieldtypedouble,
|
||||
ArcgisFieldtypeEsrifieldtypestring,
|
||||
ArcgisFieldtypeEsrifieldtypedate,
|
||||
ArcgisFieldtypeEsrifieldtypeoid,
|
||||
ArcgisFieldtypeEsrifieldtypegeometry,
|
||||
ArcgisFieldtypeEsrifieldtypeblob,
|
||||
ArcgisFieldtypeEsrifieldtyperaster,
|
||||
ArcgisFieldtypeEsrifieldtypeguid,
|
||||
ArcgisFieldtypeEsrifieldtypeglobalid,
|
||||
ArcgisFieldtypeEsrifieldtypexml,
|
||||
ArcgisFieldtypeEsrifieldtypebiginteger,
|
||||
}
|
||||
}
|
||||
|
||||
type ArcgisFieldtype string
|
||||
|
||||
func (e ArcgisFieldtype) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) Valid() bool {
|
||||
switch e {
|
||||
case ArcgisFieldtypeEsrifieldtypesmallinteger,
|
||||
ArcgisFieldtypeEsrifieldtypeinteger,
|
||||
ArcgisFieldtypeEsrifieldtypesingle,
|
||||
ArcgisFieldtypeEsrifieldtypedouble,
|
||||
ArcgisFieldtypeEsrifieldtypestring,
|
||||
ArcgisFieldtypeEsrifieldtypedate,
|
||||
ArcgisFieldtypeEsrifieldtypeoid,
|
||||
ArcgisFieldtypeEsrifieldtypegeometry,
|
||||
ArcgisFieldtypeEsrifieldtypeblob,
|
||||
ArcgisFieldtypeEsrifieldtyperaster,
|
||||
ArcgisFieldtypeEsrifieldtypeguid,
|
||||
ArcgisFieldtypeEsrifieldtypeglobalid,
|
||||
ArcgisFieldtypeEsrifieldtypexml,
|
||||
ArcgisFieldtypeEsrifieldtypebiginteger:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e ArcgisFieldtype) All() []ArcgisFieldtype {
|
||||
return AllArcgisFieldtype()
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisFieldtype) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisFieldtype) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e ArcgisFieldtype) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisFieldtype) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = ArcgisFieldtype(x)
|
||||
case []byte:
|
||||
*e = ArcgisFieldtype(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into ArcgisFieldtype")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid ArcgisFieldtype value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for ArcgisMappingdestinationaddress
|
||||
const (
|
||||
ArcgisMappingdestinationaddressCountry ArcgisMappingdestinationaddress = "country"
|
||||
ArcgisMappingdestinationaddressLocality ArcgisMappingdestinationaddress = "locality"
|
||||
ArcgisMappingdestinationaddressPostalCode ArcgisMappingdestinationaddress = "postal_code"
|
||||
ArcgisMappingdestinationaddressStreet ArcgisMappingdestinationaddress = "street"
|
||||
ArcgisMappingdestinationaddressUnit ArcgisMappingdestinationaddress = "unit"
|
||||
)
|
||||
|
||||
func AllArcgisMappingdestinationaddress() []ArcgisMappingdestinationaddress {
|
||||
return []ArcgisMappingdestinationaddress{
|
||||
ArcgisMappingdestinationaddressCountry,
|
||||
ArcgisMappingdestinationaddressLocality,
|
||||
ArcgisMappingdestinationaddressPostalCode,
|
||||
ArcgisMappingdestinationaddressStreet,
|
||||
ArcgisMappingdestinationaddressUnit,
|
||||
}
|
||||
}
|
||||
|
||||
type ArcgisMappingdestinationaddress string
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) Valid() bool {
|
||||
switch e {
|
||||
case ArcgisMappingdestinationaddressCountry,
|
||||
ArcgisMappingdestinationaddressLocality,
|
||||
ArcgisMappingdestinationaddressPostalCode,
|
||||
ArcgisMappingdestinationaddressStreet,
|
||||
ArcgisMappingdestinationaddressUnit:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e ArcgisMappingdestinationaddress) All() []ArcgisMappingdestinationaddress {
|
||||
return AllArcgisMappingdestinationaddress()
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationaddress) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationaddress) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationaddress) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationaddress) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = ArcgisMappingdestinationaddress(x)
|
||||
case []byte:
|
||||
*e = ArcgisMappingdestinationaddress(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into ArcgisMappingdestinationaddress")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid ArcgisMappingdestinationaddress value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for ArcgisMappingdestinationparcel
|
||||
const (
|
||||
ArcgisMappingdestinationparcelApn ArcgisMappingdestinationparcel = "apn"
|
||||
ArcgisMappingdestinationparcelDescription ArcgisMappingdestinationparcel = "description"
|
||||
)
|
||||
|
||||
func AllArcgisMappingdestinationparcel() []ArcgisMappingdestinationparcel {
|
||||
return []ArcgisMappingdestinationparcel{
|
||||
ArcgisMappingdestinationparcelApn,
|
||||
ArcgisMappingdestinationparcelDescription,
|
||||
}
|
||||
}
|
||||
|
||||
type ArcgisMappingdestinationparcel string
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) Valid() bool {
|
||||
switch e {
|
||||
case ArcgisMappingdestinationparcelApn,
|
||||
ArcgisMappingdestinationparcelDescription:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e ArcgisMappingdestinationparcel) All() []ArcgisMappingdestinationparcel {
|
||||
return AllArcgisMappingdestinationparcel()
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationparcel) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationparcel) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e ArcgisMappingdestinationparcel) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *ArcgisMappingdestinationparcel) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = ArcgisMappingdestinationparcel(x)
|
||||
case []byte:
|
||||
*e = ArcgisMappingdestinationparcel(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into ArcgisMappingdestinationparcel")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid ArcgisMappingdestinationparcel value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for Arcgislicensetype
|
||||
const (
|
||||
ArcgislicensetypeAdvancedut Arcgislicensetype = "advancedUT"
|
||||
|
|
@ -846,26 +582,44 @@ func (e *CommsTextorigin) Scan(value any) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Enum values for Countrytype
|
||||
// Enum values for Communicationlogentry
|
||||
const (
|
||||
CountrytypeUsa Countrytype = "usa"
|
||||
CommunicationlogentryCreated Communicationlogentry = "created"
|
||||
CommunicationlogentryStatusU2eclosed Communicationlogentry = "status.closed"
|
||||
CommunicationlogentryStatusU2einvalidated Communicationlogentry = "status.invalidated"
|
||||
CommunicationlogentryStatusU2eopened Communicationlogentry = "status.opened"
|
||||
CommunicationlogentryStatusU2epending Communicationlogentry = "status.pending"
|
||||
CommunicationlogentryStatusU2epossibleIssue Communicationlogentry = "status.possible-issue"
|
||||
CommunicationlogentryStatusU2epossibleResolved Communicationlogentry = "status.possible-resolved"
|
||||
)
|
||||
|
||||
func AllCountrytype() []Countrytype {
|
||||
return []Countrytype{
|
||||
CountrytypeUsa,
|
||||
func AllCommunicationlogentry() []Communicationlogentry {
|
||||
return []Communicationlogentry{
|
||||
CommunicationlogentryCreated,
|
||||
CommunicationlogentryStatusU2eclosed,
|
||||
CommunicationlogentryStatusU2einvalidated,
|
||||
CommunicationlogentryStatusU2eopened,
|
||||
CommunicationlogentryStatusU2epending,
|
||||
CommunicationlogentryStatusU2epossibleIssue,
|
||||
CommunicationlogentryStatusU2epossibleResolved,
|
||||
}
|
||||
}
|
||||
|
||||
type Countrytype string
|
||||
type Communicationlogentry string
|
||||
|
||||
func (e Countrytype) String() string {
|
||||
func (e Communicationlogentry) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e Countrytype) Valid() bool {
|
||||
func (e Communicationlogentry) Valid() bool {
|
||||
switch e {
|
||||
case CountrytypeUsa:
|
||||
case CommunicationlogentryCreated,
|
||||
CommunicationlogentryStatusU2eclosed,
|
||||
CommunicationlogentryStatusU2einvalidated,
|
||||
CommunicationlogentryStatusU2eopened,
|
||||
CommunicationlogentryStatusU2epending,
|
||||
CommunicationlogentryStatusU2epossibleIssue,
|
||||
CommunicationlogentryStatusU2epossibleResolved:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
|
|
@ -873,44 +627,226 @@ func (e Countrytype) Valid() bool {
|
|||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e Countrytype) All() []Countrytype {
|
||||
return AllCountrytype()
|
||||
func (e Communicationlogentry) All() []Communicationlogentry {
|
||||
return AllCommunicationlogentry()
|
||||
}
|
||||
|
||||
func (e Countrytype) MarshalText() ([]byte, error) {
|
||||
func (e Communicationlogentry) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *Countrytype) UnmarshalText(text []byte) error {
|
||||
func (e *Communicationlogentry) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e Countrytype) MarshalBinary() ([]byte, error) {
|
||||
func (e Communicationlogentry) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *Countrytype) UnmarshalBinary(data []byte) error {
|
||||
func (e *Communicationlogentry) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e Countrytype) Value() (driver.Value, error) {
|
||||
func (e Communicationlogentry) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *Countrytype) Scan(value any) error {
|
||||
func (e *Communicationlogentry) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = Countrytype(x)
|
||||
*e = Communicationlogentry(x)
|
||||
case []byte:
|
||||
*e = Countrytype(x)
|
||||
*e = Communicationlogentry(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into Countrytype")
|
||||
return fmt.Errorf("cannot nil into Communicationlogentry")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid Countrytype value: %s", *e)
|
||||
return fmt.Errorf("invalid Communicationlogentry value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for Communicationstatus
|
||||
const (
|
||||
CommunicationstatusClosed Communicationstatus = "closed"
|
||||
CommunicationstatusInvalid Communicationstatus = "invalid"
|
||||
CommunicationstatusNew Communicationstatus = "new"
|
||||
CommunicationstatusOpened Communicationstatus = "opened"
|
||||
CommunicationstatusPending Communicationstatus = "pending"
|
||||
CommunicationstatusPossibleIssue Communicationstatus = "possible-issue"
|
||||
CommunicationstatusPossibleResolved Communicationstatus = "possible-resolved"
|
||||
CommunicationstatusResolved Communicationstatus = "resolved"
|
||||
)
|
||||
|
||||
func AllCommunicationstatus() []Communicationstatus {
|
||||
return []Communicationstatus{
|
||||
CommunicationstatusClosed,
|
||||
CommunicationstatusInvalid,
|
||||
CommunicationstatusNew,
|
||||
CommunicationstatusOpened,
|
||||
CommunicationstatusPending,
|
||||
CommunicationstatusPossibleIssue,
|
||||
CommunicationstatusPossibleResolved,
|
||||
CommunicationstatusResolved,
|
||||
}
|
||||
}
|
||||
|
||||
type Communicationstatus string
|
||||
|
||||
func (e Communicationstatus) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e Communicationstatus) Valid() bool {
|
||||
switch e {
|
||||
case CommunicationstatusClosed,
|
||||
CommunicationstatusInvalid,
|
||||
CommunicationstatusNew,
|
||||
CommunicationstatusOpened,
|
||||
CommunicationstatusPending,
|
||||
CommunicationstatusPossibleIssue,
|
||||
CommunicationstatusPossibleResolved,
|
||||
CommunicationstatusResolved:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e Communicationstatus) All() []Communicationstatus {
|
||||
return AllCommunicationstatus()
|
||||
}
|
||||
|
||||
func (e Communicationstatus) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *Communicationstatus) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e Communicationstatus) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *Communicationstatus) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e Communicationstatus) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *Communicationstatus) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = Communicationstatus(x)
|
||||
case []byte:
|
||||
*e = Communicationstatus(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into Communicationstatus")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid Communicationstatus value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for Communicationstatustype
|
||||
const (
|
||||
CommunicationstatustypeClosed Communicationstatustype = "closed"
|
||||
CommunicationstatustypeInvalid Communicationstatustype = "invalid"
|
||||
CommunicationstatustypeNew Communicationstatustype = "new"
|
||||
CommunicationstatustypeOpened Communicationstatustype = "opened"
|
||||
CommunicationstatustypePending Communicationstatustype = "pending"
|
||||
CommunicationstatustypePossibleIssue Communicationstatustype = "possible-issue"
|
||||
CommunicationstatustypePossibleResolved Communicationstatustype = "possible-resolved"
|
||||
CommunicationstatustypeResolved Communicationstatustype = "resolved"
|
||||
)
|
||||
|
||||
func AllCommunicationstatustype() []Communicationstatustype {
|
||||
return []Communicationstatustype{
|
||||
CommunicationstatustypeClosed,
|
||||
CommunicationstatustypeInvalid,
|
||||
CommunicationstatustypeNew,
|
||||
CommunicationstatustypeOpened,
|
||||
CommunicationstatustypePending,
|
||||
CommunicationstatustypePossibleIssue,
|
||||
CommunicationstatustypePossibleResolved,
|
||||
CommunicationstatustypeResolved,
|
||||
}
|
||||
}
|
||||
|
||||
type Communicationstatustype string
|
||||
|
||||
func (e Communicationstatustype) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e Communicationstatustype) Valid() bool {
|
||||
switch e {
|
||||
case CommunicationstatustypeClosed,
|
||||
CommunicationstatustypeInvalid,
|
||||
CommunicationstatustypeNew,
|
||||
CommunicationstatustypeOpened,
|
||||
CommunicationstatustypePending,
|
||||
CommunicationstatustypePossibleIssue,
|
||||
CommunicationstatustypePossibleResolved,
|
||||
CommunicationstatustypeResolved:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e Communicationstatustype) All() []Communicationstatustype {
|
||||
return AllCommunicationstatustype()
|
||||
}
|
||||
|
||||
func (e Communicationstatustype) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *Communicationstatustype) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e Communicationstatustype) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *Communicationstatustype) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e Communicationstatustype) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *Communicationstatustype) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = Communicationstatustype(x)
|
||||
case []byte:
|
||||
*e = Communicationstatustype(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into Communicationstatustype")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid Communicationstatustype value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -1304,6 +1240,7 @@ const (
|
|||
JobtypeEmailSend Jobtype = "email-send"
|
||||
JobtypeTextRespond Jobtype = "text-respond"
|
||||
JobtypeTextSend Jobtype = "text-send"
|
||||
JobtypeComplianceMailerSend Jobtype = "compliance-mailer-send"
|
||||
)
|
||||
|
||||
func AllJobtype() []Jobtype {
|
||||
|
|
@ -1315,6 +1252,7 @@ func AllJobtype() []Jobtype {
|
|||
JobtypeEmailSend,
|
||||
JobtypeTextRespond,
|
||||
JobtypeTextSend,
|
||||
JobtypeComplianceMailerSend,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1332,7 +1270,8 @@ func (e Jobtype) Valid() bool {
|
|||
JobtypeLabelStudioAudioCreate,
|
||||
JobtypeEmailSend,
|
||||
JobtypeTextRespond,
|
||||
JobtypeTextSend:
|
||||
JobtypeTextSend,
|
||||
JobtypeComplianceMailerSend:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
|
|
@ -1881,6 +1820,85 @@ func (e *PublicreportNuisancedurationtype) Scan(value any) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Enum values for PublicreportPermissionaccess
|
||||
const (
|
||||
PublicreportPermissionaccessDenied PublicreportPermissionaccess = "denied"
|
||||
PublicreportPermissionaccessGranted PublicreportPermissionaccess = "granted"
|
||||
PublicreportPermissionaccessUnselected PublicreportPermissionaccess = "unselected"
|
||||
PublicreportPermissionaccessWithOwner PublicreportPermissionaccess = "with-owner"
|
||||
)
|
||||
|
||||
func AllPublicreportPermissionaccess() []PublicreportPermissionaccess {
|
||||
return []PublicreportPermissionaccess{
|
||||
PublicreportPermissionaccessDenied,
|
||||
PublicreportPermissionaccessGranted,
|
||||
PublicreportPermissionaccessUnselected,
|
||||
PublicreportPermissionaccessWithOwner,
|
||||
}
|
||||
}
|
||||
|
||||
type PublicreportPermissionaccess string
|
||||
|
||||
func (e PublicreportPermissionaccess) String() string {
|
||||
return string(e)
|
||||
}
|
||||
|
||||
func (e PublicreportPermissionaccess) Valid() bool {
|
||||
switch e {
|
||||
case PublicreportPermissionaccessDenied,
|
||||
PublicreportPermissionaccessGranted,
|
||||
PublicreportPermissionaccessUnselected,
|
||||
PublicreportPermissionaccessWithOwner:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
// useful when testing in other packages
|
||||
func (e PublicreportPermissionaccess) All() []PublicreportPermissionaccess {
|
||||
return AllPublicreportPermissionaccess()
|
||||
}
|
||||
|
||||
func (e PublicreportPermissionaccess) MarshalText() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *PublicreportPermissionaccess) UnmarshalText(text []byte) error {
|
||||
return e.Scan(text)
|
||||
}
|
||||
|
||||
func (e PublicreportPermissionaccess) MarshalBinary() ([]byte, error) {
|
||||
return []byte(e), nil
|
||||
}
|
||||
|
||||
func (e *PublicreportPermissionaccess) UnmarshalBinary(data []byte) error {
|
||||
return e.Scan(data)
|
||||
}
|
||||
|
||||
func (e PublicreportPermissionaccess) Value() (driver.Value, error) {
|
||||
return string(e), nil
|
||||
}
|
||||
|
||||
func (e *PublicreportPermissionaccess) Scan(value any) error {
|
||||
switch x := value.(type) {
|
||||
case string:
|
||||
*e = PublicreportPermissionaccess(x)
|
||||
case []byte:
|
||||
*e = PublicreportPermissionaccess(x)
|
||||
case nil:
|
||||
return fmt.Errorf("cannot nil into PublicreportPermissionaccess")
|
||||
default:
|
||||
return fmt.Errorf("cannot scan type %T: %v", value, value)
|
||||
}
|
||||
|
||||
if !e.Valid() {
|
||||
return fmt.Errorf("invalid PublicreportPermissionaccess value: %s", *e)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enum values for PublicreportPoolsourceduration
|
||||
const (
|
||||
PublicreportPoolsourcedurationNone PublicreportPoolsourceduration = "none"
|
||||
|
|
@ -2138,14 +2156,16 @@ func (e *PublicreportReportstatustype) Scan(value any) error {
|
|||
|
||||
// Enum values for PublicreportReporttype
|
||||
const (
|
||||
PublicreportReporttypeNuisance PublicreportReporttype = "nuisance"
|
||||
PublicreportReporttypeWater PublicreportReporttype = "water"
|
||||
PublicreportReporttypeNuisance PublicreportReporttype = "nuisance"
|
||||
PublicreportReporttypeWater PublicreportReporttype = "water"
|
||||
PublicreportReporttypeCompliance PublicreportReporttype = "compliance"
|
||||
)
|
||||
|
||||
func AllPublicreportReporttype() []PublicreportReporttype {
|
||||
return []PublicreportReporttype{
|
||||
PublicreportReporttypeNuisance,
|
||||
PublicreportReporttypeWater,
|
||||
PublicreportReporttypeCompliance,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -2158,7 +2178,8 @@ func (e PublicreportReporttype) String() string {
|
|||
func (e PublicreportReporttype) Valid() bool {
|
||||
switch e {
|
||||
case PublicreportReporttypeNuisance,
|
||||
PublicreportReporttypeWater:
|
||||
PublicreportReporttypeWater,
|
||||
PublicreportReporttypeCompliance:
|
||||
return true
|
||||
default:
|
||||
return false
|
||||
|
|
|
|||
|
|
@ -58,7 +58,7 @@ func SaveOrUpdateContainerRelate(ctx context.Context, org *models.Organization,
|
|||
}
|
||||
return []SqlParam{
|
||||
Uint("p_objectid", row.ObjectID),
|
||||
Int32("p_organization_id", org.ID),
|
||||
Int32("p_organization_id", org.ID()),
|
||||
UUID("p_globalid", row.GlobalID),
|
||||
String("p_created_user", row.CreatedUser),
|
||||
Timestamp("p_created_date", row.CreatedDate),
|
||||
|
|
|
|||
42
db/gen/nidus-sync/arcgis/enum/fieldtype.go
Normal file
42
db/gen/nidus-sync/arcgis/enum/fieldtype.go
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package enum
|
||||
|
||||
import "github.com/Gleipnir-Technology/jet/postgres"
|
||||
|
||||
var Fieldtype = &struct {
|
||||
EsriFieldTypeSmallInteger postgres.StringExpression
|
||||
EsriFieldTypeInteger postgres.StringExpression
|
||||
EsriFieldTypeSingle postgres.StringExpression
|
||||
EsriFieldTypeDouble postgres.StringExpression
|
||||
EsriFieldTypeString postgres.StringExpression
|
||||
EsriFieldTypeDate postgres.StringExpression
|
||||
EsriFieldTypeOID postgres.StringExpression
|
||||
EsriFieldTypeGeometry postgres.StringExpression
|
||||
EsriFieldTypeBlob postgres.StringExpression
|
||||
EsriFieldTypeRaster postgres.StringExpression
|
||||
EsriFieldTypeGUID postgres.StringExpression
|
||||
EsriFieldTypeGlobalID postgres.StringExpression
|
||||
EsriFieldTypeXML postgres.StringExpression
|
||||
EsriFieldTypeBigInteger postgres.StringExpression
|
||||
}{
|
||||
EsriFieldTypeSmallInteger: postgres.NewEnumValue("esriFieldTypeSmallInteger"),
|
||||
EsriFieldTypeInteger: postgres.NewEnumValue("esriFieldTypeInteger"),
|
||||
EsriFieldTypeSingle: postgres.NewEnumValue("esriFieldTypeSingle"),
|
||||
EsriFieldTypeDouble: postgres.NewEnumValue("esriFieldTypeDouble"),
|
||||
EsriFieldTypeString: postgres.NewEnumValue("esriFieldTypeString"),
|
||||
EsriFieldTypeDate: postgres.NewEnumValue("esriFieldTypeDate"),
|
||||
EsriFieldTypeOID: postgres.NewEnumValue("esriFieldTypeOID"),
|
||||
EsriFieldTypeGeometry: postgres.NewEnumValue("esriFieldTypeGeometry"),
|
||||
EsriFieldTypeBlob: postgres.NewEnumValue("esriFieldTypeBlob"),
|
||||
EsriFieldTypeRaster: postgres.NewEnumValue("esriFieldTypeRaster"),
|
||||
EsriFieldTypeGUID: postgres.NewEnumValue("esriFieldTypeGUID"),
|
||||
EsriFieldTypeGlobalID: postgres.NewEnumValue("esriFieldTypeGlobalID"),
|
||||
EsriFieldTypeXML: postgres.NewEnumValue("esriFieldTypeXML"),
|
||||
EsriFieldTypeBigInteger: postgres.NewEnumValue("esriFieldTypeBigInteger"),
|
||||
}
|
||||
24
db/gen/nidus-sync/arcgis/enum/mappingdestinationaddress.go
Normal file
24
db/gen/nidus-sync/arcgis/enum/mappingdestinationaddress.go
Normal file
|
|
@ -0,0 +1,24 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package enum
|
||||
|
||||
import "github.com/Gleipnir-Technology/jet/postgres"
|
||||
|
||||
var Mappingdestinationaddress = &struct {
|
||||
Country postgres.StringExpression
|
||||
Locality postgres.StringExpression
|
||||
PostalCode postgres.StringExpression
|
||||
Street postgres.StringExpression
|
||||
Unit postgres.StringExpression
|
||||
}{
|
||||
Country: postgres.NewEnumValue("country"),
|
||||
Locality: postgres.NewEnumValue("locality"),
|
||||
PostalCode: postgres.NewEnumValue("postal_code"),
|
||||
Street: postgres.NewEnumValue("street"),
|
||||
Unit: postgres.NewEnumValue("unit"),
|
||||
}
|
||||
18
db/gen/nidus-sync/arcgis/enum/mappingdestinationparcel.go
Normal file
18
db/gen/nidus-sync/arcgis/enum/mappingdestinationparcel.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package enum
|
||||
|
||||
import "github.com/Gleipnir-Technology/jet/postgres"
|
||||
|
||||
var Mappingdestinationparcel = &struct {
|
||||
Apn postgres.StringExpression
|
||||
Description postgres.StringExpression
|
||||
}{
|
||||
Apn: postgres.NewEnumValue("apn"),
|
||||
Description: postgres.NewEnumValue("description"),
|
||||
}
|
||||
19
db/gen/nidus-sync/arcgis/model/account.go
Normal file
19
db/gen/nidus-sync/arcgis/model/account.go
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package model
|
||||
|
||||
type Account struct {
|
||||
ID string `sql:"primary_key"`
|
||||
Name string
|
||||
OrganizationID int32
|
||||
URLFeatures *string
|
||||
URLInsights *string
|
||||
URLGeometry *string
|
||||
URLNotebooks *string
|
||||
URLTiles *string
|
||||
}
|
||||
16
db/gen/nidus-sync/arcgis/model/address_mapping.go
Normal file
16
db/gen/nidus-sync/arcgis/model/address_mapping.go
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package model
|
||||
|
||||
type AddressMapping struct {
|
||||
Destination Mappingdestinationaddress `sql:"primary_key"`
|
||||
LayerFeatureServiceItemID string
|
||||
LayerIndex int32
|
||||
LayerFieldName string
|
||||
OrganizationID int32 `sql:"primary_key"`
|
||||
}
|
||||
97
db/gen/nidus-sync/arcgis/model/fieldtype.go
Normal file
97
db/gen/nidus-sync/arcgis/model/fieldtype.go
Normal file
|
|
@ -0,0 +1,97 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package model
|
||||
|
||||
import "errors"
|
||||
|
||||
type Fieldtype string
|
||||
|
||||
const (
|
||||
Fieldtype_EsriFieldTypeSmallInteger Fieldtype = "esriFieldTypeSmallInteger"
|
||||
Fieldtype_EsriFieldTypeInteger Fieldtype = "esriFieldTypeInteger"
|
||||
Fieldtype_EsriFieldTypeSingle Fieldtype = "esriFieldTypeSingle"
|
||||
Fieldtype_EsriFieldTypeDouble Fieldtype = "esriFieldTypeDouble"
|
||||
Fieldtype_EsriFieldTypeString Fieldtype = "esriFieldTypeString"
|
||||
Fieldtype_EsriFieldTypeDate Fieldtype = "esriFieldTypeDate"
|
||||
Fieldtype_EsriFieldTypeOID Fieldtype = "esriFieldTypeOID"
|
||||
Fieldtype_EsriFieldTypeGeometry Fieldtype = "esriFieldTypeGeometry"
|
||||
Fieldtype_EsriFieldTypeBlob Fieldtype = "esriFieldTypeBlob"
|
||||
Fieldtype_EsriFieldTypeRaster Fieldtype = "esriFieldTypeRaster"
|
||||
Fieldtype_EsriFieldTypeGUID Fieldtype = "esriFieldTypeGUID"
|
||||
Fieldtype_EsriFieldTypeGlobalID Fieldtype = "esriFieldTypeGlobalID"
|
||||
Fieldtype_EsriFieldTypeXML Fieldtype = "esriFieldTypeXML"
|
||||
Fieldtype_EsriFieldTypeBigInteger Fieldtype = "esriFieldTypeBigInteger"
|
||||
)
|
||||
|
||||
var FieldtypeAllValues = []Fieldtype{
|
||||
Fieldtype_EsriFieldTypeSmallInteger,
|
||||
Fieldtype_EsriFieldTypeInteger,
|
||||
Fieldtype_EsriFieldTypeSingle,
|
||||
Fieldtype_EsriFieldTypeDouble,
|
||||
Fieldtype_EsriFieldTypeString,
|
||||
Fieldtype_EsriFieldTypeDate,
|
||||
Fieldtype_EsriFieldTypeOID,
|
||||
Fieldtype_EsriFieldTypeGeometry,
|
||||
Fieldtype_EsriFieldTypeBlob,
|
||||
Fieldtype_EsriFieldTypeRaster,
|
||||
Fieldtype_EsriFieldTypeGUID,
|
||||
Fieldtype_EsriFieldTypeGlobalID,
|
||||
Fieldtype_EsriFieldTypeXML,
|
||||
Fieldtype_EsriFieldTypeBigInteger,
|
||||
}
|
||||
|
||||
func (e *Fieldtype) Scan(value interface{}) error {
|
||||
var enumValue string
|
||||
switch val := value.(type) {
|
||||
case string:
|
||||
enumValue = val
|
||||
case []byte:
|
||||
enumValue = string(val)
|
||||
default:
|
||||
return errors.New("jet: Invalid scan value for AllTypesEnum enum. Enum value has to be of type string or []byte")
|
||||
}
|
||||
|
||||
switch enumValue {
|
||||
case "esriFieldTypeSmallInteger":
|
||||
*e = Fieldtype_EsriFieldTypeSmallInteger
|
||||
case "esriFieldTypeInteger":
|
||||
*e = Fieldtype_EsriFieldTypeInteger
|
||||
case "esriFieldTypeSingle":
|
||||
*e = Fieldtype_EsriFieldTypeSingle
|
||||
case "esriFieldTypeDouble":
|
||||
*e = Fieldtype_EsriFieldTypeDouble
|
||||
case "esriFieldTypeString":
|
||||
*e = Fieldtype_EsriFieldTypeString
|
||||
case "esriFieldTypeDate":
|
||||
*e = Fieldtype_EsriFieldTypeDate
|
||||
case "esriFieldTypeOID":
|
||||
*e = Fieldtype_EsriFieldTypeOID
|
||||
case "esriFieldTypeGeometry":
|
||||
*e = Fieldtype_EsriFieldTypeGeometry
|
||||
case "esriFieldTypeBlob":
|
||||
*e = Fieldtype_EsriFieldTypeBlob
|
||||
case "esriFieldTypeRaster":
|
||||
*e = Fieldtype_EsriFieldTypeRaster
|
||||
case "esriFieldTypeGUID":
|
||||
*e = Fieldtype_EsriFieldTypeGUID
|
||||
case "esriFieldTypeGlobalID":
|
||||
*e = Fieldtype_EsriFieldTypeGlobalID
|
||||
case "esriFieldTypeXML":
|
||||
*e = Fieldtype_EsriFieldTypeXML
|
||||
case "esriFieldTypeBigInteger":
|
||||
*e = Fieldtype_EsriFieldTypeBigInteger
|
||||
default:
|
||||
return errors.New("jet: Invalid scan value '" + enumValue + "' for Fieldtype enum")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e Fieldtype) String() string {
|
||||
return string(e)
|
||||
}
|
||||
18
db/gen/nidus-sync/arcgis/model/layer.go
Normal file
18
db/gen/nidus-sync/arcgis/model/layer.go
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package model
|
||||
|
||||
import (
|
||||
"github.com/twpayne/go-geom"
|
||||
)
|
||||
|
||||
type Layer struct {
|
||||
Extent geom.Bounds
|
||||
FeatureServiceItemID string `sql:"primary_key"`
|
||||
Index int32 `sql:"primary_key"`
|
||||
}
|
||||
15
db/gen/nidus-sync/arcgis/model/layer_field.go
Normal file
15
db/gen/nidus-sync/arcgis/model/layer_field.go
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package model
|
||||
|
||||
type LayerField struct {
|
||||
LayerFeatureServiceItemID string `sql:"primary_key"`
|
||||
LayerIndex int32 `sql:"primary_key"`
|
||||
Name string `sql:"primary_key"`
|
||||
Type Fieldtype
|
||||
}
|
||||
61
db/gen/nidus-sync/arcgis/model/mappingdestinationaddress.go
Normal file
61
db/gen/nidus-sync/arcgis/model/mappingdestinationaddress.go
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
//
|
||||
// Code generated by go-jet DO NOT EDIT.
|
||||
//
|
||||
// WARNING: Changes to this file may cause incorrect behavior
|
||||
// and will be lost if the code is regenerated
|
||||
//
|
||||
|
||||
package model
|
||||
|
||||
import "errors"
|
||||
|
||||
type Mappingdestinationaddress string
|
||||
|
||||
const (
|
||||
Mappingdestinationaddress_Country Mappingdestinationaddress = "country"
|
||||
Mappingdestinationaddress_Locality Mappingdestinationaddress = "locality"
|
||||
Mappingdestinationaddress_PostalCode Mappingdestinationaddress = "postal_code"
|
||||
Mappingdestinationaddress_Street Mappingdestinationaddress = "street"
|
||||
Mappingdestinationaddress_Unit Mappingdestinationaddress = "unit"
|
||||
)
|
||||
|
||||
var MappingdestinationaddressAllValues = []Mappingdestinationaddress{
|
||||
Mappingdestinationaddress_Country,
|
||||
Mappingdestinationaddress_Locality,
|
||||
Mappingdestinationaddress_PostalCode,
|
||||
Mappingdestinationaddress_Street,
|
||||
Mappingdestinationaddress_Unit,
|
||||
}
|
||||
|
||||
func (e *Mappingdestinationaddress) Scan(value interface{}) error {
|
||||
var enumValue string
|
||||
switch val := value.(type) {
|
||||
case string:
|
||||
enumValue = val
|
||||
case []byte:
|
||||
enumValue = string(val)
|
||||
default:
|
||||
return errors.New("jet: Invalid scan value for AllTypesEnum enum. Enum value has to be of type string or []byte")
|
||||
}
|
||||
|
||||
switch enumValue {
|
||||
case "country":
|
||||
*e = Mappingdestinationaddress_Country
|
||||
case "locality":
|
||||
*e = Mappingdestinationaddress_Locality
|
||||
case "postal_code":
|
||||
*e = Mappingdestinationaddress_PostalCode
|
||||
case "street":
|
||||
*e = Mappingdestinationaddress_Street
|
||||
case "unit":
|
||||
*e = Mappingdestinationaddress_Unit
|
||||
default:
|
||||
return errors.New("jet: Invalid scan value '" + enumValue + "' for Mappingdestinationaddress enum")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (e Mappingdestinationaddress) String() string {
|
||||
return string(e)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue