Compare commits

..

115 Commits

Author SHA1 Message Date
CI
375a9ef65b chore: release version v1.84.8 2025-11-12 12:42:08 +00:00
Dmitry Popov
9bf90ab752 fix(core): added cleanup jobs for old system signatures & chain passages 2025-11-12 13:41:33 +01:00
CI
90c3481151 chore: [skip ci] 2025-11-12 10:57:58 +00:00
CI
e36b08a7e5 chore: release version v1.84.7 2025-11-12 10:57:58 +00:00
Dmitry Popov
e1f79170c3 Merge pull request #540 from guarzo/guarzo/apifun
fix: api and search fixes
2025-11-12 14:54:33 +04:00
Guarzo
68b5455e91 bug fix 2025-11-12 07:25:49 +00:00
Guarzo
f28e75c7f4 pr updates 2025-11-12 07:16:21 +00:00
Guarzo
6091adb28e fix: api and structure search fixes 2025-11-12 07:07:39 +00:00
CI
d4657b335f chore: [skip ci] 2025-11-12 00:13:07 +00:00
CI
7fee850902 chore: release version v1.84.6 2025-11-12 00:13:07 +00:00
Dmitry Popov
648c168a66 fix(core): Added map slug uniqness checking while using API 2025-11-12 01:12:13 +01:00
CI
f5c4b2c407 chore: [skip ci] 2025-11-11 12:52:39 +00:00
CI
b592223d52 chore: release version v1.84.5 2025-11-11 12:52:39 +00:00
Dmitry Popov
5cf118c6ee Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-11 13:52:11 +01:00
Dmitry Popov
b25013c652 fix(core): Added tracking for map & character event handling errors 2025-11-11 13:52:07 +01:00
CI
cf43861b11 chore: [skip ci] 2025-11-11 12:27:54 +00:00
CI
b5fe8f8878 chore: release version v1.84.4 2025-11-11 12:27:54 +00:00
Dmitry Popov
5e5068c7de fix(core): fixed issue with updating system signatures 2025-11-11 13:27:17 +01:00
CI
624b51edfb chore: [skip ci] 2025-11-11 09:52:29 +00:00
CI
a72f8e60c4 chore: release version v1.84.3 2025-11-11 09:52:29 +00:00
Dmitry Popov
dec8ae50c9 Merge branch 'develop' 2025-11-11 10:51:55 +01:00
Dmitry Popov
0332d36a8e fix(core): fixed linked signature time status update
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-11 10:51:43 +01:00
CI
8444c7f82d chore: [skip ci] 2025-11-10 16:57:53 +00:00
CI
ec3fc7447e chore: release version v1.84.2 2025-11-10 16:57:53 +00:00
Dmitry Popov
20ec2800c9 Merge pull request #538 from wanderer-industries/develop
Develop
2025-11-10 20:56:53 +04:00
Dmitry Popov
6fbf43e860 fix(api): fixed api for get/update map systems
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-10 17:23:44 +01:00
Dmitry Popov
697da38020 Merge pull request #537 from guarzo/guarzo/apisystemperf
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
fix: add indexes for map/system
2025-11-09 01:48:01 +04:00
Guarzo
4bc65b43d2 fix: add index for map/systems api 2025-11-08 14:30:19 +00:00
Dmitry Popov
910ec97fd1 chore: refactored map server processes
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
2025-11-06 09:23:19 +01:00
Dmitry Popov
40ed58ee8c Merge pull request #536 from wanderer-industries/refactor-map-servers
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Refactor map servers
2025-11-06 03:03:57 +04:00
Dmitry Popov
c18d241c77 Merge branch 'develop' into refactor-map-servers 2025-11-06 00:01:32 +01:00
Dmitry Popov
8b42908a5c chore: refactored map server processes 2025-11-06 00:01:04 +01:00
Dmitry Popov
6d32505a59 chore: added map cached rtree implementation 2025-11-04 23:40:37 +01:00
Dmitry Popov
fe8a34c77d chore: refactored map state usage 2025-11-04 22:40:04 +01:00
CI
d12cafcca8 chore: [skip ci] 2025-11-01 20:01:52 +00:00
CI
38a9c76ff0 chore: release version v1.84.1 2025-11-01 20:01:52 +00:00
Dmitry Popov
d6c30b4a53 fix(Core): Fixed connection time status update issue 2025-11-01 21:01:18 +01:00
CI
53a81daaf5 chore: [skip ci] 2025-10-29 14:30:52 +00:00
CI
92081c99e3 chore: release version v1.84.0 2025-10-29 14:30:52 +00:00
Dmitry Popov
d78020d2f5 Merge pull request #535 from wanderer-industries/esi-rate-limits
feat(Core): ESI API rate limits support
2025-10-29 18:30:18 +04:00
Dmitry Popov
fb1a9b440d feat(Core): ESI API rate limits support
fixes #534
2025-10-29 15:29:12 +01:00
CI
0141ac46e3 chore: [skip ci] 2025-10-29 09:24:54 +00:00
CI
d2bf6a8f86 chore: release version v1.83.4 2025-10-29 09:24:54 +00:00
Dmitry Popov
1844e4c757 fix(Core): Fixed page reloads 2025-10-29 10:23:54 +01:00
CI
d407efe805 chore: [skip ci] 2025-10-27 23:52:49 +00:00
CI
021e04d87a chore: release version v1.83.3 2025-10-27 23:52:49 +00:00
Dmitry Popov
7844c9db34 fix(Core): Fixed old map API for systems & added small QOL improvements 2025-10-28 00:52:04 +01:00
CI
355beb8394 chore: [skip ci] 2025-10-22 16:09:15 +00:00
CI
d82eeba792 chore: release version v1.83.2 2025-10-22 16:09:15 +00:00
Dmitry Popov
0396b05e58 fix(Connections): Set new connection time status based on to/from system class 2025-10-22 18:08:38 +02:00
CI
9494a9eb37 chore: [skip ci] 2025-10-21 14:13:39 +00:00
CI
8238f84ac7 chore: release version v1.83.1 2025-10-21 14:13:39 +00:00
Dmitry Popov
1cf19b2a50 fix(Kills): Fixed zkb links (added following '/'). 2025-10-21 16:13:08 +02:00
CI
e8543fd2f8 chore: [skip ci] 2025-10-21 07:45:45 +00:00
CI
c7f360e1fa chore: release version v1.83.0 2025-10-21 07:45:45 +00:00
Dmitry Popov
a2b83f7f0c Merge pull request #531 from wanderer-industries/copy-past-roles
Copy past roles
2025-10-21 11:45:13 +04:00
CI
ae5689a403 chore: [skip ci] 2025-10-21 06:52:44 +00:00
CI
c46af1d286 chore: release version v1.82.3 2025-10-21 06:52:44 +00:00
Aleksei Chichenkov
d17ba2168c Merge pull request #533 from wanderer-industries/fix-db
fix(Map): Fix system static info - add source region for U319 from Null-sec
2025-10-21 09:52:17 +03:00
DanSylvest
80c14716eb fix(Map): Fix system static info - add source region for U319 from Null-sec 2025-10-21 09:50:10 +03:00
CI
8541fcd29b chore: [skip ci] 2025-10-21 06:41:33 +00:00
CI
65d6acd7fb chore: release version v1.82.2 2025-10-21 06:41:33 +00:00
Aleksei Chichenkov
8b5f83d6b2 Merge pull request #532 from wanderer-industries/fix-db
fix(Map): Fix system static info - for J012635 add D382; for J015092 …
2025-10-21 09:41:08 +03:00
DanSylvest
5e18891f4b fix(Map): Fix system static info - for J012635 add D382; for J015092 - changed from J244, Z060 to N110, J244; for J000487 removed C008 2025-10-21 09:38:47 +03:00
DanSylvest
74e0b85748 fix(Map): Copy-Paste restriction: support from FE side - fixed problem with incorrect disabling copy and paste buttons 2025-10-21 09:20:41 +03:00
DanSylvest
81d3495b65 fix(Map): Copy-Paste restriction: support from FE side - removed unnecessary constant 2025-10-20 12:51:20 +03:00
CI
d1959ca09f chore: [skip ci] 2025-10-20 09:33:16 +00:00
CI
ec7a5ecf10 chore: release version v1.82.1 2025-10-20 09:33:16 +00:00
DanSylvest
70b9ec99ba Merge remote-tracking branch 'origin/copy-past-roles' into copy-past-roles 2025-10-20 12:32:41 +03:00
Dmitry Popov
7147d79166 Merge branch 'main' into copy-past-roles 2025-10-20 11:33:45 +02:00
Dmitry Popov
1dad9316bd fix(Core): Fixed 'viewer' map access & characters tracking 2025-10-20 11:32:32 +02:00
DanSylvest
872f7dcf48 fix(Map): Copy-Paste restriction: support from FE side 2025-10-20 12:32:07 +03:00
Dmitry Popov
02b450325e fix(Core): Added Eve data downloaded files cleanup logic 2025-10-19 12:37:31 +02:00
Dmitry Popov
136bc4cbb9 feat(Core): Added map roles settings for copy/paste 2025-10-19 12:03:16 +02:00
Dmitry Popov
dab49df9aa Merge branch 'main' into copy-past-roles 2025-10-16 16:01:41 +02:00
Dmitry Popov
6286087f3e feat(Core): Added map roles settings for copy/paste 2025-10-16 16:01:12 +02:00
CI
4ce7160f79 chore: [skip ci] 2025-10-15 19:59:55 +00:00
CI
2913bf19b0 chore: release version v1.82.0 2025-10-15 19:59:55 +00:00
Dmitry Popov
7bd6be6fd0 Merge pull request #528 from wanderer-industries/copy-past-systems-with-connections
Copy past systems with connections
2025-10-15 23:56:31 +04:00
Dmitry Popov
705daa286b Merge branch 'main' into copy-past-systems-with-connections 2025-10-15 21:56:09 +02:00
Dmitry Popov
614d06be66 feat(Core): Added an ability to copy/paste selected map area between maps 2025-10-15 21:55:56 +02:00
CI
dec3e9a7ce chore: [skip ci] 2025-10-15 19:08:54 +00:00
CI
0017ac3373 chore: release version v1.81.15 2025-10-15 19:08:54 +00:00
DanSylvest
ae34744578 Merge remote-tracking branch 'origin/main' 2025-10-15 22:06:53 +03:00
DanSylvest
76885058ef fix(Map): Fixed problem with commit - for correct restore deprecated data - change config key 2025-10-15 22:06:31 +03:00
CI
fccb007036 chore: [skip ci] 2025-10-15 18:56:52 +00:00
CI
a9f8901bd5 chore: release version v1.81.14 2025-10-15 18:56:52 +00:00
DanSylvest
8ae968b5be fix(Map): Fixed problem with commit - for correct restore deprecated data 2025-10-15 21:54:57 +03:00
Dmitry Popov
beffd45e4f Merge branch 'main' into copy-past-systems-with-connections 2025-10-15 16:30:57 +02:00
CI
4488d81e8d chore: [skip ci] 2025-10-15 12:30:56 +00:00
CI
618cc8c5f1 chore: release version v1.81.13 2025-10-15 12:30:56 +00:00
Dmitry Popov
3fb22a877e fix(Core): Fixed system select after tab switch 2025-10-15 14:30:19 +02:00
Dmitry Popov
8759409b82 Merge branch 'main' into copy-past-systems-with-connections 2025-10-15 12:51:25 +02:00
CI
245647ae6a chore: [skip ci] 2025-10-15 10:33:07 +00:00
CI
eb7d33ea07 chore: release version v1.81.12 2025-10-15 10:33:07 +00:00
Dmitry Popov
3575b16def fix(Core): Fixed map events buffering on tab switch 2025-10-15 12:32:29 +02:00
CI
a6fb680be8 chore: [skip ci] 2025-10-15 09:59:48 +00:00
CI
9e17df5544 chore: release version v1.81.11 2025-10-15 09:59:48 +00:00
Dmitry Popov
683fde7be4 fix(Signatures): Fixed EOL indication for un-splashed and signatures list 2025-10-15 11:59:14 +02:00
DanSylvest
ee68ce92a2 fix(Map): Add ability to copy and past systems (UI part) 2025-10-14 14:34:47 +03:00
CI
8b4e38d795 chore: [skip ci] 2025-10-13 22:46:48 +00:00
CI
4995202627 chore: release version v1.81.10 2025-10-13 22:46:48 +00:00
Dmitry Popov
986b997a6a fix(Signatures): Rework for lazy signatures deletion
- if lazy delete enabled, linked connection deleted after signature only
now
- added sort by signature name for info column
- show signature temporary name if set on link signature to system &
signatures widget
2025-10-14 00:46:04 +02:00
CI
9a957af759 chore: [skip ci] 2025-10-12 21:37:58 +00:00
CI
c5a0a96016 chore: release version v1.81.9 2025-10-12 21:37:58 +00:00
Dmitry Popov
8715a6c0ac fix(Signatures): Fixed issue with wrong linked signatures deletions 2025-10-12 23:37:22 +02:00
CI
c9810095aa chore: [skip ci] 2025-10-11 16:12:20 +00:00
CI
69eb888469 chore: release version v1.81.8 2025-10-11 16:12:20 +00:00
DanSylvest
748347df9a fix(Map): Fix problem with restoring settings on widgets 2025-10-11 19:10:27 +03:00
CI
aa4d49027c chore: [skip ci] 2025-10-10 19:12:44 +00:00
CI
a9d7387e40 chore: release version v1.81.7 2025-10-10 19:12:44 +00:00
DanSylvest
dc4d260c9b fix(Map): Fixed problem with rendering dropdown classes in signatures 2025-10-10 22:10:54 +03:00
CI
dc430491bf chore: [skip ci] 2025-10-10 06:47:34 +00:00
CI
42cd261ea7 chore: release version v1.81.6 2025-10-10 06:47:34 +00:00
Aleksei Chichenkov
35af4fdc09 Merge pull request #520 from wanderer-industries/migrations
Migrations
2025-10-10 09:47:08 +03:00
148 changed files with 5436 additions and 133425 deletions

View File

@@ -2,6 +2,292 @@
<!-- changelog -->
## [v1.84.8](https://github.com/wanderer-industries/wanderer/compare/v1.84.7...v1.84.8) (2025-11-12)
### Bug Fixes:
* core: added cleanup jobs for old system signatures & chain passages
## [v1.84.7](https://github.com/wanderer-industries/wanderer/compare/v1.84.6...v1.84.7) (2025-11-12)
### Bug Fixes:
* api and structure search fixes
## [v1.84.6](https://github.com/wanderer-industries/wanderer/compare/v1.84.5...v1.84.6) (2025-11-12)
### Bug Fixes:
* core: Added map slug uniqness checking while using API
## [v1.84.5](https://github.com/wanderer-industries/wanderer/compare/v1.84.4...v1.84.5) (2025-11-11)
### Bug Fixes:
* core: Added tracking for map & character event handling errors
## [v1.84.4](https://github.com/wanderer-industries/wanderer/compare/v1.84.3...v1.84.4) (2025-11-11)
### Bug Fixes:
* core: fixed issue with updating system signatures
## [v1.84.3](https://github.com/wanderer-industries/wanderer/compare/v1.84.2...v1.84.3) (2025-11-11)
### Bug Fixes:
* core: fixed linked signature time status update
## [v1.84.2](https://github.com/wanderer-industries/wanderer/compare/v1.84.1...v1.84.2) (2025-11-10)
### Bug Fixes:
* api: fixed api for get/update map systems
* add index for map/systems api
## [v1.84.1](https://github.com/wanderer-industries/wanderer/compare/v1.84.0...v1.84.1) (2025-11-01)
### Bug Fixes:
* Core: Fixed connection time status update issue
## [v1.84.0](https://github.com/wanderer-industries/wanderer/compare/v1.83.4...v1.84.0) (2025-10-29)
### Features:
* Core: ESI API rate limits support
## [v1.83.4](https://github.com/wanderer-industries/wanderer/compare/v1.83.3...v1.83.4) (2025-10-29)
### Bug Fixes:
* Core: Fixed page reloads
## [v1.83.3](https://github.com/wanderer-industries/wanderer/compare/v1.83.2...v1.83.3) (2025-10-27)
### Bug Fixes:
* Core: Fixed old map API for systems & added small QOL improvements
## [v1.83.2](https://github.com/wanderer-industries/wanderer/compare/v1.83.1...v1.83.2) (2025-10-22)
### Bug Fixes:
* Connections: Set new connection time status based on to/from system class
## [v1.83.1](https://github.com/wanderer-industries/wanderer/compare/v1.83.0...v1.83.1) (2025-10-21)
### Bug Fixes:
* Kills: Fixed zkb links (added following '/').
## [v1.83.0](https://github.com/wanderer-industries/wanderer/compare/v1.82.3...v1.83.0) (2025-10-21)
### Features:
* Core: Added map roles settings for copy/paste
* Core: Added map roles settings for copy/paste
### Bug Fixes:
* Map: Copy-Paste restriction: support from FE side - fixed problem with incorrect disabling copy and paste buttons
* Map: Copy-Paste restriction: support from FE side - removed unnecessary constant
* Map: Copy-Paste restriction: support from FE side
* Core: Added Eve data downloaded files cleanup logic
## [v1.82.3](https://github.com/wanderer-industries/wanderer/compare/v1.82.2...v1.82.3) (2025-10-21)
### Bug Fixes:
* Map: Fix system static info - add source region for U319 from Null-sec
## [v1.82.2](https://github.com/wanderer-industries/wanderer/compare/v1.82.1...v1.82.2) (2025-10-21)
### Bug Fixes:
* Map: Fix system static info - for J012635 add D382; for J015092 - changed from J244, Z060 to N110, J244; for J000487 removed C008
## [v1.82.1](https://github.com/wanderer-industries/wanderer/compare/v1.82.0...v1.82.1) (2025-10-20)
### Bug Fixes:
* Core: Fixed 'viewer' map access & characters tracking
## [v1.82.0](https://github.com/wanderer-industries/wanderer/compare/v1.81.15...v1.82.0) (2025-10-15)
### Features:
* Core: Added an ability to copy/paste selected map area between maps
### Bug Fixes:
* Map: Add ability to copy and past systems (UI part)
## [v1.81.15](https://github.com/wanderer-industries/wanderer/compare/v1.81.14...v1.81.15) (2025-10-15)
### Bug Fixes:
* Map: Fixed problem with commit - for correct restore deprecated data - change config key
## [v1.81.14](https://github.com/wanderer-industries/wanderer/compare/v1.81.13...v1.81.14) (2025-10-15)
### Bug Fixes:
* Map: Fixed problem with commit - for correct restore deprecated data
## [v1.81.13](https://github.com/wanderer-industries/wanderer/compare/v1.81.12...v1.81.13) (2025-10-15)
### Bug Fixes:
* Core: Fixed system select after tab switch
## [v1.81.12](https://github.com/wanderer-industries/wanderer/compare/v1.81.11...v1.81.12) (2025-10-15)
### Bug Fixes:
* Core: Fixed map events buffering on tab switch
## [v1.81.11](https://github.com/wanderer-industries/wanderer/compare/v1.81.10...v1.81.11) (2025-10-15)
### Bug Fixes:
* Signatures: Fixed EOL indication for un-splashed and signatures list
## [v1.81.10](https://github.com/wanderer-industries/wanderer/compare/v1.81.9...v1.81.10) (2025-10-13)
### Bug Fixes:
* Signatures: Rework for lazy signatures deletion
## [v1.81.9](https://github.com/wanderer-industries/wanderer/compare/v1.81.8...v1.81.9) (2025-10-12)
### Bug Fixes:
* Signatures: Fixed issue with wrong linked signatures deletions
## [v1.81.8](https://github.com/wanderer-industries/wanderer/compare/v1.81.7...v1.81.8) (2025-10-11)
### Bug Fixes:
* Map: Fix problem with restoring settings on widgets
## [v1.81.7](https://github.com/wanderer-industries/wanderer/compare/v1.81.6...v1.81.7) (2025-10-10)
### Bug Fixes:
* Map: Fixed problem with rendering dropdown classes in signatures
## [v1.81.6](https://github.com/wanderer-industries/wanderer/compare/v1.81.5...v1.81.6) (2025-10-10)
### Bug Fixes:
* Map: Fixed problem with a lot unnecessary loads zkb data on resize map
* Map: Added ability to see focused element
* Map: Removed unnecessary vertical scroller in Character Tracking dialog. Main always first in list of tracking characters, following next after main, another characters sorting by name
* Map: Added Search tool for systems what on the map
* Map: Added migration mechanism
* Map: Remove settings some default values if migration from very old settings system
* Map: MIGRATION: support from old store settings import
* Map: Add common migration mechanism. ATTENTION! This is a non-reversible stored map settings commit — it means we do not guarantee that settings will work if you check out back. We’ve tried to migrate old settings, but it may not work well or may NOT work at all.
* Map: Add front-end migrations for local store settings
## [v1.81.5](https://github.com/wanderer-industries/wanderer/compare/v1.81.4...v1.81.5) (2025-10-09)

View File

@@ -9,6 +9,7 @@ import { useMapperHandlers } from './useMapperHandlers';
import { MapRootContent } from '@/hooks/Mapper/components/mapRootContent/MapRootContent.tsx';
import { MapRootProvider } from '@/hooks/Mapper/mapRootProvider';
import './common-styles/main.scss';
import { ToastProvider } from '@/hooks/Mapper/ToastProvider.tsx';
const ErrorFallback = () => {
return <div className="!z-100 absolute w-screen h-screen bg-transparent"></div>;
@@ -39,13 +40,15 @@ export default function MapRoot({ hooks }) {
return (
<PrimeReactProvider>
<MapRootProvider fwdRef={providerRef} outCommand={handleCommand}>
<ErrorBoundary FallbackComponent={ErrorFallback} onError={logError}>
<ReactFlowProvider>
<MapRootContent />
</ReactFlowProvider>
</ErrorBoundary>
</MapRootProvider>
<ToastProvider>
<MapRootProvider fwdRef={providerRef} outCommand={handleCommand}>
<ErrorBoundary FallbackComponent={ErrorFallback} onError={logError}>
<ReactFlowProvider>
<MapRootContent />
</ReactFlowProvider>
</ErrorBoundary>
</MapRootProvider>
</ToastProvider>
</PrimeReactProvider>
);
}

View File

@@ -0,0 +1,31 @@
import React, { createContext, useContext, useRef } from 'react';
import { Toast } from 'primereact/toast';
import type { ToastMessage } from 'primereact/toast';
interface ToastContextValue {
toastRef: React.RefObject<Toast>;
show: (message: ToastMessage | ToastMessage[]) => void;
}
const ToastContext = createContext<ToastContextValue | null>(null);
export const ToastProvider: React.FC<{ children: React.ReactNode }> = ({ children }) => {
const toastRef = useRef<Toast>(null);
const show = (message: ToastMessage | ToastMessage[]) => {
toastRef.current?.show(message);
};
return (
<ToastContext.Provider value={{ toastRef, show }}>
<Toast ref={toastRef} position="top-right" />
{children}
</ToastContext.Provider>
);
};
export const useToast = (): ToastContextValue => {
const context = useContext(ToastContext);
if (!context) throw new Error('useToast must be used within a ToastProvider');
return context;
};

View File

@@ -118,7 +118,11 @@ export const useContextMenuSystemItems = ({
});
if (isShowPingBtn) {
return <WdMenuItem icon={iconClasses}>{!hasPing ? 'Ping: RALLY' : 'Cancel: RALLY'}</WdMenuItem>;
return (
<WdMenuItem icon={iconClasses} className="!ml-[-2px]">
{!hasPing ? 'Ping: RALLY' : 'Cancel: RALLY'}
</WdMenuItem>
);
}
return (
@@ -126,7 +130,7 @@ export const useContextMenuSystemItems = ({
infoTitle="Locked. Ping can be set only for one system."
infoClass="pi-lock text-stone-500 mr-[12px]"
>
<WdMenuItem disabled icon={iconClasses}>
<WdMenuItem disabled icon={iconClasses} className="!ml-[-2px]">
{!hasPing ? 'Ping: RALLY' : 'Cancel: RALLY'}
</WdMenuItem>
</MenuItemWithInfo>

View File

@@ -2,25 +2,60 @@ import React, { RefObject, useMemo } from 'react';
import { ContextMenu } from 'primereact/contextmenu';
import { PrimeIcons } from 'primereact/api';
import { MenuItem } from 'primereact/menuitem';
import { checkPermissions } from '@/hooks/Mapper/components/map/helpers';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { MenuItemWithInfo, WdMenuItem } from '@/hooks/Mapper/components/ui-kit';
import clsx from 'clsx';
export interface ContextMenuSystemMultipleProps {
contextMenuRef: RefObject<ContextMenu>;
onDeleteSystems(): void;
onCopySystems(): void;
}
export const ContextMenuSystemMultiple: React.FC<ContextMenuSystemMultipleProps> = ({
contextMenuRef,
onDeleteSystems,
onCopySystems,
}) => {
const {
data: { options, userPermissions },
} = useMapRootState();
const items: MenuItem[] = useMemo(() => {
const allowCopy = checkPermissions(userPermissions, options.allowed_copy_for);
return [
{
label: 'Delete',
icon: PrimeIcons.TRASH,
icon: clsx(PrimeIcons.TRASH, 'text-red-400'),
command: onDeleteSystems,
},
{ separator: true },
{
label: 'Copy',
icon: PrimeIcons.COPY,
command: onCopySystems,
disabled: !allowCopy,
template: () => {
if (allowCopy) {
return <WdMenuItem icon="pi pi-copy">Copy</WdMenuItem>;
}
return (
<MenuItemWithInfo
infoTitle="Action is blocked because you dont have permission to Copy."
infoClass={clsx(PrimeIcons.QUESTION_CIRCLE, 'text-stone-500 mr-[12px]')}
tooltipWrapperClassName="flex"
>
<WdMenuItem disabled icon="pi pi-copy">
Copy
</WdMenuItem>
</MenuItemWithInfo>
);
},
},
];
}, [onDeleteSystems]);
}, [onCopySystems, onDeleteSystems, options, userPermissions]);
return (
<>

View File

@@ -6,27 +6,34 @@ import { ctxManager } from '@/hooks/Mapper/utils/contextManager.ts';
import { NodeSelectionMouseHandler } from '@/hooks/Mapper/components/contexts/types.ts';
import { useDeleteSystems } from '@/hooks/Mapper/components/contexts/hooks';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { encodeJsonToUriBase64 } from '@/hooks/Mapper/utils';
import { useToast } from '@/hooks/Mapper/ToastProvider.tsx';
export const useContextMenuSystemMultipleHandlers = () => {
const {
data: { pings },
data: { pings, connections },
} = useMapRootState();
const { show } = useToast();
const contextMenuRef = useRef<ContextMenu | null>(null);
const [systems, setSystems] = useState<Node<SolarSystemRawType>[]>();
const { deleteSystems } = useDeleteSystems();
const ping = useMemo(() => (pings.length === 1 ? pings[0] : undefined), [pings]);
const refVars = useRef({ systems, ping, connections, deleteSystems });
refVars.current = { systems, ping, connections, deleteSystems };
const handleSystemMultipleContext: NodeSelectionMouseHandler = (ev, systems_) => {
const handleSystemMultipleContext = useCallback<NodeSelectionMouseHandler>((ev, systems_) => {
setSystems(systems_);
ev.preventDefault();
ctxManager.next('ctxSysMult', contextMenuRef.current);
contextMenuRef.current?.show(ev);
};
}, []);
const onDeleteSystems = useCallback(() => {
const { systems, ping, deleteSystems } = refVars.current;
if (!systems) {
return;
}
@@ -41,11 +48,34 @@ export const useContextMenuSystemMultipleHandlers = () => {
}
deleteSystems(sysToDel);
}, [deleteSystems, systems, ping]);
}, []);
const onCopySystems = useCallback(async () => {
const { systems, connections } = refVars.current;
if (!systems) {
return;
}
const connectionToCopy = connections.filter(
c => systems.filter(s => [c.target, c.source].includes(s.id)).length == 2,
);
await navigator.clipboard.writeText(
encodeJsonToUriBase64({ systems: systems.map(x => x.data), connections: connectionToCopy }),
);
show({
severity: 'success',
summary: 'Copied to clipboard',
detail: `Successfully copied to clipboard - [${systems.length}] systems and [${connectionToCopy.length}] connections`,
life: 3000,
});
}, [show]);
return {
handleSystemMultipleContext,
contextMenuRef,
onDeleteSystems,
onCopySystems,
};
};

View File

@@ -1,10 +1,10 @@
import { useCallback, useRef } from 'react';
import { LayoutEventBlocker, TooltipPosition, WdImageSize, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
import { ANOIK_ICON, DOTLAN_ICON, ZKB_ICON } from '@/hooks/Mapper/icons';
import { useCallback, useRef } from 'react';
import classes from './FastSystemActions.module.scss';
import clsx from 'clsx';
import { PrimeIcons } from 'primereact/api';
import classes from './FastSystemActions.module.scss';
export interface FastSystemActionsProps {
systemId: string;
@@ -27,7 +27,7 @@ export const FastSystemActions = ({
ref.current = { systemId, systemName, regionName, isWH };
const handleOpenZKB = useCallback(
() => window.open(`https://zkillboard.com/system/${ref.current.systemId}`, '_blank'),
() => window.open(`https://zkillboard.com/system/${ref.current.systemId}/`, '_blank'),
[],
);

View File

@@ -8,6 +8,4 @@ export type WaypointSetContextHandlerProps = {
destination: string;
};
export type WaypointSetContextHandler = (props: WaypointSetContextHandlerProps) => void;
export type NodeSelectionMouseHandler =
| ((event: React.MouseEvent<Element, MouseEvent>, nodes: Node[]) => void)
| undefined;
export type NodeSelectionMouseHandler = (event: React.MouseEvent<Element, MouseEvent>, nodes: Node[]) => void;

View File

@@ -120,7 +120,7 @@ const MapComp = ({
useMapHandlers(refn, onSelectionChange);
useUpdateNodes(nodes);
const { handleRootContext, ...rootCtxProps } = useContextMenuRootHandlers({ onAddSystem });
const { handleRootContext, ...rootCtxProps } = useContextMenuRootHandlers({ onAddSystem, onCommand });
const { handleConnectionContext, ...connectionCtxProps } = useContextMenuConnectionHandlers();
const { update } = useMapState();
const { variant, gap, size, color } = useBackgroundVars(theme);

View File

@@ -2,22 +2,70 @@ import React, { RefObject, useMemo } from 'react';
import { ContextMenu } from 'primereact/contextmenu';
import { PrimeIcons } from 'primereact/api';
import { MenuItem } from 'primereact/menuitem';
import { PasteSystemsAndConnections } from '@/hooks/Mapper/components/map/components';
import { useMapState } from '@/hooks/Mapper/components/map/MapProvider.tsx';
import { checkPermissions } from '@/hooks/Mapper/components/map/helpers';
import { MenuItemWithInfo, WdMenuItem } from '@/hooks/Mapper/components/ui-kit';
import clsx from 'clsx';
export interface ContextMenuRootProps {
contextMenuRef: RefObject<ContextMenu>;
pasteSystemsAndConnections: PasteSystemsAndConnections | undefined;
onAddSystem(): void;
onPasteSystemsAnsConnections(): void;
}
export const ContextMenuRoot: React.FC<ContextMenuRootProps> = ({ contextMenuRef, onAddSystem }) => {
export const ContextMenuRoot: React.FC<ContextMenuRootProps> = ({
contextMenuRef,
onAddSystem,
onPasteSystemsAnsConnections,
pasteSystemsAndConnections,
}) => {
const {
data: { options, userPermissions },
} = useMapState();
const items: MenuItem[] = useMemo(() => {
const allowPaste = checkPermissions(userPermissions, options.allowed_paste_for);
return [
{
label: 'Add System',
icon: PrimeIcons.PLUS,
command: onAddSystem,
},
...(pasteSystemsAndConnections != null
? [
{
icon: 'pi pi-clipboard',
disabled: !allowPaste,
command: onPasteSystemsAnsConnections,
template: () => {
if (allowPaste) {
return (
<WdMenuItem icon="pi pi-clipboard">
Paste
</WdMenuItem>
);
}
return (
<MenuItemWithInfo
infoTitle="Action is blocked because you dont have permission to Paste."
infoClass={clsx(PrimeIcons.QUESTION_CIRCLE, 'text-stone-500 mr-[12px]')}
tooltipWrapperClassName="flex"
>
<WdMenuItem disabled icon="pi pi-clipboard">
Paste
</WdMenuItem>
</MenuItemWithInfo>
);
},
},
]
: []),
];
}, [onAddSystem]);
}, [userPermissions, options, onAddSystem, pasteSystemsAndConnections, onPasteSystemsAnsConnections]);
return (
<>

View File

@@ -1,36 +1,76 @@
import { useReactFlow, XYPosition } from 'reactflow';
import React, { useCallback, useRef, useState } from 'react';
import { ContextMenu } from 'primereact/contextmenu';
import { ctxManager } from '@/hooks/Mapper/utils/contextManager.ts';
import { OnMapAddSystemCallback } from '@/hooks/Mapper/components/map/map.types.ts';
import { recenterSystemsByBounds } from '@/hooks/Mapper/helpers/recenterSystems.ts';
import { OutCommand, OutCommandHandler, SolarSystemConnection, SolarSystemRawType } from '@/hooks/Mapper/types';
import { decodeUriBase64ToJson } from '@/hooks/Mapper/utils';
import { ctxManager } from '@/hooks/Mapper/utils/contextManager.ts';
import { ContextMenu } from 'primereact/contextmenu';
import React, { useCallback, useRef, useState } from 'react';
import { useReactFlow, XYPosition } from 'reactflow';
export type PasteSystemsAndConnections = {
systems: SolarSystemRawType[];
connections: SolarSystemConnection[];
};
type UseContextMenuRootHandlers = {
onAddSystem?: OnMapAddSystemCallback;
onCommand?: OutCommandHandler;
};
export const useContextMenuRootHandlers = ({ onAddSystem }: UseContextMenuRootHandlers = {}) => {
export const useContextMenuRootHandlers = ({ onAddSystem, onCommand }: UseContextMenuRootHandlers = {}) => {
const rf = useReactFlow();
const contextMenuRef = useRef<ContextMenu | null>(null);
const [position, setPosition] = useState<XYPosition | null>(null);
const [pasteSystemsAndConnections, setPasteSystemsAndConnections] = useState<PasteSystemsAndConnections>();
const handleRootContext = (e: React.MouseEvent<HTMLDivElement>) => {
const handleRootContext = async (e: React.MouseEvent<HTMLDivElement>) => {
setPosition(rf.project({ x: e.clientX, y: e.clientY }));
e.preventDefault();
ctxManager.next('ctxRoot', contextMenuRef.current);
contextMenuRef.current?.show(e);
try {
const text = await navigator.clipboard.readText();
const result = decodeUriBase64ToJson(text);
setPasteSystemsAndConnections(result as PasteSystemsAndConnections);
} catch (err) {
setPasteSystemsAndConnections(undefined);
// do nothing
}
};
const ref = useRef({ onAddSystem, position });
ref.current = { onAddSystem, position };
const ref = useRef({ onAddSystem, position, pasteSystemsAndConnections, onCommand });
ref.current = { onAddSystem, position, pasteSystemsAndConnections, onCommand };
const onAddSystemCallback = useCallback(() => {
ref.current.onAddSystem?.({ coordinates: position });
}, [position]);
const onPasteSystemsAnsConnections = useCallback(async () => {
const { pasteSystemsAndConnections, onCommand, position } = ref.current;
if (!position || !onCommand || !pasteSystemsAndConnections) {
return;
}
const { systems } = recenterSystemsByBounds(pasteSystemsAndConnections.systems);
await onCommand({
type: OutCommand.manualPasteSystemsAndConnections,
data: {
systems: systems.map(({ position: srcPos, ...rest }) => ({
position: { x: Math.round(srcPos.x + position.x), y: Math.round(srcPos.y + position.y) },
...rest,
})),
connections: pasteSystemsAndConnections.connections,
},
});
}, []);
return {
handleRootContext,
pasteSystemsAndConnections,
contextMenuRef,
onAddSystem: onAddSystemCallback,
onPasteSystemsAnsConnections,
};
};

View File

@@ -1,15 +1,16 @@
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
import { InfoDrawer } from '@/hooks/Mapper/components/ui-kit';
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
import classes from './UnsplashedSignature.module.scss';
import { SystemSignature } from '@/hooks/Mapper/types/signatures';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { WORMHOLE_CLASS_STYLES, WORMHOLES_ADDITIONAL_INFO } from '@/hooks/Mapper/components/map/constants.ts';
import { useMemo } from 'react';
import clsx from 'clsx';
import { renderInfoColumn } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/renders';
import { K162_TYPES_MAP } from '@/hooks/Mapper/constants.ts';
import { parseSignatureCustomInfo } from '@/hooks/Mapper/helpers/parseSignatureCustomInfo.ts';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { TimeStatus } from '@/hooks/Mapper/types';
import { SystemSignature } from '@/hooks/Mapper/types/signatures';
import clsx from 'clsx';
import { useMemo } from 'react';
import classes from './UnsplashedSignature.module.scss';
interface UnsplashedSignatureProps {
signature: SystemSignature;
@@ -35,7 +36,7 @@ export const UnsplashedSignature = ({ signature }: UnsplashedSignatureProps) =>
}, [customInfo]);
const isEOL = useMemo(() => {
return customInfo?.isEOL;
return customInfo?.time_status === TimeStatus._1h;
}, [customInfo]);
const whClassStyle = useMemo(() => {

View File

@@ -0,0 +1,5 @@
import { UserPermission, UserPermissions } from '@/hooks/Mapper/types';
export const checkPermissions = (permissions: Partial<UserPermissions>, targetPermission: UserPermission) => {
return targetPermission != null && permissions[targetPermission];
};

View File

@@ -4,3 +4,4 @@ export * from './getSystemClassStyles';
export * from './getShapeClass';
export * from './getBackgroundClass';
export * from './prepareUnsplashedChunks';
export * from './checkPermissions';

View File

@@ -38,6 +38,8 @@ export const useMapInit = () => {
user_characters,
present_characters,
hubs,
options,
user_permissions,
}: CommandInit) => {
const { update } = ref.current;
@@ -63,6 +65,14 @@ export const useMapInit = () => {
updateData.hubs = hubs;
}
if (options) {
updateData.options = options;
}
if (options) {
updateData.userPermissions = user_permissions;
}
if (systems) {
updateData.systems = systems;
}

View File

@@ -9,11 +9,12 @@ import {
} from '@/hooks/Mapper/components/map/constants.ts';
import { SystemSignaturesContent } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/SystemSignaturesContent';
import { K162_TYPES_MAP } from '@/hooks/Mapper/constants.ts';
import { SETTINGS_KEYS, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
import { parseSignatureCustomInfo } from '@/hooks/Mapper/helpers/parseSignatureCustomInfo';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { CommandLinkSignatureToSystem, SignatureGroup, SystemSignature } from '@/hooks/Mapper/types';
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers.ts';
import { SETTINGS_KEYS, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
import { useSystemSignaturesData } from '../../widgets/SystemSignatures/hooks/useSystemSignaturesData';
const K162_SIGNATURE_TYPE = WORMHOLES_ADDITIONAL_INFO_BY_SHORT_NAME['K162'].shortName;
@@ -135,6 +136,11 @@ export const SystemLinkSignatureDialog = ({ data, setVisible }: SystemLinkSignat
[data, setVisible],
);
const { signatures } = useSystemSignaturesData({
systemId: `${data.solar_system_source}`,
settings: LINK_SIGNTATURE_SETTINGS,
});
useEffect(() => {
if (!targetSystemDynamicInfo) {
handleHide();
@@ -152,10 +158,12 @@ export const SystemLinkSignatureDialog = ({ data, setVisible }: SystemLinkSignat
>
<SystemSignaturesContent
systemId={`${data.solar_system_source}`}
hideLinkedSignatures
signatures={signatures}
hasUnsupportedLanguage={false}
settings={LINK_SIGNTATURE_SETTINGS}
hideLinkedSignatures
selectable
onSelect={handleSelect}
selectable={true}
filterSignature={filterSignature}
/>
</Dialog>

View File

@@ -1,12 +1,12 @@
import { Widget } from '@/hooks/Mapper/components/mapInterface/components';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { SystemSettingsDialog } from '@/hooks/Mapper/components/mapInterface/components/SystemSettingsDialog/SystemSettingsDialog.tsx';
import { LayoutEventBlocker, SystemView, TooltipPosition, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
import { SystemInfoContent } from './SystemInfoContent';
import { ANOIK_ICON, DOTLAN_ICON, ZKB_ICON } from '@/hooks/Mapper/icons';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
import { PrimeIcons } from 'primereact/api';
import { useCallback, useState } from 'react';
import { SystemSettingsDialog } from '@/hooks/Mapper/components/mapInterface/components/SystemSettingsDialog/SystemSettingsDialog.tsx';
import { ANOIK_ICON, DOTLAN_ICON, ZKB_ICON } from '@/hooks/Mapper/icons';
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
import { SystemInfoContent } from './SystemInfoContent';
export const SystemInfo = () => {
const [visible, setVisible] = useState(false);
@@ -48,7 +48,7 @@ export const SystemInfo = () => {
</div>
<LayoutEventBlocker className="flex gap-1 items-center">
<a href={`https://zkillboard.com/system/${systemId}`} rel="noreferrer" target="_blank">
<a href={`https://zkillboard.com/system/${systemId}/`} rel="noreferrer" target="_blank">
<img src={ZKB_ICON} width="14" height="14" className="external-icon" />
</a>
<a href={`http://anoik.is/systems/${solarSystemName}`} rel="noreferrer" target="_blank">

View File

@@ -1,123 +1,16 @@
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { Widget } from '@/hooks/Mapper/components/mapInterface/components';
import { SETTINGS_KEYS, SIGNATURE_WINDOW_ID, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
import { useHotkey } from '@/hooks/Mapper/hooks/useHotkey';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { useCallback, useMemo, useState } from 'react';
import { useSignatureUndo } from './hooks/useSignatureUndo';
import { useSystemSignaturesData } from './hooks/useSystemSignaturesData';
import { SystemSignaturesHeader } from './SystemSignatureHeader';
import { SystemSignaturesContent } from './SystemSignaturesContent';
import { SystemSignatureSettingsDialog } from './SystemSignatureSettingsDialog';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { SystemSignaturesHeader } from './SystemSignatureHeader';
import { useHotkey } from '@/hooks/Mapper/hooks/useHotkey';
import { getDeletionTimeoutMs } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
import { OutCommand, OutCommandHandler } from '@/hooks/Mapper/types/mapHandlers';
import { ExtendedSystemSignature } from '@/hooks/Mapper/types';
import { SETTINGS_KEYS, SIGNATURE_WINDOW_ID, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
/**
* Custom hook for managing pending signature deletions and undo countdown.
*/
function useSignatureUndo(
systemId: string | undefined,
settings: SignatureSettingsType,
outCommand: OutCommandHandler,
) {
const [countdown, setCountdown] = useState<number>(0);
const [pendingIds, setPendingIds] = useState<Set<string>>(new Set());
const [deletedSignatures, setDeletedSignatures] = useState<ExtendedSystemSignature[]>([]);
const intervalRef = useRef<number | null>(null);
const addDeleted = useCallback((signatures: ExtendedSystemSignature[]) => {
const newIds = signatures.map(sig => sig.eve_id);
setPendingIds(prev => {
const next = new Set(prev);
newIds.forEach(id => next.add(id));
return next;
});
setDeletedSignatures(prev => [...prev, ...signatures]);
}, []);
// Clear deleted signatures when system changes
useEffect(() => {
if (systemId) {
setDeletedSignatures([]);
setPendingIds(new Set());
setCountdown(0);
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
}
}, [systemId]);
// kick off or clear countdown whenever pendingIds changes
useEffect(() => {
// clear any existing timer
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
if (pendingIds.size === 0) {
setCountdown(0);
setDeletedSignatures([]);
return;
}
// determine timeout from settings
const timeoutMs = getDeletionTimeoutMs(settings);
// Ensure a minimum of 1 second for immediate deletion so the UI shows
const effectiveTimeoutMs = timeoutMs === 0 ? 1000 : timeoutMs;
setCountdown(Math.ceil(effectiveTimeoutMs / 1000));
// start new interval
intervalRef.current = window.setInterval(() => {
setCountdown(prev => {
if (prev <= 1) {
clearInterval(intervalRef.current!);
intervalRef.current = null;
setPendingIds(new Set());
setDeletedSignatures([]);
return 0;
}
return prev - 1;
});
}, 1000);
return () => {
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
};
}, [pendingIds, settings]);
// undo handler
const handleUndo = useCallback(async () => {
if (!systemId || pendingIds.size === 0) return;
await outCommand({
type: OutCommand.undoDeleteSignatures,
data: { system_id: systemId, eve_ids: Array.from(pendingIds) },
});
setPendingIds(new Set());
setDeletedSignatures([]);
setCountdown(0);
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
}, [systemId, pendingIds, outCommand]);
return {
pendingIds,
countdown,
deletedSignatures,
addDeleted,
handleUndo,
};
}
export const SystemSignatures = () => {
const [visible, setVisible] = useState(false);
const [sigCount, setSigCount] = useState(0);
const [showSettings, setShowSettings] = useState(false);
const {
data: { selectedSystems },
@@ -127,31 +20,6 @@ export const SystemSignatures = () => {
const [systemId] = selectedSystems;
const isSystemSelected = useMemo(() => selectedSystems.length === 1, [selectedSystems.length]);
const { pendingIds, countdown, deletedSignatures, addDeleted, handleUndo } = useSignatureUndo(
systemId,
settingsSignatures,
outCommand,
);
useHotkey(true, ['z', 'Z'], (event: KeyboardEvent) => {
if (pendingIds.size > 0 && countdown > 0) {
event.preventDefault();
event.stopPropagation();
handleUndo();
}
});
const handleCountChange = useCallback((count: number) => {
setSigCount(count);
}, []);
const handleSettingsSave = useCallback(
(newSettings: SignatureSettingsType) => {
settingsSignaturesUpdate(newSettings);
setVisible(false);
},
[settingsSignaturesUpdate],
);
const handleLazyDeleteToggle = useCallback(
(value: boolean) => {
@@ -163,7 +31,42 @@ export const SystemSignatures = () => {
[settingsSignaturesUpdate],
);
const openSettings = useCallback(() => setVisible(true), []);
const {
signatures,
selectedSignatures,
setSelectedSignatures,
handleDeleteSelected,
handleSelectAll,
handlePaste,
hasUnsupportedLanguage,
} = useSystemSignaturesData({
systemId,
settings: settingsSignatures,
onLazyDeleteChange: handleLazyDeleteToggle,
});
const sigCount = useMemo(() => signatures.length, [signatures]);
const deletedSignatures = useMemo(() => signatures.filter(s => s.deleted), [signatures]);
const { countdown, handleUndo } = useSignatureUndo(systemId, settingsSignatures, deletedSignatures, outCommand);
useHotkey(true, ['z', 'Z'], (event: KeyboardEvent) => {
if (deletedSignatures.length > 0 && countdown > 0) {
event.preventDefault();
event.stopPropagation();
handleUndo();
}
});
const handleSettingsSave = useCallback(
(newSettings: SignatureSettingsType) => {
settingsSignaturesUpdate(newSettings);
setShowSettings(false);
},
[settingsSignaturesUpdate],
);
const openSettings = useCallback(() => setShowSettings(true), []);
return (
<Widget
@@ -171,7 +74,7 @@ export const SystemSignatures = () => {
<SystemSignaturesHeader
sigCount={sigCount}
lazyDeleteValue={settingsSignatures[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES] as boolean}
pendingCount={pendingIds.size}
pendingCount={deletedSignatures.length}
undoCountdown={countdown}
onLazyDeleteChange={handleLazyDeleteToggle}
onUndoClick={handleUndo}
@@ -187,18 +90,21 @@ export const SystemSignatures = () => {
) : (
<SystemSignaturesContent
systemId={systemId}
signatures={signatures}
selectedSignatures={selectedSignatures}
onSelectSignatures={setSelectedSignatures}
onDeleteSelected={handleDeleteSelected}
onSelectAll={handleSelectAll}
onPaste={handlePaste}
hasUnsupportedLanguage={hasUnsupportedLanguage}
settings={settingsSignatures}
deletedSignatures={deletedSignatures}
onLazyDeleteChange={handleLazyDeleteToggle}
onCountChange={handleCountChange}
onSignatureDeleted={addDeleted}
/>
)}
{visible && (
{showSettings && (
<SystemSignatureSettingsDialog
settings={settingsSignatures}
onCancel={() => setVisible(false)}
onCancel={() => setShowSettings(false)}
onSave={handleSettingsSave}
/>
)}

View File

@@ -33,34 +33,39 @@ import { useClipboard, useHotkey } from '@/hooks/Mapper/hooks';
import useMaxWidth from '@/hooks/Mapper/hooks/useMaxWidth';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { getSignatureRowClass } from '../helpers/rowStyles';
import { useSystemSignaturesData } from '../hooks/useSystemSignaturesData';
const renderColIcon = (sig: SystemSignature) => renderIcon(sig);
interface SystemSignaturesContentProps {
systemId: string;
signatures: ExtendedSystemSignature[];
selectedSignatures?: ExtendedSystemSignature[];
onSelectSignatures?: (s: ExtendedSystemSignature[]) => void;
onDeleteSelected?: () => Promise<void>;
onSelectAll?: () => void;
onPaste?: (clipboardString: string) => void;
settings: SignatureSettingsType;
hideLinkedSignatures?: boolean;
hasUnsupportedLanguage?: boolean;
selectable?: boolean;
onSelect?: (signature: SystemSignature) => void;
onLazyDeleteChange?: (value: boolean) => void;
onCountChange?: (count: number) => void;
filterSignature?: (signature: SystemSignature) => boolean;
onSignatureDeleted?: (deletedSignatures: ExtendedSystemSignature[]) => void;
deletedSignatures?: ExtendedSystemSignature[];
}
export const SystemSignaturesContent = ({
systemId,
signatures,
selectedSignatures,
onSelectSignatures,
onDeleteSelected,
onSelectAll,
onPaste,
settings,
hideLinkedSignatures,
hasUnsupportedLanguage,
selectable,
onSelect,
onLazyDeleteChange,
onCountChange,
filterSignature,
onSignatureDeleted,
deletedSignatures = [],
}: SystemSignaturesContentProps) => {
const [selectedSignatureForDialog, setSelectedSignatureForDialog] = useState<SystemSignature | null>(null);
const [showSignatureSettings, setShowSignatureSettings] = useState(false);
@@ -79,32 +84,18 @@ export const SystemSignaturesContent = ({
const { clipboardContent, setClipboardContent } = useClipboard();
const {
signatures,
selectedSignatures,
setSelectedSignatures,
handleDeleteSelected,
handleSelectAll,
handlePaste,
hasUnsupportedLanguage,
} = useSystemSignaturesData({
systemId,
settings,
onCountChange,
onLazyDeleteChange,
onSignatureDeleted,
});
const deletedSignatures = useMemo(() => signatures.filter(s => s.deleted), [signatures]);
useEffect(() => {
if (selectable) return;
if (!clipboardContent?.text) return;
handlePaste(clipboardContent.text);
onPaste?.(clipboardContent.text);
setClipboardContent(null);
}, [selectable, clipboardContent, handlePaste, setClipboardContent]);
}, [selectable, clipboardContent, onPaste, setClipboardContent]);
useHotkey(true, ['a'], handleSelectAll);
useHotkey(true, ['a'], () => onSelectAll?.());
useHotkey(false, ['Backspace', 'Delete'], (event: KeyboardEvent) => {
const targetWindow = (event.target as HTMLHtmlElement)?.closest(`[data-window-id="${SIGNATURE_WINDOW_ID}"]`);
@@ -117,7 +108,7 @@ export const SystemSignaturesContent = ({
event.stopPropagation();
// Delete key should always immediately delete, never show pending deletions
handleDeleteSelected();
onDeleteSelected?.();
});
const handleResize = useCallback(() => {
@@ -152,9 +143,9 @@ export const SystemSignaturesContent = ({
selectable
? onSelect?.(selectableSignatures[0])
: setSelectedSignatures(selectableSignatures as ExtendedSystemSignature[]);
: onSelectSignatures?.(selectableSignatures as ExtendedSystemSignature[]);
},
[onSelect, selectable, setSelectedSignatures, deletedSignatures],
[onSelect, selectable, onSelectSignatures, deletedSignatures],
);
const {
@@ -177,9 +168,6 @@ export const SystemSignaturesContent = ({
);
const filteredSignatures = useMemo<ExtendedSystemSignature[]>(() => {
// Get the set of deleted signature IDs for quick lookup
const deletedIds = new Set(deletedSignatures.map(sig => sig.eve_id));
// Common filter function
const shouldShowSignature = (sig: ExtendedSystemSignature): boolean => {
if (filterSignature && !filterSignature(sig)) {
@@ -213,24 +201,8 @@ export const SystemSignaturesContent = ({
return settings[sig.kind] as boolean;
};
// Filter active signatures, excluding any that are in the deleted list
const activeSignatures = signatures.filter(sig => {
// Skip if this signature is in the deleted list
if (deletedIds.has(sig.eve_id)) {
return false;
}
return shouldShowSignature(sig);
});
// Add deleted signatures with pending deletion flag, applying the same filters
const deletedWithPendingFlag = deletedSignatures.filter(shouldShowSignature).map(sig => ({
...sig,
pendingDeletion: true,
}));
return [...activeSignatures, ...deletedWithPendingFlag];
}, [signatures, hideLinkedSignatures, settings, filterSignature, deletedSignatures]);
return signatures.filter(sig => shouldShowSignature(sig));
}, [signatures, hideLinkedSignatures, settings, filterSignature]);
const onRowMouseEnter = useCallback((e: DataTableRowMouseEvent) => {
setHoveredSignature(e.data as SystemSignature);
@@ -253,20 +225,18 @@ export const SystemSignaturesContent = ({
return getSignatureRowClass(
rowData as ExtendedSystemSignature,
refVars.current.selectedSignatures,
refVars.current.selectedSignatures || [],
refVars.current.settings[SETTINGS_KEYS.COLOR_BY_TYPE] as boolean,
);
}, []);
const handleSortSettings = useCallback(
(e: DataTableStateEvent) =>
refVars.current.settingsSignaturesUpdate({
...refVars.current.settingsSignatures,
[SETTINGS_KEYS.SORT_FIELD]: e.sortField,
[SETTINGS_KEYS.SORT_ORDER]: e.sortOrder,
}),
[],
);
const handleSortSettings = useCallback((e: DataTableStateEvent) => {
refVars.current.settingsSignaturesUpdate({
...refVars.current.settingsSignatures,
[SETTINGS_KEYS.SORT_FIELD]: e.sortField,
[SETTINGS_KEYS.SORT_ORDER]: e.sortOrder,
});
}, []);
return (
<div ref={tableRef} className="h-full">
@@ -287,7 +257,7 @@ export const SystemSignaturesContent = ({
value={filteredSignatures}
size="small"
selectionMode="multiple"
selection={selectedSignatures}
selection={selectedSignatures || []}
metaKeySelection
onSelectionChange={handleSelectSignatures}
dataKey="eve_id"
@@ -336,6 +306,8 @@ export const SystemSignaturesContent = ({
style={{ maxWidth: nameColumnWidth }}
hidden={isCompact || isMedium}
body={renderInfoColumn}
sortable
sortField="name"
/>
{showDescriptionColumn && (
<Column

View File

@@ -1,5 +1,5 @@
import clsx from 'clsx';
import { ExtendedSystemSignature, SignatureGroup } from '@/hooks/Mapper/types';
import clsx from 'clsx';
import { getRowBackgroundColor } from './getRowBackgroundColor';
import classes from './rowStyles.module.scss';
@@ -20,7 +20,7 @@ export function getSignatureRowClass(
return clsx([...baseCls, 'bg-violet-400/40 hover:bg-violet-300/40']);
}
if (row.pendingDeletion) {
if (row.deleted) {
return clsx([...baseCls, 'bg-red-400/40 hover:bg-red-400/50']);
}

View File

@@ -1,24 +1,20 @@
import { ExtendedSystemSignature } from '@/hooks/Mapper/types';
import { SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
import { ExtendedSystemSignature } from '@/hooks/Mapper/types';
export interface UseSystemSignaturesDataProps {
systemId: string;
settings: SignatureSettingsType;
hideLinkedSignatures?: boolean;
onCountChange?: (count: number) => void;
onPendingChange?: (
pending: React.MutableRefObject<Record<string, ExtendedSystemSignature>>,
undo: () => void,
) => void;
onLazyDeleteChange?: (value: boolean) => void;
deletionTiming?: number;
}
export interface UseFetchingParams {
systemId: string;
settings: SignatureSettingsType;
signaturesRef: React.MutableRefObject<ExtendedSystemSignature[]>;
setSignatures: React.Dispatch<React.SetStateAction<ExtendedSystemSignature[]>>;
pendingDeletionMapRef: React.MutableRefObject<Record<string, ExtendedSystemSignature>>;
}
export interface UsePendingDeletionParams {

View File

@@ -1,42 +0,0 @@
import { useCallback, useRef } from 'react';
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers';
import { prepareUpdatePayload } from '../helpers';
import { UsePendingDeletionParams } from './types';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { ExtendedSystemSignature } from '@/hooks/Mapper/types';
export function usePendingDeletions({
systemId,
setSignatures,
onPendingChange,
}: Omit<UsePendingDeletionParams, 'deletionTiming'>) {
const { outCommand } = useMapRootState();
const pendingDeletionMapRef = useRef<Record<string, ExtendedSystemSignature>>({});
const processRemovedSignatures = useCallback(
async (
removed: ExtendedSystemSignature[],
added: ExtendedSystemSignature[],
updated: ExtendedSystemSignature[],
) => {
if (!removed.length) return;
await outCommand({
type: OutCommand.updateSignatures,
data: prepareUpdatePayload(systemId, added, updated, removed),
});
},
[systemId, outCommand],
);
const clearPendingDeletions = useCallback(() => {
pendingDeletionMapRef.current = {};
setSignatures(prev => prev.map(x => (x.pendingDeletion ? { ...x, pendingDeletion: false } : x)));
onPendingChange?.(pendingDeletionMapRef, clearPendingDeletions);
}, []);
return {
pendingDeletionMapRef,
processRemovedSignatures,
clearPendingDeletions,
};
}

View File

@@ -1,21 +1,27 @@
import { useCallback } from 'react';
import { SETTINGS_KEYS } from '@/hooks/Mapper/constants/signatures';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { ExtendedSystemSignature, SystemSignature } from '@/hooks/Mapper/types';
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers';
import { prepareUpdatePayload, getActualSigs, mergeLocalPending } from '../helpers';
import { useCallback, useMemo } from 'react';
import { getDeletionTimeoutMs } from '../constants';
import { getActualSigs, prepareUpdatePayload } from '../helpers';
import { UseFetchingParams } from './types';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
export const useSignatureFetching = ({
systemId,
signaturesRef,
setSignatures,
pendingDeletionMapRef,
}: UseFetchingParams) => {
export const useSignatureFetching = ({ systemId, settings, signaturesRef, setSignatures }: UseFetchingParams) => {
const {
data: { characters },
outCommand,
} = useMapRootState();
const deleteTimeout = useMemo(() => {
const lazyDelete = settings[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES] as boolean;
if (!lazyDelete) {
return 0;
}
return getDeletionTimeoutMs(settings);
}, [settings]);
const handleGetSignatures = useCallback(async () => {
if (!systemId) {
setSignatures([]);
@@ -32,24 +38,23 @@ export const useSignatureFetching = ({
character_name: characters.find(c => c.eve_id === s.character_eve_id)?.name,
})) as ExtendedSystemSignature[];
setSignatures(() => mergeLocalPending(pendingDeletionMapRef, extended));
setSignatures(() => extended);
}, [characters, systemId, outCommand]);
const handleUpdateSignatures = useCallback(
async (newList: ExtendedSystemSignature[], updateOnly: boolean, skipUpdateUntouched?: boolean) => {
const { added, updated, removed } = getActualSigs(
signaturesRef.current,
newList,
updateOnly,
skipUpdateUntouched,
);
const actualSigs = getActualSigs(signaturesRef.current, newList, updateOnly, skipUpdateUntouched);
await outCommand({
type: OutCommand.updateSignatures,
data: prepareUpdatePayload(systemId, added, updated, removed),
});
const { added, updated, removed } = actualSigs;
if (updated.length !== 0 || added.length !== 0 || removed.length !== 0) {
await outCommand({
type: OutCommand.updateSignatures,
data: { ...prepareUpdatePayload(systemId, added, updated, removed), deleteTimeout },
});
}
},
[systemId, outCommand, signaturesRef],
[systemId, deleteTimeout, outCommand, signaturesRef],
);
return {

View File

@@ -0,0 +1,89 @@
import { SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
import { ExtendedSystemSignature, OutCommandHandler } from '@/hooks/Mapper/types';
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers';
import { useCallback, useEffect, useRef, useState } from 'react';
import { getDeletionTimeoutMs } from '../constants';
/**
* Custom hook for managing pending signature deletions and undo countdown.
*/
export function useSignatureUndo(
systemId: string | undefined,
settings: SignatureSettingsType,
deletedSignatures: ExtendedSystemSignature[],
outCommand: OutCommandHandler,
) {
const [countdown, setCountdown] = useState<number>(0);
const intervalRef = useRef<number | null>(null);
// Clear deleted signatures when system changes
useEffect(() => {
if (systemId) {
setCountdown(0);
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
}
}, [systemId]);
// kick off or clear countdown whenever pendingIds changes
useEffect(() => {
// clear any existing timer
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
if (deletedSignatures.length === 0) {
setCountdown(0);
return;
}
// determine timeout from settings
const timeoutMs = getDeletionTimeoutMs(settings);
// Ensure a minimum of 1 second for immediate deletion so the UI shows
const effectiveTimeoutMs = timeoutMs === 0 ? 1000 : timeoutMs;
setCountdown(Math.ceil(effectiveTimeoutMs / 1000));
// start new interval
intervalRef.current = window.setInterval(() => {
setCountdown(prev => {
if (prev <= 1) {
clearInterval(intervalRef.current!);
intervalRef.current = null;
return 0;
}
return prev - 1;
});
}, 1000);
return () => {
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
};
}, [deletedSignatures, settings]);
// undo handler
const handleUndo = useCallback(async () => {
if (!systemId || deletedSignatures.length === 0) return;
await outCommand({
type: OutCommand.undoDeleteSignatures,
data: { system_id: systemId, eve_ids: deletedSignatures.map(s => s.eve_id) },
});
setCountdown(0);
if (intervalRef.current != null) {
clearInterval(intervalRef.current);
intervalRef.current = null;
}
}, [systemId, deletedSignatures, outCommand]);
return {
countdown,
handleUndo,
};
}

View File

@@ -1,44 +1,29 @@
import { useMapEventListener } from '@/hooks/Mapper/events';
import { parseSignatures } from '@/hooks/Mapper/helpers';
import { Commands, ExtendedSystemSignature, SignatureKind } from '@/hooks/Mapper/types';
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers';
import { useCallback, useEffect, useState } from 'react';
import useRefState from 'react-usestateref';
import { getDeletionTimeoutMs } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { getActualSigs } from '../helpers';
import { UseSystemSignaturesDataProps } from './types';
import { usePendingDeletions } from './usePendingDeletions';
import { useSignatureFetching } from './useSignatureFetching';
import { SETTINGS_KEYS } from '@/hooks/Mapper/constants/signatures.ts';
import { UseSystemSignaturesDataProps } from './types';
import { useSignatureFetching } from './useSignatureFetching';
export const useSystemSignaturesData = ({
systemId,
settings,
onCountChange,
onPendingChange,
onLazyDeleteChange,
onSignatureDeleted,
}: Omit<UseSystemSignaturesDataProps, 'deletionTiming'> & {
onSignatureDeleted?: (deletedSignatures: ExtendedSystemSignature[]) => void;
}) => {
const { outCommand } = useMapRootState();
const [signatures, setSignatures, signaturesRef] = useRefState<ExtendedSystemSignature[]>([]);
const [selectedSignatures, setSelectedSignatures] = useState<ExtendedSystemSignature[]>([]);
const [hasUnsupportedLanguage, setHasUnsupportedLanguage] = useState<boolean>(false);
const { pendingDeletionMapRef, processRemovedSignatures, clearPendingDeletions } = usePendingDeletions({
systemId,
setSignatures,
onPendingChange,
});
const { handleGetSignatures, handleUpdateSignatures } = useSignatureFetching({
systemId,
settings,
signaturesRef,
setSignatures,
pendingDeletionMapRef,
});
const handlePaste = useCallback(
@@ -67,40 +52,14 @@ export const useSystemSignaturesData = ({
setHasUnsupportedLanguage(false);
}
const currentNonPending = lazyDeleteValue
? signaturesRef.current.filter(sig => !sig.pendingDeletion)
: signaturesRef.current.filter(sig => !sig.pendingDeletion || !sig.pendingAddition);
const { added, updated, removed } = getActualSigs(currentNonPending, incomingSignatures, !lazyDeleteValue, false);
if (removed.length > 0) {
await processRemovedSignatures(removed, added, updated);
// Show pending deletions if lazy deletion is enabled
// The deletion timing controls how long the countdown lasts, not whether lazy delete is active
if (onSignatureDeleted && lazyDeleteValue) {
onSignatureDeleted(removed);
}
}
if (updated.length !== 0 || added.length !== 0) {
await outCommand({
type: OutCommand.updateSignatures,
data: {
system_id: systemId,
added,
updated,
removed: [],
},
});
}
await handleUpdateSignatures(incomingSignatures, !lazyDeleteValue, false);
const keepLazy = settings[SETTINGS_KEYS.KEEP_LAZY_DELETE] as boolean;
if (lazyDeleteValue && !keepLazy) {
onLazyDeleteChange?.(false);
}
},
[settings, signaturesRef, processRemovedSignatures, outCommand, systemId, onLazyDeleteChange, onSignatureDeleted],
[settings, handleUpdateSignatures, onLazyDeleteChange],
);
const handleDeleteSelected = useCallback(async () => {
@@ -109,23 +68,15 @@ export const useSystemSignaturesData = ({
const selectedIds = selectedSignatures.map(s => s.eve_id);
const finalList = signatures.filter(s => !selectedIds.includes(s.eve_id));
// IMPORTANT: Send deletion to server BEFORE updating local state
// Otherwise signaturesRef.current will be updated and getActualSigs won't detect removals
await handleUpdateSignatures(finalList, false, true);
// Update local state after server call
setSignatures(finalList);
setSelectedSignatures([]);
}, [handleUpdateSignatures, selectedSignatures, signatures, setSignatures]);
await handleUpdateSignatures(finalList, false, true);
}, [handleUpdateSignatures, selectedSignatures, signatures]);
const handleSelectAll = useCallback(() => {
setSelectedSignatures(signatures);
}, [signatures]);
const undoPending = useCallback(() => {
clearPendingDeletions();
}, [clearPendingDeletions]);
useMapEventListener(event => {
if (event.name === Commands.signaturesUpdated && String(event.data) === String(systemId)) {
handleGetSignatures();
@@ -136,18 +87,13 @@ export const useSystemSignaturesData = ({
useEffect(() => {
if (!systemId) {
setSignatures([]);
undoPending();
return;
}
handleGetSignatures();
}, [systemId]);
useEffect(() => {
onCountChange?.(signatures.length);
}, [signatures]);
return {
signatures: signatures.filter(sig => !sig.deleted),
signatures,
selectedSignatures,
setSelectedSignatures,
handleDeleteSelected,

View File

@@ -1,14 +1,14 @@
import { PrimeIcons } from 'primereact/api';
import { SignatureGroup, SystemSignature } from '@/hooks/Mapper/types';
import { SystemViewStandalone, TooltipPosition, WHClassView } from '@/hooks/Mapper/components/ui-kit';
import { SignatureGroup, SystemSignature, TimeStatus } from '@/hooks/Mapper/types';
import { PrimeIcons } from 'primereact/api';
import { renderK162Type } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/components/SignatureK162TypeSelect';
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
import clsx from 'clsx';
import { renderName } from './renderName.tsx';
import { K162_TYPES_MAP } from '@/hooks/Mapper/constants.ts';
import { parseSignatureCustomInfo } from '@/hooks/Mapper/helpers/parseSignatureCustomInfo.ts';
import clsx from 'clsx';
import { renderName } from './renderName.tsx';
export const renderInfoColumn = (row: SystemSignature) => {
if (!row.group || row.group === SignatureGroup.Wormhole) {
@@ -18,7 +18,9 @@ export const renderInfoColumn = (row: SystemSignature) => {
return (
<div className="flex justify-start items-center gap-[4px]">
{customInfo.isEOL && (
{row.temporary_name && <span className={clsx('text-[12px]')}>{row.temporary_name}</span>}
{customInfo.time_status === TimeStatus._1h && (
<WdTooltipWrapper offset={5} position={TooltipPosition.top} content="Signature marked as EOL">
<div className="pi pi-clock text-fuchsia-400 text-[11px] mr-[2px]"></div>
</WdTooltipWrapper>

View File

@@ -30,9 +30,6 @@ export const SystemStructuresDialog: React.FC<StructuresEditDialogProps> = ({
const { outCommand } = useMapRootState();
const [prevQuery, setPrevQuery] = useState('');
const [prevResults, setPrevResults] = useState<{ label: string; value: string }[]>([]);
useEffect(() => {
if (structure) {
setEditData(structure);
@@ -46,34 +43,24 @@ export const SystemStructuresDialog: React.FC<StructuresEditDialogProps> = ({
// Searching corporation owners via auto-complete
const searchOwners = useCallback(
async (e: { query: string }) => {
const newQuery = e.query.trim();
if (!newQuery) {
const query = e.query.trim();
if (!query) {
setOwnerSuggestions([]);
return;
}
// If user typed more text but we have partial match in prevResults
if (newQuery.startsWith(prevQuery) && prevResults.length > 0) {
const filtered = prevResults.filter(item => item.label.toLowerCase().includes(newQuery.toLowerCase()));
setOwnerSuggestions(filtered);
return;
}
try {
// TODO fix it
const { results = [] } = await outCommand({
type: OutCommand.getCorporationNames,
data: { search: newQuery },
data: { search: query },
});
setOwnerSuggestions(results);
setPrevQuery(newQuery);
setPrevResults(results);
} catch (err) {
console.error('Failed to fetch owners:', err);
setOwnerSuggestions([]);
}
},
[prevQuery, prevResults, outCommand],
[outCommand],
);
const handleChange = (field: keyof StructureItem, val: string | Date) => {
@@ -122,7 +109,6 @@ export const SystemStructuresDialog: React.FC<StructuresEditDialogProps> = ({
// fetch corporation ticker if we have an ownerId
if (editData.ownerId) {
try {
// TODO fix it
const { ticker } = await outCommand({
type: OutCommand.getCorporationTicker,
data: { corp_id: editData.ownerId },

View File

@@ -64,7 +64,7 @@ export const ImportExport = () => {
// INFO: WE NOT SUPPORT MIGRATIONS FOR OLD FILES AND Clipboard
const parsed = parseMapUserSettings(text);
if (applySettings(applyMigrations(parsed))) {
if (applySettings(applyMigrations(parsed) || createDefaultStoredSettings())) {
toast.current?.show({
severity: 'success',
summary: 'Import',

View File

@@ -35,7 +35,7 @@ export const ServerSettings = () => {
try {
//INFO: INSTEAD CHECK WE WILL TRY TO APPLY MIGRATION
applySettings(applyMigrations(JSON.parse(res.default_settings)));
applySettings(applyMigrations(JSON.parse(res.default_settings)) || createDefaultStoredSettings());
callToastSuccess(toast.current, 'Settings synchronized successfully');
} catch (error) {
applySettings(createDefaultStoredSettings());

View File

@@ -1,15 +1,15 @@
import { Dialog } from 'primereact/dialog';
import { useCallback, useEffect } from 'react';
import { OutCommand, SignatureGroup, SystemSignature, TimeStatus } from '@/hooks/Mapper/types';
import { Controller, FormProvider, useForm } from 'react-hook-form';
import {
SignatureGroupContent,
SignatureGroupSelect,
} from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/components';
import { InputText } from 'primereact/inputtext';
import { SystemsSettingsProvider } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/Provider.tsx';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { WdButton } from '@/hooks/Mapper/components/ui-kit';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { OutCommand, SignatureGroup, SystemSignature, TimeStatus } from '@/hooks/Mapper/types';
import { Dialog } from 'primereact/dialog';
import { InputText } from 'primereact/inputtext';
import { useCallback, useEffect } from 'react';
import { Controller, FormProvider, useForm } from 'react-hook-form';
type SystemSignaturePrepared = Omit<SystemSignature, 'linked_system'> & {
linked_system: string;
@@ -119,6 +119,7 @@ export const SignatureSettings = ({ systemId, show, onHide, signatureData }: Map
added: [],
updated: [out],
removed: [],
deleteTimeout: 0,
},
});

View File

@@ -1,36 +1,36 @@
import { Map, MAP_ROOT_ID } from '@/hooks/Mapper/components/map/Map.tsx';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { CommandSelectSystems, OutCommand, OutCommandHandler, SolarSystemConnection } from '@/hooks/Mapper/types';
import { MapRootData, useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { OnMapAddSystemCallback, OnMapSelectionChange } from '@/hooks/Mapper/components/map/map.types.ts';
import isEqual from 'lodash.isequal';
import { ContextMenuSystem, useContextMenuSystemHandlers } from '@/hooks/Mapper/components/contexts';
import { Map, MAP_ROOT_ID } from '@/hooks/Mapper/components/map/Map.tsx';
import { OnMapAddSystemCallback, OnMapSelectionChange } from '@/hooks/Mapper/components/map/map.types.ts';
import {
SystemCustomLabelDialog,
SystemLinkSignatureDialog,
SystemSettingsDialog,
} from '@/hooks/Mapper/components/mapInterface/components';
import { Connections } from '@/hooks/Mapper/components/mapRootContent/components/Connections';
import { ContextMenuSystemMultiple, useContextMenuSystemMultipleHandlers } from '../contexts/ContextMenuSystemMultiple';
import { getSystemById } from '@/hooks/Mapper/helpers';
import { MapRootData, useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { CommandSelectSystems, OutCommand, OutCommandHandler, SolarSystemConnection } from '@/hooks/Mapper/types';
import { Commands } from '@/hooks/Mapper/types/mapHandlers.ts';
import isEqual from 'lodash.isequal';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { Node, useReactFlow, Viewport, XYPosition } from 'reactflow';
import { ContextMenuSystemMultiple, useContextMenuSystemMultipleHandlers } from '../contexts/ContextMenuSystemMultiple';
import { useCommandsSystems } from '@/hooks/Mapper/mapRootProvider/hooks/api';
import { emitMapEvent, useMapEventListener } from '@/hooks/Mapper/events';
import { useCommandsSystems } from '@/hooks/Mapper/mapRootProvider/hooks/api';
import { useDeleteSystems } from '@/hooks/Mapper/components/contexts/hooks';
import { useCommonMapEventProcessor } from '@/hooks/Mapper/components/mapWrapper/hooks/useCommonMapEventProcessor.ts';
import {
AddSystemDialog,
SearchOnSubmitCallback,
} from '@/hooks/Mapper/components/mapInterface/components/AddSystemDialog';
import { useHotkey } from '../../hooks/useHotkey';
import { PingType } from '@/hooks/Mapper/types/ping.ts';
import { SystemPingDialog } from '@/hooks/Mapper/components/mapInterface/components/SystemPingDialog';
import { MiniMapPlacement } from '@/hooks/Mapper/mapRootProvider/types.ts';
import { useCommonMapEventProcessor } from '@/hooks/Mapper/components/mapWrapper/hooks/useCommonMapEventProcessor.ts';
import { MINIMAP_PLACEMENT_MAP } from '@/hooks/Mapper/constants.ts';
import { MiniMapPlacement } from '@/hooks/Mapper/mapRootProvider/types.ts';
import { PingType } from '@/hooks/Mapper/types/ping.ts';
import type { PanelPosition } from '@reactflow/core';
import { useHotkey } from '../../hooks/useHotkey';
import { MINI_MAP_PLACEMENT_OFFSETS } from './constants.ts';
// TODO: INFO - this component needs for abstract work with Map instance
@@ -106,7 +106,7 @@ export const MapWrapper = () => {
runCommand({
name: Commands.selectSystems,
data: { systems: selectedSystems } as CommandSelectSystems,
data: { systems: selectedSystems, delay: 200 } as CommandSelectSystems,
});
}
});

View File

@@ -4,8 +4,17 @@ import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrap
import { TooltipPosition } from '@/hooks/Mapper/components/ui-kit/WdTooltip';
import clsx from 'clsx';
type MenuItemWithInfoProps = { infoTitle: ReactNode; infoClass?: string } & WithChildren;
export const MenuItemWithInfo = ({ children, infoClass, infoTitle }: MenuItemWithInfoProps) => {
type MenuItemWithInfoProps = {
infoTitle: ReactNode;
infoClass?: string;
tooltipWrapperClassName?: string;
} & WithChildren;
export const MenuItemWithInfo = ({
children,
infoClass,
infoTitle,
tooltipWrapperClassName,
}: MenuItemWithInfoProps) => {
return (
<div className="flex justify-between w-full h-full items-center">
{children}
@@ -13,6 +22,7 @@ export const MenuItemWithInfo = ({ children, infoClass, infoTitle }: MenuItemWit
content={infoTitle}
position={TooltipPosition.top}
className="!opacity-100 !pointer-events-auto"
wrapperClassName={tooltipWrapperClassName}
>
<div className={clsx('pi text-orange-400', infoClass)} />
</WdTooltipWrapper>

View File

@@ -45,40 +45,42 @@ export const WHClassView = ({
const whClass = useMemo(() => WORMHOLES_ADDITIONAL_INFO[whData.dest], [whData.dest]);
const whClassStyle = WORMHOLE_CLASS_STYLES[whClass?.wormholeClassID] ?? '';
return (
<div className={clsx(classes.WHClassViewRoot, className)}>
{!hideTooltip && (
<WdTooltipWrapper
position={TooltipPosition.bottom}
content={
<div className="flex gap-3">
<div className="flex flex-col gap-1">
<InfoDrawer title="Total mass">{prepareMass(whData.total_mass)}</InfoDrawer>
<InfoDrawer title="Jump mass">{prepareMass(whData.max_mass_per_jump)}</InfoDrawer>
</div>
<div className="flex flex-col gap-1">
<InfoDrawer title="Lifetime">{whData.lifetime}h</InfoDrawer>
<InfoDrawer title="Mass regen">{prepareMass(whData.mass_regen)}</InfoDrawer>
</div>
</div>
}
>
<div
className={clsx(
classes.WHClassViewContent,
{ [classes.NoOffset]: noOffset },
'wh-name select-none cursor-help',
)}
>
{!hideWhClassName && <span className={clsx({ [whClassStyle]: highlightName })}>{whClassName}</span>}
{!hideWhClass && whClass && (
<span className={clsx(classes.WHClassName, whClassStyle, classNameWh)}>
{useShortTitle ? whClass.shortTitle : whClass.shortName}
</span>
)}
</div>
</WdTooltipWrapper>
const content = (
<div
className={clsx(classes.WHClassViewContent, { [classes.NoOffset]: noOffset }, 'wh-name select-none cursor-help')}
>
{!hideWhClassName && <span className={clsx({ [whClassStyle]: highlightName })}>{whClassName}</span>}
{!hideWhClass && whClass && (
<span className={clsx(classes.WHClassName, whClassStyle, classNameWh)}>
{useShortTitle ? whClass.shortTitle : whClass.shortName}
</span>
)}
</div>
);
if (hideTooltip) {
return <div className={clsx(classes.WHClassViewRoot, className)}>{content}</div>;
}
return (
<div className={clsx(classes.WHClassViewRoot, className)}>
<WdTooltipWrapper
position={TooltipPosition.bottom}
content={
<div className="flex gap-3">
<div className="flex flex-col gap-1">
<InfoDrawer title="Total mass">{prepareMass(whData.total_mass)}</InfoDrawer>
<InfoDrawer title="Jump mass">{prepareMass(whData.max_mass_per_jump)}</InfoDrawer>
</div>
<div className="flex flex-col gap-1">
<InfoDrawer title="Lifetime">{whData.lifetime}h</InfoDrawer>
<InfoDrawer title="Mass regen">{prepareMass(whData.mass_regen)}</InfoDrawer>
</div>
</div>
}
>
{content}
</WdTooltipWrapper>
</div>
);
};

View File

@@ -1,13 +1,18 @@
import { WithChildren } from '@/hooks/Mapper/types/common.ts';
import { WithChildren, WithClassName } from '@/hooks/Mapper/types/common.ts';
import clsx from 'clsx';
type WdMenuItemProps = { icon?: string; disabled?: boolean } & WithChildren;
export const WdMenuItem = ({ children, icon, disabled }: WdMenuItemProps) => {
type WdMenuItemProps = { icon?: string; disabled?: boolean } & WithChildren & WithClassName;
export const WdMenuItem = ({ children, icon, disabled, className }: WdMenuItemProps) => {
return (
<a
className={clsx('flex gap-[6px] w-full h-full items-center px-[12px] !py-0 ml-[-2px]', 'p-menuitem-link', {
'p-disabled': disabled,
})}
className={clsx(
'flex gap-[6px] w-full h-full items-center px-[12px] !py-0',
'p-menuitem-link',
{
'p-disabled': disabled,
},
className,
)}
>
{icon && <div className={clsx('min-w-[20px]', icon)}></div>}
<div className="w-full">{children}</div>

View File

@@ -10,6 +10,7 @@ export type WdTooltipWrapperProps = {
interactive?: boolean;
smallPaddings?: boolean;
tooltipClassName?: string;
wrapperClassName?: string;
} & Omit<HTMLProps<HTMLDivElement>, 'content' | 'size'> &
Omit<TooltipProps, 'content'>;
@@ -26,6 +27,7 @@ export const WdTooltipWrapper = forwardRef<WdTooltipHandlers, WdTooltipWrapperPr
smallPaddings,
size,
tooltipClassName,
wrapperClassName,
...props
},
forwardedRef,
@@ -36,7 +38,7 @@ export const WdTooltipWrapper = forwardRef<WdTooltipHandlers, WdTooltipWrapperPr
return (
<div className={clsx(classes.WdTooltipWrapperRoot, className)} {...props}>
{targetSelector ? <>{children}</> : <div className={autoClass}>{children}</div>}
{targetSelector ? <>{children}</> : <div className={clsx(autoClass, wrapperClassName)}>{children}</div>}
<WdTooltip
ref={forwardedRef}

View File

@@ -1,12 +1,5 @@
import { PingsPlacement } from '@/hooks/Mapper/mapRootProvider/types.ts';
export enum SESSION_KEY {
viewPort = 'viewPort',
windows = 'windows',
windowsVisible = 'windowsVisible',
routes = 'routes',
}
export const SYSTEM_FOCUSED_LIFETIME = 10000;
export const GRADIENT_MENU_ACTIVE_CLASSES = 'bg-gradient-to-br from-transparent/10 to-fuchsia-300/10';

View File

@@ -3,3 +3,4 @@ export * from './parseSignatures';
export * from './getSystemById';
export * from './getEveImageUrl';
export * from './toastHelpers';
export * from './recenterSystems';

View File

@@ -0,0 +1,39 @@
import { XYPosition } from 'reactflow';
export type WithPosition<T = unknown> = T & { position: XYPosition };
export const computeBoundsCenter = (items: Array<WithPosition>): XYPosition => {
if (items.length === 0) return { x: 0, y: 0 };
let minX = Infinity;
let maxX = -Infinity;
let minY = Infinity;
let maxY = -Infinity;
for (const { position } of items) {
if (position.x < minX) minX = position.x;
if (position.x > maxX) maxX = position.x;
if (position.y < minY) minY = position.y;
if (position.y > maxY) maxY = position.y;
}
return {
x: minX + (maxX - minX) / 2,
y: minY + (maxY - minY) / 2,
};
};
/** Смещает все точки так, чтобы центр области стал (0,0) */
export const recenterSystemsByBounds = <T extends WithPosition>(items: T[]): { center: XYPosition; systems: T[] } => {
const center = computeBoundsCenter(items);
const systems = items.map(it => ({
...it,
position: {
x: it.position.x - center.x,
y: it.position.y - center.y,
},
}));
return { center, systems };
};

View File

@@ -28,14 +28,17 @@ export const useEventBuffer = <T>(handler: UseEventBufferHandler<T>) => {
eventTickRef.current = eventTick;
// @ts-ignore
const handleEvent = useCallback(event => {
if (!eventTickRef.current) {
return;
}
const handleEvent = useCallback(
event => {
if (!eventTickRef.current) {
return;
}
eventsBufferRef.current.push(event);
eventTickRef.current();
}, []);
eventsBufferRef.current.push(event);
eventTickRef.current();
},
[eventTickRef.current],
);
return { handleEvent };
};

View File

@@ -19,7 +19,7 @@ export const createWidgetSettings = <T>(settings: T) => {
export const createDefaultStoredSettings = (): MapUserSettings => {
return {
version: STORED_SETTINGS_VERSION,
migratedFromOld: true,
migratedFromOld: false,
killsWidget: createWidgetSettings(DEFAULT_KILLS_WIDGET_SETTINGS),
localWidget: createWidgetSettings(DEFAULT_WIDGET_LOCAL_SETTINGS),
widgets: createWidgetSettings(getDefaultWidgetProps()),

View File

@@ -42,7 +42,7 @@ export const useActualizeRemoteMapSettings = ({
}
try {
applySettings(applyMigrations(JSON.parse(res.default_settings)));
applySettings(applyMigrations(JSON.parse(res.default_settings) || createDefaultStoredSettings()));
} catch (error) {
applySettings(createDefaultStoredSettings());
}

View File

@@ -115,10 +115,15 @@ export const useMapUserSettings = ({ map_slug }: MapRootData, outCommand: OutCom
}
try {
// here we try to restore settings
let oldMapData;
if (!currentMapUserSettings.migratedFromOld) {
const allData = extractData(LS_KEY_LEGASY);
oldMapData = allData?.[map_slug];
}
// INFO: after migrations migratedFromOld always will be true
const migratedResult = applyMigrations(
!currentMapUserSettings.migratedFromOld ? extractData(LS_KEY_LEGASY) : currentMapUserSettings,
);
const migratedResult = applyMigrations(oldMapData ? oldMapData : currentMapUserSettings);
if (!migratedResult) {
setIsReady(true);

View File

@@ -26,7 +26,7 @@ export const applyMigrations = (mapSettings: any) => {
return { ...currentMapSettings, version: STORED_SETTINGS_VERSION, migratedFromOld: true };
}
return;
return currentMapSettings;
}
const cmVersion = currentMapSettings.version || 0;

View File

@@ -1,4 +1,4 @@
export const STORED_SETTINGS_VERSION = 2;
export const LS_KEY_LEGASY = 'map-user-settings';
export const LS_KEY = 'map-user-settings-v2';
export const LS_KEY = 'map-user-settings-v3';

View File

@@ -9,3 +9,4 @@ export * from './connectionPassages';
export * from './permissions';
export * from './comment';
export * from './ping';
export * from './options';

View File

@@ -1,4 +1,4 @@
import { CommentType, PingData, SystemSignature, UserPermissions } from '@/hooks/Mapper/types';
import { CommentType, MapOptions, PingData, SystemSignature, UserPermissions } from '@/hooks/Mapper/types';
import { ActivitySummary, CharacterTypeRaw, TrackingCharacter } from '@/hooks/Mapper/types/character.ts';
import { SolarSystemConnection } from '@/hooks/Mapper/types/connection.ts';
import { DetailedKill, Kill } from '@/hooks/Mapper/types/kills.ts';
@@ -94,7 +94,7 @@ export type CommandInit = {
hubs: string[];
user_hubs: string[];
routes: RoutesList;
options: Record<string, string | boolean>;
options: MapOptions;
reset?: boolean;
is_subscription_active?: boolean;
main_character_eve_id?: string | null;
@@ -247,6 +247,7 @@ export enum OutCommand {
deleteSystems = 'delete_systems',
manualAddSystem = 'manual_add_system',
manualAddConnection = 'manual_add_connection',
manualPasteSystemsAndConnections = 'manual_paste_systems_and_connections',
manualDeleteConnection = 'manual_delete_connection',
setAutopilotWaypoint = 'set_autopilot_waypoint',
addSystem = 'add_system',

View File

@@ -4,7 +4,7 @@ import { CharacterTypeRaw } from '@/hooks/Mapper/types/character.ts';
import { SolarSystemRawType } from '@/hooks/Mapper/types/system.ts';
import { RoutesList } from '@/hooks/Mapper/types/routes.ts';
import { SolarSystemConnection } from '@/hooks/Mapper/types/connection.ts';
import { PingData, UserPermissions } from '@/hooks/Mapper/types';
import { MapOptions, PingData, UserPermissions } from '@/hooks/Mapper/types';
import { SystemSignature } from '@/hooks/Mapper/types/signatures';
export type MapUnionTypes = {
@@ -23,7 +23,7 @@ export type MapUnionTypes = {
kills: Record<number, number>;
connections: SolarSystemConnection[];
userPermissions: Partial<UserPermissions>;
options: Record<string, string | boolean>;
options: MapOptions;
isSubscriptionActive: boolean;
mainCharacterEveId: string | null;

View File

@@ -0,0 +1,14 @@
import { UserPermission } from '@/hooks/Mapper/types/permissions.ts';
export type StringBoolean = 'true' | 'false';
export type MapOptions = {
allowed_copy_for: UserPermission;
allowed_paste_for: UserPermission;
layout: string;
restrict_offline_showing: StringBoolean;
show_linked_signature_id: StringBoolean;
show_linked_signature_id_temp_name: StringBoolean;
show_temp_system_name: StringBoolean;
store_custom_labels: StringBoolean;
};

View File

@@ -29,7 +29,7 @@ export type GroupType = {
export type SignatureCustomInfo = {
k162Type?: string;
isEOL?: boolean;
time_status?: number;
isCrit?: boolean;
};

View File

@@ -1,5 +1,5 @@
import { useEventBuffer } from '@/hooks/Mapper/hooks';
import usePageVisibility from '@/hooks/Mapper/hooks/usePageVisibility.ts';
import debounce from 'lodash.debounce';
import { MapHandlers } from '@/hooks/Mapper/types/mapHandlers.ts';
import { RefObject, useCallback, useEffect, useRef } from 'react';
@@ -16,23 +16,6 @@ export const useMapperHandlers = (handlerRefs: RefObject<MapHandlers>[], hooksRe
const visibleRef = useRef(visible);
visibleRef.current = visible;
// @ts-ignore
const handleBufferedEvent = useCallback(({ type, body }) => {
if (!visibleRef.current) {
return;
}
handlerRefs.forEach(ref => {
if (!ref.current) {
return;
}
ref.current?.command(type, body);
});
}, []);
const { handleEvent: handleMapEvent } = useEventBuffer<any>(handleBufferedEvent);
// TODO - do not delete THIS code it needs for debug
// const [record, setRecord] = useLocalStorageState<boolean>('record', {
// defaultValue: false,
@@ -73,6 +56,52 @@ export const useMapperHandlers = (handlerRefs: RefObject<MapHandlers>[], hooksRe
[hooksRef.current],
);
// @ts-ignore
const eventsBufferRef = useRef<{ type; body }[]>([]);
const eventTick = useCallback(
debounce(() => {
if (eventsBufferRef.current.length === 0) {
return;
}
const { type, body } = eventsBufferRef.current.shift()!;
handlerRefs.forEach(ref => {
if (!ref.current) {
return;
}
ref.current?.command(type, body);
});
// TODO - do not delete THIS code it needs for debug
// console.log('JOipP', `Tick Buff`, eventsBufferRef.current.length);
if (eventsBufferRef.current.length > 0) {
eventTick();
}
}, 10),
[],
);
const eventTickRef = useRef(eventTick);
eventTickRef.current = eventTick;
// @ts-ignore
const handleMapEvent = useCallback(({ type, body }) => {
// TODO - do not delete THIS code it needs for debug
// const currentTime = +new Date();
// const timeDiff = currentTime - prevEventTime;
// prevEventTime = currentTime;
// console.log('JOipP', `IN [${inIndex++}] [${timeDiff}] ${getFormattedTime()}`, { type, body });
if (!eventTickRef.current || !visibleRef.current) {
return;
}
eventsBufferRef.current.push({ type, body });
eventTickRef.current();
}, []);
useEffect(() => {
if (!visible && !wasHiddenOnce.current) {
wasHiddenOnce.current = true;

View File

@@ -3,3 +3,4 @@ export * from './getQueryVariable';
export * from './loadTextFile';
export * from './saveToFile';
export * from './omit';
export * from './jsonToUriBase64';

View File

@@ -0,0 +1,26 @@
export const encodeJsonToUriBase64 = (value: unknown): string => {
const json = JSON.stringify(value);
const uriEncoded = encodeURIComponent(json);
if (typeof window !== 'undefined' && typeof window.btoa === 'function') {
return window.btoa(uriEncoded);
}
// Node.js
// @ts-ignore
return Buffer.from(uriEncoded, 'utf8').toString('base64');
};
export const decodeUriBase64ToJson = <T = unknown>(base64: string): T => {
let uriEncoded: string;
if (typeof window !== 'undefined' && typeof window.atob === 'function') {
uriEncoded = window.atob(base64);
} else {
// Node.js
// @ts-ignore
uriEncoded = Buffer.from(base64, 'base64').toString('utf8');
}
const json = decodeURIComponent(uriEncoded);
return JSON.parse(json) as T;
};

View File

@@ -25,7 +25,7 @@ config :wanderer_app,
ecto_repos: [WandererApp.Repo],
ash_domains: [WandererApp.Api],
generators: [timestamp_type: :utc_datetime],
ddrt: DDRT,
ddrt: WandererApp.Map.CacheRTree,
logger: Logger,
pubsub_client: Phoenix.PubSub,
wanderer_kills_base_url:

View File

@@ -258,7 +258,9 @@ config :wanderer_app, WandererApp.Scheduler,
timezone: :utc,
jobs:
[
{"@daily", {WandererApp.Map.Audit, :archive, []}}
{"@daily", {WandererApp.Map.Audit, :archive, []}},
{"@daily", {WandererApp.Map.GarbageCollector, :cleanup_chain_passages, []}},
{"@daily", {WandererApp.Map.GarbageCollector, :cleanup_system_signatures, []}}
] ++ sheduler_jobs,
timeout: :infinity

View File

@@ -2,6 +2,7 @@ defmodule WandererApp.Api.Changes.SlugifyName do
use Ash.Resource.Change
alias Ash.Changeset
require Ash.Query
@impl true
@spec change(Changeset.t(), keyword, Change.context()) :: Changeset.t()
@@ -12,10 +13,56 @@ defmodule WandererApp.Api.Changes.SlugifyName do
defp maybe_slugify_name(changeset) do
case Changeset.get_attribute(changeset, :slug) do
slug when is_binary(slug) ->
Changeset.force_change_attribute(changeset, :slug, Slug.slugify(slug))
base_slug = Slug.slugify(slug)
unique_slug = ensure_unique_slug(changeset, base_slug)
Changeset.force_change_attribute(changeset, :slug, unique_slug)
_ ->
changeset
end
end
defp ensure_unique_slug(changeset, base_slug) do
# Get the current record ID if this is an update operation
current_id = Changeset.get_attribute(changeset, :id)
# Check if the base slug is available
if slug_available?(base_slug, current_id) do
base_slug
else
# Find the next available slug with a numeric suffix
find_available_slug(base_slug, current_id, 2)
end
end
defp find_available_slug(base_slug, current_id, n) do
candidate_slug = "#{base_slug}-#{n}"
if slug_available?(candidate_slug, current_id) do
candidate_slug
else
find_available_slug(base_slug, current_id, n + 1)
end
end
defp slug_available?(slug, current_id) do
query =
WandererApp.Api.Map
|> Ash.Query.filter(slug == ^slug)
|> then(fn query ->
# Exclude the current record if this is an update
if current_id do
Ash.Query.filter(query, id != ^current_id)
else
query
end
end)
|> Ash.Query.limit(1)
case Ash.read(query) do
{:ok, []} -> true
{:ok, _} -> false
{:error, _} -> false
end
end
end

View File

@@ -30,14 +30,14 @@ defmodule WandererApp.Api.Map do
# Routes configuration
routes do
base("/maps")
get(:read)
get(:by_slug, route: "/:slug")
index :read
post(:new)
patch(:update)
delete(:destroy)
# Custom action for map duplication
post(:duplicate, route: "/:id/duplicate")
# post(:duplicate, route: "/:id/duplicate")
end
end

View File

@@ -9,6 +9,11 @@ defmodule WandererApp.Api.MapConnection do
postgres do
repo(WandererApp.Repo)
table("map_chain_v1")
custom_indexes do
# Critical index for list_connections query performance
index [:map_id], name: "map_chain_v1_map_id_index"
end
end
json_api do

View File

@@ -65,7 +65,7 @@ defmodule WandererApp.Api.MapSubscription do
defaults [:create, :read, :update, :destroy]
read :all_active do
prepare build(sort: [updated_at: :asc])
prepare build(sort: [updated_at: :asc], load: [:map])
filter(expr(status == :active))
end

View File

@@ -1,6 +1,26 @@
defmodule WandererApp.Api.MapSystem do
@moduledoc false
@derive {Jason.Encoder,
only: [
:id,
:map_id,
:name,
:solar_system_id,
:position_x,
:position_y,
:status,
:visible,
:locked,
:custom_name,
:description,
:tag,
:temporary_name,
:labels,
:added_at,
:linked_sig_eve_id
]}
use Ash.Resource,
domain: WandererApp.Api,
data_layer: AshPostgres.DataLayer,
@@ -9,6 +29,11 @@ defmodule WandererApp.Api.MapSystem do
postgres do
repo(WandererApp.Repo)
table("map_system_v1")
custom_indexes do
# Partial index for efficient visible systems query
index [:map_id], where: "visible = true", name: "map_system_v1_map_id_visible_index"
end
end
json_api do
@@ -16,6 +41,17 @@ defmodule WandererApp.Api.MapSystem do
includes([:map])
default_fields([
:name,
:solar_system_id,
:status,
:custom_name,
:description,
:tag,
:temporary_name,
:labels
])
derive_filter?(true)
derive_sort?(true)

View File

@@ -38,7 +38,12 @@ defmodule WandererApp.Application do
),
Supervisor.child_spec({Cachex, name: :ship_types_cache}, id: :ship_types_cache_worker),
Supervisor.child_spec({Cachex, name: :character_cache}, id: :character_cache_worker),
Supervisor.child_spec({Cachex, name: :acl_cache}, id: :acl_cache_worker),
Supervisor.child_spec({Cachex, name: :map_cache}, id: :map_cache_worker),
Supervisor.child_spec({Cachex, name: :map_pool_cache},
id: :map_pool_cache_worker
),
Supervisor.child_spec({Cachex, name: :map_state_cache}, id: :map_state_cache_worker),
Supervisor.child_spec({Cachex, name: :character_state_cache},
id: :character_state_cache_worker
),
@@ -48,10 +53,7 @@ defmodule WandererApp.Application do
Supervisor.child_spec({Cachex, name: :wanderer_app_cache},
id: :wanderer_app_cache_worker
),
{Registry, keys: :unique, name: WandererApp.MapRegistry},
{Registry, keys: :unique, name: WandererApp.Character.TrackerRegistry},
{PartitionSupervisor,
child_spec: DynamicSupervisor, name: WandererApp.Map.DynamicSupervisors},
{PartitionSupervisor,
child_spec: DynamicSupervisor, name: WandererApp.Character.DynamicSupervisors},
WandererAppWeb.PresenceGracePeriodManager,
@@ -78,6 +80,7 @@ defmodule WandererApp.Application do
WandererApp.Server.ServerStatusTracker,
WandererApp.Server.TheraDataFetcher,
{WandererApp.Character.TrackerPoolSupervisor, []},
{WandererApp.Map.MapPoolSupervisor, []},
WandererApp.Character.TrackerManager,
WandererApp.Map.Manager
] ++ security_audit_children

View File

@@ -116,7 +116,7 @@ defmodule WandererApp.CachedInfo do
def get_solar_system_jumps() do
case WandererApp.Cache.lookup(:solar_system_jumps) do
{:ok, nil} ->
data = WandererApp.EveDataService.get_solar_system_jumps_data()
{:ok, data} = WandererApp.Api.MapSolarSystemJumps.read()
cache_items(data, :solar_system_jumps)

View File

@@ -28,7 +28,7 @@ defmodule WandererApp.Character do
Cachex.put(:character_cache, character_id, character)
{:ok, character}
error ->
_error ->
{:error, :not_found}
end
@@ -192,7 +192,7 @@ defmodule WandererApp.Character do
{:ok, []}
end
error ->
_error ->
{:ok, []}
end
end

View File

@@ -17,7 +17,7 @@ defmodule WandererApp.Character.TrackerPool do
@unique_registry :unique_tracker_pool_registry
@update_location_interval :timer.seconds(1)
@update_online_interval :timer.seconds(5)
@update_online_interval :timer.seconds(30)
@check_offline_characters_interval :timer.minutes(5)
@check_online_errors_interval :timer.minutes(1)
@check_ship_errors_interval :timer.minutes(1)
@@ -46,10 +46,6 @@ defmodule WandererApp.Character.TrackerPool do
{:ok, _} = Registry.register(@unique_registry, Module.concat(__MODULE__, uuid), tracked_ids)
{:ok, _} = Registry.register(@registry, __MODULE__, uuid)
# Cachex.get_and_update(@cache, :tracked_characters, fn ids ->
# {:commit, ids ++ tracked_ids}
# end)
tracked_ids
|> Enum.each(fn id ->
Cachex.put(@cache, id, uuid)
@@ -79,9 +75,6 @@ defmodule WandererApp.Character.TrackerPool do
[tracked_id | r_tracked_ids]
end)
# Cachex.get_and_update(@cache, :tracked_characters, fn ids ->
# {:commit, ids ++ [tracked_id]}
# end)
Cachex.put(@cache, tracked_id, uuid)
{:noreply, %{state | characters: [tracked_id | characters]}}
@@ -96,10 +89,6 @@ defmodule WandererApp.Character.TrackerPool do
r_tracked_ids |> Enum.reject(fn id -> id == tracked_id end)
end)
# Cachex.get_and_update(@cache, :tracked_characters, fn ids ->
# {:commit, ids |> Enum.reject(fn id -> id == tracked_id end)}
# end)
#
Cachex.del(@cache, tracked_id)
{:noreply, %{state | characters: characters |> Enum.reject(fn id -> id == tracked_id end)}}
@@ -191,6 +180,8 @@ defmodule WandererApp.Character.TrackerPool do
[Tracker Pool] update_online => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
ErrorTracker.report(e, __STACKTRACE__)
end
{:noreply, state}
@@ -581,8 +572,4 @@ defmodule WandererApp.Character.TrackerPool do
Logger.debug("Failed to monitor message queue: #{inspect(error)}")
end
end
defp via_tuple(uuid) do
{:via, Registry, {@unique_registry, Module.concat(__MODULE__, uuid)}}
end
end

View File

@@ -50,11 +50,6 @@ defmodule WandererApp.Character.TrackerPoolDynamicSupervisor do
end
end
def is_not_tracked?(tracked_id) do
{:ok, tracked_ids} = Cachex.get(@cache, :tracked_characters)
tracked_ids |> Enum.member?(tracked_id) |> Kernel.not()
end
defp get_available_pool([]), do: nil
defp get_available_pool([{pid, uuid} | pools]) do

View File

@@ -173,12 +173,11 @@ defmodule WandererApp.Character.TrackingUtils do
%{
id: character_id,
eve_id: eve_id
},
} = _character,
map_id,
is_track_allowed,
caller_pid
)
when not is_nil(caller_pid) do
) do
WandererAppWeb.Presence.update(caller_pid, map_id, character_id, %{
tracked: is_track_allowed,
from: DateTime.utc_now()
@@ -217,13 +216,16 @@ defmodule WandererApp.Character.TrackingUtils do
end
defp track_character(
_character,
character,
_map_id,
_is_track_allowed,
_caller_pid
) do
Logger.error("caller_pid is required for tracking characters")
{:error, "caller_pid is required"}
Logger.error(
"Invalid character data for tracking - character must have :id and :eve_id fields, got: #{inspect(character)}"
)
{:error, "Invalid character data"}
end
def untrack(characters, map_id, caller_pid) do
@@ -238,30 +240,14 @@ defmodule WandererApp.Character.TrackingUtils do
})
end)
# WandererApp.Map.Server.untrack_characters(map_id, character_ids)
:ok
else
true ->
Logger.error("caller_pid is required for untracking characters")
Logger.error("caller_pid is required for untracking characters 2")
{:error, "caller_pid is required"}
end
end
# def add_characters([], _map_id, _track_character), do: :ok
# def add_characters([character | characters], map_id, track_character) do
# :ok = WandererApp.Map.Server.add_character(map_id, character, track_character)
# add_characters(characters, map_id, track_character)
# end
# def remove_characters([], _map_id), do: :ok
# def remove_characters([character | characters], map_id) do
# :ok = WandererApp.Map.Server.remove_character(map_id, character.id)
# remove_characters(characters, map_id)
# end
def get_main_character(
nil,
current_user_characters,

View File

@@ -14,8 +14,6 @@ defmodule WandererApp.DatabaseSetup do
alias WandererApp.Repo
alias Ecto.Adapters.SQL
@test_db_name "wanderer_test"
@doc """
Sets up the test database from scratch.
Creates the database, runs migrations, and sets up initial data.

View File

@@ -21,7 +21,8 @@ defmodule WandererApp.Esi do
defdelegate get_character_location(character_eve_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_character_online(character_eve_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_character_ship(character_eve_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate find_routes(map_id, origin, hubs, routes_settings), to: WandererApp.Esi.ApiClient
defdelegate get_routes_custom(hubs, origin, params), to: WandererApp.Esi.ApiClient
defdelegate get_routes_eve(hubs, origin, params, opts), to: WandererApp.Esi.ApiClient
defdelegate search(character_eve_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_killmail(killmail_id, killmail_hash, opts \\ []), to: WandererApp.Esi.ApiClient

View File

@@ -6,35 +6,9 @@ defmodule WandererApp.Esi.ApiClient do
alias WandererApp.Cache
@ttl :timer.hours(1)
@routes_ttl :timer.minutes(15)
@base_url "https://esi.evetech.net/latest"
@wanderrer_user_agent "(wanderer-industries@proton.me; +https://github.com/wanderer-industries/wanderer)"
@req_esi Req.new(base_url: @base_url, finch: WandererApp.Finch)
@get_link_pairs_advanced_params [
:include_mass_crit,
:include_eol,
:include_frig
]
@default_routes_settings %{
path_type: "shortest",
include_mass_crit: true,
include_eol: false,
include_frig: true,
include_cruise: true,
avoid_wormholes: false,
avoid_pochven: false,
avoid_edencom: false,
avoid_triglavian: false,
include_thera: true,
avoid: []
}
@zarzakh_system 30_100_000
@default_avoid_systems [@zarzakh_system]
@req_esi_options [base_url: "https://esi.evetech.net", finch: WandererApp.Finch]
@cache_opts [cache: true]
@retry_opts [retry: false, retry_log_level: :warning]
@@ -43,11 +17,11 @@ defmodule WandererApp.Esi.ApiClient do
@logger Application.compile_env(:wanderer_app, :logger)
def get_server_status, do: get("/status")
def get_server_status, do: do_get("/status", [], @cache_opts)
def set_autopilot_waypoint(add_to_beginning, clear_other_waypoints, destination_id, opts \\ []),
do:
post_esi(
do_post_esi(
"/ui/autopilot/waypoint",
get_auth_opts(opts)
|> Keyword.merge(
@@ -62,7 +36,7 @@ defmodule WandererApp.Esi.ApiClient do
def post_characters_affiliation(character_eve_ids, _opts)
when is_list(character_eve_ids),
do:
post_esi(
do_post_esi(
"/characters/affiliation/",
json: character_eve_ids,
params: %{
@@ -70,168 +44,9 @@ defmodule WandererApp.Esi.ApiClient do
}
)
def find_routes(map_id, origin, hubs, routes_settings) do
origin = origin |> String.to_integer()
hubs = hubs |> Enum.map(&(&1 |> String.to_integer()))
routes_settings = @default_routes_settings |> Map.merge(routes_settings)
connections =
case routes_settings.avoid_wormholes do
false ->
map_chains =
routes_settings
|> Map.take(@get_link_pairs_advanced_params)
|> Map.put_new(:map_id, map_id)
|> WandererApp.Api.MapConnection.get_link_pairs_advanced!()
|> Enum.map(fn %{
solar_system_source: solar_system_source,
solar_system_target: solar_system_target
} ->
%{
first: solar_system_source,
second: solar_system_target
}
end)
|> Enum.uniq()
{:ok, thera_chains} =
case routes_settings.include_thera do
true ->
WandererApp.Server.TheraDataFetcher.get_chain_pairs(routes_settings)
false ->
{:ok, []}
end
chains = remove_intersection([map_chains | thera_chains] |> List.flatten())
chains =
case routes_settings.include_cruise do
false ->
{:ok, wh_class_a_systems} = WandererApp.CachedInfo.get_wh_class_a_systems()
chains
|> Enum.filter(fn x ->
not Enum.member?(wh_class_a_systems, x.first) and
not Enum.member?(wh_class_a_systems, x.second)
end)
_ ->
chains
end
chains
|> Enum.map(fn chain ->
["#{chain.first}|#{chain.second}", "#{chain.second}|#{chain.first}"]
end)
|> List.flatten()
true ->
[]
end
{:ok, trig_systems} = WandererApp.CachedInfo.get_trig_systems()
pochven_solar_systems =
trig_systems
|> Enum.filter(fn s -> s.triglavian_invasion_status == "Final" end)
|> Enum.map(& &1.solar_system_id)
triglavian_solar_systems =
trig_systems
|> Enum.filter(fn s -> s.triglavian_invasion_status == "Triglavian" end)
|> Enum.map(& &1.solar_system_id)
edencom_solar_systems =
trig_systems
|> Enum.filter(fn s -> s.triglavian_invasion_status == "Edencom" end)
|> Enum.map(& &1.solar_system_id)
avoidance_list =
case routes_settings.avoid_edencom do
true ->
edencom_solar_systems
false ->
[]
end
avoidance_list =
case routes_settings.avoid_triglavian do
true ->
[avoidance_list | triglavian_solar_systems]
false ->
avoidance_list
end
avoidance_list =
case routes_settings.avoid_pochven do
true ->
[avoidance_list | pochven_solar_systems]
false ->
avoidance_list
end
avoidance_list =
(@default_avoid_systems ++ [routes_settings.avoid | avoidance_list])
|> List.flatten()
|> Enum.uniq()
params =
%{
datasource: "tranquility",
flag: routes_settings.path_type,
connections: connections,
avoid: avoidance_list
}
{:ok, all_routes} = get_all_routes(hubs, origin, params)
routes =
all_routes
|> Enum.map(fn route_info ->
map_route_info(route_info)
end)
|> Enum.filter(fn route_info -> not is_nil(route_info) end)
{:ok, routes}
end
def get_all_routes(hubs, origin, params, opts \\ []) do
cache_key =
"routes-#{origin}-#{hubs |> Enum.join("-")}-#{:crypto.hash(:sha, :erlang.term_to_binary(params))}"
case WandererApp.Cache.lookup(cache_key) do
{:ok, result} when not is_nil(result) ->
{:ok, result}
_ ->
case get_all_routes_custom(hubs, origin, params) do
{:ok, result} ->
WandererApp.Cache.insert(
cache_key,
result,
ttl: @routes_ttl
)
{:ok, result}
{:error, _error} ->
@logger.error(
"Error getting custom routes for #{inspect(origin)}: #{inspect(params)}"
)
get_all_routes_eve(hubs, origin, params, opts)
end
end
end
defp get_all_routes_custom(hubs, origin, params),
def get_routes_custom(hubs, origin, params),
do:
post(
do_post(
"#{get_custom_route_base_url()}/route/multiple",
[
json: %{
@@ -245,13 +60,20 @@ defmodule WandererApp.Esi.ApiClient do
|> Keyword.merge(@timeout_opts)
)
def get_all_routes_eve(hubs, origin, params, opts),
def get_routes_eve(hubs, origin, params, opts),
do:
{:ok,
hubs
|> Task.async_stream(
fn destination ->
get_routes(origin, destination, params, opts)
%{
"origin" => origin,
"destination" => destination,
"systems" => [],
"success" => false
}
# do_get_routes_eve(origin, destination, params, opts)
end,
max_concurrency: System.schedulers_online() * 4,
timeout: :timer.seconds(30),
@@ -265,8 +87,19 @@ defmodule WandererApp.Esi.ApiClient do
end
end)}
def get_routes(origin, destination, params, opts) do
case _get_routes(origin, destination, params, opts) do
defp do_get_routes_eve(origin, destination, params, opts) do
esi_params =
Map.merge(params, %{
connections: params.connections |> Enum.join(","),
avoid: params.avoid |> Enum.join(",")
})
do_get(
"/route/#{origin}/#{destination}/?#{esi_params |> Plug.Conn.Query.encode()}",
opts,
@cache_opts
)
|> case do
{:ok, result} ->
%{
"origin" => origin,
@@ -299,9 +132,8 @@ defmodule WandererApp.Esi.ApiClient do
key: "killmail-#{killmail_id}-#{killmail_hash}",
opts: [ttl: @ttl]
)
def get_killmail(killmail_id, killmail_hash, opts \\ []) do
get("/killmails/#{killmail_id}/#{killmail_hash}/", opts, @cache_opts)
end
def get_killmail(killmail_id, killmail_hash, opts \\ []),
do: do_get("/killmails/#{killmail_id}/#{killmail_hash}/", opts, @cache_opts)
@decorate cacheable(
cache: Cache,
@@ -322,7 +154,7 @@ defmodule WandererApp.Esi.ApiClient do
opts: [ttl: @ttl]
)
def get_character_info(eve_id, opts \\ []) do
case get(
case do_get(
"/characters/#{eve_id}/",
opts,
@cache_opts
@@ -395,48 +227,11 @@ defmodule WandererApp.Esi.ApiClient do
get_character_auth_data(character_eve_id, "search", merged_opts)
end
defp remove_intersection(pairs_arr) do
tuples = pairs_arr |> Enum.map(fn x -> {x.first, x.second} end)
tuples
|> Enum.reduce([], fn {first, second} = x, acc ->
if Enum.member?(tuples, {second, first}) do
acc
else
[x | acc]
end
end)
|> Enum.uniq()
|> Enum.map(fn {first, second} ->
%{
first: first,
second: second
}
end)
end
defp _get_routes(origin, destination, params, opts),
do: get_routes_eve(origin, destination, params, opts)
defp get_routes_eve(origin, destination, params, opts) do
esi_params =
Map.merge(params, %{
connections: params.connections |> Enum.join(","),
avoid: params.avoid |> Enum.join(",")
})
get(
"/route/#{origin}/#{destination}/?#{esi_params |> Plug.Conn.Query.encode()}",
opts,
@cache_opts
)
end
defp get_auth_opts(opts), do: [auth: {:bearer, opts[:access_token]}]
defp get_alliance_info(alliance_eve_id, info_path, opts),
do:
get(
do_get(
"/alliances/#{alliance_eve_id}/#{info_path}",
opts,
@cache_opts
@@ -444,7 +239,7 @@ defmodule WandererApp.Esi.ApiClient do
defp get_corporation_info(corporation_eve_id, info_path, opts),
do:
get(
do_get(
"/corporations/#{corporation_eve_id}/#{info_path}",
opts,
@cache_opts
@@ -460,13 +255,13 @@ defmodule WandererApp.Esi.ApiClient do
character_id = opts |> Keyword.get(:character_id, nil)
if not is_access_token_expired?(character_id) do
get(
do_get(
path,
auth_opts,
opts |> with_refresh_token()
)
else
get_retry(path, auth_opts, opts |> with_refresh_token())
do_get_retry(path, auth_opts, opts |> with_refresh_token())
end
end
@@ -481,29 +276,26 @@ defmodule WandererApp.Esi.ApiClient do
defp get_corporation_auth_data(corporation_eve_id, info_path, opts),
do:
get(
do_get(
"/corporations/#{corporation_eve_id}/#{info_path}",
[params: opts[:params] || []] ++
(opts |> get_auth_opts()),
(opts |> with_refresh_token()) ++ @cache_opts
)
defp with_user_agent_opts(opts) do
opts
|> Keyword.merge(
headers: [{:user_agent, "Wanderer/#{WandererApp.Env.vsn()} #{@wanderrer_user_agent}"}]
)
end
defp with_user_agent_opts(opts),
do:
opts
|> Keyword.merge(
headers: [{:user_agent, "Wanderer/#{WandererApp.Env.vsn()} #{@wanderrer_user_agent}"}]
)
defp with_refresh_token(opts) do
opts |> Keyword.merge(refresh_token?: true)
end
defp with_refresh_token(opts), do: opts |> Keyword.merge(refresh_token?: true)
defp with_cache_opts(opts) do
opts |> Keyword.merge(@cache_opts) |> Keyword.merge(cache_dir: System.tmp_dir!())
end
defp with_cache_opts(opts),
do: opts |> Keyword.merge(@cache_opts) |> Keyword.merge(cache_dir: System.tmp_dir!())
defp get(path, api_opts \\ [], opts \\ []) do
defp do_get(path, api_opts \\ [], opts \\ []) do
case Cachex.get(:api_cache, path) do
{:ok, cached_data} when not is_nil(cached_data) ->
{:ok, cached_data}
@@ -515,15 +307,17 @@ defmodule WandererApp.Esi.ApiClient do
defp do_get_request(path, api_opts \\ [], opts \\ []) do
try do
case Req.get(
@req_esi,
api_opts
|> Keyword.merge(url: path)
|> with_user_agent_opts()
|> with_cache_opts()
|> Keyword.merge(@retry_opts)
|> Keyword.merge(@timeout_opts)
) do
@req_esi_options
|> Req.new()
|> Req.get(
api_opts
|> Keyword.merge(url: path)
|> with_user_agent_opts()
|> with_cache_opts()
|> Keyword.merge(@retry_opts)
|> Keyword.merge(@timeout_opts)
)
|> case do
{:ok, %{status: 200, body: body, headers: headers}} ->
maybe_cache_response(path, body, headers, opts)
@@ -537,8 +331,8 @@ defmodule WandererApp.Esi.ApiClient do
{:ok, %{status: 420, headers: headers} = _error} ->
# Extract rate limit information from headers
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["0"]) |> List.first()
remaining = Map.get(headers, "x-esi-error-limit-remain", ["0"]) |> List.first()
# Emit telemetry for rate limiting
:telemetry.execute(
@@ -568,10 +362,40 @@ defmodule WandererApp.Esi.ApiClient do
{:error, :error_limited, headers}
{:ok, %{status: status} = _error} when status in [401, 403] ->
get_retry(path, api_opts, opts)
{:ok, %{status: 429, headers: headers} = _error} ->
# Extract rate limit information from headers
reset_seconds = Map.get(headers, "retry-after", ["0"]) |> List.first()
{:ok, %{status: status}} ->
# Emit telemetry for rate limiting
:telemetry.execute(
[:wanderer_app, :esi, :rate_limited],
%{
count: 1,
reset_duration:
case Integer.parse(reset_seconds || "0") do
{seconds, _} -> seconds * 1000
_ -> 0
end
},
%{
method: "GET",
path: path,
reset_seconds: reset_seconds
}
)
Logger.warning("ESI_RATE_LIMITED: GET request rate limited",
method: "GET",
path: path,
reset_seconds: reset_seconds
)
{:error, :error_limited, headers}
{:ok, %{status: status} = _error} when status in [401, 403] ->
do_get_retry(path, api_opts, opts)
{:ok, %{status: status, headers: headers}} ->
{:error, "Unexpected status: #{status}"}
{:error, _reason} ->
@@ -585,7 +409,7 @@ defmodule WandererApp.Esi.ApiClient do
end
end
defp maybe_cache_response(path, body, %{"expires" => [expires]}, opts)
defp maybe_cache_response(path, body, %{"expires" => [expires]} = _headers, opts)
when is_binary(path) and not is_nil(expires) do
try do
if opts |> Keyword.get(:cache, false) do
@@ -609,7 +433,7 @@ defmodule WandererApp.Esi.ApiClient do
defp maybe_cache_response(_path, _body, _headers, _opts), do: :ok
defp post(url, opts) do
defp do_post(url, opts) do
try do
case Req.post("#{url}", opts |> with_user_agent_opts()) do
{:ok, %{status: status, body: body}} when status in [200, 201] ->
@@ -623,8 +447,8 @@ defmodule WandererApp.Esi.ApiClient do
{:ok, %{status: 420, headers: headers} = _error} ->
# Extract rate limit information from headers
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["0"]) |> List.first()
remaining = Map.get(headers, "x-esi-error-limit-remain", ["0"]) |> List.first()
# Emit telemetry for rate limiting
:telemetry.execute(
@@ -668,16 +492,13 @@ defmodule WandererApp.Esi.ApiClient do
end
end
defp post_esi(url, opts) do
defp do_post_esi(url, opts) do
try do
req_opts =
(opts |> with_user_agent_opts() |> Keyword.merge(@retry_opts)) ++
[params: opts[:params] || []]
Req.new(
[base_url: @base_url, finch: WandererApp.Finch] ++
req_opts
)
Req.new(@req_esi_options ++ req_opts)
|> Req.post(url: url)
|> case do
{:ok, %{status: status, body: body}} when status in [200, 201] ->
@@ -691,8 +512,8 @@ defmodule WandererApp.Esi.ApiClient do
{:ok, %{status: 420, headers: headers} = _error} ->
# Extract rate limit information from headers
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["0"]) |> List.first()
remaining = Map.get(headers, "x-esi-error-limit-remain", ["0"]) |> List.first()
# Emit telemetry for rate limiting
:telemetry.execute(
@@ -722,6 +543,36 @@ defmodule WandererApp.Esi.ApiClient do
{:error, :error_limited, headers}
{:ok, %{status: 429, headers: headers} = _error} ->
# Extract rate limit information from headers
reset_seconds = Map.get(headers, "retry-after", ["0"]) |> List.first()
# Emit telemetry for rate limiting
:telemetry.execute(
[:wanderer_app, :esi, :rate_limited],
%{
count: 1,
reset_duration:
case Integer.parse(reset_seconds || "0") do
{seconds, _} -> seconds * 1000
_ -> 0
end
},
%{
method: "POST_ESI",
path: url,
reset_seconds: reset_seconds
}
)
Logger.warning("ESI_RATE_LIMITED: POST request rate limited",
method: "POST_ESI",
path: url,
reset_seconds: reset_seconds
)
{:error, :error_limited, headers}
{:ok, %{status: status}} ->
{:error, "Unexpected status: #{status}"}
@@ -736,7 +587,7 @@ defmodule WandererApp.Esi.ApiClient do
end
end
defp get_retry(path, api_opts, opts, status \\ :forbidden) do
defp do_get_retry(path, api_opts, opts, status \\ :forbidden) do
refresh_token? = opts |> Keyword.get(:refresh_token?, false)
retry_count = opts |> Keyword.get(:retry_count, 0)
character_id = opts |> Keyword.get(:character_id, nil)
@@ -748,7 +599,7 @@ defmodule WandererApp.Esi.ApiClient do
{:ok, token} ->
auth_opts = [access_token: token.access_token] |> get_auth_opts()
get(
do_get(
path,
api_opts |> Keyword.merge(auth_opts),
opts |> Keyword.merge(retry_count: retry_count + 1)
@@ -913,44 +764,4 @@ defmodule WandererApp.Esi.ApiClient do
:character_token_invalid
)
end
defp map_route_info(
%{
"origin" => origin,
"destination" => destination,
"systems" => result_systems,
"success" => success
} = _route_info
),
do:
map_route_info(%{
origin: origin,
destination: destination,
systems: result_systems,
success: success
})
defp map_route_info(
%{origin: origin, destination: destination, systems: result_systems, success: success} =
_route_info
) do
systems =
case result_systems do
[] ->
[]
_ ->
result_systems |> Enum.reject(fn system_id -> system_id == origin end)
end
%{
has_connection: result_systems != [],
systems: systems,
origin: origin,
destination: destination,
success: success
}
end
defp map_route_info(_), do: nil
end

View File

@@ -38,32 +38,8 @@ defmodule WandererApp.EveDataService do
|> Ash.bulk_create(WandererApp.Api.MapSolarSystemJumps, :create)
Logger.info("MapSolarSystemJumps updated!")
end
def download_files() do
tasks =
@dump_file_names
|> Enum.map(fn file_name ->
Task.async(fn ->
download_file(file_name)
end)
end)
Task.await_many(tasks, :timer.minutes(30))
end
def download_file(file_name) do
url = "#{@eve_db_dump_url}/#{file_name}"
Logger.info("Downloading file from #{url}")
download_path = Path.join([:code.priv_dir(:wanderer_app), "repo", "data", file_name])
Req.get!(url, raw: true, into: File.stream!(download_path, [:write])).body
|> Stream.run()
Logger.info("File downloaded successfully to #{download_path}")
:ok
cleanup_files()
end
def load_wormhole_types() do
@@ -163,7 +139,57 @@ defmodule WandererApp.EveDataService do
data
end
def load_map_constellations() do
defp cleanup_files() do
tasks =
@dump_file_names
|> Enum.map(fn file_name ->
Task.async(fn ->
cleanup_file(file_name)
end)
end)
Task.await_many(tasks, :timer.minutes(30))
end
defp cleanup_file(file_name) do
Logger.info("Cleaning file: #{file_name}")
download_path = Path.join([:code.priv_dir(:wanderer_app), "repo", "data", file_name])
:ok = File.rm(download_path)
Logger.info("File removed successfully to #{download_path}")
:ok
end
defp download_files() do
tasks =
@dump_file_names
|> Enum.map(fn file_name ->
Task.async(fn ->
download_file(file_name)
end)
end)
Task.await_many(tasks, :timer.minutes(30))
end
defp download_file(file_name) do
url = "#{@eve_db_dump_url}/#{file_name}"
Logger.info("Downloading file from #{url}")
download_path = Path.join([:code.priv_dir(:wanderer_app), "repo", "data", file_name])
Req.get!(url, raw: true, into: File.stream!(download_path, [:write])).body
|> Stream.run()
Logger.info("File downloaded successfully to #{download_path}")
:ok
end
defp load_map_constellations() do
WandererApp.Utils.CSVUtil.csv_row_to_table_record(
"#{:code.priv_dir(:wanderer_app)}/repo/data/mapConstellations.csv",
fn row ->
@@ -175,7 +201,7 @@ defmodule WandererApp.EveDataService do
)
end
def load_map_regions() do
defp load_map_regions() do
WandererApp.Utils.CSVUtil.csv_row_to_table_record(
"#{:code.priv_dir(:wanderer_app)}/repo/data/mapRegions.csv",
fn row ->
@@ -187,7 +213,7 @@ defmodule WandererApp.EveDataService do
)
end
def load_map_location_wormhole_classes() do
defp load_map_location_wormhole_classes() do
WandererApp.Utils.CSVUtil.csv_row_to_table_record(
"#{:code.priv_dir(:wanderer_app)}/repo/data/mapLocationWormholeClasses.csv",
fn row ->
@@ -199,7 +225,7 @@ defmodule WandererApp.EveDataService do
)
end
def load_inv_groups() do
defp load_inv_groups() do
WandererApp.Utils.CSVUtil.csv_row_to_table_record(
"#{:code.priv_dir(:wanderer_app)}/repo/data/invGroups.csv",
fn row ->
@@ -212,7 +238,7 @@ defmodule WandererApp.EveDataService do
)
end
def get_db_data() do
defp get_db_data() do
map_constellations = load_map_constellations()
map_regions = load_map_regions()
map_location_wormhole_classes = load_map_location_wormhole_classes()
@@ -296,7 +322,7 @@ defmodule WandererApp.EveDataService do
)
end
def get_ship_types_data() do
defp get_ship_types_data() do
inv_groups = load_inv_groups()
ship_type_groups =
@@ -331,7 +357,7 @@ defmodule WandererApp.EveDataService do
|> Enum.filter(fn t -> t.group_id in ship_type_groups end)
end
def get_solar_system_jumps_data() do
defp get_solar_system_jumps_data() do
WandererApp.Utils.CSVUtil.csv_row_to_table_record(
"#{:code.priv_dir(:wanderer_app)}/repo/data/mapSolarSystemJumps.csv",
fn row ->

View File

@@ -212,6 +212,7 @@ defmodule WandererApp.ExternalEvents.JsonApiFormatter do
"time_status" => payload["time_status"] || payload[:time_status],
"mass_status" => payload["mass_status"] || payload[:mass_status],
"ship_size_type" => payload["ship_size_type"] || payload[:ship_size_type],
"locked" => payload["locked"] || payload[:locked],
"updated_at" => event.timestamp
},
"relationships" => %{

View File

@@ -109,8 +109,8 @@ defmodule WandererApp.Kills.MapEventListener do
# Handle re-subscription attempt
def handle_info(:resubscribe_to_maps, state) do
running_maps = WandererApp.Map.RegistryHelper.list_all_maps()
current_running_map_ids = MapSet.new(Enum.map(running_maps, & &1.id))
{:ok, started_maps} = WandererApp.Cache.lookup("started_maps", [])
current_running_map_ids = MapSet.new(started_maps)
Logger.debug(fn ->
"[MapEventListener] Resubscribing to maps. Running maps: #{MapSet.size(current_running_map_ids)}"

View File

@@ -88,13 +88,13 @@ defmodule WandererApp.Kills.Subscription.MapIntegration do
def get_tracked_system_ids do
try do
# Get systems from currently running maps
active_maps = WandererApp.Map.RegistryHelper.list_all_maps()
{:ok, started_maps_ids} = WandererApp.Cache.lookup("started_maps", [])
Logger.debug("[MapIntegration] Found #{length(active_maps)} active maps")
Logger.debug("[MapIntegration] Found #{length(started_maps_ids)} active maps")
map_systems =
active_maps
|> Enum.map(fn %{id: map_id} ->
started_maps_ids
|> Enum.map(fn map_id ->
case WandererApp.MapSystemRepo.get_visible_by_map(map_id) do
{:ok, systems} ->
system_ids = Enum.map(systems, & &1.solar_system_id)
@@ -114,7 +114,7 @@ defmodule WandererApp.Kills.Subscription.MapIntegration do
|> Enum.uniq()
Logger.debug(fn ->
"[MapIntegration] Total tracked systems: #{length(system_ids)} across #{length(active_maps)} maps"
"[MapIntegration] Total tracked systems: #{length(system_ids)} across #{length(started_maps_ids)} maps"
end)
{:ok, system_ids}

View File

@@ -146,25 +146,6 @@ defmodule WandererApp.License.LicenseManager do
end
end
@doc """
Checks if a license is expired.
"""
defp expired?(license) do
case license.expire_at do
nil -> false
expire_at -> DateTime.compare(expire_at, DateTime.utc_now()) == :lt
end
end
@doc """
Generates a random string of specified length.
"""
defp generate_random_string(length) do
:crypto.strong_rand_bytes(length)
|> Base.encode16(case: :upper)
|> binary_part(0, length)
end
@doc """
Formats a datetime as YYYY-MM-DD.
"""

View File

@@ -7,6 +7,8 @@ defmodule WandererApp.Map do
require Logger
@map_state_cache :map_state_cache
defstruct map_id: nil,
name: nil,
scope: :none,
@@ -69,6 +71,50 @@ defmodule WandererApp.Map do
end)
end
def get_map_state(map_id, init_if_empty? \\ true) do
case Cachex.get(@map_state_cache, map_id) do
{:ok, nil} ->
case init_if_empty? do
true ->
map_state = WandererApp.Map.Server.Impl.do_init_state(map_id: map_id)
Cachex.put(@map_state_cache, map_id, map_state)
{:ok, map_state}
_ ->
{:ok, nil}
end
{:ok, map_state} ->
{:ok, map_state}
end
end
def get_map_state!(map_id) do
case get_map_state(map_id) do
{:ok, map_state} ->
map_state
_ ->
Logger.error("Failed to get map_state #{map_id}")
throw("Failed to get map_state #{map_id}")
end
end
def update_map_state(map_id, state_update),
do:
Cachex.get_and_update(@map_state_cache, map_id, fn map_state ->
case map_state do
nil ->
new_state = WandererApp.Map.Server.Impl.do_init_state(map_id: map_id)
{:commit, Map.merge(new_state, state_update)}
_ ->
{:commit, Map.merge(map_state, state_update)}
end
end)
def delete_map_state(map_id), do: Cachex.del(@map_state_cache, map_id)
def get_characters_limit(map_id),
do: {:ok, map_id |> get_map!() |> Map.get(:characters_limit, 50)}
@@ -486,15 +532,16 @@ defmodule WandererApp.Map do
solar_system_source,
solar_system_target
) do
case map_id
|> get_map!()
|> Map.get(:connections, Map.new())
connections =
map_id
|> get_map!()
|> Map.get(:connections, Map.new())
case connections
|> Map.get("#{solar_system_source}_#{solar_system_target}") do
nil ->
{:ok,
map_id
|> get_map!()
|> Map.get(:connections, Map.new())
connections
|> Map.get("#{solar_system_target}_#{solar_system_source}")}
connection ->

View File

@@ -0,0 +1,347 @@
defmodule WandererApp.Map.CacheRTree do
@moduledoc """
Cache-based spatial index implementing DDRT behavior.
Provides R-tree-like spatial indexing using grid-based storage in Nebulex cache.
No GenServer processes required - all operations are functional and cache-based.
## Storage Structure
Data is stored in the cache with the following keys:
- `"rtree:<name>:leaves"` - Map of solar_system_id => {id, bounding_box}
- `"rtree:<name>:grid"` - Map of {grid_x, grid_y} => [solar_system_id, ...]
- `"rtree:<name>:config"` - Tree configuration
## Spatial Grid
Uses 150x150 pixel grid cells for O(1) spatial queries. Each system node
(130x34 pixels) typically overlaps 1-2 grid cells, providing fast collision
detection without the overhead of GenServer-based tree traversal.
"""
@behaviour WandererApp.Test.DDRT
alias WandererApp.Cache
@grid_size 150 # Grid cell size in pixels
# Type definitions matching DDRT behavior
@type id :: number() | String.t()
@type coord_range :: {number(), number()}
@type bounding_box :: list(coord_range())
@type leaf :: {id(), bounding_box()}
# ============================================================================
# Public API - DDRT Behavior Implementation
# ============================================================================
@doc """
Insert one or more leaves into the spatial index.
## Parameters
- `leaf_or_leaves` - Single `{id, bounding_box}` tuple or list of tuples
- `name` - Name of the R-tree instance
## Examples
iex> CacheRTree.insert({30000142, [{100, 230}, {50, 84}]}, "rtree_map_123")
{:ok, %{}}
iex> CacheRTree.insert([
...> {30000142, [{100, 230}, {50, 84}]},
...> {30000143, [{250, 380}, {100, 134}]}
...> ], "rtree_map_123")
{:ok, %{}}
"""
@impl true
def insert(leaf_or_leaves, name) do
leaves = normalize_leaves(leaf_or_leaves)
# Update leaves storage
current_leaves = get_leaves(name)
new_leaves = Enum.reduce(leaves, current_leaves, fn {id, box}, acc ->
Map.put(acc, id, {id, box})
end)
put_leaves(name, new_leaves)
# Update spatial grid
current_grid = get_grid(name)
new_grid = Enum.reduce(leaves, current_grid, fn leaf, grid ->
add_to_grid(grid, leaf)
end)
put_grid(name, new_grid)
{:ok, %{}} # Match DRTree return format
end
@doc """
Delete one or more leaves from the spatial index.
## Parameters
- `id_or_ids` - Single ID or list of IDs to remove
- `name` - Name of the R-tree instance
## Examples
iex> CacheRTree.delete([30000142], "rtree_map_123")
{:ok, %{}}
iex> CacheRTree.delete([30000142, 30000143], "rtree_map_123")
{:ok, %{}}
"""
@impl true
def delete(id_or_ids, name) do
ids = normalize_ids(id_or_ids)
current_leaves = get_leaves(name)
current_grid = get_grid(name)
# Remove from leaves and track bounding boxes for grid cleanup
{new_leaves, removed} = Enum.reduce(ids, {current_leaves, []}, fn id, {leaves, removed} ->
case Map.pop(leaves, id) do
{nil, leaves} -> {leaves, removed}
{{^id, box}, leaves} -> {leaves, [{id, box} | removed]}
end
end)
# Update grid
new_grid = Enum.reduce(removed, current_grid, fn {id, box}, grid ->
remove_from_grid(grid, id, box)
end)
put_leaves(name, new_leaves)
put_grid(name, new_grid)
{:ok, %{}}
end
@doc """
Update a leaf's bounding box.
## Parameters
- `id` - ID of the leaf to update
- `box_or_tuple` - Either a new `bounding_box` or `{old_box, new_box}` tuple
- `name` - Name of the R-tree instance
## Examples
iex> CacheRTree.update(30000142, [{150, 280}, {200, 234}], "rtree_map_123")
{:ok, %{}}
iex> CacheRTree.update(30000142, {[{100, 230}, {50, 84}], [{150, 280}, {200, 234}]}, "rtree_map_123")
{:ok, %{}}
"""
@impl true
def update(id, box_or_tuple, name) do
{old_box, new_box} = case box_or_tuple do
{old, new} ->
{old, new}
box ->
# Need to look up old box
leaves = get_leaves(name)
case Map.get(leaves, id) do
{^id, old} -> {old, box}
nil -> {nil, box} # Will be handled as new insert
end
end
# Delete old, insert new
if old_box, do: delete([id], name)
insert({id, new_box}, name)
end
@doc """
Query for all leaves intersecting a bounding box.
Uses grid-based spatial indexing for O(1) average case performance.
## Parameters
- `bounding_box` - Query bounding box `[{x_min, x_max}, {y_min, y_max}]`
- `name` - Name of the R-tree instance
## Returns
- `{:ok, [id()]}` - List of IDs intersecting the query box
- `{:error, term()}` - Error if query fails
## Examples
iex> CacheRTree.query([{200, 330}, {90, 124}], "rtree_map_123")
{:ok, [30000143]}
iex> CacheRTree.query([{0, 50}, {0, 50}], "rtree_map_123")
{:ok, []}
"""
@impl true
def query(bounding_box, name) do
# Get candidate IDs from grid cells
grid = get_grid(name)
grid_cells = get_grid_cells(bounding_box)
candidate_ids =
grid_cells
|> Enum.flat_map(fn cell -> Map.get(grid, cell, []) end)
|> Enum.uniq()
# Precise intersection test
leaves = get_leaves(name)
matching_ids =
Enum.filter(candidate_ids, fn id ->
case Map.get(leaves, id) do
{^id, leaf_box} -> boxes_intersect?(bounding_box, leaf_box)
nil -> false
end
end)
{:ok, matching_ids}
rescue
error -> {:error, error}
end
# ============================================================================
# Initialization and Management
# ============================================================================
@doc """
Initialize an empty R-tree in the cache.
## Parameters
- `name` - Name for this R-tree instance
- `config` - Optional configuration map (width, verbose, etc.)
## Examples
iex> CacheRTree.init_tree("rtree_map_123")
:ok
iex> CacheRTree.init_tree("rtree_map_456", %{width: 150, verbose: false})
:ok
"""
def init_tree(name, config \\ %{}) do
Cache.put(cache_key(name, :leaves), %{})
Cache.put(cache_key(name, :grid), %{})
Cache.put(cache_key(name, :config), Map.merge(default_config(), config))
:ok
end
@doc """
Clear all data for an R-tree from the cache.
Should be called when a map is shut down to free memory.
## Parameters
- `name` - Name of the R-tree instance to clear
## Examples
iex> CacheRTree.clear_tree("rtree_map_123")
:ok
"""
def clear_tree(name) do
Cache.delete(cache_key(name, :leaves))
Cache.delete(cache_key(name, :grid))
Cache.delete(cache_key(name, :config))
:ok
end
# ============================================================================
# Private Helper Functions
# ============================================================================
# Cache access helpers
defp cache_key(name, suffix), do: "rtree:#{name}:#{suffix}"
defp get_leaves(name) do
Cache.get(cache_key(name, :leaves)) || %{}
end
defp put_leaves(name, leaves) do
Cache.put(cache_key(name, :leaves), leaves)
end
defp get_grid(name) do
Cache.get(cache_key(name, :grid)) || %{}
end
defp put_grid(name, grid) do
Cache.put(cache_key(name, :grid), grid)
end
defp default_config do
%{
width: 150,
grid_size: @grid_size,
verbose: false
}
end
# Grid operations
defp add_to_grid(grid, {id, bounding_box}) do
grid_cells = get_grid_cells(bounding_box)
Enum.reduce(grid_cells, grid, fn cell, acc ->
Map.update(acc, cell, [id], fn existing_ids ->
if id in existing_ids do
existing_ids
else
[id | existing_ids]
end
end)
end)
end
defp remove_from_grid(grid, id, bounding_box) do
grid_cells = get_grid_cells(bounding_box)
Enum.reduce(grid_cells, grid, fn cell, acc ->
Map.update(acc, cell, [], fn existing_ids ->
List.delete(existing_ids, id)
end)
end)
end
# Calculate which grid cells a bounding box overlaps
defp get_grid_cells(bounding_box) do
[{x_min, x_max}, {y_min, y_max}] = bounding_box
# Calculate cell coordinates using integer division
# Handles negative coordinates correctly
cell_x_min = div_floor(x_min, @grid_size)
cell_x_max = div_floor(x_max, @grid_size)
cell_y_min = div_floor(y_min, @grid_size)
cell_y_max = div_floor(y_max, @grid_size)
# Generate all overlapping cells
for x <- cell_x_min..cell_x_max,
y <- cell_y_min..cell_y_max do
{x, y}
end
end
# Floor division that works correctly with negative numbers
defp div_floor(a, b) when a >= 0, do: div(a, b)
defp div_floor(a, b) when a < 0 do
case rem(a, b) do
0 -> div(a, b)
_ -> div(a, b) - 1
end
end
# Check if two bounding boxes intersect
defp boxes_intersect?(box1, box2) do
[{x1_min, x1_max}, {y1_min, y1_max}] = box1
[{x2_min, x2_max}, {y2_min, y2_max}] = box2
# Boxes intersect if they overlap on both axes
x_overlap = x1_min <= x2_max and x2_min <= x1_max
y_overlap = y1_min <= y2_max and y2_min <= y1_max
x_overlap and y_overlap
end
# Input normalization
defp normalize_leaves(leaf) when is_tuple(leaf), do: [leaf]
defp normalize_leaves(leaves) when is_list(leaves), do: leaves
defp normalize_ids(id) when is_number(id) or is_binary(id), do: [id]
defp normalize_ids(ids) when is_list(ids), do: ids
end

View File

@@ -1,42 +0,0 @@
defmodule WandererApp.Map.DynamicSupervisor do
@moduledoc """
Dynamically starts a map server
"""
use DynamicSupervisor
require Logger
alias WandererApp.Map.Server
def start_link(_arg) do
DynamicSupervisor.start_link(__MODULE__, nil, name: __MODULE__)
end
def init(nil) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def _start_child(map_id) do
child_spec = %{
id: Server,
start: {Server, :start_link, [map_id]},
restart: :transient
}
case DynamicSupervisor.start_child(__MODULE__, child_spec) do
{:ok, _} ->
:ok
{:error, {:already_started, _}} ->
:ok
{:error, reason} ->
{:error, reason}
end
end
def which_children do
Supervisor.which_children(__MODULE__)
end
end

View File

@@ -0,0 +1,38 @@
defmodule WandererApp.Map.GarbageCollector do
@moduledoc """
Manager map subscription plans
"""
require Logger
require Ash.Query
@logger Application.compile_env(:wanderer_app, :logger)
@one_week_seconds 7 * 24 * 60 * 60
@two_weeks_seconds 14 * 24 * 60 * 60
def cleanup_chain_passages() do
Logger.info("Start cleanup old map chain passages...")
WandererApp.Api.MapChainPassages
|> Ash.Query.filter(updated_at: [less_than: get_cutoff_time(@one_week_seconds)])
|> Ash.bulk_destroy!(:destroy, %{}, batch_size: 100)
@logger.info(fn -> "All map chain passages processed" end)
:ok
end
def cleanup_system_signatures() do
Logger.info("Start cleanup old map system signatures...")
WandererApp.Api.MapSystemSignature
|> Ash.Query.filter(updated_at: [less_than: get_cutoff_time(@two_weeks_seconds)])
|> Ash.bulk_destroy!(:destroy, %{}, batch_size: 100)
@logger.info(fn -> "All map system signatures processed" end)
:ok
end
defp get_cutoff_time(seconds), do: DateTime.utc_now() |> DateTime.add(-seconds, :second)
end

View File

@@ -8,16 +8,11 @@ defmodule WandererApp.Map.Manager do
require Logger
alias WandererApp.Map.Server
alias WandererApp.Map.ServerSupervisor
alias WandererApp.Api.MapSystemSignature
@maps_start_per_second 10
@maps_start_interval 1000
@maps_queue :maps_queue
@garbage_collection_interval :timer.hours(1)
@check_maps_queue_interval :timer.seconds(1)
@signatures_cleanup_interval :timer.minutes(30)
@delete_after_minutes 30
@pings_cleanup_interval :timer.minutes(10)
@pings_expire_minutes 60
@@ -42,15 +37,11 @@ defmodule WandererApp.Map.Manager do
do: WandererApp.Queue.push_uniq(@maps_queue, map_id)
def stop_map(map_id) when is_binary(map_id) do
case Server.map_pid(map_id) do
pid when is_pid(pid) ->
GenServer.cast(
pid,
:stop
)
with {:ok, started_maps} <- WandererApp.Cache.lookup("started_maps", []),
true <- Enum.member?(started_maps, map_id) do
Logger.warning(fn -> "Shutting down map server: #{inspect(map_id)}" end)
nil ->
:ok
WandererApp.Map.MapPoolDynamicSupervisor.stop_map(map_id)
end
end
@@ -59,16 +50,11 @@ defmodule WandererApp.Map.Manager do
@impl true
def init([]) do
WandererApp.Queue.new(@maps_queue, [])
WandererApp.Cache.insert("started_maps", [])
{:ok, check_maps_queue_timer} =
:timer.send_interval(@check_maps_queue_interval, :check_maps_queue)
{:ok, garbage_collector_timer} =
:timer.send_interval(@garbage_collection_interval, :garbage_collect)
{:ok, signatures_cleanup_timer} =
:timer.send_interval(@signatures_cleanup_interval, :cleanup_signatures)
{:ok, pings_cleanup_timer} =
:timer.send_interval(@pings_cleanup_interval, :cleanup_pings)
@@ -78,9 +64,7 @@ defmodule WandererApp.Map.Manager do
{:ok,
%{
garbage_collector_timer: garbage_collector_timer,
check_maps_queue_timer: check_maps_queue_timer,
signatures_cleanup_timer: signatures_cleanup_timer,
pings_cleanup_timer: pings_cleanup_timer
}}
end
@@ -113,48 +97,6 @@ defmodule WandererApp.Map.Manager do
end
end
@impl true
def handle_info(:garbage_collect, state) do
try do
WandererApp.Map.RegistryHelper.list_all_maps()
|> Enum.each(fn %{id: map_id, pid: server_pid} ->
case Process.alive?(server_pid) do
true ->
presence_character_ids =
WandererApp.Cache.lookup!("map_#{map_id}:presence_character_ids", [])
if presence_character_ids |> Enum.empty?() do
Logger.info("No more characters present on: #{map_id}, shutting down map server...")
stop_map(map_id)
end
false ->
Logger.warning("Server not alive: #{inspect(server_pid)}")
:ok
end
end)
{:noreply, state}
rescue
e ->
Logger.error(Exception.message(e))
{:noreply, state}
end
end
@impl true
def handle_info(:cleanup_signatures, state) do
try do
cleanup_deleted_signatures()
{:noreply, state}
rescue
e ->
Logger.error("Failed to cleanup signatures: #{inspect(e)}")
{:noreply, state}
end
end
@impl true
def handle_info(:cleanup_pings, state) do
try do
@@ -167,23 +109,6 @@ defmodule WandererApp.Map.Manager do
end
end
defp cleanup_deleted_signatures() do
delete_after_date = DateTime.utc_now() |> DateTime.add(-1 * @delete_after_minutes, :minute)
case MapSystemSignature.by_deleted_and_updated_before!(true, delete_after_date) do
{:ok, deleted_signatures} ->
Enum.each(deleted_signatures, fn sig ->
Ash.destroy!(sig)
end)
:ok
{:error, error} ->
Logger.error("Failed to fetch deleted signatures: #{inspect(error)}")
{:error, error}
end
end
defp cleanup_expired_pings() do
delete_after_date = DateTime.utc_now() |> DateTime.add(-1 * @pings_expire_minutes, :minute)
@@ -192,7 +117,7 @@ defmodule WandererApp.Map.Manager do
Enum.each(pings, fn %{id: ping_id, map_id: map_id, type: type} = ping ->
{:ok, %{system: system}} = ping |> Ash.load([:system])
WandererApp.Map.Server.Impl.broadcast!(map_id, :ping_cancelled, %{
Server.Impl.broadcast!(map_id, :ping_cancelled, %{
id: ping_id,
solar_system_id: system.solar_system_id,
type: type
@@ -273,21 +198,21 @@ defmodule WandererApp.Map.Manager do
end
defp start_map_server(map_id) do
case DynamicSupervisor.start_child(
{:via, PartitionSupervisor, {WandererApp.Map.DynamicSupervisors, self()}},
{ServerSupervisor, map_id: map_id}
) do
{:ok, pid} ->
{:ok, pid}
with {:ok, started_maps} <- WandererApp.Cache.lookup("started_maps", []),
false <- Enum.member?(started_maps, map_id) do
WandererApp.Cache.insert_or_update(
"started_maps",
[map_id],
fn existing ->
[map_id | existing] |> Enum.uniq()
end
)
{:error, {:already_started, pid}} ->
{:ok, pid}
{:error, {:shutdown, {:failed_to_start_child, Server, {:already_started, pid}}}} ->
{:ok, pid}
{:error, reason} ->
{:error, reason}
WandererApp.Map.MapPoolDynamicSupervisor.start_map(map_id)
else
_error ->
Logger.warning("Map already started: #{map_id}")
:ok
end
end
end

View File

@@ -0,0 +1,360 @@
defmodule WandererApp.Map.MapPool do
@moduledoc false
use GenServer, restart: :transient
require Logger
alias WandererApp.Map.Server
defstruct [
:map_ids,
:uuid
]
@name __MODULE__
@cache :map_pool_cache
@registry :map_pool_registry
@unique_registry :unique_map_pool_registry
@garbage_collection_interval :timer.hours(12)
@systems_cleanup_timeout :timer.minutes(30)
@characters_cleanup_timeout :timer.minutes(5)
@connections_cleanup_timeout :timer.minutes(5)
@backup_state_timeout :timer.minutes(1)
def new(), do: __struct__()
def new(args), do: __struct__(args)
def start_link(map_ids) do
uuid = UUID.uuid1()
GenServer.start_link(
@name,
{uuid, map_ids},
name: Module.concat(__MODULE__, uuid)
)
end
@impl true
def init({uuid, map_ids}) do
{:ok, _} = Registry.register(@unique_registry, Module.concat(__MODULE__, uuid), map_ids)
{:ok, _} = Registry.register(@registry, __MODULE__, uuid)
map_ids
|> Enum.each(fn id ->
Cachex.put(@cache, id, uuid)
end)
state =
%{
uuid: uuid,
map_ids: []
}
|> new()
{:ok, state, {:continue, {:start, map_ids}}}
end
@impl true
def terminate(_reason, _state) do
:ok
end
@impl true
def handle_continue({:start, map_ids}, state) do
Logger.info("#{@name} started")
map_ids
|> Enum.each(fn map_id ->
GenServer.cast(self(), {:start_map, map_id})
end)
Process.send_after(self(), :backup_state, @backup_state_timeout)
Process.send_after(self(), :cleanup_systems, 15_000)
Process.send_after(self(), :cleanup_characters, @characters_cleanup_timeout)
Process.send_after(self(), :cleanup_connections, @connections_cleanup_timeout)
Process.send_after(self(), :garbage_collect, @garbage_collection_interval)
# Start message queue monitoring
Process.send_after(self(), :monitor_message_queue, :timer.seconds(30))
{:noreply, state}
end
@impl true
def handle_cast(:stop, state), do: {:stop, :normal, state}
@impl true
def handle_cast({:start_map, map_id}, %{map_ids: map_ids, uuid: uuid} = state) do
if map_id not in map_ids do
Registry.update_value(@unique_registry, Module.concat(__MODULE__, uuid), fn r_map_ids ->
[map_id | r_map_ids]
end)
Cachex.put(@cache, map_id, uuid)
map_id
|> WandererApp.Map.get_map_state!()
|> Server.Impl.start_map()
{:noreply, %{state | map_ids: [map_id | map_ids]}}
else
{:noreply, state}
end
end
@impl true
def handle_cast(
{:stop_map, map_id},
%{map_ids: map_ids, uuid: uuid} = state
) do
Registry.update_value(@unique_registry, Module.concat(__MODULE__, uuid), fn r_map_ids ->
r_map_ids |> Enum.reject(fn id -> id == map_id end)
end)
Cachex.del(@cache, map_id)
map_id
|> Server.Impl.stop_map()
{:noreply, %{state | map_ids: map_ids |> Enum.reject(fn id -> id == map_id end)}}
end
@impl true
def handle_call(:error, _, state), do: {:stop, :error, :ok, state}
@impl true
def handle_info(:backup_state, %{map_ids: map_ids} = state) do
Process.send_after(self(), :backup_state, @backup_state_timeout)
try do
map_ids
|> Task.async_stream(
fn map_id ->
{:ok, _map_state} = Server.Impl.save_map_state(map_id)
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.minutes(1)
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Map Pool] backup_state => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
@impl true
def handle_info(:cleanup_systems, %{map_ids: map_ids} = state) do
Process.send_after(self(), :cleanup_systems, @systems_cleanup_timeout)
try do
map_ids
|> Task.async_stream(
fn map_id ->
Server.Impl.cleanup_systems(map_id)
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.minutes(1)
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Map Pool] cleanup_systems => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
@impl true
def handle_info(:cleanup_connections, %{map_ids: map_ids} = state) do
Process.send_after(self(), :cleanup_connections, @connections_cleanup_timeout)
try do
map_ids
|> Task.async_stream(
fn map_id ->
Server.Impl.cleanup_connections(map_id)
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.minutes(1)
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Map Pool] cleanup_connections => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
@impl true
def handle_info(:cleanup_characters, %{map_ids: map_ids} = state) do
Process.send_after(self(), :cleanup_characters, @characters_cleanup_timeout)
try do
map_ids
|> Task.async_stream(
fn map_id ->
Server.Impl.cleanup_characters(map_id)
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.minutes(1)
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Map Pool] cleanup_characters => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
@impl true
def handle_info(:garbage_collect, %{map_ids: map_ids, uuid: uuid} = state) do
Process.send_after(self(), :garbage_collect, @garbage_collection_interval)
try do
map_ids
|> Enum.each(fn map_id ->
# presence_character_ids =
# WandererApp.Cache.lookup!("map_#{map_id}:presence_character_ids", [])
# if presence_character_ids |> Enum.empty?() do
Logger.info(
"#{uuid}: No more characters present on: #{map_id}, shutting down map server..."
)
GenServer.cast(self(), {:stop_map, map_id})
# end
end)
rescue
e ->
Logger.error(Exception.message(e))
end
{:noreply, state}
end
@impl true
def handle_info(:monitor_message_queue, state) do
monitor_message_queue(state)
# Schedule next monitoring check
Process.send_after(self(), :monitor_message_queue, :timer.seconds(30))
{:noreply, state}
end
def handle_info({ref, result}, state) when is_reference(ref) do
Process.demonitor(ref, [:flush])
case result do
{:error, error} ->
Logger.error("#{__MODULE__} failed to process: #{inspect(error)}")
:ok
_ ->
:ok
end
{:noreply, state}
end
def handle_info(
:update_online,
%{
characters: characters,
server_online: true
} =
state
) do
Process.send_after(self(), :update_online, @update_online_interval)
try do
characters
|> Task.async_stream(
fn character_id ->
WandererApp.Character.Tracker.update_online(character_id)
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.seconds(5)
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Tracker Pool] update_online => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
def handle_info(event, state) do
try do
Server.Impl.handle_event(event)
rescue
e ->
Logger.error("""
[Map Pool] handle_info => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
ErrorTracker.report(e, __STACKTRACE__)
end
{:noreply, state}
end
defp monitor_message_queue(state) do
try do
{_, message_queue_len} = Process.info(self(), :message_queue_len)
{_, memory} = Process.info(self(), :memory)
# Alert on high message queue
if message_queue_len > 50 do
Logger.warning("GENSERVER_QUEUE_HIGH: Map pool message queue buildup",
pool_id: state.uuid,
message_queue_length: message_queue_len,
memory_bytes: memory,
pool_length: length(state.map_ids)
)
# Emit telemetry
:telemetry.execute(
[:wanderer_app, :map, :map_pool, :queue_buildup],
%{
message_queue_length: message_queue_len,
memory_bytes: memory
},
%{
pool_id: state.uuid,
pool_length: length(state.map_ids)
}
)
end
rescue
error ->
Logger.debug("Failed to monitor message queue: #{inspect(error)}")
end
end
end

View File

@@ -0,0 +1,102 @@
defmodule WandererApp.Map.MapPoolDynamicSupervisor do
@moduledoc false
use DynamicSupervisor
require Logger
@cache :map_pool_cache
@registry :map_pool_registry
@unique_registry :unique_map_pool_registry
@map_pool_limit 10
@name __MODULE__
def start_link(_arg) do
DynamicSupervisor.start_link(@name, [], name: @name, max_restarts: 10)
end
def init(_arg) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def start_map(map_id) do
case Registry.lookup(@registry, WandererApp.Map.MapPool) do
[] ->
start_child([map_id], 0)
pools ->
case get_available_pool(pools) do
nil ->
start_child([map_id], pools |> Enum.count())
pid ->
GenServer.cast(pid, {:start_map, map_id})
end
end
end
def stop_map(map_id) do
{:ok, pool_uuid} = Cachex.get(@cache, map_id)
case Registry.lookup(
@unique_registry,
Module.concat(WandererApp.Map.MapPool, pool_uuid)
) do
[] ->
:ok
[{pool_pid, _}] ->
GenServer.cast(pool_pid, {:stop_map, map_id})
end
end
defp get_available_pool([]), do: nil
defp get_available_pool([{pid, uuid} | pools]) do
case Registry.lookup(@unique_registry, Module.concat(WandererApp.Map.MapPool, uuid)) do
[] ->
nil
uuid_pools ->
case get_available_pool_pid(uuid_pools) do
nil ->
get_available_pool(pools)
pid ->
pid
end
end
end
defp get_available_pool_pid([]), do: nil
defp get_available_pool_pid([{pid, map_ids} | pools]) do
if Enum.count(map_ids) < @map_pool_limit do
pid
else
get_available_pool_pid(pools)
end
end
defp start_child(map_ids, pools_count) do
case DynamicSupervisor.start_child(@name, {WandererApp.Map.MapPool, map_ids}) do
{:ok, pid} ->
Logger.info("Starting map pool, total map_pools: #{pools_count + 1}")
{:ok, pid}
{:error, {:already_started, pid}} ->
{:ok, pid}
end
end
defp stop_child(uuid) do
case Registry.lookup(@registry, uuid) do
[{pid, _}] ->
GenServer.cast(pid, :stop)
_ ->
Logger.warn("Unable to locate pool assigned to #{inspect(uuid)}")
:ok
end
end
end

View File

@@ -0,0 +1,22 @@
defmodule WandererApp.Map.MapPoolSupervisor do
@moduledoc false
use Supervisor
@name __MODULE__
@registry :map_pool_registry
@unique_registry :unique_map_pool_registry
def start_link(_args) do
Supervisor.start_link(@name, [], name: @name)
end
def init(_args) do
children = [
{Registry, [keys: :unique, name: @unique_registry]},
{Registry, [keys: :duplicate, name: @registry]},
{WandererApp.Map.MapPoolDynamicSupervisor, []}
]
Supervisor.init(children, strategy: :rest_for_one, max_restarts: 10)
end
end

View File

@@ -2,6 +2,8 @@ defmodule WandererApp.Map.PositionCalculator do
@moduledoc false
require Logger
@ddrt Application.compile_env(:wanderer_app, :ddrt)
# Node height
@h 34
# Node weight
@@ -60,7 +62,7 @@ defmodule WandererApp.Map.PositionCalculator do
end
defp is_available_position({x, y} = _position, rtree_name) do
case DDRT.query(get_system_bounding_rect(%{position_x: x, position_y: y}), rtree_name) do
case @ddrt.query(get_system_bounding_rect(%{position_x: x, position_y: y}), rtree_name) do
{:ok, []} ->
true

View File

@@ -1,15 +0,0 @@
defmodule WandererApp.Map.RegistryHelper do
@moduledoc false
alias WandererApp.MapRegistry
def list_all_maps(),
do: Registry.select(MapRegistry, [{{:"$1", :"$2", :_}, [], [%{id: :"$1", pid: :"$2"}]}])
def list_all_maps_by_map_id(map_id) do
match_all = {:"$1", :"$2", :"$3"}
guards = [{:==, :"$1", map_id}]
map_result = [%{id: :"$1", pid: :"$2"}]
Registry.select(MapRegistry, [{match_all, guards, map_result}])
end
end

View File

@@ -0,0 +1,311 @@
defmodule WandererApp.Map.Routes do
@moduledoc """
Map routes helper
"""
require Logger
@default_routes_settings %{
path_type: "shortest",
include_mass_crit: true,
include_eol: false,
include_frig: true,
include_cruise: true,
avoid_wormholes: false,
avoid_pochven: false,
avoid_edencom: false,
avoid_triglavian: false,
include_thera: true,
avoid: []
}
@minimum_route_attrs [
:system_class,
:class_title,
:security,
:triglavian_invasion_status,
:solar_system_id,
:solar_system_name,
:region_name,
:is_shattered
]
@get_link_pairs_advanced_params [
:include_mass_crit,
:include_eol,
:include_frig
]
@zarzakh_system 30_100_000
@default_avoid_systems [@zarzakh_system]
@routes_ttl :timer.minutes(15)
@logger Application.compile_env(:wanderer_app, :logger)
def find(map_id, hubs, origin, routes_settings, false) do
do_find_routes(
map_id,
origin,
hubs,
routes_settings
)
|> case do
{:ok, routes} ->
systems_static_data =
routes
|> Enum.map(fn route_info -> route_info.systems end)
|> List.flatten()
|> Enum.uniq()
|> Task.async_stream(
fn system_id ->
case WandererApp.CachedInfo.get_system_static_info(system_id) do
{:ok, nil} ->
nil
{:ok, system} ->
system |> Map.take(@minimum_route_attrs)
end
end,
max_concurrency: System.schedulers_online() * 4
)
|> Enum.map(fn {:ok, val} -> val end)
{:ok, %{routes: routes, systems_static_data: systems_static_data}}
error ->
{:ok, %{routes: [], systems_static_data: []}}
end
end
def find(map_id, hubs, origin, routes_settings, true) do
origin = origin |> String.to_integer()
hubs = hubs |> Enum.map(&(&1 |> String.to_integer()))
routes =
hubs
|> Enum.map(fn hub ->
%{origin: origin, destination: hub, success: false, systems: [], has_connection: false}
end)
{:ok, %{routes: routes, systems_static_data: []}}
end
defp do_find_routes(map_id, origin, hubs, routes_settings) do
origin = origin |> String.to_integer()
hubs = hubs |> Enum.map(&(&1 |> String.to_integer()))
routes_settings = @default_routes_settings |> Map.merge(routes_settings)
connections =
case routes_settings.avoid_wormholes do
false ->
map_chains =
routes_settings
|> Map.take(@get_link_pairs_advanced_params)
|> Map.put_new(:map_id, map_id)
|> WandererApp.Api.MapConnection.get_link_pairs_advanced!()
|> Enum.map(fn %{
solar_system_source: solar_system_source,
solar_system_target: solar_system_target
} ->
%{
first: solar_system_source,
second: solar_system_target
}
end)
|> Enum.uniq()
{:ok, thera_chains} =
case routes_settings.include_thera do
true ->
WandererApp.Server.TheraDataFetcher.get_chain_pairs(routes_settings)
false ->
{:ok, []}
end
chains = remove_intersection([map_chains | thera_chains] |> List.flatten())
chains =
case routes_settings.include_cruise do
false ->
{:ok, wh_class_a_systems} = WandererApp.CachedInfo.get_wh_class_a_systems()
chains
|> Enum.filter(fn x ->
not Enum.member?(wh_class_a_systems, x.first) and
not Enum.member?(wh_class_a_systems, x.second)
end)
_ ->
chains
end
chains
|> Enum.map(fn chain ->
["#{chain.first}|#{chain.second}", "#{chain.second}|#{chain.first}"]
end)
|> List.flatten()
true ->
[]
end
{:ok, trig_systems} = WandererApp.CachedInfo.get_trig_systems()
pochven_solar_systems =
trig_systems
|> Enum.filter(fn s -> s.triglavian_invasion_status == "Final" end)
|> Enum.map(& &1.solar_system_id)
triglavian_solar_systems =
trig_systems
|> Enum.filter(fn s -> s.triglavian_invasion_status == "Triglavian" end)
|> Enum.map(& &1.solar_system_id)
edencom_solar_systems =
trig_systems
|> Enum.filter(fn s -> s.triglavian_invasion_status == "Edencom" end)
|> Enum.map(& &1.solar_system_id)
avoidance_list =
case routes_settings.avoid_edencom do
true ->
edencom_solar_systems
false ->
[]
end
avoidance_list =
case routes_settings.avoid_triglavian do
true ->
[avoidance_list | triglavian_solar_systems]
false ->
avoidance_list
end
avoidance_list =
case routes_settings.avoid_pochven do
true ->
[avoidance_list | pochven_solar_systems]
false ->
avoidance_list
end
avoidance_list =
(@default_avoid_systems ++ [routes_settings.avoid | avoidance_list])
|> List.flatten()
|> Enum.uniq()
params =
%{
datasource: "tranquility",
flag: routes_settings.path_type,
connections: connections,
avoid: avoidance_list
}
{:ok, all_routes} = get_all_routes(hubs, origin, params)
routes =
all_routes
|> Enum.map(fn route_info ->
map_route_info(route_info)
end)
|> Enum.filter(fn route_info -> not is_nil(route_info) end)
{:ok, routes}
end
defp get_all_routes(hubs, origin, params, opts \\ []) do
cache_key =
"routes-#{origin}-#{hubs |> Enum.join("-")}-#{:crypto.hash(:sha, :erlang.term_to_binary(params))}"
case WandererApp.Cache.lookup(cache_key) do
{:ok, result} when not is_nil(result) ->
{:ok, result}
_ ->
case WandererApp.Esi.get_routes_custom(hubs, origin, params) do
{:ok, result} ->
WandererApp.Cache.insert(
cache_key,
result,
ttl: @routes_ttl
)
{:ok, result}
{:error, _error} ->
@logger.error(
"Error getting custom routes for #{inspect(origin)}: #{inspect(params)}"
)
WandererApp.Esi.get_routes_eve(hubs, origin, params, opts)
end
end
end
defp remove_intersection(pairs_arr) do
tuples = pairs_arr |> Enum.map(fn x -> {x.first, x.second} end)
tuples
|> Enum.reduce([], fn {first, second} = x, acc ->
if Enum.member?(tuples, {second, first}) do
acc
else
[x | acc]
end
end)
|> Enum.uniq()
|> Enum.map(fn {first, second} ->
%{
first: first,
second: second
}
end)
end
defp map_route_info(
%{
"origin" => origin,
"destination" => destination,
"systems" => result_systems,
"success" => success
} = _route_info
),
do:
map_route_info(%{
origin: origin,
destination: destination,
systems: result_systems,
success: success
})
defp map_route_info(
%{origin: origin, destination: destination, systems: result_systems, success: success} =
_route_info
) do
systems =
case result_systems do
[] ->
[]
_ ->
result_systems |> Enum.reject(fn system_id -> system_id == origin end)
end
%{
has_connection: result_systems != [],
systems: systems,
origin: origin,
destination: destination,
success: success
}
end
defp map_route_info(_), do: nil
end

View File

@@ -1,41 +0,0 @@
defmodule WandererApp.Map.RtreeDynamicSupervisor do
@moduledoc """
Dynamically starts a map server
"""
use DynamicSupervisor
def start_link(_arg) do
DynamicSupervisor.start_link(__MODULE__, nil, name: __MODULE__)
end
def init(nil) do
DynamicSupervisor.init(strategy: :one_for_one)
end
def start(map_id) do
case DynamicSupervisor.start_child(
__MODULE__,
{DDRT.DynamicRtree,
[
conf: [name: "rtree_#{map_id}", width: 150, verbose: false, seed: 0],
name: Module.concat([map_id, DDRT.DynamicRtree])
]}
) do
{:ok, pid} -> {:ok, pid}
{:error, {:already_started, pid}} -> {:ok, pid}
{:error, reason} -> {:error, reason}
end
end
def stop(map_id) do
case Process.whereis(Module.concat([map_id, DDRT.DynamicRtree])) do
nil -> :ok
pid when is_pid(pid) -> DynamicSupervisor.terminate_child(__MODULE__, pid)
end
end
def which_children do
Supervisor.which_children(__MODULE__)
end
end

View File

@@ -2,52 +2,12 @@ defmodule WandererApp.Map.Server do
@moduledoc """
Holds state for a map and exposes an interface to managing the map instance
"""
use GenServer, restart: :transient, significant: true
require Logger
alias WandererApp.Map.Server.Impl
@logger Application.compile_env(:wanderer_app, :logger)
@spec start_link(keyword()) :: GenServer.on_start()
def start_link(args) when is_list(args) do
GenServer.start_link(__MODULE__, args, name: _via(args[:map_id]))
end
@impl true
def init(args), do: {:ok, Impl.init(args), {:continue, :load_state}}
def map_pid(map_id),
do:
map_id
|> _via()
|> GenServer.whereis()
def map_pid!(map_id) do
map_id
|> map_pid()
|> case do
map_id when is_pid(map_id) ->
map_id
nil ->
WandererApp.Cache.insert("map_#{map_id}:started", false)
throw("Map server not started")
end
end
def get_map(pid) when is_pid(pid),
do:
pid
|> GenServer.call({&Impl.get_map/1, []}, :timer.minutes(5))
def get_map(map_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> get_map()
def get_export_settings(%{id: map_id, hubs: hubs} = _map) do
with {:ok, all_systems} <- WandererApp.MapSystemRepo.get_all_by_map(map_id),
{:ok, connections} <- WandererApp.MapConnectionRepo.get_by_map(map_id) do
@@ -70,244 +30,67 @@ defmodule WandererApp.Map.Server do
end
end
def get_characters(map_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.call({&Impl.get_characters/1, []}, :timer.minutes(1))
defdelegate untrack_characters(map_id, character_ids), to: Impl
def add_character(map_id, character, track_character \\ false) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.add_character/3, [character, track_character]})
defdelegate add_system(map_id, system_info, user_id, character_id, opts \\ []), to: Impl
def remove_character(map_id, character_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.remove_character/2, [character_id]})
defdelegate paste_connections(map_id, connections, user_id, character_id), to: Impl
def untrack_characters(map_id, character_ids) when is_binary(map_id) do
map_id
|> map_pid()
|> case do
pid when is_pid(pid) ->
GenServer.cast(pid, {&Impl.untrack_characters/2, [character_ids]})
defdelegate paste_systems(map_id, systems, user_id, character_id, opts \\ []), to: Impl
_ ->
WandererApp.Cache.insert("map_#{map_id}:started", false)
:ok
end
end
defdelegate add_system_comment(map_id, comment_info, user_id, character_id), to: Impl
def add_system(map_id, system_info, user_id, character_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.add_system/4, [system_info, user_id, character_id]})
defdelegate remove_system_comment(map_id, comment_id, user_id, character_id), to: Impl
def add_system_comment(map_id, comment_info, user_id, character_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.add_system_comment/4, [comment_info, user_id, character_id]})
defdelegate update_system_position(map_id, update), to: Impl
def remove_system_comment(map_id, comment_id, user_id, character_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.remove_system_comment/4, [comment_id, user_id, character_id]})
defdelegate update_system_linked_sig_eve_id(map_id, update), to: Impl
def update_system_position(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_position/2, [update]})
defdelegate update_system_name(map_id, update), to: Impl
def update_system_linked_sig_eve_id(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_linked_sig_eve_id/2, [update]})
defdelegate update_system_description(map_id, update), to: Impl
def update_system_name(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_name/2, [update]})
defdelegate update_system_status(map_id, update), to: Impl
def update_system_description(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_description/2, [update]})
defdelegate update_system_tag(map_id, update), to: Impl
def update_system_status(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_status/2, [update]})
defdelegate update_system_temporary_name(map_id, update), to: Impl
def update_system_tag(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_tag/2, [update]})
defdelegate update_system_locked(map_id, update), to: Impl
def update_system_temporary_name(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_temporary_name/2, [update]})
defdelegate update_system_labels(map_id, update), to: Impl
def update_system_locked(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_locked/2, [update]})
defdelegate add_hub(map_id, hub_info), to: Impl
def update_system_labels(map_id, update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_system_labels/2, [update]})
defdelegate remove_hub(map_id, hub_info), to: Impl
def add_hub(map_id, hub_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.add_hub/2, [hub_info]})
defdelegate add_ping(map_id, ping_info), to: Impl
def remove_hub(map_id, hub_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.remove_hub/2, [hub_info]})
defdelegate cancel_ping(map_id, ping_info), to: Impl
def add_ping(map_id, ping_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.add_ping/2, [ping_info]})
defdelegate delete_systems(map_id, solar_system_ids, user_id, character_id), to: Impl
def cancel_ping(map_id, ping_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.cancel_ping/2, [ping_info]})
defdelegate add_connection(map_id, connection_info), to: Impl
def delete_systems(map_id, solar_system_ids, user_id, character_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.delete_systems/4, [solar_system_ids, user_id, character_id]})
defdelegate delete_connection(map_id, connection_info), to: Impl
def add_connection(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.add_connection/2, [connection_info]})
defdelegate import_settings(map_id, settings, user_id), to: Impl
def import_settings(map_id, settings, user_id) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.call({&Impl.import_settings/3, [settings, user_id]}, :timer.minutes(30))
defdelegate update_subscription_settings(map_id, settings), to: Impl
def update_subscription_settings(map_id, settings) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_subscription_settings/2, [settings]})
defdelegate get_connection_info(map_id, connection_info), to: Impl
def delete_connection(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.delete_connection/2, [connection_info]})
defdelegate update_connection_time_status(map_id, connection_info), to: Impl
def get_connection_info(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.call({&Impl.get_connection_info/2, [connection_info]}, :timer.minutes(1))
defdelegate update_connection_type(map_id, connection_info), to: Impl
def update_connection_time_status(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_connection_time_status/2, [connection_info]})
defdelegate update_connection_mass_status(map_id, connection_info), to: Impl
def update_connection_type(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_connection_type/2, [connection_info]})
defdelegate update_connection_ship_size_type(map_id, connection_info), to: Impl
def update_connection_mass_status(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_connection_mass_status/2, [connection_info]})
defdelegate update_connection_locked(map_id, connection_info), to: Impl
def update_connection_ship_size_type(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_connection_ship_size_type/2, [connection_info]})
defdelegate update_connection_custom_info(map_id, connection_info), to: Impl
def update_connection_locked(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_connection_locked/2, [connection_info]})
def update_connection_custom_info(map_id, connection_info) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_connection_custom_info/2, [connection_info]})
def update_signatures(map_id, signatures_update) when is_binary(map_id),
do:
map_id
|> map_pid!
|> GenServer.cast({&Impl.update_signatures/2, [signatures_update]})
@impl true
def handle_continue(:load_state, state),
do: {:noreply, state |> Impl.load_state(), {:continue, :start_map}}
@impl true
def handle_continue(:start_map, state), do: {:noreply, state |> Impl.start_map()}
@impl true
def handle_call(
{impl_function, args},
_from,
state
)
when is_function(impl_function),
do: WandererApp.GenImpl.apply_call(impl_function, state, args)
@impl true
def handle_cast(:stop, state), do: {:stop, :normal, state |> Impl.stop_map()}
@impl true
def handle_cast({impl_function, args}, state)
when is_function(impl_function) do
case WandererApp.GenImpl.apply_call(impl_function, state, args) do
{:reply, _return, updated_state} ->
{:noreply, updated_state}
_ ->
{:noreply, state}
end
end
@impl true
def handle_info(event, state), do: {:noreply, Impl.handle_event(event, state)}
defp _via(map_id), do: {:via, Registry, {WandererApp.MapRegistry, map_id}}
defdelegate update_signatures(map_id, signatures_update), to: Impl
end

View File

@@ -300,10 +300,9 @@ defmodule WandererApp.Map.SubscriptionManager do
defp is_expired(subscription) when is_map(subscription),
do: DateTime.compare(DateTime.utc_now(), subscription.active_till) == :gt
defp renew_subscription(%{auto_renew?: true} = subscription) when is_map(subscription) do
with {:ok, %{map: map}} <-
subscription |> WandererApp.MapSubscriptionRepo.load_relationships([:map]),
{:ok, estimated_price, discount} <- estimate_price(subscription, true),
defp renew_subscription(%{auto_renew?: true, map: map} = subscription)
when is_map(subscription) do
with {:ok, estimated_price, discount} <- estimate_price(subscription, true),
{:ok, map_balance} <- get_balance(map) do
case map_balance >= estimated_price do
true ->
@@ -328,7 +327,7 @@ defmodule WandererApp.Map.SubscriptionManager do
@pubsub_client.broadcast(
WandererApp.PubSub,
"maps:#{map.id}",
:subscription_settings_updated
{:subscription_settings_updated, map.id}
)
:telemetry.execute([:wanderer_app, :map, :subscription, :renew], %{count: 1}, %{
@@ -388,7 +387,7 @@ defmodule WandererApp.Map.SubscriptionManager do
@pubsub_client.broadcast(
WandererApp.PubSub,
"maps:#{map.id}",
:subscription_settings_updated
{:subscription_settings_updated, map.id}
)
case WandererApp.License.LicenseManager.get_license_by_map_id(map.id) do
@@ -423,7 +422,7 @@ defmodule WandererApp.Map.SubscriptionManager do
@pubsub_client.broadcast(
WandererApp.PubSub,
"maps:#{subscription.map_id}",
:subscription_settings_updated
{:subscription_settings_updated, subscription.map_id}
)
case WandererApp.License.LicenseManager.get_license_by_map_id(subscription.map_id) do

View File

@@ -29,20 +29,20 @@ defmodule WandererApp.Map.ZkbDataFetcher do
kills_enabled = Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, true)
if kills_enabled do
WandererApp.Map.RegistryHelper.list_all_maps()
{:ok, started_maps_ids} = WandererApp.Cache.lookup("started_maps", [])
started_maps_ids
|> Task.async_stream(
fn %{id: map_id, pid: _server_pid} ->
fn map_id ->
try do
if WandererApp.Map.Server.map_pid(map_id) do
# Always update kill counts
update_map_kills(map_id)
# Always update kill counts
update_map_kills(map_id)
# Update detailed kills for maps with active subscriptions
{:ok, is_subscription_active} = map_id |> WandererApp.Map.is_subscription_active?()
# Update detailed kills for maps with active subscriptions
{:ok, is_subscription_active} = map_id |> WandererApp.Map.is_subscription_active?()
if is_subscription_active do
update_detailed_map_kills(map_id)
end
if is_subscription_active do
update_detailed_map_kills(map_id)
end
rescue
e ->

View File

@@ -231,31 +231,15 @@ defmodule WandererApp.Map.Operations.Connections do
attrs
) do
with {:ok, conn_struct} <- MapConnectionRepo.get_by_id(map_id, conn_id),
result <-
:ok <-
(try do
_allowed_keys = [
:mass_status,
:ship_size_type,
:time_status,
:type
]
_update_map =
attrs
|> Enum.filter(fn {k, _v} ->
k in ["mass_status", "ship_size_type", "time_status", "type"]
end)
|> Enum.map(fn {k, v} -> {String.to_atom(k), v} end)
|> Enum.into(%{})
res = apply_connection_updates(map_id, conn_struct, attrs, char_id)
res
rescue
error ->
Logger.error("[update_connection] Exception: #{inspect(error)}")
{:error, :exception}
end),
:ok <- result do
end) do
# Since GenServer updates are asynchronous, manually apply updates to the current struct
# to return the correct data immediately instead of refetching from potentially stale cache
updated_attrs =
@@ -374,6 +358,7 @@ defmodule WandererApp.Map.Operations.Connections do
"ship_size_type" -> maybe_update_ship_size_type(map_id, conn, val)
"time_status" -> maybe_update_time_status(map_id, conn, val)
"type" -> maybe_update_type(map_id, conn, val)
"locked" -> maybe_update_locked(map_id, conn, val)
_ -> :ok
end
@@ -429,6 +414,16 @@ defmodule WandererApp.Map.Operations.Connections do
})
end
defp maybe_update_locked(_map_id, _conn, nil), do: :ok
defp maybe_update_locked(map_id, conn, value) do
Server.update_connection_locked(map_id, %{
solar_system_source_id: conn.solar_system_source,
solar_system_target_id: conn.solar_system_target,
locked: value
})
end
@doc "Creates a connection between two systems"
@spec create_connection(String.t(), map(), String.t()) ::
{:ok, :created} | {:skip, :exists} | {:error, atom()}

View File

@@ -5,9 +5,41 @@ defmodule WandererApp.Map.Operations.Signatures do
require Logger
alias WandererApp.Map.Operations
alias WandererApp.Api.{MapSystem, MapSystemSignature}
alias WandererApp.Api.{Character, MapSystem, MapSystemSignature}
alias WandererApp.Map.Server
# Private helper to validate character_eve_id from params
# If character_eve_id is provided in params, validates it exists in the system
# If not provided, falls back to the owner's character ID
@spec validate_character_eve_id(map() | nil, String.t()) ::
{:ok, String.t()} | {:error, :invalid_character}
defp validate_character_eve_id(params, fallback_char_id) when is_map(params) do
case Map.get(params, "character_eve_id") do
nil ->
# No character_eve_id provided, use fallback (owner's character)
{:ok, fallback_char_id}
provided_char_id when is_binary(provided_char_id) ->
# Validate the provided character_eve_id exists
case Character.by_eve_id(provided_char_id) do
{:ok, _character} ->
{:ok, provided_char_id}
_ ->
{:error, :invalid_character}
end
_ ->
# Invalid format
{:error, :invalid_character}
end
end
# Handle nil or non-map params by falling back to owner's character
defp validate_character_eve_id(_params, fallback_char_id) do
{:ok, fallback_char_id}
end
@spec list_signatures(String.t()) :: [map()]
def list_signatures(map_id) do
systems = Operations.list_systems(map_id)
@@ -41,11 +73,12 @@ defmodule WandererApp.Map.Operations.Signatures do
%{"solar_system_id" => solar_system_id} = params
)
when is_integer(solar_system_id) do
# Convert solar_system_id to system_id for internal use
with {:ok, system} <- MapSystem.by_map_id_and_solar_system_id(map_id, solar_system_id) do
# Validate character first, then convert solar_system_id to system_id
with {:ok, validated_char_id} <- validate_character_eve_id(params, char_id),
{:ok, system} <- MapSystem.by_map_id_and_solar_system_id(map_id, solar_system_id) do
attrs =
params
|> Map.put("character_eve_id", char_id)
|> Map.put("character_eve_id", validated_char_id)
|> Map.put("system_id", system.id)
|> Map.delete("solar_system_id")
@@ -54,7 +87,7 @@ defmodule WandererApp.Map.Operations.Signatures do
updated_signatures: [],
removed_signatures: [],
solar_system_id: solar_system_id,
character_id: char_id,
character_id: validated_char_id,
user_id: user_id,
delete_connection_with_sigs: false
}) do
@@ -86,6 +119,10 @@ defmodule WandererApp.Map.Operations.Signatures do
{:error, :unexpected_error}
end
else
{:error, :invalid_character} ->
Logger.error("[create_signature] Invalid character_eve_id provided")
{:error, :invalid_character}
_ ->
Logger.error(
"[create_signature] System not found for solar_system_id: #{solar_system_id}"
@@ -111,7 +148,9 @@ defmodule WandererApp.Map.Operations.Signatures do
sig_id,
params
) do
with {:ok, sig} <- MapSystemSignature.by_id(sig_id),
# Validate character first, then look up signature and system
with {:ok, validated_char_id} <- validate_character_eve_id(params, char_id),
{:ok, sig} <- MapSystemSignature.by_id(sig_id),
{:ok, system} <- MapSystem.by_id(sig.system_id) do
base = %{
"eve_id" => sig.eve_id,
@@ -120,7 +159,7 @@ defmodule WandererApp.Map.Operations.Signatures do
"group" => sig.group,
"type" => sig.type,
"custom_info" => sig.custom_info,
"character_eve_id" => char_id,
"character_eve_id" => validated_char_id,
"description" => sig.description,
"linked_system_id" => sig.linked_system_id
}
@@ -133,7 +172,7 @@ defmodule WandererApp.Map.Operations.Signatures do
updated_signatures: [attrs],
removed_signatures: [],
solar_system_id: system.solar_system_id,
character_id: char_id,
character_id: validated_char_id,
user_id: user_id,
delete_connection_with_sigs: false
})
@@ -151,6 +190,10 @@ defmodule WandererApp.Map.Operations.Signatures do
_ -> {:ok, attrs}
end
else
{:error, :invalid_character} ->
Logger.error("[update_signature] Invalid character_eve_id provided")
{:error, :invalid_character}
err ->
Logger.error("[update_signature] Unexpected error: #{inspect(err)}")
{:error, :unexpected_error}

View File

@@ -35,20 +35,37 @@ defmodule WandererApp.Map.Operations.Systems do
# Private helper for batch upsert
defp create_system_batch(%{map_id: map_id, user_id: user_id, char_id: char_id}, params) do
do_create_system(map_id, user_id, char_id, params)
{:ok, solar_system_id} = fetch_system_id(params)
update_existing = fetch_update_existing(params, false)
map_id
|> WandererApp.Map.check_location(%{solar_system_id: solar_system_id})
|> case do
{:ok, _location} ->
do_create_system(map_id, user_id, char_id, params)
{:error, :already_exists} ->
if update_existing do
do_update_system(map_id, user_id, char_id, solar_system_id, params)
else
:ok
end
end
end
defp do_create_system(map_id, user_id, char_id, params) do
with {:ok, system_id} <- fetch_system_id(params),
update_existing <- fetch_update_existing(params, false),
coords <- normalize_coordinates(params),
:ok <-
Server.add_system(
map_id,
%{solar_system_id: system_id, coordinates: coords},
%{solar_system_id: system_id, coordinates: coords, extra: params},
user_id,
char_id
char_id,
update_existing: update_existing
) do
# System creation is async, but if add_system returns :ok,
# System creation is async, but if add_system returns :ok,
# it means the operation was queued successfully
{:ok, %{solar_system_id: system_id}}
else
@@ -63,15 +80,26 @@ defmodule WandererApp.Map.Operations.Systems do
end
@spec update_system(Plug.Conn.t(), integer(), map()) :: {:ok, map()} | {:error, atom()}
def update_system(%{assigns: %{map_id: map_id}} = _conn, system_id, attrs) do
with {:ok, current} <- MapSystemRepo.get_by_map_and_solar_system_id(map_id, system_id),
x_raw <- Map.get(attrs, "position_x", Map.get(attrs, :position_x, current.position_x)),
y_raw <- Map.get(attrs, "position_y", Map.get(attrs, :position_y, current.position_y)),
def update_system(
%{assigns: %{map_id: map_id, owner_character_id: char_id, owner_user_id: user_id}} =
_conn,
solar_system_id,
attrs
) do
do_update_system(map_id, user_id, char_id, solar_system_id, attrs)
end
def update_system(_conn, _solar_system_id, _attrs), do: {:error, :missing_params}
defp do_update_system(map_id, _user_id, _char_id, solar_system_id, params) do
with {:ok, current} <- MapSystemRepo.get_by_map_and_solar_system_id(map_id, solar_system_id),
x_raw <- Map.get(params, "position_x", Map.get(params, :position_x, current.position_x)),
y_raw <- Map.get(params, "position_y", Map.get(params, :position_y, current.position_y)),
{:ok, x} <- parse_int(x_raw, "position_x"),
{:ok, y} <- parse_int(y_raw, "position_y"),
coords = %{x: x, y: y},
:ok <- apply_system_updates(map_id, system_id, attrs, coords),
{:ok, system} <- MapSystemRepo.get_by_map_and_solar_system_id(map_id, system_id) do
:ok <- apply_system_updates(map_id, solar_system_id, params, coords),
{:ok, system} <- MapSystemRepo.get_by_map_and_solar_system_id(map_id, solar_system_id) do
{:ok, system}
else
{:error, reason} when is_binary(reason) ->
@@ -84,8 +112,6 @@ defmodule WandererApp.Map.Operations.Systems do
end
end
def update_system(_conn, _system_id, _attrs), do: {:error, :missing_params}
@spec delete_system(Plug.Conn.t(), integer()) :: {:ok, integer()} | {:error, atom()}
def delete_system(
%{assigns: %{map_id: map_id, owner_character_id: char_id, owner_user_id: user_id}} =
@@ -148,6 +174,15 @@ defmodule WandererApp.Map.Operations.Systems do
defp fetch_system_id(_), do: {:error, "Missing system identifier (id)"}
defp fetch_update_existing(%{"update_existing" => update_existing}, _default),
do: update_existing
defp fetch_update_existing(%{update_existing: update_existing}, _default)
when not is_nil(update_existing),
do: update_existing
defp fetch_update_existing(_, default), do: default
defp parse_int(val, _field) when is_integer(val), do: {:ok, val}
defp parse_int(val, field) when is_binary(val) do
@@ -232,6 +267,15 @@ defmodule WandererApp.Map.Operations.Systems do
labels: Enum.join(labels, ",")
})
"custom_name" ->
{:ok, solar_system_info} =
WandererApp.CachedInfo.get_system_static_info(system_id)
Server.update_system_name(map_id, %{
solar_system_id: system_id,
name: val || solar_system_info.solar_system_name
})
"temporary_name" ->
Server.update_system_temporary_name(map_id, %{
solar_system_id: system_id,

View File

@@ -5,7 +5,7 @@ defmodule WandererApp.Map.Server.AclsImpl do
@pubsub_client Application.compile_env(:wanderer_app, :pubsub_client)
def handle_map_acl_updated(%{map_id: map_id, map: old_map} = state, added_acls, removed_acls) do
def handle_map_acl_updated(map_id, added_acls, removed_acls) do
{:ok, map} =
WandererApp.MapRepo.get(map_id,
acls: [
@@ -63,7 +63,11 @@ defmodule WandererApp.Map.Server.AclsImpl do
broadcast_acl_updates({:ok, result}, map_id)
%{state | map: Map.merge(old_map, map_update)}
{:ok, %{map: old_map}} = WandererApp.Map.get_map_state(map_id)
WandererApp.Map.update_map_state(map_id, %{
map: Map.merge(old_map, map_update)
})
end
def handle_acl_updated(map_id, acl_id) do
@@ -113,8 +117,18 @@ defmodule WandererApp.Map.Server.AclsImpl do
track_acls(rest)
end
defp track_acl(acl_id),
do: @pubsub_client.subscribe(WandererApp.PubSub, "acls:#{acl_id}")
defp track_acl(acl_id) do
Cachex.get_and_update(:acl_cache, acl_id, fn acl ->
case acl do
nil ->
@pubsub_client.subscribe(WandererApp.PubSub, "acls:#{acl_id}")
{:commit, acl_id}
_ ->
{:ignore, nil}
end
end)
end
defp broadcast_acl_updates(
{:ok,

View File

@@ -5,56 +5,33 @@ defmodule WandererApp.Map.Server.CharactersImpl do
alias WandererApp.Map.Server.{Impl, ConnectionsImpl, SystemsImpl}
def add_character(%{map_id: map_id} = state, %{id: character_id} = character, track_character) do
Task.start_link(fn ->
with :ok <- map_id |> WandererApp.Map.add_character(character),
{:ok, _settings} <-
WandererApp.MapCharacterSettingsRepo.create(%{
character_id: character_id,
map_id: map_id,
tracked: track_character
}),
{:ok, character} <- WandererApp.Character.get_character(character_id) do
Impl.broadcast!(map_id, :character_added, character)
def cleanup_characters(map_id) do
{:ok, invalidate_character_ids} =
WandererApp.Cache.get_and_remove(
"map_#{map_id}:invalidate_character_ids",
[]
)
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
WandererApp.ExternalEvents.broadcast(map_id, :character_added, character)
:telemetry.execute([:wanderer_app, :map, :character, :added], %{count: 1})
:ok
else
{:error, :not_found} ->
:ok
if Enum.empty?(invalidate_character_ids) do
:ok
else
{:ok, %{acls: acls}} =
WandererApp.MapRepo.get(map_id,
acls: [
:owner_id,
members: [:role, :eve_character_id, :eve_corporation_id, :eve_alliance_id]
]
)
_error ->
{:ok, character} = WandererApp.Character.get_character(character_id)
Impl.broadcast!(map_id, :character_added, character)
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
WandererApp.ExternalEvents.broadcast(map_id, :character_added, character)
:ok
end
end)
state
process_invalidate_characters(invalidate_character_ids, map_id, acls)
end
end
def remove_character(map_id, character_id) do
Task.start_link(fn ->
with :ok <- WandererApp.Map.remove_character(map_id, character_id),
{:ok, character} <- WandererApp.Character.get_map_character(map_id, character_id) do
Impl.broadcast!(map_id, :character_removed, character)
def track_characters(_map_id, []), do: :ok
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
WandererApp.ExternalEvents.broadcast(map_id, :character_removed, character)
:telemetry.execute([:wanderer_app, :map, :character, :removed], %{count: 1})
:ok
else
{:error, _error} ->
:ok
end
end)
def track_characters(map_id, [character_id | rest]) do
track_character(map_id, character_id)
track_characters(map_id, rest)
end
def update_tracked_characters(map_id) do
@@ -94,18 +71,18 @@ defmodule WandererApp.Map.Server.CharactersImpl do
end)
end
def untrack_character(true, map_id, character_id) do
defp untrack_character(true, map_id, character_id) do
WandererApp.Character.TrackerManager.update_track_settings(character_id, %{
map_id: map_id,
track: false
})
end
def untrack_character(_is_character_map_active, _map_id, character_id) do
defp untrack_character(_is_character_map_active, _map_id, character_id) do
:ok
end
def is_character_map_active?(map_id, character_id) do
defp is_character_map_active?(map_id, character_id) do
case WandererApp.Character.get_character_state(character_id) do
{:ok, %{active_maps: active_maps}} ->
map_id in active_maps
@@ -115,29 +92,9 @@ defmodule WandererApp.Map.Server.CharactersImpl do
end
end
def cleanup_characters(map_id, owner_id) do
{:ok, invalidate_character_ids} =
WandererApp.Cache.get_and_remove(
"map_#{map_id}:invalidate_character_ids",
[]
)
defp process_invalidate_characters(invalidate_character_ids, map_id, acls) do
{:ok, %{map: %{owner_id: owner_id}}} = WandererApp.Map.get_map_state(map_id)
if Enum.empty?(invalidate_character_ids) do
:ok
else
{:ok, %{acls: acls}} =
WandererApp.MapRepo.get(map_id,
acls: [
:owner_id,
members: [:role, :eve_character_id, :eve_corporation_id, :eve_alliance_id]
]
)
process_invalidate_characters(invalidate_character_ids, map_id, owner_id, acls)
end
end
defp process_invalidate_characters(invalidate_character_ids, map_id, owner_id, acls) do
invalidate_character_ids
|> Task.async_stream(
fn character_id ->
@@ -194,6 +151,25 @@ defmodule WandererApp.Map.Server.CharactersImpl do
end
end
defp remove_character(map_id, character_id) do
Task.start_link(fn ->
with :ok <- WandererApp.Map.remove_character(map_id, character_id),
{:ok, character} <- WandererApp.Character.get_map_character(map_id, character_id) do
Impl.broadcast!(map_id, :character_removed, character)
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
WandererApp.ExternalEvents.broadcast(map_id, :character_removed, character)
:telemetry.execute([:wanderer_app, :map, :character, :removed], %{count: 1})
:ok
else
{:error, _error} ->
:ok
end
end)
end
defp remove_and_untrack_characters(map_id, character_ids) do
Logger.debug(fn ->
"Map #{map_id} - remove and untrack characters #{inspect(character_ids)}"
@@ -217,14 +193,7 @@ defmodule WandererApp.Map.Server.CharactersImpl do
end
end
def track_characters(_map_id, []), do: :ok
def track_characters(map_id, [character_id | rest]) do
track_character(map_id, character_id)
track_characters(map_id, rest)
end
def update_characters(%{map_id: map_id} = state) do
def update_characters(map_id) do
try do
{:ok, presence_character_ids} =
WandererApp.Cache.lookup("map_#{map_id}:presence_character_ids", [])
@@ -246,11 +215,13 @@ defmodule WandererApp.Map.Server.CharactersImpl do
update
|> case do
{:character_location, location_info, old_location_info} ->
{:ok, map_state} = WandererApp.Map.get_map_state(map_id)
update_location(
map_state,
character_id,
location_info,
old_location_info,
state
old_location_info
)
:broadcast
@@ -330,34 +301,35 @@ defmodule WandererApp.Map.Server.CharactersImpl do
end
defp update_location(
%{map: %{scope: scope}, map_id: map_id, map_opts: map_opts} =
_state,
character_id,
location,
old_location,
%{map: map, map_id: map_id, rtree_name: rtree_name, map_opts: map_opts} = _state
old_location
) do
start_solar_system_id =
WandererApp.Cache.take("map:#{map_id}:character:#{character_id}:start_solar_system_id")
case is_nil(old_location.solar_system_id) and
is_nil(start_solar_system_id) and
ConnectionsImpl.can_add_location(map.scope, location.solar_system_id) do
ConnectionsImpl.can_add_location(scope, location.solar_system_id) do
true ->
:ok = SystemsImpl.maybe_add_system(map_id, location, nil, rtree_name, map_opts)
:ok = SystemsImpl.maybe_add_system(map_id, location, nil, map_opts)
_ ->
if is_nil(start_solar_system_id) || start_solar_system_id == old_location.solar_system_id do
ConnectionsImpl.is_connection_valid(
map.scope,
scope,
old_location.solar_system_id,
location.solar_system_id
)
|> case do
true ->
:ok =
SystemsImpl.maybe_add_system(map_id, location, old_location, rtree_name, map_opts)
SystemsImpl.maybe_add_system(map_id, location, old_location, map_opts)
:ok =
SystemsImpl.maybe_add_system(map_id, old_location, location, rtree_name, map_opts)
SystemsImpl.maybe_add_system(map_id, old_location, location, map_opts)
if is_character_in_space?(location) do
:ok =
@@ -366,7 +338,8 @@ defmodule WandererApp.Map.Server.CharactersImpl do
location,
old_location,
character_id,
false
false,
nil
)
end
@@ -380,17 +353,49 @@ defmodule WandererApp.Map.Server.CharactersImpl do
end
end
defp is_character_in_space?(%{station_id: station_id, structure_id: structure_id} = location) do
is_nil(structure_id) and is_nil(station_id)
defp is_character_in_space?(%{station_id: station_id, structure_id: structure_id} = _location),
do: is_nil(structure_id) && is_nil(station_id)
defp add_character(
map_id,
%{id: character_id} = map_character,
track_character
) do
Task.start_link(fn ->
with :ok <- map_id |> WandererApp.Map.add_character(map_character),
{:ok, _settings} <-
WandererApp.MapCharacterSettingsRepo.create(%{
character_id: character_id,
map_id: map_id,
tracked: track_character
}) do
Impl.broadcast!(map_id, :character_added, map_character)
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
WandererApp.ExternalEvents.broadcast(map_id, :character_added, map_character)
:telemetry.execute([:wanderer_app, :map, :character, :added], %{count: 1})
:ok
else
{:error, :not_found} ->
:ok
_error ->
Impl.broadcast!(map_id, :character_added, map_character)
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
WandererApp.ExternalEvents.broadcast(map_id, :character_added, map_character)
:ok
end
end)
end
defp track_character(map_id, character_id) do
{:ok, character} =
{:ok, %{solar_system_id: solar_system_id} = map_character} =
WandererApp.Character.get_map_character(map_id, character_id, not_present: true)
WandererApp.Cache.delete("character:#{character.id}:tracking_paused")
WandererApp.Cache.delete("character:#{character_id}:tracking_paused")
add_character(%{map_id: map_id}, character, true)
add_character(map_id, map_character, true)
WandererApp.Character.TrackerManager.update_track_settings(character_id, %{
map_id: map_id,
@@ -398,7 +403,7 @@ defmodule WandererApp.Map.Server.CharactersImpl do
track_online: true,
track_location: true,
track_ship: true,
solar_system_id: character.solar_system_id
solar_system_id: solar_system_id
})
end

View File

@@ -139,56 +139,62 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
def init_start_cache(_map_id, _connections_start_time), do: :ok
def add_connection(
%{map_id: map_id} = state,
map_id,
%{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id,
character_id: character_id
} = _connection_info
) do
:ok =
maybe_add_connection(
map_id,
%{solar_system_id: solar_system_target_id},
%{
solar_system_id: solar_system_source_id
},
character_id,
true
)
} = connection_info
),
do:
maybe_add_connection(
map_id,
%{solar_system_id: solar_system_target_id},
%{
solar_system_id: solar_system_source_id
},
character_id,
true,
connection_info |> Map.get(:extra_info)
)
state
def paste_connections(
map_id,
connections,
_user_id,
character_id
) do
connections
|> Enum.each(fn %{
"source" => source,
"target" => target
} = connection ->
solar_system_source_id = source |> String.to_integer()
solar_system_target_id = target |> String.to_integer()
add_connection(map_id, %{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id,
character_id: character_id,
extra_info: connection
})
end)
end
def delete_connection(
%{map_id: map_id} = state,
map_id,
%{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id
} = _connection_info
) do
:ok =
maybe_remove_connection(map_id, %{solar_system_id: solar_system_target_id}, %{
solar_system_id: solar_system_source_id
})
state
end
def update_connection_type(
%{map_id: map_id} = state,
%{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id,
character_id: character_id
} = _connection_info,
type
) do
state
end
),
do:
maybe_remove_connection(map_id, %{solar_system_id: solar_system_target_id}, %{
solar_system_id: solar_system_source_id
})
def get_connection_info(
%{map_id: map_id} = _state,
map_id,
%{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id
@@ -210,11 +216,11 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
end
def update_connection_time_status(
%{map_id: map_id} = state,
map_id,
connection_update
),
do:
update_connection(state, :update_time_status, [:time_status], connection_update, fn
update_connection(map_id, :update_time_status, [:time_status], connection_update, fn
%{time_status: old_time_status},
%{id: connection_id, time_status: time_status} = updated_connection ->
case time_status == @connection_time_status_eol do
@@ -241,131 +247,124 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
end)
def update_connection_type(
state,
map_id,
connection_update
),
do: update_connection(state, :update_type, [:type], connection_update)
do: update_connection(map_id, :update_type, [:type], connection_update)
def update_connection_mass_status(
state,
map_id,
connection_update
),
do: update_connection(state, :update_mass_status, [:mass_status], connection_update)
do: update_connection(map_id, :update_mass_status, [:mass_status], connection_update)
def update_connection_ship_size_type(
state,
map_id,
connection_update
),
do: update_connection(state, :update_ship_size_type, [:ship_size_type], connection_update)
do: update_connection(map_id, :update_ship_size_type, [:ship_size_type], connection_update)
def update_connection_locked(
state,
map_id,
connection_update
),
do: update_connection(state, :update_locked, [:locked], connection_update)
do: update_connection(map_id, :update_locked, [:locked], connection_update)
def update_connection_custom_info(
state,
map_id,
connection_update
),
do: update_connection(state, :update_custom_info, [:custom_info], connection_update)
do: update_connection(map_id, :update_custom_info, [:custom_info], connection_update)
def cleanup_connections(%{map_id: map_id} = state) do
def cleanup_connections(map_id) do
connection_auto_expire_hours = get_connection_auto_expire_hours()
connection_auto_eol_hours = get_connection_auto_eol_hours()
connection_eol_expire_timeout_hours = get_eol_expire_timeout_mins() / 60
state =
map_id
|> WandererApp.Map.list_connections!()
|> Enum.reduce(state, fn %{
id: connection_id,
solar_system_source: solar_system_source_id,
solar_system_target: solar_system_target_id,
time_status: time_status,
type: type
},
state ->
if type == @connection_type_wormhole do
connection_start_time = get_start_time(map_id, connection_id)
new_time_status = get_new_time_status(connection_start_time, time_status)
map_id
|> WandererApp.Map.list_connections!()
|> Enum.each(fn connection ->
maybe_update_connection_time_status(map_id, connection)
end)
if new_time_status != time_status &&
is_connection_valid(
:wormholes,
solar_system_source_id,
solar_system_target_id
) do
set_start_time(map_id, connection_id, DateTime.utc_now())
state
|> update_connection_time_status(%{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id,
time_status: new_time_status
})
else
state
end
else
state
end
end)
state =
map_id
|> WandererApp.Map.list_connections!()
|> Enum.filter(fn %{
id: connection_id,
solar_system_source: solar_system_source_id,
solar_system_target: solar_system_target_id,
time_status: time_status,
type: type
} ->
is_connection_exist =
is_connection_exist(
map_id,
solar_system_source_id,
solar_system_target_id
) ||
not is_nil(
WandererApp.Map.get_connection(
map_id,
solar_system_target_id,
solar_system_source_id
)
map_id
|> WandererApp.Map.list_connections!()
|> Enum.filter(fn %{
id: connection_id,
solar_system_source: solar_system_source_id,
solar_system_target: solar_system_target_id,
time_status: time_status,
type: type
} ->
is_connection_exist =
is_connection_exist(
map_id,
solar_system_source_id,
solar_system_target_id
) ||
not is_nil(
WandererApp.Map.get_connection(
map_id,
solar_system_target_id,
solar_system_source_id
)
)
not is_connection_exist ||
(type == @connection_type_wormhole &&
time_status == @connection_time_status_eol &&
is_connection_valid(
:wormholes,
solar_system_source_id,
solar_system_target_id
) &&
DateTime.diff(
DateTime.utc_now(),
get_connection_mark_eol_time(map_id, connection_id),
:hour
) >=
connection_auto_expire_hours - connection_auto_eol_hours +
connection_eol_expire_timeout_hours)
end)
|> Enum.reduce(state, fn %{
solar_system_source: solar_system_source_id,
solar_system_target: solar_system_target_id
},
state ->
delete_connection(state, %{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id
})
end)
state
not is_connection_exist ||
(type == @connection_type_wormhole &&
time_status == @connection_time_status_eol &&
is_connection_valid(
:wormholes,
solar_system_source_id,
solar_system_target_id
) &&
DateTime.diff(
DateTime.utc_now(),
get_connection_mark_eol_time(map_id, connection_id),
:hour
) >=
connection_auto_expire_hours - connection_auto_eol_hours +
connection_eol_expire_timeout_hours)
end)
|> Enum.each(fn %{
solar_system_source: solar_system_source_id,
solar_system_target: solar_system_target_id
} ->
delete_connection(map_id, %{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id
})
end)
end
defp maybe_update_connection_time_status(map_id, %{
id: connection_id,
solar_system_source: solar_system_source_id,
solar_system_target: solar_system_target_id,
time_status: time_status,
type: @connection_type_wormhole
}) do
connection_start_time = get_start_time(map_id, connection_id)
new_time_status = get_new_time_status(connection_start_time, time_status)
if new_time_status != time_status &&
is_connection_valid(
:wormholes,
solar_system_source_id,
solar_system_target_id
) do
set_start_time(map_id, connection_id, DateTime.utc_now())
update_connection_time_status(map_id, %{
solar_system_source_id: solar_system_source_id,
solar_system_target_id: solar_system_target_id,
time_status: new_time_status
})
end
end
defp maybe_update_connection_time_status(_map_id, _connection), do: :ok
defp maybe_update_linked_signature_time_status(
map_id,
%{
@@ -374,36 +373,36 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
solar_system_target: solar_system_target
} = updated_connection
) do
source_system =
WandererApp.Map.find_system_by_location(
with source_system when not is_nil(source_system) <-
WandererApp.Map.find_system_by_location(
map_id,
%{solar_system_id: solar_system_source}
),
target_system when not is_nil(source_system) <-
WandererApp.Map.find_system_by_location(
map_id,
%{solar_system_id: solar_system_target}
),
source_linked_signatures <-
find_linked_signatures(source_system, target_system),
target_linked_signatures <- find_linked_signatures(target_system, source_system) do
update_signatures_time_status(
map_id,
%{solar_system_id: solar_system_source}
source_system.solar_system_id,
source_linked_signatures,
time_status
)
target_system =
WandererApp.Map.find_system_by_location(
update_signatures_time_status(
map_id,
%{solar_system_id: solar_system_target}
target_system.solar_system_id,
target_linked_signatures,
time_status
)
source_linked_signatures =
find_linked_signatures(source_system, target_system)
target_linked_signatures = find_linked_signatures(target_system, source_system)
update_signatures_time_status(
map_id,
source_system.solar_system_id,
source_linked_signatures,
time_status
)
update_signatures_time_status(
map_id,
target_system.solar_system_id,
target_linked_signatures,
time_status
)
else
error ->
Logger.error("Failed to update_linked_signature_time_status: #{inspect(error)}")
end
end
defp find_linked_signatures(
@@ -439,13 +438,20 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
%{custom_info: updated_custom_info}
end
SignaturesImpl.apply_update_signature(%{map_id: map_id}, sig, update_params)
SignaturesImpl.apply_update_signature(map_id, sig, update_params)
end)
Impl.broadcast!(map_id, :signatures_updated, solar_system_id)
end
def maybe_add_connection(map_id, location, old_location, character_id, is_manual)
def maybe_add_connection(
map_id,
location,
old_location,
character_id,
is_manual,
extra_info
)
when not is_nil(location) and not is_nil(old_location) and
not is_nil(old_location.solar_system_id) and
location.solar_system_id != old_location.solar_system_id do
@@ -491,11 +497,21 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
time_status =
if connection_type == @connection_type_wormhole do
@connection_time_status_eol_24
get_time_status(
old_location.solar_system_id,
location.solar_system_id,
ship_size_type
)
else
@connection_time_status_default
end
connection_type = get_extra_info(extra_info, "type", connection_type)
ship_size_type = get_extra_info(extra_info, "ship_size_type", ship_size_type)
time_status = get_extra_info(extra_info, "time_status", time_status)
mass_status = get_extra_info(extra_info, "mass_status", 0)
locked = get_extra_info(extra_info, "locked", false)
{:ok, connection} =
WandererApp.MapConnectionRepo.create(%{
map_id: map_id,
@@ -503,7 +519,9 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
solar_system_target: location.solar_system_id,
type: connection_type,
ship_size_type: ship_size_type,
time_status: time_status
time_status: time_status,
mass_status: mass_status,
locked: locked
})
if connection_type == @connection_type_wormhole do
@@ -563,7 +581,19 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
end
end
def maybe_add_connection(_map_id, _location, _old_location, _character_id, _is_manual), do: :ok
def maybe_add_connection(
_map_id,
_location,
_old_location,
_character_id,
_is_manual,
_connection_extra_info
),
do: :ok
defp get_extra_info(nil, _key, default_value), do: default_value
defp get_extra_info(extra_info, key, default_value), do: Map.get(extra_info, key, default_value)
def get_start_time(map_id, connection_id) do
case WandererApp.Cache.get("map_#{map_id}:conn_#{connection_id}:start_time") do
@@ -721,7 +751,7 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
defp maybe_remove_connection(_map_id, _location, _old_location), do: :ok
defp update_connection(
%{map_id: map_id} = state,
map_id,
update_method,
attributes,
%{
@@ -766,12 +796,12 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
custom_info: updated_connection.custom_info
})
state
:ok
else
{:error, error} ->
Logger.error("Failed to update connection: #{inspect(error, pretty: true)}")
state
:ok
end
end
@@ -811,6 +841,41 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
defp get_ship_size_type(_source_solar_system_id, _target_solar_system_id, _connection_type),
do: @large_ship_size
defp get_time_status(
_source_solar_system_id,
_target_solar_system_id,
@frigate_ship_size
),
do: @connection_time_status_eol_4_5
defp get_time_status(
source_solar_system_id,
target_solar_system_id,
_ship_size_type
) do
# Check if either system is C1 before creating the connection
{:ok, source_system_info} = get_system_static_info(source_solar_system_id)
{:ok, target_system_info} = get_system_static_info(target_solar_system_id)
cond do
# C1/2/3/4 systems always get eol_16
source_system_info.system_class in [@c1, @c2, @c3, @c4] or
target_system_info.system_class in [@c1, @c2, @c3, @c4] ->
@connection_time_status_eol_16
# C5/6 systems always get eol_24
source_system_info.system_class in [@c5, @c6] or
target_system_info.system_class in [@c5, @c6] ->
@connection_time_status_eol_24
true ->
@connection_time_status_default
end
end
defp get_time_status(_source_solar_system_id, _target_solar_system_id, _ship_size_type),
do: @connection_time_status_default
defp get_new_time_status(_start_time, @connection_time_status_default),
do: @connection_time_status_eol_24
@@ -849,4 +914,5 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
defp get_time_status_minutes(@connection_time_status_eol_16), do: @connection_eol_16_minutes
defp get_time_status_minutes(@connection_time_status_eol_24), do: @connection_eol_24_minutes
defp get_time_status_minutes(@connection_time_status_eol_48), do: @connection_eol_48_minutes
defp get_time_status_minutes(_), do: @connection_eol_24_minutes
end

View File

@@ -24,12 +24,10 @@ defmodule WandererApp.Map.Server.Impl do
map_opts: []
]
@systems_cleanup_timeout :timer.minutes(30)
@characters_cleanup_timeout :timer.minutes(5)
@pubsub_client Application.compile_env(:wanderer_app, :pubsub_client)
@connections_cleanup_timeout :timer.minutes(1)
@pubsub_client Application.compile_env(:wanderer_app, :pubsub_client)
@backup_state_timeout :timer.minutes(1)
@update_presence_timeout :timer.seconds(5)
@update_characters_timeout :timer.seconds(1)
@update_tracked_characters_timeout :timer.minutes(1)
@@ -37,21 +35,16 @@ defmodule WandererApp.Map.Server.Impl do
def new(), do: __struct__()
def new(args), do: __struct__(args)
def init(args) do
map_id = args[:map_id]
Logger.info("Starting map server for #{map_id}")
def do_init_state(opts) do
map_id = opts[:map_id]
ErrorTracker.set_context(%{map_id: map_id})
WandererApp.Cache.insert("map_#{map_id}:started", false)
initial_state =
%{
map_id: map_id,
rtree_name: "rtree_#{map_id}"
}
|> new()
%{
map_id: map_id,
rtree_name: Module.concat([map_id, DDRT.DynamicRtree])
}
|> new()
end
def load_state(%__MODULE__{map_id: map_id} = state) do
with {:ok, map} <-
WandererApp.MapRepo.get(map_id, [
:owner,
@@ -65,23 +58,23 @@ defmodule WandererApp.Map.Server.Impl do
{:ok, connections} <- WandererApp.MapConnectionRepo.get_by_map(map_id),
{:ok, subscription_settings} <-
WandererApp.Map.SubscriptionManager.get_active_map_subscription(map_id) do
state
initial_state
|> init_map(
map,
subscription_settings,
systems,
connections
)
|> SystemsImpl.init_map_systems(systems)
|> init_map_cache()
else
error ->
Logger.error("Failed to load map state: #{inspect(error, pretty: true)}")
state
initial_state
end
end
def start_map(%__MODULE__{map: map, map_id: map_id} = state) do
def start_map(%__MODULE__{map: map, map_id: map_id} = _state) do
WandererApp.Cache.insert("map_#{map_id}:started", false)
# Check if map was loaded successfully
case map do
nil ->
@@ -95,293 +88,142 @@ defmodule WandererApp.Map.Server.Impl do
"maps:#{map_id}"
)
Process.send_after(self(), :update_characters, @update_characters_timeout)
WandererApp.Map.CacheRTree.init_tree("rtree_#{map_id}", %{width: 150, verbose: false})
Process.send_after(self(), {:update_characters, map_id}, @update_characters_timeout)
Process.send_after(
self(),
:update_tracked_characters,
{:update_tracked_characters, map_id},
@update_tracked_characters_timeout
)
Process.send_after(self(), :update_presence, @update_presence_timeout)
Process.send_after(self(), :cleanup_connections, @connections_cleanup_timeout)
Process.send_after(self(), :cleanup_systems, 10_000)
Process.send_after(self(), :cleanup_characters, @characters_cleanup_timeout)
Process.send_after(self(), :backup_state, @backup_state_timeout)
Process.send_after(self(), {:update_presence, map_id}, @update_presence_timeout)
WandererApp.Cache.insert("map_#{map_id}:started", true)
# Initialize zkb cache structure to prevent timing issues
cache_key = "map:#{map_id}:zkb:detailed_kills"
WandererApp.Cache.insert(cache_key, %{}, ttl: :timer.hours(24))
WandererApp.Cache.insert("map:#{map_id}:zkb:detailed_kills", %{}, ttl: :timer.hours(24))
broadcast!(map_id, :map_server_started)
@pubsub_client.broadcast!(WandererApp.PubSub, "maps", :map_server_started)
:telemetry.execute([:wanderer_app, :map, :started], %{count: 1})
state
else
error ->
Logger.error("Failed to start map: #{inspect(error, pretty: true)}")
state
end
end
end
def stop_map(%{map_id: map_id} = state) do
def stop_map(map_id) do
Logger.debug(fn -> "Stopping map server for #{map_id}" end)
@pubsub_client.unsubscribe(
WandererApp.PubSub,
"maps:#{map_id}"
)
WandererApp.Cache.delete("map_#{map_id}:started")
WandererApp.Cache.delete("map_characters-#{map_id}")
WandererApp.Map.CacheRTree.clear_tree("rtree_#{map_id}")
WandererApp.Map.delete_map_state(map_id)
WandererApp.Cache.insert_or_update(
"started_maps",
[],
fn started_maps ->
started_maps
|> Enum.reject(fn started_map_id -> started_map_id == map_id end)
end
)
:telemetry.execute([:wanderer_app, :map, :stopped], %{count: 1})
state
|> maybe_stop_rtree()
end
def get_map(%{map: map} = _state), do: {:ok, map}
defdelegate cleanup_systems(map_id), to: SystemsImpl
defdelegate cleanup_connections(map_id), to: ConnectionsImpl
defdelegate cleanup_characters(map_id), to: CharactersImpl
defdelegate add_character(state, character, track_character), to: CharactersImpl
defdelegate untrack_characters(map_id, characters_ids), to: CharactersImpl
def remove_character(%{map_id: map_id} = state, character_id) do
CharactersImpl.remove_character(map_id, character_id)
defdelegate add_system(map_id, system_info, user_id, character_id, opts \\ []), to: SystemsImpl
state
end
defdelegate paste_connections(map_id, connections, user_id, character_id), to: ConnectionsImpl
def untrack_characters(%{map_id: map_id} = state, characters_ids) do
CharactersImpl.untrack_characters(map_id, characters_ids)
defdelegate paste_systems(map_id, systems, user_id, character_id, opts), to: SystemsImpl
state
end
defdelegate add_system_comment(map_id, comment_info, user_id, character_id), to: SystemsImpl
defdelegate add_system(state, system_info, user_id, character_id), to: SystemsImpl
defdelegate add_system_comment(state, comment_info, user_id, character_id), to: SystemsImpl
defdelegate remove_system_comment(state, comment_id, user_id, character_id), to: SystemsImpl
defdelegate remove_system_comment(map_id, comment_id, user_id, character_id), to: SystemsImpl
defdelegate delete_systems(
state,
map_id,
removed_ids,
user_id,
character_id
),
to: SystemsImpl
defdelegate update_system_name(state, update), to: SystemsImpl
defdelegate update_system_name(map_id, update), to: SystemsImpl
defdelegate update_system_description(state, update), to: SystemsImpl
defdelegate update_system_description(map_id, update), to: SystemsImpl
defdelegate update_system_status(state, update), to: SystemsImpl
defdelegate update_system_status(map_id, update), to: SystemsImpl
defdelegate update_system_tag(state, update), to: SystemsImpl
defdelegate update_system_tag(map_id, update), to: SystemsImpl
defdelegate update_system_temporary_name(state, update), to: SystemsImpl
defdelegate update_system_temporary_name(map_id, update), to: SystemsImpl
defdelegate update_system_locked(state, update), to: SystemsImpl
defdelegate update_system_locked(map_id, update), to: SystemsImpl
defdelegate update_system_labels(state, update), to: SystemsImpl
defdelegate update_system_labels(map_id, update), to: SystemsImpl
defdelegate update_system_linked_sig_eve_id(state, update), to: SystemsImpl
defdelegate update_system_linked_sig_eve_id(map_id, update), to: SystemsImpl
defdelegate update_system_position(state, update), to: SystemsImpl
defdelegate update_system_position(map_id, update), to: SystemsImpl
defdelegate add_hub(state, hub_info), to: SystemsImpl
defdelegate add_hub(map_id, hub_info), to: SystemsImpl
defdelegate remove_hub(state, hub_info), to: SystemsImpl
defdelegate remove_hub(map_id, hub_info), to: SystemsImpl
defdelegate add_ping(state, ping_info), to: PingsImpl
defdelegate add_ping(map_id, ping_info), to: PingsImpl
defdelegate cancel_ping(state, ping_info), to: PingsImpl
defdelegate cancel_ping(map_id, ping_info), to: PingsImpl
defdelegate add_connection(state, connection_info), to: ConnectionsImpl
defdelegate add_connection(map_id, connection_info), to: ConnectionsImpl
defdelegate delete_connection(state, connection_info), to: ConnectionsImpl
defdelegate delete_connection(map_id, connection_info), to: ConnectionsImpl
defdelegate get_connection_info(state, connection_info), to: ConnectionsImpl
defdelegate get_connection_info(map_id, connection_info), to: ConnectionsImpl
defdelegate update_connection_time_status(state, connection_update), to: ConnectionsImpl
defdelegate update_connection_time_status(map_id, connection_update), to: ConnectionsImpl
defdelegate update_connection_type(state, connection_update), to: ConnectionsImpl
defdelegate update_connection_type(map_id, connection_update), to: ConnectionsImpl
defdelegate update_connection_mass_status(state, connection_update), to: ConnectionsImpl
defdelegate update_connection_mass_status(map_id, connection_update), to: ConnectionsImpl
defdelegate update_connection_ship_size_type(state, connection_update), to: ConnectionsImpl
defdelegate update_connection_ship_size_type(map_id, connection_update), to: ConnectionsImpl
defdelegate update_connection_locked(state, connection_update), to: ConnectionsImpl
defdelegate update_connection_locked(map_id, connection_update), to: ConnectionsImpl
defdelegate update_connection_custom_info(state, signatures_update), to: ConnectionsImpl
defdelegate update_connection_custom_info(map_id, connection_update), to: ConnectionsImpl
defdelegate update_signatures(state, signatures_update), to: SignaturesImpl
defdelegate update_signatures(map_id, signatures_update), to: SignaturesImpl
def import_settings(%{map_id: map_id} = state, settings, user_id) do
def import_settings(map_id, settings, user_id) do
WandererApp.Cache.put(
"map_#{map_id}:importing",
true
)
state =
state
|> maybe_import_systems(settings, user_id, nil)
|> maybe_import_connections(settings, user_id)
|> maybe_import_hubs(settings, user_id)
maybe_import_systems(map_id, settings, user_id, nil)
maybe_import_connections(map_id, settings, user_id)
maybe_import_hubs(map_id, settings, user_id)
WandererApp.Cache.take("map_#{map_id}:importing")
state
end
def update_subscription_settings(%{map: map} = state, subscription_settings),
do: %{
state
| map: map |> WandererApp.Map.update_subscription_settings!(subscription_settings)
}
def handle_event(:update_characters, state) do
Process.send_after(self(), :update_characters, @update_characters_timeout)
CharactersImpl.update_characters(state)
state
end
def handle_event(:update_tracked_characters, %{map_id: map_id} = state) do
Process.send_after(self(), :update_tracked_characters, @update_tracked_characters_timeout)
CharactersImpl.update_tracked_characters(map_id)
state
end
def handle_event(:update_presence, %{map_id: map_id} = state) do
Process.send_after(self(), :update_presence, @update_presence_timeout)
update_presence(map_id)
state
end
def handle_event(:backup_state, state) do
Process.send_after(self(), :backup_state, @backup_state_timeout)
{:ok, _map_state} = state |> save_map_state()
state
end
def handle_event(
{:map_acl_updated, added_acls, removed_acls},
state
) do
state |> AclsImpl.handle_map_acl_updated(added_acls, removed_acls)
end
def handle_event({:acl_updated, %{acl_id: acl_id}}, %{map_id: map_id} = state) do
AclsImpl.handle_acl_updated(map_id, acl_id)
state
end
def handle_event({:acl_deleted, %{acl_id: acl_id}}, %{map_id: map_id} = state) do
AclsImpl.handle_acl_deleted(map_id, acl_id)
state
end
def handle_event(:cleanup_connections, state) do
Process.send_after(self(), :cleanup_connections, @connections_cleanup_timeout)
state |> ConnectionsImpl.cleanup_connections()
end
def handle_event(:cleanup_characters, %{map_id: map_id, map: %{owner_id: owner_id}} = state) do
Process.send_after(self(), :cleanup_characters, @characters_cleanup_timeout)
CharactersImpl.cleanup_characters(map_id, owner_id)
state
end
def handle_event(:cleanup_systems, state) do
Process.send_after(self(), :cleanup_systems, @systems_cleanup_timeout)
state |> SystemsImpl.cleanup_systems()
end
def handle_event(:subscription_settings_updated, %{map: map, map_id: map_id} = state) do
{:ok, subscription_settings} =
WandererApp.Map.SubscriptionManager.get_active_map_subscription(map_id)
%{
state
| map:
map
|> WandererApp.Map.update_subscription_settings!(subscription_settings)
}
end
def handle_event({:options_updated, options}, %{map: map} = state) do
map |> WandererApp.Map.update_options!(options)
%{state | map_opts: map_options(options)}
end
def handle_event({ref, _result}, %{map_id: _map_id} = state) when is_reference(ref) do
Process.demonitor(ref, [:flush])
state
end
def handle_event(msg, state) do
Logger.warning("Unhandled event: #{inspect(msg)}")
state
end
def broadcast!(map_id, event, payload \\ nil) do
if can_broadcast?(map_id) do
@pubsub_client.broadcast!(WandererApp.PubSub, map_id, %{
event: event,
payload: payload
})
end
:ok
end
defp can_broadcast?(map_id),
do:
not WandererApp.Cache.lookup!("map_#{map_id}:importing", false) and
WandererApp.Cache.lookup!("map_#{map_id}:started", false)
def get_update_map(update, attributes),
do:
{:ok,
Enum.reduce(attributes, Map.new(), fn attribute, map ->
map |> Map.put_new(attribute, get_in(update, [Access.key(attribute)]))
end)}
defp map_options(options) do
[
layout: options |> Map.get("layout", "left_to_right"),
store_custom_labels:
options |> Map.get("store_custom_labels", "false") |> String.to_existing_atom(),
show_linked_signature_id:
options |> Map.get("show_linked_signature_id", "false") |> String.to_existing_atom(),
show_linked_signature_id_temp_name:
options
|> Map.get("show_linked_signature_id_temp_name", "false")
|> String.to_existing_atom(),
show_temp_system_name:
options |> Map.get("show_temp_system_name", "false") |> String.to_existing_atom(),
restrict_offline_showing:
options |> Map.get("restrict_offline_showing", "false") |> String.to_existing_atom()
]
end
defp save_map_state(%{map_id: map_id} = _state) do
def save_map_state(map_id) do
systems_last_activity =
map_id
|> WandererApp.Map.list_systems!()
@@ -426,19 +268,165 @@ defmodule WandererApp.Map.Server.Impl do
})
end
defp maybe_stop_rtree(%{rtree_name: rtree_name} = state) do
case Process.whereis(rtree_name) do
nil ->
:ok
def handle_event({:update_characters, map_id} = event) do
Process.send_after(self(), event, @update_characters_timeout)
pid when is_pid(pid) ->
GenServer.stop(pid, :normal)
end
state
CharactersImpl.update_characters(map_id)
end
defp init_map_cache(%__MODULE__{map_id: map_id} = state) do
def handle_event({:update_tracked_characters, map_id} = event) do
Process.send_after(
self(),
event,
@update_tracked_characters_timeout
)
CharactersImpl.update_tracked_characters(map_id)
end
def handle_event({:update_presence, map_id} = event) do
Process.send_after(self(), event, @update_presence_timeout)
update_presence(map_id)
end
def handle_event({:map_acl_updated, map_id, added_acls, removed_acls}) do
AclsImpl.handle_map_acl_updated(map_id, added_acls, removed_acls)
end
def handle_event({:acl_updated, %{acl_id: acl_id}}) do
# Find all maps that use this ACL
case Ash.read(
WandererApp.Api.MapAccessList
|> Ash.Query.for_read(:read_by_acl, %{acl_id: acl_id})
) do
{:ok, map_acls} ->
Logger.debug(fn ->
"Found #{length(map_acls)} maps using ACL #{acl_id}: #{inspect(Enum.map(map_acls, & &1.map_id))}"
end)
# Broadcast to each map
Enum.each(map_acls, fn %{map_id: map_id} ->
Logger.debug(fn -> "Broadcasting acl_updated to map #{map_id}" end)
AclsImpl.handle_acl_updated(map_id, acl_id)
end)
Logger.debug(fn ->
"Successfully broadcast acl_updated event to #{length(map_acls)} maps"
end)
{:error, error} ->
Logger.error("Failed to find maps for ACL #{acl_id}: #{inspect(error)}")
:ok
end
end
def handle_event({:acl_deleted, %{acl_id: acl_id}}) do
case Ash.read(
WandererApp.Api.MapAccessList
|> Ash.Query.for_read(:read_by_acl, %{acl_id: acl_id})
) do
{:ok, map_acls} ->
Logger.debug(fn ->
"Found #{length(map_acls)} maps using ACL #{acl_id}: #{inspect(Enum.map(map_acls, & &1.map_id))}"
end)
# Broadcast to each map
Enum.each(map_acls, fn %{map_id: map_id} ->
Logger.debug(fn -> "Broadcasting acl_deleted to map #{map_id}" end)
AclsImpl.handle_acl_deleted(map_id, acl_id)
end)
Logger.debug(fn ->
"Successfully broadcast acl_deleted event to #{length(map_acls)} maps"
end)
{:error, error} ->
Logger.error("Failed to find maps for ACL #{acl_id}: #{inspect(error)}")
:ok
end
end
def handle_event({:subscription_settings_updated, map_id}) do
{:ok, subscription_settings} =
WandererApp.Map.SubscriptionManager.get_active_map_subscription(map_id)
update_subscription_settings(map_id, subscription_settings)
end
def handle_event({:options_updated, map_id, options}) do
update_options(map_id, options)
end
def handle_event({ref, _result}) when is_reference(ref) do
Process.demonitor(ref, [:flush])
end
def handle_event(msg) do
Logger.warning("Unhandled event: #{inspect(msg)}")
end
def update_subscription_settings(map_id, subscription_settings) do
{:ok, %{map: map}} = WandererApp.Map.get_map_state(map_id)
WandererApp.Map.update_map_state(map_id, %{
map: map |> WandererApp.Map.update_subscription_settings!(subscription_settings)
})
end
def update_options(map_id, options) do
{:ok, %{map: map}} = WandererApp.Map.get_map_state(map_id)
WandererApp.Map.update_map_state(map_id, %{
map: map |> WandererApp.Map.update_options!(options),
map_opts: map_options(options)
})
end
def broadcast!(map_id, event, payload \\ nil) do
if can_broadcast?(map_id) do
@pubsub_client.broadcast!(WandererApp.PubSub, map_id, %{
event: event,
payload: payload
})
end
:ok
end
defp can_broadcast?(map_id),
do:
not WandererApp.Cache.lookup!("map_#{map_id}:importing", false) and
WandererApp.Cache.lookup!("map_#{map_id}:started", false)
def get_update_map(update, attributes),
do:
{:ok,
Enum.reduce(attributes, Map.new(), fn attribute, map ->
map |> Map.put_new(attribute, get_in(update, [Access.key(attribute)]))
end)}
defp map_options(options) do
[
layout: options |> Map.get("layout", "left_to_right"),
store_custom_labels:
options |> Map.get("store_custom_labels", "false") |> String.to_existing_atom(),
show_linked_signature_id:
options |> Map.get("show_linked_signature_id", "false") |> String.to_existing_atom(),
show_linked_signature_id_temp_name:
options
|> Map.get("show_linked_signature_id_temp_name", "false")
|> String.to_existing_atom(),
show_temp_system_name:
options |> Map.get("show_temp_system_name", "false") |> String.to_existing_atom(),
restrict_offline_showing:
options |> Map.get("restrict_offline_showing", "false") |> String.to_existing_atom(),
allowed_copy_for: options |> Map.get("allowed_copy_for", "admin"),
allowed_paste_for: options |> Map.get("allowed_paste_for", "member")
]
end
defp init_map_cache(map_id) do
case WandererApp.Api.MapState.by_map_id(map_id) do
{:ok,
%{
@@ -450,10 +438,8 @@ defmodule WandererApp.Map.Server.Impl do
ConnectionsImpl.init_eol_cache(map_id, connections_eol_time)
ConnectionsImpl.init_start_cache(map_id, connections_start_time)
state
_ ->
state
:ok
end
end
@@ -475,54 +461,70 @@ defmodule WandererApp.Map.Server.Impl do
|> WandererApp.Map.add_connections!(connections)
|> WandererApp.Map.add_characters!(characters)
SystemsImpl.init_map_systems(map_id, systems)
character_ids =
map_id
|> WandererApp.Map.get_map!()
|> Map.get(:characters, [])
init_map_cache(map_id)
WandererApp.Cache.insert("map_#{map_id}:invalidate_character_ids", character_ids)
%{state | map: map, map_opts: map_options(options)}
end
def maybe_import_systems(state, %{"systems" => systems} = _settings, user_id, character_id) do
state =
systems
|> Enum.reduce(state, fn %{
"description" => description,
"id" => id,
"labels" => labels,
"locked" => locked,
"name" => name,
"position" => %{"x" => x, "y" => y},
"status" => status,
"tag" => tag,
"temporary_name" => temporary_name
} = _system,
acc ->
acc
|> add_system(
%{
solar_system_id: id |> String.to_integer(),
coordinates: %{"x" => round(x), "y" => round(y)}
},
user_id,
character_id
)
|> update_system_name(%{solar_system_id: id |> String.to_integer(), name: name})
|> update_system_description(%{
solar_system_id: id |> String.to_integer(),
description: description
})
|> update_system_status(%{solar_system_id: id |> String.to_integer(), status: status})
|> update_system_tag(%{solar_system_id: id |> String.to_integer(), tag: tag})
|> update_system_temporary_name(%{
solar_system_id: id |> String.to_integer(),
temporary_name: temporary_name
})
|> update_system_locked(%{solar_system_id: id |> String.to_integer(), locked: locked})
|> update_system_labels(%{solar_system_id: id |> String.to_integer(), labels: labels})
end)
def maybe_import_systems(
map_id,
%{"systems" => systems} = _settings,
user_id,
character_id
) do
systems
|> Enum.each(fn %{
"description" => description,
"id" => id,
"labels" => labels,
"locked" => locked,
"name" => name,
"position" => %{"x" => x, "y" => y},
"status" => status,
"tag" => tag,
"temporary_name" => temporary_name
} ->
solar_system_id = id |> String.to_integer()
add_system(
map_id,
%{
solar_system_id: solar_system_id,
coordinates: %{"x" => round(x), "y" => round(y)}
},
user_id,
character_id
)
update_system_name(map_id, %{solar_system_id: solar_system_id, name: name})
update_system_description(map_id, %{
solar_system_id: solar_system_id,
description: description
})
update_system_status(map_id, %{solar_system_id: solar_system_id, status: status})
update_system_tag(map_id, %{solar_system_id: solar_system_id, tag: tag})
update_system_temporary_name(map_id, %{
solar_system_id: solar_system_id,
temporary_name: temporary_name
})
update_system_locked(map_id, %{solar_system_id: solar_system_id, locked: locked})
update_system_labels(map_id, %{solar_system_id: solar_system_id, labels: labels})
end)
removed_system_ids =
systems
@@ -530,39 +532,39 @@ defmodule WandererApp.Map.Server.Impl do
|> Enum.map(fn system -> system["id"] end)
|> Enum.map(&String.to_integer/1)
state
|> delete_systems(removed_system_ids, user_id, character_id)
delete_systems(map_id, removed_system_ids, user_id, character_id)
end
def maybe_import_connections(state, %{"connections" => connections} = _settings, _user_id) do
def maybe_import_connections(map_id, %{"connections" => connections} = _settings, _user_id) do
connections
|> Enum.reduce(state, fn %{
"source" => source,
"target" => target,
"mass_status" => mass_status,
"time_status" => time_status,
"ship_size_type" => ship_size_type
} = _system,
acc ->
|> Enum.each(fn %{
"source" => source,
"target" => target,
"mass_status" => mass_status,
"time_status" => time_status,
"ship_size_type" => ship_size_type
} ->
source_id = source |> String.to_integer()
target_id = target |> String.to_integer()
acc
|> add_connection(%{
add_connection(map_id, %{
solar_system_source_id: source_id,
solar_system_target_id: target_id
})
|> update_connection_time_status(%{
update_connection_time_status(map_id, %{
solar_system_source_id: source_id,
solar_system_target_id: target_id,
time_status: time_status
})
|> update_connection_mass_status(%{
update_connection_mass_status(map_id, %{
solar_system_source_id: source_id,
solar_system_target_id: target_id,
mass_status: mass_status
})
|> update_connection_ship_size_type(%{
update_connection_ship_size_type(map_id, %{
solar_system_source_id: source_id,
solar_system_target_id: target_id,
ship_size_type: ship_size_type
@@ -570,13 +572,12 @@ defmodule WandererApp.Map.Server.Impl do
end)
end
def maybe_import_hubs(state, %{"hubs" => hubs} = _settings, _user_id) do
def maybe_import_hubs(map_id, %{"hubs" => hubs} = _settings, _user_id) do
hubs
|> Enum.reduce(state, fn hub, acc ->
|> Enum.each(fn hub ->
solar_system_id = hub |> String.to_integer()
acc
|> add_hub(%{solar_system_id: solar_system_id})
add_hub(map_id, %{solar_system_id: solar_system_id})
end)
end

View File

@@ -8,14 +8,14 @@ defmodule WandererApp.Map.Server.PingsImpl do
@ping_auto_expire_timeout :timer.minutes(15)
def add_ping(
%{map_id: map_id} = state,
map_id,
%{
solar_system_id: solar_system_id,
type: type,
message: message,
character_id: character_id,
user_id: user_id
} = ping_info
} = _ping_info
) do
with {:ok, character} <- WandererApp.Character.get_character(character_id),
system <-
@@ -57,23 +57,20 @@ defmodule WandererApp.Map.Server.PingsImpl do
map_id: map_id,
solar_system_id: "#{solar_system_id}"
})
state
else
error ->
Logger.error("Failed to add_ping: #{inspect(error, pretty: true)}")
state
end
end
def cancel_ping(
%{map_id: map_id} = state,
map_id,
%{
id: ping_id,
character_id: character_id,
user_id: user_id,
type: type
} = ping_info
} = _ping_info
) do
with {:ok, character} <- WandererApp.Character.get_character(character_id),
{:ok,
@@ -105,12 +102,9 @@ defmodule WandererApp.Map.Server.PingsImpl do
map_id: map_id,
solar_system_id: solar_system_id
})
state
else
error ->
Logger.error("Failed to cancel_ping: #{inspect(error, pretty: true)}")
state
end
end
end

View File

@@ -13,7 +13,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
Public entrypoint for updating signatures on a map system.
"""
def update_signatures(
%{map_id: map_id} = state,
map_id,
%{
solar_system_id: system_solar_id,
character_id: char_id,
@@ -31,7 +31,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
solar_system_id: system_solar_id
}) do
do_update_signatures(
state,
map_id,
system,
char_id,
user_id,
@@ -43,14 +43,13 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
else
error ->
Logger.warning("Skipping signature update: #{inspect(error)}")
state
end
end
def update_signatures(state, _), do: state
def update_signatures(_map_id, _), do: :ok
defp do_update_signatures(
state,
map_id,
system,
character_id,
user_id,
@@ -86,14 +85,14 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
# 1. Removals
existing_current
|> Enum.filter(&(&1.eve_id in removed_ids))
|> Enum.each(&remove_signature(&1, state, system, delete_conn?))
|> Enum.each(&remove_signature(map_id, &1, system, delete_conn?))
# 2. Updates
existing_current
|> Enum.filter(&(&1.eve_id in updated_ids))
|> Enum.each(fn existing ->
update = Enum.find(updated_sigs, &(&1.eve_id == existing.eve_id))
apply_update_signature(state, existing, update)
apply_update_signature(map_id, existing, update)
end)
# 3. Additions & restorations
@@ -110,23 +109,6 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
nil ->
MapSystemSignature.create!(sig)
%MapSystemSignature{deleted: true} = deleted_sig ->
MapSystemSignature.update!(
deleted_sig,
Map.take(sig, [
:name,
:temporary_name,
:description,
:kind,
:group,
:type,
:character_eve_id,
:custom_info,
:deleted,
:update_forced_at
])
)
_ ->
:noop
end
@@ -136,7 +118,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
if added_ids != [] do
track_activity(
:signatures_added,
state.map_id,
map_id,
system.solar_system_id,
user_id,
character_id,
@@ -147,7 +129,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
if removed_ids != [] do
track_activity(
:signatures_removed,
state.map_id,
map_id,
system.solar_system_id,
user_id,
character_id,
@@ -156,12 +138,12 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
end
# 5. Broadcast to any live subscribers
Impl.broadcast!(state.map_id, :signatures_updated, system.solar_system_id)
Impl.broadcast!(map_id, :signatures_updated, system.solar_system_id)
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
# Send individual signature events
Enum.each(added_sigs, fn sig ->
WandererApp.ExternalEvents.broadcast(state.map_id, :signature_added, %{
WandererApp.ExternalEvents.broadcast(map_id, :signature_added, %{
solar_system_id: system.solar_system_id,
signature_id: sig.eve_id,
name: sig.name,
@@ -172,27 +154,25 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
end)
Enum.each(removed_ids, fn sig_eve_id ->
WandererApp.ExternalEvents.broadcast(state.map_id, :signature_removed, %{
WandererApp.ExternalEvents.broadcast(map_id, :signature_removed, %{
solar_system_id: system.solar_system_id,
signature_id: sig_eve_id
})
end)
# Also send the summary event for backwards compatibility
WandererApp.ExternalEvents.broadcast(state.map_id, :signatures_updated, %{
WandererApp.ExternalEvents.broadcast(map_id, :signatures_updated, %{
solar_system_id: system.solar_system_id,
added_count: length(added_ids),
updated_count: length(updated_ids),
removed_count: length(removed_ids)
})
state
end
defp remove_signature(sig, state, system, delete_conn?) do
defp remove_signature(map_id, sig, system, delete_conn?) do
# optionally remove the linked connection
if delete_conn? && sig.linked_system_id do
ConnectionsImpl.delete_connection(state, %{
ConnectionsImpl.delete_connection(map_id, %{
solar_system_source_id: system.solar_system_id,
solar_system_target_id: sig.linked_system_id
})
@@ -200,18 +180,18 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
# clear any linked_sig_eve_id on the target system
if sig.linked_system_id do
SystemsImpl.update_system_linked_sig_eve_id(state, %{
SystemsImpl.update_system_linked_sig_eve_id(map_id, %{
solar_system_id: sig.linked_system_id,
linked_sig_eve_id: nil
})
end
# mark as deleted
MapSystemSignature.update!(sig, %{deleted: true})
sig
|> MapSystemSignature.destroy!()
end
def apply_update_signature(
state,
map_id,
%MapSystemSignature{} = existing,
update_params
)
@@ -221,8 +201,8 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
update_params |> Map.put(:update_forced_at, DateTime.utc_now())
) do
{:ok, updated} ->
maybe_update_connection_time_status(state, existing, updated)
maybe_update_connection_mass_status(state, existing, updated)
maybe_update_connection_time_status(map_id, existing, updated)
maybe_update_connection_mass_status(map_id, existing, updated)
:ok
{:error, reason} ->
@@ -231,7 +211,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
end
defp maybe_update_connection_time_status(
state,
map_id,
%{custom_info: old_custom_info} = old_sig,
%{custom_info: new_custom_info, system_id: system_id, linked_system_id: linked_system_id} =
updated_sig
@@ -243,7 +223,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
if old_time_status != new_time_status do
{:ok, source_system} = MapSystem.by_id(system_id)
ConnectionsImpl.update_connection_time_status(state, %{
ConnectionsImpl.update_connection_time_status(map_id, %{
solar_system_source_id: source_system.solar_system_id,
solar_system_target_id: linked_system_id,
time_status: new_time_status
@@ -251,10 +231,10 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
end
end
defp maybe_update_connection_time_status(_state, _old_sig, _updated_sig), do: :ok
defp maybe_update_connection_time_status(_map_id, _old_sig, _updated_sig), do: :ok
defp maybe_update_connection_mass_status(
state,
map_id,
%{type: old_type} = old_sig,
%{type: new_type, system_id: system_id, linked_system_id: linked_system_id} =
updated_sig
@@ -265,7 +245,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
signature_ship_size_type = EVEUtil.get_wh_size(new_type)
if not is_nil(signature_ship_size_type) do
ConnectionsImpl.update_connection_ship_size_type(state, %{
ConnectionsImpl.update_connection_ship_size_type(map_id, %{
solar_system_source_id: source_system.solar_system_id,
solar_system_target_id: linked_system_id,
ship_size_type: signature_ship_size_type
@@ -274,7 +254,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
end
end
defp maybe_update_connection_mass_status(_state, _old_sig, _updated_sig), do: :ok
defp maybe_update_connection_mass_status(_map_id, _old_sig, _updated_sig), do: :ok
defp track_activity(event, map_id, solar_system_id, user_id, character_id, signatures) do
ActivityTracker.track_map_event(event, %{

View File

@@ -1,22 +0,0 @@
defmodule WandererApp.Map.ServerSupervisor do
@moduledoc false
use Supervisor, restart: :transient
alias WandererApp.Map.Server
def start_link(args), do: Supervisor.start_link(__MODULE__, args)
@impl true
def init(args) do
children = [
{Server, args},
{DDRT.DynamicRtree,
[
conf: [name: "rtree_#{args[:map_id]}", width: 150, verbose: false, seed: 0],
name: Module.concat([args[:map_id], DDRT.DynamicRtree])
]}
]
Supervisor.init(children, strategy: :one_for_one, auto_shutdown: :any_significant)
end
end

View File

@@ -20,14 +20,14 @@ defmodule WandererApp.Map.Server.SystemsImpl do
end)
end
def init_map_systems(state, [] = _systems), do: state
def init_map_systems(_map_id, [] = _systems), do: :ok
def init_map_systems(%{map_id: map_id, rtree_name: rtree_name} = state, systems) do
def init_map_systems(map_id, systems) do
systems
|> Enum.each(fn %{id: system_id, solar_system_id: solar_system_id} = system ->
@ddrt.insert(
{solar_system_id, WandererApp.Map.PositionCalculator.get_system_bounding_rect(system)},
rtree_name
"rtree_#{map_id}"
)
WandererApp.Cache.put(
@@ -36,29 +36,67 @@ defmodule WandererApp.Map.Server.SystemsImpl do
ttl: @system_inactive_timeout
)
end)
state
end
def add_system(
%{map_id: map_id} = state,
map_id,
%{
solar_system_id: solar_system_id
} = system_info,
user_id,
character_id
character_id,
opts
) do
case map_id |> WandererApp.Map.check_location(%{solar_system_id: solar_system_id}) do
map_id
|> WandererApp.Map.check_location(%{solar_system_id: solar_system_id})
|> case do
{:ok, _location} ->
state |> _add_system(system_info, user_id, character_id)
do_add_system(map_id, system_info, user_id, character_id)
{:error, :already_exists} ->
state
:ok
end
end
def paste_systems(
map_id,
systems,
user_id,
character_id,
opts
) do
systems
|> Enum.each(fn %{
"id" => solar_system_id,
"position" => coordinates
} = system ->
solar_system_id = solar_system_id |> String.to_integer()
case map_id |> WandererApp.Map.check_location(%{solar_system_id: solar_system_id}) do
{:ok, _location} ->
if opts |> Keyword.get(:add_not_existing, true) do
do_add_system(
map_id,
%{solar_system_id: solar_system_id, coordinates: coordinates, extra_info: system},
user_id,
character_id
)
else
:ok
end
{:error, :already_exists} ->
if opts |> Keyword.get(:update_existing, false) do
:ok
else
:ok
end
end
end)
end
def add_system_comment(
%{map_id: map_id} = state,
map_id,
%{
solar_system_id: solar_system_id,
text: text
@@ -86,12 +124,10 @@ defmodule WandererApp.Map.Server.SystemsImpl do
solar_system_id: solar_system_id,
comment: comment
})
state
end
def remove_system_comment(
%{map_id: map_id} = state,
map_id,
comment_id,
user_id,
character_id
@@ -105,11 +141,9 @@ defmodule WandererApp.Map.Server.SystemsImpl do
solar_system_id: system.solar_system_id,
comment_id: comment_id
})
state
end
def cleanup_systems(%{map_id: map_id} = state) do
def cleanup_systems(map_id) do
expired_systems =
map_id
|> WandererApp.Map.list_systems!()
@@ -144,71 +178,66 @@ defmodule WandererApp.Map.Server.SystemsImpl do
end)
|> Enum.map(& &1.solar_system_id)
case expired_systems |> Enum.empty?() do
false ->
state |> delete_systems(expired_systems, nil, nil)
_ ->
state
if expired_systems |> Enum.empty?() |> Kernel.not() do
delete_systems(map_id, expired_systems, nil, nil)
end
end
def update_system_name(
state,
map_id,
update
),
do: state |> update_system(:update_name, [:name], update)
do: update_system(map_id, :update_name, [:name], update)
def update_system_description(
state,
map_id,
update
),
do: state |> update_system(:update_description, [:description], update)
do: update_system(map_id, :update_description, [:description], update)
def update_system_status(
state,
map_id,
update
),
do: state |> update_system(:update_status, [:status], update)
do: update_system(map_id, :update_status, [:status], update)
def update_system_tag(
state,
map_id,
update
),
do: state |> update_system(:update_tag, [:tag], update)
do: update_system(map_id, :update_tag, [:tag], update)
def update_system_temporary_name(
state,
map_id,
update
) do
state |> update_system(:update_temporary_name, [:temporary_name], update)
end
),
do: update_system(map_id, :update_temporary_name, [:temporary_name], update)
def update_system_locked(
state,
map_id,
update
),
do: state |> update_system(:update_locked, [:locked], update)
do: update_system(map_id, :update_locked, [:locked], update)
def update_system_labels(
state,
map_id,
update
),
do: state |> update_system(:update_labels, [:labels], update)
do: update_system(map_id, :update_labels, [:labels], update)
def update_system_linked_sig_eve_id(
state,
map_id,
update
),
do: state |> update_system(:update_linked_sig_eve_id, [:linked_sig_eve_id], update)
do: update_system(map_id, :update_linked_sig_eve_id, [:linked_sig_eve_id], update)
def update_system_position(
%{rtree_name: rtree_name} = state,
map_id,
update
),
do:
state
|> update_system(
update_system(
map_id,
:update_position,
[:position_x, :position_y],
update,
@@ -216,13 +245,13 @@ defmodule WandererApp.Map.Server.SystemsImpl do
@ddrt.update(
updated_system.solar_system_id,
WandererApp.Map.PositionCalculator.get_system_bounding_rect(updated_system),
rtree_name
"rtree_#{map_id}"
)
end
)
def add_hub(
%{map_id: map_id} = state,
map_id,
hub_info
) do
with :ok <- WandererApp.Map.add_hub(map_id, hub_info),
@@ -230,16 +259,15 @@ defmodule WandererApp.Map.Server.SystemsImpl do
{:ok, _} <-
WandererApp.MapRepo.update_hubs(map_id, hubs) do
Impl.broadcast!(map_id, :update_map, %{hubs: hubs})
state
else
error ->
Logger.error("Failed to add hub: #{inspect(error, pretty: true)}")
state
:ok
end
end
def remove_hub(
%{map_id: map_id} = state,
map_id,
hub_info
) do
with :ok <- WandererApp.Map.remove_hub(map_id, hub_info),
@@ -247,16 +275,15 @@ defmodule WandererApp.Map.Server.SystemsImpl do
{:ok, _} <-
WandererApp.MapRepo.update_hubs(map_id, hubs) do
Impl.broadcast!(map_id, :update_map, %{hubs: hubs})
state
else
error ->
Logger.error("Failed to remove hub: #{inspect(error, pretty: true)}")
state
:ok
end
end
def delete_systems(
%{map_id: map_id, rtree_name: rtree_name} = state,
map_id,
removed_ids,
user_id,
character_id
@@ -274,9 +301,9 @@ defmodule WandererApp.Map.Server.SystemsImpl do
map_id
|> WandererApp.MapSystemRepo.remove_from_map(solar_system_id)
|> case do
{:ok, _} ->
{:ok, result} ->
:ok = WandererApp.Map.remove_system(map_id, solar_system_id)
@ddrt.delete([solar_system_id], rtree_name)
@ddrt.delete([solar_system_id], "rtree_#{map_id}")
Impl.broadcast!(map_id, :systems_removed, [solar_system_id])
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
@@ -304,7 +331,7 @@ defmodule WandererApp.Map.Server.SystemsImpl do
end
try do
cleanup_linked_system_sig_eve_ids(state, [system_id])
cleanup_linked_system_sig_eve_ids(map_id, [system_id])
rescue
e ->
Logger.error("Failed to cleanup system linked sig eve ids: #{inspect(e)}")
@@ -317,8 +344,6 @@ defmodule WandererApp.Map.Server.SystemsImpl do
:ok
end
end)
state
end
defp track_systems_removed(map_id, user_id, character_id, removed_solar_system_ids)
@@ -369,8 +394,13 @@ defmodule WandererApp.Map.Server.SystemsImpl do
|> Enum.uniq_by(& &1.system_id)
|> Enum.each(fn s ->
try do
{:ok, %{system: system}} = s |> Ash.load([:system])
{:ok, %{eve_id: eve_id, system: system}} = s |> Ash.load([:system])
:ok = Ash.destroy!(s)
Logger.warning(
"[cleanup_linked_signatures] for system #{system.solar_system_id}: #{inspect(eve_id)}"
)
Impl.broadcast!(map_id, :signatures_updated, system.solar_system_id)
rescue
e ->
@@ -379,7 +409,7 @@ defmodule WandererApp.Map.Server.SystemsImpl do
end)
end
defp cleanup_linked_system_sig_eve_ids(state, system_ids_to_remove) do
defp cleanup_linked_system_sig_eve_ids(map_id, system_ids_to_remove) do
linked_system_ids =
system_ids_to_remove
|> Enum.map(fn system_id ->
@@ -392,17 +422,19 @@ defmodule WandererApp.Map.Server.SystemsImpl do
linked_system_ids
|> Enum.each(fn linked_system_id ->
update_system_linked_sig_eve_id(state, %{
update_system(map_id, :update_linked_sig_eve_id, [:linked_sig_eve_id], %{
solar_system_id: linked_system_id,
linked_sig_eve_id: nil
})
end)
end
def maybe_add_system(map_id, location, old_location, rtree_name, map_opts)
def maybe_add_system(map_id, location, old_location, map_opts)
when not is_nil(location) do
case WandererApp.Map.check_location(map_id, location) do
{:ok, location} ->
rtree_name = "rtree_#{map_id}"
{:ok, position} = calc_new_system_position(map_id, old_location, rtree_name, map_opts)
case WandererApp.MapSystemRepo.get_by_map_and_solar_system_id(
@@ -501,10 +533,10 @@ defmodule WandererApp.Map.Server.SystemsImpl do
end
end
def maybe_add_system(_map_id, _location, _old_location, _rtree_name, _map_opts), do: :ok
def maybe_add_system(_map_id, _location, _old_location, _map_opts), do: :ok
defp _add_system(
%{map_id: map_id, map_opts: map_opts, rtree_name: rtree_name} = state,
defp do_add_system(
map_id,
%{
solar_system_id: solar_system_id,
coordinates: coordinates
@@ -512,6 +544,10 @@ defmodule WandererApp.Map.Server.SystemsImpl do
user_id,
character_id
) do
extra_info = system_info |> Map.get(:extra_info)
rtree_name = "rtree_#{map_id}"
{:ok, %{map_opts: map_opts}} = WandererApp.Map.get_map_state(map_id)
%{"x" => x, "y" => y} =
coordinates
|> case do
@@ -558,6 +594,7 @@ defmodule WandererApp.Map.Server.SystemsImpl do
|> WandererApp.MapSystemRepo.cleanup_tags!()
|> WandererApp.MapSystemRepo.cleanup_temporary_name!()
|> WandererApp.MapSystemRepo.cleanup_linked_sig_eve_id!()
|> maybe_update_extra_info(extra_info)
|> WandererApp.MapSystemRepo.update_visible(%{visible: true})
end
@@ -583,7 +620,7 @@ defmodule WandererApp.Map.Server.SystemsImpl do
})
end
:ok = map_id |> WandererApp.Map.add_system(system)
:ok = WandererApp.Map.add_system(map_id, system)
WandererApp.Cache.put(
"map_#{map_id}:system_#{system.id}:last_activity",
@@ -595,7 +632,7 @@ defmodule WandererApp.Map.Server.SystemsImpl do
# ADDITIVE: Also broadcast to external event system (webhooks/WebSocket)
Logger.debug(fn ->
"SystemsImpl._add_system calling ExternalEvents.broadcast for map #{map_id}, system: #{solar_system_id}"
"SystemsImpl.do_add_system calling ExternalEvents.broadcast for map #{map_id}, system: #{solar_system_id}"
end)
WandererApp.ExternalEvents.broadcast(map_id, :add_system, %{
@@ -612,10 +649,129 @@ defmodule WandererApp.Map.Server.SystemsImpl do
map_id: map_id,
solar_system_id: solar_system_id
})
state
end
defp maybe_update_extra_info(system, nil), do: system
defp maybe_update_extra_info(
system,
%{
"description" => description,
"labels" => labels,
"name" => name,
"status" => status,
"tag" => tag,
"temporary_name" => temporary_name
}
) do
system
|> maybe_update_name(name)
|> maybe_update_description(description)
|> maybe_update_labels(labels)
|> maybe_update_status(status)
|> maybe_update_tag(tag)
|> maybe_update_temporary_name(temporary_name)
end
defp maybe_update_description(
%{description: old_description} = system,
description
)
when not is_nil(description) and old_description != description do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_description(%{description: description})
updated_system
end
defp maybe_update_description(system, _description), do: system
defp maybe_update_name(
%{name: old_name} = system,
name
)
when not is_nil(name) and old_name != name do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_name(%{name: name})
updated_system
end
defp maybe_update_name(system, _name), do: system
defp maybe_update_labels(
%{name: old_labels} = system,
labels
)
when not is_nil(labels) and old_labels != labels do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_labels(%{labels: labels})
updated_system
end
defp maybe_update_labels(system, _labels), do: system
defp maybe_update_labels(
%{name: old_labels} = system,
labels
)
when not is_nil(labels) and old_labels != labels do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_labels(%{labels: labels})
updated_system
end
defp maybe_update_labels(system, _labels), do: system
defp maybe_update_status(
%{name: old_status} = system,
status
)
when not is_nil(status) and old_status != status do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_status(%{status: status})
updated_system
end
defp maybe_update_status(system, _status), do: system
defp maybe_update_tag(
%{name: old_tag} = system,
tag
)
when not is_nil(tag) and old_tag != tag do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_tag(%{tag: tag})
updated_system
end
defp maybe_update_tag(system, _labels), do: system
defp maybe_update_temporary_name(
%{name: old_temporary_name} = system,
temporary_name
)
when not is_nil(temporary_name) and old_temporary_name != temporary_name do
{:ok, updated_system} =
system
|> WandererApp.MapSystemRepo.update_temporary_name(%{temporary_name: temporary_name})
updated_system
end
defp maybe_update_temporary_name(system, _temporary_name),
do: system
defp calc_new_system_position(map_id, old_location, rtree_name, opts),
do:
{:ok,
@@ -624,7 +780,7 @@ defmodule WandererApp.Map.Server.SystemsImpl do
|> WandererApp.Map.PositionCalculator.get_new_system_position(rtree_name, opts)}
defp update_system(
%{map_id: map_id} = state,
map_id,
update_method,
attributes,
update,
@@ -648,12 +804,10 @@ defmodule WandererApp.Map.Server.SystemsImpl do
end
update_map_system_last_activity(map_id, updated_system)
state
else
error ->
Logger.error("Failed to update system: #{inspect(error, pretty: true)}")
state
:ok
end
end
@@ -673,13 +827,9 @@ defmodule WandererApp.Map.Server.SystemsImpl do
WandererApp.ExternalEvents.broadcast(map_id, :system_metadata_changed, %{
solar_system_id: updated_system.solar_system_id,
name: updated_system.name,
# ADD
temporary_name: updated_system.temporary_name,
# ADD
labels: updated_system.labels,
# ADD
description: updated_system.description,
# ADD
status: updated_system.status
})
end

View File

@@ -6,65 +6,6 @@ defmodule WandererApp.Maps do
import Ecto.Query
require Logger
@minimum_route_attrs [
:system_class,
:class_title,
:security,
:triglavian_invasion_status,
:solar_system_id,
:solar_system_name,
:region_name,
:is_shattered
]
def find_routes(map_id, hubs, origin, routes_settings, false) do
WandererApp.Esi.find_routes(
map_id,
origin,
hubs,
routes_settings
)
|> case do
{:ok, routes} ->
systems_static_data =
routes
|> Enum.map(fn route_info -> route_info.systems end)
|> List.flatten()
|> Enum.uniq()
|> Task.async_stream(
fn system_id ->
case WandererApp.CachedInfo.get_system_static_info(system_id) do
{:ok, nil} ->
nil
{:ok, system} ->
system |> Map.take(@minimum_route_attrs)
end
end,
max_concurrency: System.schedulers_online() * 4
)
|> Enum.map(fn {:ok, val} -> val end)
{:ok, %{routes: routes, systems_static_data: systems_static_data}}
error ->
{:ok, %{routes: [], systems_static_data: []}}
end
end
def find_routes(map_id, hubs, origin, routes_settings, true) do
origin = origin |> String.to_integer()
hubs = hubs |> Enum.map(&(&1 |> String.to_integer()))
routes =
hubs
|> Enum.map(fn hub ->
%{origin: origin, destination: hub, success: false, systems: [], has_connection: false}
end)
{:ok, %{routes: routes, systems_static_data: []}}
end
def get_available_maps() do
case WandererApp.Api.Map.available() do
{:ok, maps} -> {:ok, maps}

Some files were not shown because too many files have changed in this diff Show More