Compare commits

...

169 Commits

Author SHA1 Message Date
Dmitry Popov
2cb2dc526c Merge branch 'main' into develop
Some checks are pending
Build Test / 🚀 Deploy to test env (fly.io) (push) Waiting to run
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Waiting to run
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Waiting to run
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Blocked by required conditions
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Blocked by required conditions
Build Develop / merge (push) Blocked by required conditions
Build Develop / 🏷 Notify about develop release (push) Blocked by required conditions
🧪 Test Suite / Test Suite (push) Waiting to run
2025-11-30 18:51:58 +01:00
Dmitry Popov
f3c38ba62a fix(core): fixed tracking issues 2025-11-30 18:51:50 +01:00
CI
29473f2d3b chore: [skip ci] 2025-11-30 10:00:35 +00:00
CI
48654250e8 chore: release version v1.89.0 2025-11-30 10:00:35 +00:00
Aleksei Chichenkov
7aa24245b6 Merge pull request #564 from wanderer-industries/sig-panel
Sig panel
2025-11-30 13:00:08 +03:00
DanSylvest
6070d74684 feat: removed unnecessary command 2025-11-30 12:57:14 +03:00
Dmitry Popov
c3de3c4e35 Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
2025-11-29 20:17:14 +01:00
CI
5c513f3e50 chore: [skip ci] 2025-11-29 19:13:30 +00:00
CI
5a980c6b89 chore: release version v1.88.13 2025-11-29 19:13:30 +00:00
Dmitry Popov
85c075c5a6 fix(core): fixed tracking issues 2025-11-29 20:12:54 +01:00
DanSylvest
f068afd16e Merge branch 'refs/heads/main' into sig-panel 2025-11-29 21:09:29 +03:00
DanSylvest
ac71b0af64 feat: rework wormholes reference 2025-11-29 21:07:48 +03:00
DanSylvest
5c515d6acd Merge remote-tracking branch 'leesolway/sig-panel-pr' into sig-panel
# Conflicts:
#	assets/js/hooks/Mapper/mapRootProvider/hooks/useMapRootHandlers.ts
2025-11-29 17:32:35 +03:00
Dmitry Popov
4585c3a94b feat(core): Added several map scopes support (Wh, Hi, Low, Null, Pochven) 2025-11-29 14:36:45 +01:00
CI
cf2c27c961 chore: [skip ci] 2025-11-29 11:35:52 +00:00
CI
f8e403025c chore: release version v1.88.12 2025-11-29 11:35:52 +00:00
Dmitry Popov
46a1898be9 Merge branch 'fixed-warinings' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-29 12:35:36 +01:00
Dmitry Popov
25fa7c07bc fix(core): fixed c4 -> ns connections auto size issues 2025-11-29 12:35:22 +01:00
Dmitry Popov
e7219e0eec chore: fixed compile warnings 2025-11-29 12:34:28 +01:00
CI
45130fcffa chore: [skip ci] 2025-11-29 09:16:34 +00:00
CI
5f75d4440d chore: release version v1.88.11 2025-11-29 09:16:34 +00:00
Dmitry Popov
34210f63e3 Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-29 10:16:02 +01:00
Dmitry Popov
5f60fd4922 chore: fix tests workflow 2025-11-29 10:15:59 +01:00
CI
47ef7dda55 chore: [skip ci] 2025-11-29 00:15:17 +00:00
CI
0f3550a687 chore: release version v1.88.10 2025-11-29 00:15:17 +00:00
Dmitry Popov
8f242f3535 Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-29 01:14:21 +01:00
Dmitry Popov
1ce39e5394 fix(core): fixed pings cleanup 2025-11-29 01:14:17 +01:00
CI
cca7b912aa chore: [skip ci] 2025-11-29 00:11:43 +00:00
CI
d939e32500 chore: release version v1.88.9 2025-11-29 00:11:43 +00:00
Dmitry Popov
97ebe66db5 Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-29 01:11:04 +01:00
Dmitry Popov
f437fc4541 fix(core): fixed linked signatures cleanup 2025-11-29 01:11:01 +01:00
CI
6c65538450 chore: [skip ci] 2025-11-28 23:54:56 +00:00
CI
d566a74df4 chore: release version v1.88.8 2025-11-28 23:54:56 +00:00
Dmitry Popov
03e030a7d3 Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-29 00:54:10 +01:00
Dmitry Popov
e738e1da9c fix(core): fixed pings issue 2025-11-29 00:54:07 +01:00
CI
972b3a6cbe chore: [skip ci] 2025-11-28 23:43:54 +00:00
CI
96b4a3077e chore: release version v1.88.7 2025-11-28 23:43:53 +00:00
Dmitry Popov
6b308e8a1e Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-29 00:43:16 +01:00
Dmitry Popov
d0874cbc6f fix(core): fixed tracking issues 2025-11-29 00:43:13 +01:00
CI
f106a51bf5 chore: [skip ci] 2025-11-28 22:50:24 +00:00
CI
dc47dc5f81 chore: release version v1.88.6 2025-11-28 22:50:24 +00:00
Dmitry Popov
dc81cffeea Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-28 23:49:53 +01:00
Dmitry Popov
5766fcf4d8 fix(core): fixed tracking issues 2025-11-28 23:49:48 +01:00
CI
c57a3b2cea chore: [skip ci] 2025-11-28 00:28:34 +00:00
CI
0c1fa8e79b chore: release version v1.88.5 2025-11-28 00:28:34 +00:00
Dmitry Popov
36cc91915c Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-28 01:27:30 +01:00
Dmitry Popov
bb644fde31 fix(core): fixed env errors 2025-11-28 01:27:26 +01:00
CI
269b54d382 chore: [skip ci] 2025-11-27 11:17:21 +00:00
CI
a9115cc653 chore: release version v1.88.4 2025-11-27 11:17:21 +00:00
Dmitry Popov
eeea7aee8b Merge pull request #563 from guarzo/guarzo/killsdefense
fix: defensive check for undefined excluded systems
2025-11-27 15:16:52 +04:00
Guarzo
700089e381 fix: defensive check for undefined excluded systems 2025-11-27 04:12:59 +00:00
CI
932935557c chore: [skip ci] 2025-11-26 22:42:01 +00:00
CI
2890a76cf2 chore: release version v1.88.3 2025-11-26 22:42:01 +00:00
Dmitry Popov
4ac9b2e2b7 chore: Updated mix version 2025-11-26 23:41:24 +01:00
Dmitry Popov
f92436f3f0 Merge branch 'develop' 2025-11-26 22:37:38 +01:00
Dmitry Popov
22d97cc99d fix(core): fixed env issues
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
2025-11-26 22:18:02 +01:00
CI
305838573c chore: [skip ci] 2025-11-26 12:42:35 +00:00
CI
cc7ad81d2f chore: release version v1.88.1 2025-11-26 12:42:35 +00:00
Dmitry Popov
a694e57512 Merge pull request #561 from wanderer-industries/develop
Develop
2025-11-26 16:39:34 +04:00
Dmitry Popov
20be7fc67d Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-26 12:49:49 +01:00
CI
54bfee414b chore: [skip ci] 2025-11-25 21:55:15 +00:00
CI
bcfa47bd94 chore: release version v1.88.0 2025-11-25 21:55:15 +00:00
Dmitry Popov
b784f68818 Merge pull request #560 from wanderer-industries/zkb-evewho-links
feat: Add zkb and eve who links for characters where it possibly was add
2025-11-26 01:54:50 +04:00
DanSylvest
344ee54018 feat: Add zkb and eve who links for characters where it possibly was add 2025-11-25 23:28:54 +03:00
Dmitry Popov
42e0f8f660 Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-25 21:02:53 +01:00
CI
99b081887c chore: [skip ci] 2025-11-25 20:01:36 +00:00
CI
dee8d0dae8 chore: release version v1.87.0 2025-11-25 20:01:36 +00:00
Dmitry Popov
147dd5880e Merge pull request #559 from wanderer-industries/markdown-description
feat: Add support markdown for system description
2025-11-26 00:01:09 +04:00
DanSylvest
69991fff72 feat: Add support markdown for system description 2025-11-25 22:50:11 +03:00
Dmitry Popov
b881c84a52 Merge branch 'main' into develop 2025-11-25 20:11:53 +01:00
CI
de4e1f859f chore: [skip ci] 2025-11-25 19:07:31 +00:00
CI
8e2a19540c chore: release version v1.86.1 2025-11-25 19:07:31 +00:00
Dmitry Popov
855c596672 Merge pull request #558 from wanderer-industries/show-passage-direction
fix(Map): Add ability to see character passage direction in list of p…
2025-11-25 23:06:45 +04:00
DanSylvest
36d3c0937b chore: Add ability to see character passage direction in list of passages - remove unnecessary log 2025-11-25 22:04:12 +03:00
CI
d8fb1f78cf chore: [skip ci] 2025-11-25 19:03:24 +00:00
CI
98fa7e0235 chore: release version v1.86.0 2025-11-25 19:03:24 +00:00
Dmitry Popov
e4396fe2f9 Merge pull request #557 from guarzo/guarzo/filteractivity
feat: add date filter for character activity
2025-11-25 23:02:58 +04:00
DanSylvest
1c117903f6 fix(Map): Add ability to see character passage direction in list of passages 2025-11-25 21:51:01 +03:00
Dmitry Popov
9e9dc39200 Merge pull request #556 from guarzo/guarzo/ticker2andsse
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
fix: sse enable checkbox, and kills ticker
2025-11-25 15:33:05 +04:00
Dmitry Popov
abd7e4e15c chore: fix tests issues 2025-11-25 12:28:31 +01:00
Guarzo
88ed9cd39e feat: add date filter for character activity 2025-11-25 01:52:06 +00:00
Dmitry Popov
9666a8e78a chore: fix tests issues
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-25 00:41:40 +01:00
Dmitry Popov
271a3d90f8 Merge branch 'main' into tests-fixes-2 2025-11-24 23:58:08 +01:00
Dmitry Popov
01e291daf4 chore: fix tests issues 2025-11-24 23:57:52 +01:00
CI
b7c0b45c15 chore: [skip ci] 2025-11-24 11:23:10 +00:00
CI
0874e3c51c chore: release version v1.85.5 2025-11-24 11:23:10 +00:00
Dmitry Popov
d39fa0363a Merge branch 'main' into tests-fixes-2 2025-11-24 12:22:57 +01:00
Dmitry Popov
369b08a9ae fix(core): fixed connections cleanup and rally points delete issues 2025-11-24 12:22:40 +01:00
Dmitry Popov
a872561b18 chore: fix tests issues 2025-11-24 11:33:08 +01:00
Dmitry Popov
857608f8ef chore: fix tests issues 2025-11-23 22:43:59 +01:00
Guarzo
7a74ae566b fix: sse enable checkbox, and kills ticker 2025-11-23 18:04:30 +00:00
Dmitry Popov
f2c8724763 Merge branch 'tests-fixes' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
2025-11-22 12:35:19 +01:00
Dmitry Popov
9a8dc4dbe5 Merge branch 'main' into tests-fixes 2025-11-22 12:29:22 +01:00
CI
01192dc637 chore: [skip ci] 2025-11-22 11:25:53 +00:00
CI
957cbcc561 chore: release version v1.85.4 2025-11-22 11:25:53 +00:00
Dmitry Popov
7eb6d093cf fix(core): invalidate map characters every 1 hour for any missing/revoked permissions 2025-11-22 12:25:24 +01:00
CI
a23e544a9f chore: [skip ci] 2025-11-22 09:42:11 +00:00
CI
845ea7a576 chore: release version v1.85.3 2025-11-22 09:42:11 +00:00
Dmitry Popov
ae8fbf30e4 fix(core): fixed connection time status issues. fixed character alliance update issues 2025-11-22 10:41:35 +01:00
Dmitry Popov
083e300ff5 chore: updated deps, fixed signatures and comments related issues
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
2025-11-21 14:23:44 +01:00
Dmitry Popov
ae4ebc0e36 Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-20 12:05:40 +01:00
CI
3de385c902 chore: [skip ci] 2025-11-20 10:57:05 +00:00
CI
5f3d4dba37 chore: release version v1.85.2 2025-11-20 10:57:05 +00:00
Dmitry Popov
8acc7ddc25 fix(core): increased API pool limits 2025-11-20 11:56:31 +01:00
Dmitry Popov
c175f19142 Merge branch 'main' into develop 2025-11-20 11:35:38 +01:00
CI
ed6d25f3ea chore: [skip ci] 2025-11-20 10:35:09 +00:00
CI
ab07d1321d chore: release version v1.85.1 2025-11-20 10:35:09 +00:00
Dmitry Popov
a81e61bd70 Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-20 11:31:39 +01:00
Dmitry Popov
d2d33619c2 fix(core): increased API pool limits 2025-11-20 11:31:36 +01:00
CI
fa464110c6 chore: [skip ci] 2025-11-19 23:13:02 +00:00
CI
a5fa60e699 chore: release version v1.85.0 2025-11-19 23:13:02 +00:00
Dmitry Popov
6db994852f feat(core): added support for new ship types 2025-11-20 00:12:30 +01:00
CI
0a68676957 chore: [skip ci] 2025-11-19 21:06:28 +00:00
CI
9b82dd8f43 chore: release version v1.84.37 2025-11-19 21:06:28 +00:00
Dmitry Popov
aac2c33fd2 fix(auth): fixed character auth issues 2025-11-19 22:05:49 +01:00
Dmitry Popov
0ebc703774 Merge pull request #551 from guarzo/guarzo/minorfixes
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
fix: apiv1  token auth and doc updates
2025-11-19 21:31:02 +04:00
Guarzo
4615e20838 reset dev.exs 2025-11-19 17:27:40 +00:00
guarzo
f4d28f282a Merge branch 'develop' into guarzo/minorfixes 2025-11-19 11:03:43 -05:00
Dmitry Popov
1fe8ef17bd Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-19 11:35:45 +01:00
CI
1665b65619 chore: [skip ci] 2025-11-19 10:33:10 +00:00
CI
e1a946bb1d chore: release version v1.84.36 2025-11-19 10:33:10 +00:00
Dmitry Popov
543ec7f071 fix: fixed duplicated map slugs 2025-11-19 11:32:35 +01:00
CI
bf40d2cb8d chore: [skip ci] 2025-11-19 09:44:24 +00:00
CI
48ac40ea55 chore: release version v1.84.35 2025-11-19 09:44:24 +00:00
Dmitry Popov
5a3f3c40fe Merge pull request #552 from guarzo/guarzo/structurefix
fix: structure search / paste issues
2025-11-19 13:43:52 +04:00
guarzo
d5bac311ff Merge branch 'main' into guarzo/structurefix 2025-11-18 22:24:30 -05:00
Guarzo
34a7c854ed fix: structure search / paste issues 2025-11-18 22:19:04 -05:00
Guarzo
6088afb38c openapi spec / api updates 2025-11-19 00:10:23 +00:00
Guarzo
5764c41d23 pr feedback 2025-11-18 20:46:06 +00:00
Guarzo
09444596ff fix: apiv1 token auth and structure fixes 2025-11-18 20:10:06 +00:00
Dmitry Popov
ee15d90f9c fix: removed ipv6 distribution env settings
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-18 19:47:29 +01:00
Dmitry Popov
f5b014dae9 Merge branch 'main' into develop 2025-11-18 19:45:59 +01:00
CI
ebb6090be9 chore: [skip ci] 2025-11-18 11:47:15 +00:00
CI
7a4d31db60 chore: release version v1.84.34 2025-11-18 11:47:15 +00:00
Dmitry Popov
2acf9ed5dc Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-18 12:46:45 +01:00
Dmitry Popov
46df025200 fix(core): fixed character tracking issues 2025-11-18 12:46:42 +01:00
CI
43a363b5ab chore: [skip ci] 2025-11-18 11:00:34 +00:00
CI
03688387d8 chore: release version v1.84.33 2025-11-18 11:00:34 +00:00
Dmitry Popov
5060852918 Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-18 12:00:04 +01:00
Dmitry Popov
57381b9782 fix(core): fixed character tracking issues 2025-11-18 12:00:01 +01:00
CI
6014c60e13 chore: [skip ci] 2025-11-18 10:08:04 +00:00
CI
1b711d7b4b chore: release version v1.84.32 2025-11-18 10:08:04 +00:00
Dmitry Popov
f761ba9746 fix(core): fixed character tracking issues 2025-11-18 11:04:32 +01:00
CI
20a795c5b5 chore: [skip ci] 2025-11-17 13:41:22 +00:00
CI
0c80894c65 chore: release version v1.84.31 2025-11-17 13:41:22 +00:00
Dmitry Popov
21844f0550 fix(core): fixed connactions validation logic 2025-11-17 14:40:46 +01:00
CI
f7716ca45a chore: [skip ci] 2025-11-17 12:38:04 +00:00
CI
de74714c77 chore: release version v1.84.30 2025-11-17 12:38:04 +00:00
Dmitry Popov
4dfa83bd30 chore: fixed character updates issue 2025-11-17 13:37:30 +01:00
CI
cb4dba8dc2 chore: [skip ci] 2025-11-17 12:09:39 +00:00
CI
1d75b8f063 chore: release version v1.84.29 2025-11-17 12:09:39 +00:00
Dmitry Popov
2a42c4e6df Merge branch 'main' of github.com:wanderer-industries/wanderer 2025-11-17 13:09:08 +01:00
Dmitry Popov
0ee6160bcd chore: fixed MapEventRelay logs 2025-11-17 13:09:05 +01:00
CI
5826d2492b chore: [skip ci] 2025-11-17 11:53:30 +00:00
CI
a643e20247 chore: release version v1.84.28 2025-11-17 11:53:30 +00:00
Dmitry Popov
66dc680281 fix(core): fixed ACL updates 2025-11-17 12:52:59 +01:00
Dmitry Popov
5e0965ead4 fix(tests): updated tests 2025-11-17 12:52:11 +01:00
CI
46f46c745e chore: [skip ci] 2025-11-17 09:16:32 +00:00
Dmitry Popov
712379f4bb Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
2025-11-17 00:09:27 +01:00
Dmitry Popov
4c39c6fb39 fix(tests): updated tests 2025-11-17 00:09:10 +01:00
Dmitry Popov
a14e829f09 Merge pull request #547 from guarzo/guarzo/ssedisable
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
feature: disable sse by default
2025-11-15 19:36:29 +04:00
Guarzo
4002285882 test improvement 2025-11-15 12:46:03 +00:00
Guarzo
d732d15ef6 feature: disable sse by default 2025-11-15 12:46:03 +00:00
Dmitry Popov
7613ca78da Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-14 14:44:39 +01:00
Dmitry Popov
c8631708b9 Merge branch 'main' into develop
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-14 11:48:12 +01:00
Dmitry Popov
63ca473113 Merge pull request #502 from guarzo/guarzo/asyncfix
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build / merge (push) Has been cancelled
Build / 🏷 Create Release (push) Has been cancelled
fix: resolve issue with async event processing
2025-11-12 15:10:08 +04:00
Lee Solway
be7bbe6872 Create a signature list panel + hook into live events 2025-10-04 12:04:02 +01:00
guarzo
7df8284124 fix: clean up id generation 2025-08-30 02:05:28 +00:00
guarzo
21ca630abd fix: resolve issue with async event processing 2025-08-30 02:05:28 +00:00
282 changed files with 14191 additions and 2829 deletions

View File

@@ -1,9 +1,9 @@
name: Build Docker Image
name: Build Develop
on:
push:
tags:
- '**'
branches:
- develop
env:
MIX_ENV: prod
@@ -18,12 +18,85 @@ permissions:
contents: write
jobs:
build:
name: 🛠 Build
runs-on: ubuntu-22.04
if: ${{ github.ref == 'refs/heads/develop' && github.event_name == 'push' }}
permissions:
checks: write
contents: write
packages: write
attestations: write
id-token: write
pull-requests: write
repository-projects: write
strategy:
matrix:
otp: ["27"]
elixir: ["1.17"]
node-version: ["18.x"]
outputs:
commit_hash: ${{ steps.set-commit-develop.outputs.commit_hash }}
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Setup Elixir
uses: erlef/setup-beam@v1
with:
otp-version: ${{matrix.otp}}
elixir-version: ${{matrix.elixir}}
# nix build would also work here because `todos` is the default package
- name: ⬇️ Checkout repo
uses: actions/checkout@v3
with:
ssh-key: "${{ secrets.COMMIT_KEY }}"
fetch-depth: 0
- name: 😅 Cache deps
id: cache-deps
uses: actions/cache@v4
env:
cache-name: cache-elixir-deps
with:
path: |
deps
key: ${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-${{ hashFiles('**/mix.lock') }}
restore-keys: |
${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-
- name: 😅 Cache compiled build
id: cache-build
uses: actions/cache@v4
env:
cache-name: cache-compiled-build
with:
path: |
_build
key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}-${{ hashFiles( '**/lib/**/*.{ex,eex}', '**/config/*.exs', '**/mix.exs' ) }}
restore-keys: |
${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}-
${{ runner.os }}-build-
# Step: Download project dependencies. If unchanged, uses
# the cached version.
- name: 🌐 Install dependencies
run: mix deps.get --only "prod"
# Step: Compile the project treating any warnings as errors.
# Customize this step if a different behavior is desired.
- name: 🛠 Compiles without warnings
if: steps.cache-build.outputs.cache-hit != 'true'
run: mix compile
- name: Set commit hash for develop
id: set-commit-develop
run: |
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
docker:
name: 🛠 Build Docker Images
needs: build
runs-on: ubuntu-22.04
outputs:
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
release-notes: ${{ steps.get-content.outputs.string }}
permissions:
checks: write
contents: write
@@ -37,6 +110,7 @@ jobs:
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
@@ -46,25 +120,9 @@ jobs:
- name: ⬇️ Checkout repo
uses: actions/checkout@v3
with:
ref: ${{ needs.build.outputs.commit_hash }}
fetch-depth: 0
- name: Get Release Tag
id: get-latest-tag
uses: "WyriHaximus/github-action-get-previous-tag@v1"
with:
fallback: 1.0.0
- name: ⬇️ Checkout repo
uses: actions/checkout@v3
with:
ref: ${{ steps.get-latest-tag.outputs.tag }}
fetch-depth: 0
- name: Prepare Changelog
run: |
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
@@ -113,24 +171,6 @@ jobs:
if-no-files-found: error
retention-days: 1
- uses: markpatterson27/markdown-to-output@v1
id: extract-changelog
with:
filepath: CHANGELOG.md
- name: Get content
uses: 2428392/gh-truncate-string-action@v1.3.0
id: get-content
with:
stringToTruncate: |
📣 Wanderer new release available 🎉
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
${{ steps.extract-changelog.outputs.body }}
maxLength: 500
truncationSymbol: "…"
merge:
runs-on: ubuntu-latest
needs:
@@ -161,9 +201,8 @@ jobs:
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
type=raw,value=develop-{{sha}},enable=${{ github.ref == 'refs/heads/develop' }}
- name: Create manifest list and push
working-directory: /tmp/digests
@@ -176,12 +215,20 @@ jobs:
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
notify:
name: 🏷 Notify about release
name: 🏷 Notify about develop release
runs-on: ubuntu-22.04
needs: [docker, merge]
steps:
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: ${{ needs.docker.outputs.release-notes }}
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL_DEV }}
content: |
📣 New develop release available 🚀
**Commit**: `${{ github.sha }}`
**Status**: Development/Testing Release
Docker image: `wandererltd/community-edition:develop`
⚠️ This is an unstable development release for testing purposes.

View File

@@ -4,7 +4,6 @@ on:
push:
branches:
- main
- develop
env:
MIX_ENV: prod
@@ -22,7 +21,7 @@ jobs:
build:
name: 🛠 Build
runs-on: ubuntu-22.04
if: ${{ (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') && github.event_name == 'push' }}
if: ${{ github.ref == 'refs/heads/main' && github.event_name == 'push' }}
permissions:
checks: write
contents: write
@@ -37,7 +36,7 @@ jobs:
elixir: ["1.17"]
node-version: ["18.x"]
outputs:
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash || steps.set-commit-develop.outputs.commit_hash }}
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash }}
steps:
- name: Prepare
run: |
@@ -91,7 +90,6 @@ jobs:
- name: Generate Changelog & Update Tag Version
id: generate-changelog
if: github.ref == 'refs/heads/main'
run: |
git config --global user.name 'CI'
git config --global user.email 'ci@users.noreply.github.com'
@@ -102,15 +100,16 @@ jobs:
- name: Set commit hash for develop
id: set-commit-develop
if: github.ref == 'refs/heads/develop'
run: |
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
docker:
name: 🛠 Build Docker Images
if: github.ref == 'refs/heads/develop'
needs: build
runs-on: ubuntu-22.04
outputs:
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
release-notes: ${{ steps.get-content.outputs.string }}
permissions:
checks: write
contents: write
@@ -137,6 +136,17 @@ jobs:
ref: ${{ needs.build.outputs.commit_hash }}
fetch-depth: 0
- name: Get Release Tag
id: get-latest-tag
uses: "WyriHaximus/github-action-get-previous-tag@v1"
with:
fallback: 1.0.0
- name: Prepare Changelog
run: |
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
@@ -185,6 +195,24 @@ jobs:
if-no-files-found: error
retention-days: 1
- uses: markpatterson27/markdown-to-output@v1
id: extract-changelog
with:
filepath: CHANGELOG.md
- name: Get content
uses: 2428392/gh-truncate-string-action@v1.3.0
id: get-content
with:
stringToTruncate: |
📣 Wanderer new release available 🎉
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
${{ steps.extract-changelog.outputs.body }}
maxLength: 500
truncationSymbol: "…"
merge:
runs-on: ubuntu-latest
needs:
@@ -215,8 +243,9 @@ jobs:
tags: |
type=ref,event=branch
type=ref,event=pr
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
type=raw,value=develop-{{sha}},enable=${{ github.ref == 'refs/heads/develop' }}
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
- name: Create manifest list and push
working-directory: /tmp/digests
@@ -259,3 +288,14 @@ jobs:
## How to Promote?
In order to promote this to prod, edit the draft and press **"Publish release"**.
draft: true
notify:
name: 🏷 Notify about release
runs-on: ubuntu-22.04
needs: [docker, merge]
steps:
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: ${{ needs.docker.outputs.release-notes }}

View File

@@ -1,187 +0,0 @@
name: Build Docker ARM Image
on:
push:
tags:
- '**'
env:
MIX_ENV: prod
GH_TOKEN: ${{ github.token }}
REGISTRY_IMAGE: wandererltd/community-edition-arm
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
permissions:
contents: write
jobs:
docker:
name: 🛠 Build Docker Images
runs-on: ubuntu-22.04
outputs:
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
release-notes: ${{ steps.get-content.outputs.string }}
permissions:
checks: write
contents: write
packages: write
attestations: write
id-token: write
pull-requests: write
repository-projects: write
strategy:
fail-fast: false
matrix:
platform:
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: ⬇️ Checkout repo
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Get Release Tag
id: get-latest-tag
uses: "WyriHaximus/github-action-get-previous-tag@v1"
with:
fallback: 1.0.0
- name: ⬇️ Checkout repo
uses: actions/checkout@v3
with:
ref: ${{ steps.get-latest-tag.outputs.tag }}
fetch-depth: 0
- name: Prepare Changelog
run: |
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to DockerHub
uses: docker/login-action@v3
with:
username: ${{ secrets.WANDERER_DOCKER_USER }}
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
- name: Build and push
id: build
uses: docker/build-push-action@v6
with:
push: true
context: .
file: ./Dockerfile
cache-from: type=gha
cache-to: type=gha,mode=max
labels: ${{ steps.meta.outputs.labels }}
platforms: ${{ matrix.platform }}
outputs: type=image,"name=${{ env.REGISTRY_IMAGE }}",push-by-digest=true,name-canonical=true,push=true
build-args: |
MIX_ENV=prod
BUILD_METADATA=${{ steps.meta.outputs.json }}
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
- uses: markpatterson27/markdown-to-output@v1
id: extract-changelog
with:
filepath: CHANGELOG.md
- name: Get content
uses: 2428392/gh-truncate-string-action@v1.3.0
id: get-content
with:
stringToTruncate: |
📣 Wanderer **ARM** release available 🎉
**Version**: :${{ steps.get-latest-tag.outputs.tag }}
${{ steps.extract-changelog.outputs.body }}
maxLength: 500
truncationSymbol: "…"
merge:
runs-on: ubuntu-latest
needs:
- docker
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.WANDERER_DOCKER_USER }}
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.REGISTRY_IMAGE }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
notify:
name: 🏷 Notify about release
runs-on: ubuntu-22.04
needs: [docker, merge]
steps:
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v5.3.0
with:
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
content: ${{ needs.docker.outputs.release-notes }}

View File

@@ -21,7 +21,7 @@ jobs:
test:
name: Test Suite
runs-on: ubuntu-latest
services:
postgres:
image: postgres:15
@@ -35,17 +35,17 @@ jobs:
--health-retries 5
ports:
- 5432:5432
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Elixir/OTP
uses: erlef/setup-beam@v1
with:
elixir-version: ${{ env.ELIXIR_VERSION }}
otp-version: ${{ env.OTP_VERSION }}
- name: Cache Elixir dependencies
uses: actions/cache@v3
with:
@@ -54,12 +54,12 @@ jobs:
_build
key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}
restore-keys: ${{ runner.os }}-mix-
- name: Install Elixir dependencies
run: |
mix deps.get
mix deps.compile
- name: Check code formatting
id: format
run: |
@@ -71,42 +71,42 @@ jobs:
echo "count=1" >> $GITHUB_OUTPUT
fi
continue-on-error: true
- name: Compile code and capture warnings
id: compile
run: |
# Capture compilation output
output=$(mix compile 2>&1 || true)
echo "$output" > compile_output.txt
# Count warnings
warning_count=$(echo "$output" | grep -c "warning:" || echo "0")
# Check if compilation succeeded
if mix compile > /dev/null 2>&1; then
echo "status=✅ Success" >> $GITHUB_OUTPUT
else
echo "status=❌ Failed" >> $GITHUB_OUTPUT
fi
echo "warnings=$warning_count" >> $GITHUB_OUTPUT
echo "output<<EOF" >> $GITHUB_OUTPUT
echo "$output" >> $GITHUB_OUTPUT
echo "EOF" >> $GITHUB_OUTPUT
continue-on-error: true
- name: Setup database
run: |
mix ecto.create
mix ecto.migrate
- name: Run tests with coverage
id: tests
run: |
# Run tests with coverage
output=$(mix test --cover 2>&1 || true)
echo "$output" > test_output.txt
# Parse test results
if echo "$output" | grep -q "0 failures"; then
echo "status=✅ All Passed" >> $GITHUB_OUTPUT
@@ -115,16 +115,16 @@ jobs:
echo "status=❌ Some Failed" >> $GITHUB_OUTPUT
test_status="failed"
fi
# Extract test counts
test_line=$(echo "$output" | grep -E "[0-9]+ tests?, [0-9]+ failures?" | head -1 || echo "0 tests, 0 failures")
total_tests=$(echo "$test_line" | grep -o '[0-9]\+ tests\?' | grep -o '[0-9]\+' | head -1 || echo "0")
failures=$(echo "$test_line" | grep -o '[0-9]\+ failures\?' | grep -o '[0-9]\+' | head -1 || echo "0")
echo "total=$total_tests" >> $GITHUB_OUTPUT
echo "failures=$failures" >> $GITHUB_OUTPUT
echo "passed=$((total_tests - failures))" >> $GITHUB_OUTPUT
# Calculate success rate
if [ "$total_tests" -gt 0 ]; then
success_rate=$(echo "scale=1; ($total_tests - $failures) * 100 / $total_tests" | bc)
@@ -132,26 +132,26 @@ jobs:
success_rate="0"
fi
echo "success_rate=$success_rate" >> $GITHUB_OUTPUT
exit_code=$?
echo "exit_code=$exit_code" >> $GITHUB_OUTPUT
continue-on-error: true
- name: Generate coverage report
id: coverage
run: |
# Generate coverage report with GitHub format
output=$(mix coveralls.github 2>&1 || true)
echo "$output" > coverage_output.txt
# Extract coverage percentage
coverage=$(echo "$output" | grep -o '[0-9]\+\.[0-9]\+%' | head -1 | sed 's/%//' || echo "0")
if [ -z "$coverage" ]; then
coverage="0"
fi
echo "percentage=$coverage" >> $GITHUB_OUTPUT
# Determine status
if (( $(echo "$coverage >= 80" | bc -l) )); then
echo "status=✅ Excellent" >> $GITHUB_OUTPUT
@@ -161,14 +161,14 @@ jobs:
echo "status=❌ Needs Improvement" >> $GITHUB_OUTPUT
fi
continue-on-error: true
- name: Run Credo analysis
id: credo
run: |
# Run Credo and capture output
output=$(mix credo --strict --format=json 2>&1 || true)
echo "$output" > credo_output.txt
# Try to parse JSON output
if echo "$output" | jq . > /dev/null 2>&1; then
issues=$(echo "$output" | jq '.issues | length' 2>/dev/null || echo "0")
@@ -183,12 +183,12 @@ jobs:
normal_issues="0"
low_issues="0"
fi
echo "total_issues=$issues" >> $GITHUB_OUTPUT
echo "high_issues=$high_issues" >> $GITHUB_OUTPUT
echo "normal_issues=$normal_issues" >> $GITHUB_OUTPUT
echo "low_issues=$low_issues" >> $GITHUB_OUTPUT
# Determine status
if [ "$issues" -eq 0 ]; then
echo "status=✅ Clean" >> $GITHUB_OUTPUT
@@ -198,24 +198,24 @@ jobs:
echo "status=❌ Needs Attention" >> $GITHUB_OUTPUT
fi
continue-on-error: true
- name: Run Dialyzer analysis
id: dialyzer
run: |
# Ensure PLT is built
mix dialyzer --plt
# Run Dialyzer and capture output
output=$(mix dialyzer --format=github 2>&1 || true)
echo "$output" > dialyzer_output.txt
# Count warnings and errors
warnings=$(echo "$output" | grep -c "warning:" || echo "0")
errors=$(echo "$output" | grep -c "error:" || echo "0")
echo "warnings=$warnings" >> $GITHUB_OUTPUT
echo "errors=$errors" >> $GITHUB_OUTPUT
# Determine status
if [ "$errors" -eq 0 ] && [ "$warnings" -eq 0 ]; then
echo "status=✅ Clean" >> $GITHUB_OUTPUT
@@ -225,7 +225,7 @@ jobs:
echo "status=❌ Has Errors" >> $GITHUB_OUTPUT
fi
continue-on-error: true
- name: Create test results summary
id: summary
run: |
@@ -236,11 +236,11 @@ jobs:
coverage_score=${{ steps.coverage.outputs.percentage }}
credo_score=$(echo "scale=0; (100 - ${{ steps.credo.outputs.total_issues }} * 2)" | bc | sed 's/^-.*$/0/')
dialyzer_score=$(echo "scale=0; (100 - ${{ steps.dialyzer.outputs.warnings }} * 2 - ${{ steps.dialyzer.outputs.errors }} * 10)" | bc | sed 's/^-.*$/0/')
overall_score=$(echo "scale=1; ($format_score + $compile_score + $test_score + $coverage_score + $credo_score + $dialyzer_score) / 6" | bc)
echo "overall_score=$overall_score" >> $GITHUB_OUTPUT
# Determine overall status
if (( $(echo "$overall_score >= 90" | bc -l) )); then
echo "overall_status=🌟 Excellent" >> $GITHUB_OUTPUT
@@ -252,7 +252,7 @@ jobs:
echo "overall_status=❌ Poor" >> $GITHUB_OUTPUT
fi
continue-on-error: true
- name: Find existing PR comment
if: github.event_name == 'pull_request'
id: find_comment
@@ -261,7 +261,7 @@ jobs:
issue-number: ${{ github.event.pull_request.number }}
comment-author: 'github-actions[bot]'
body-includes: '## 🧪 Test Results Summary'
- name: Create or update PR comment
if: github.event_name == 'pull_request'
uses: peter-evans/create-or-update-comment@v4
@@ -271,11 +271,11 @@ jobs:
edit-mode: replace
body: |
## 🧪 Test Results Summary
**Overall Quality Score: ${{ steps.summary.outputs.overall_score }}%** ${{ steps.summary.outputs.overall_status }}
### 📊 Metrics Dashboard
| Category | Status | Count | Details |
|----------|---------|-------|---------|
| 📝 **Code Formatting** | ${{ steps.format.outputs.status }} | ${{ steps.format.outputs.count }} issues | `mix format --check-formatted` |
@@ -284,50 +284,50 @@ jobs:
| 📊 **Coverage** | ${{ steps.coverage.outputs.status }} | ${{ steps.coverage.outputs.percentage }}% | `mix coveralls` |
| 🎯 **Credo** | ${{ steps.credo.outputs.status }} | ${{ steps.credo.outputs.total_issues }} issues | High: ${{ steps.credo.outputs.high_issues }}, Normal: ${{ steps.credo.outputs.normal_issues }}, Low: ${{ steps.credo.outputs.low_issues }} |
| 🔍 **Dialyzer** | ${{ steps.dialyzer.outputs.status }} | ${{ steps.dialyzer.outputs.errors }} errors, ${{ steps.dialyzer.outputs.warnings }} warnings | `mix dialyzer` |
### 🎯 Quality Gates
Based on the project's quality thresholds:
- **Compilation Warnings**: ${{ steps.compile.outputs.warnings }}/148 (limit: 148)
- **Credo Issues**: ${{ steps.credo.outputs.total_issues }}/87 (limit: 87)
- **Credo Issues**: ${{ steps.credo.outputs.total_issues }}/87 (limit: 87)
- **Dialyzer Warnings**: ${{ steps.dialyzer.outputs.warnings }}/161 (limit: 161)
- **Test Coverage**: ${{ steps.coverage.outputs.percentage }}%/50% (minimum: 50%)
- **Test Failures**: ${{ steps.tests.outputs.failures }}/0 (limit: 0)
<details>
<summary>📈 Progress Toward Goals</summary>
Target goals for the project:
- ✨ **Zero compilation warnings** (currently: ${{ steps.compile.outputs.warnings }})
- ✨ **≤10 Credo issues** (currently: ${{ steps.credo.outputs.total_issues }})
- ✨ **Zero Dialyzer warnings** (currently: ${{ steps.dialyzer.outputs.warnings }})
- ✨ **≥85% test coverage** (currently: ${{ steps.coverage.outputs.percentage }}%)
- ✅ **Zero test failures** (currently: ${{ steps.tests.outputs.failures }})
</details>
<details>
<summary>🔧 Quick Actions</summary>
To improve code quality:
```bash
# Fix formatting issues
mix format
# View detailed Credo analysis
mix credo --strict
# Check Dialyzer warnings
mix dialyzer
# Generate detailed coverage report
mix coveralls.html
```
</details>
---
🤖 *Auto-generated by GitHub Actions* • Updated: ${{ github.event.head_commit.timestamp }}
> **Note**: This comment will be updated automatically when new commits are pushed to this PR.
> **Note**: This comment will be updated automatically when new commits are pushed to this PR.

View File

@@ -2,6 +2,305 @@
<!-- changelog -->
## [v1.89.0](https://github.com/wanderer-industries/wanderer/compare/v1.88.13...v1.89.0) (2025-11-30)
### Features:
* removed unnecessary command
* rework wormholes reference
## [v1.88.13](https://github.com/wanderer-industries/wanderer/compare/v1.88.12...v1.88.13) (2025-11-29)
### Bug Fixes:
* core: fixed tracking issues
## [v1.88.12](https://github.com/wanderer-industries/wanderer/compare/v1.88.11...v1.88.12) (2025-11-29)
### Bug Fixes:
* core: fixed c4 -> ns connections auto size issues
## [v1.88.11](https://github.com/wanderer-industries/wanderer/compare/v1.88.10...v1.88.11) (2025-11-29)
## [v1.88.10](https://github.com/wanderer-industries/wanderer/compare/v1.88.9...v1.88.10) (2025-11-29)
### Bug Fixes:
* core: fixed pings cleanup
## [v1.88.9](https://github.com/wanderer-industries/wanderer/compare/v1.88.8...v1.88.9) (2025-11-29)
### Bug Fixes:
* core: fixed linked signatures cleanup
## [v1.88.8](https://github.com/wanderer-industries/wanderer/compare/v1.88.7...v1.88.8) (2025-11-28)
### Bug Fixes:
* core: fixed pings issue
## [v1.88.7](https://github.com/wanderer-industries/wanderer/compare/v1.88.6...v1.88.7) (2025-11-28)
### Bug Fixes:
* core: fixed tracking issues
## [v1.88.6](https://github.com/wanderer-industries/wanderer/compare/v1.88.5...v1.88.6) (2025-11-28)
### Bug Fixes:
* core: fixed tracking issues
## [v1.88.5](https://github.com/wanderer-industries/wanderer/compare/v1.88.4...v1.88.5) (2025-11-28)
### Bug Fixes:
* core: fixed env errors
## [v1.88.4](https://github.com/wanderer-industries/wanderer/compare/v1.88.3...v1.88.4) (2025-11-27)
### Bug Fixes:
* defensive check for undefined excluded systems
## [v1.88.3](https://github.com/wanderer-industries/wanderer/compare/v1.88.2...v1.88.3) (2025-11-26)
### Bug Fixes:
* core: fixed env issues
## [v1.88.1](https://github.com/wanderer-industries/wanderer/compare/v1.88.0...v1.88.1) (2025-11-26)
### Bug Fixes:
* sse enable checkbox, and kills ticker
* apiv1 token auth and structure fixes
* removed ipv6 distribution env settings
* tests: updated tests
* tests: updated tests
* clean up id generation
* resolve issue with async event processing
## [v1.88.0](https://github.com/wanderer-industries/wanderer/compare/v1.87.0...v1.88.0) (2025-11-25)
### Features:
* Add zkb and eve who links for characters where it possibly was add
## [v1.87.0](https://github.com/wanderer-industries/wanderer/compare/v1.86.1...v1.87.0) (2025-11-25)
### Features:
* Add support markdown for system description
## [v1.86.1](https://github.com/wanderer-industries/wanderer/compare/v1.86.0...v1.86.1) (2025-11-25)
### Bug Fixes:
* Map: Add ability to see character passage direction in list of passages
## [v1.86.0](https://github.com/wanderer-industries/wanderer/compare/v1.85.5...v1.86.0) (2025-11-25)
### Features:
* add date filter for character activity
## [v1.85.5](https://github.com/wanderer-industries/wanderer/compare/v1.85.4...v1.85.5) (2025-11-24)
### Bug Fixes:
* core: fixed connections cleanup and rally points delete issues
## [v1.85.4](https://github.com/wanderer-industries/wanderer/compare/v1.85.3...v1.85.4) (2025-11-22)
### Bug Fixes:
* core: invalidate map characters every 1 hour for any missing/revoked permissions
## [v1.85.3](https://github.com/wanderer-industries/wanderer/compare/v1.85.2...v1.85.3) (2025-11-22)
### Bug Fixes:
* core: fixed connection time status issues. fixed character alliance update issues
## [v1.85.2](https://github.com/wanderer-industries/wanderer/compare/v1.85.1...v1.85.2) (2025-11-20)
### Bug Fixes:
* core: increased API pool limits
## [v1.85.1](https://github.com/wanderer-industries/wanderer/compare/v1.85.0...v1.85.1) (2025-11-20)
### Bug Fixes:
* core: increased API pool limits
## [v1.85.0](https://github.com/wanderer-industries/wanderer/compare/v1.84.37...v1.85.0) (2025-11-19)
### Features:
* core: added support for new ship types
## [v1.84.37](https://github.com/wanderer-industries/wanderer/compare/v1.84.36...v1.84.37) (2025-11-19)
### Bug Fixes:
* auth: fixed character auth issues
## [v1.84.36](https://github.com/wanderer-industries/wanderer/compare/v1.84.35...v1.84.36) (2025-11-19)
### Bug Fixes:
* fixed duplicated map slugs
## [v1.84.35](https://github.com/wanderer-industries/wanderer/compare/v1.84.34...v1.84.35) (2025-11-19)
### Bug Fixes:
* structure search / paste issues
## [v1.84.34](https://github.com/wanderer-industries/wanderer/compare/v1.84.33...v1.84.34) (2025-11-18)
### Bug Fixes:
* core: fixed character tracking issues
## [v1.84.33](https://github.com/wanderer-industries/wanderer/compare/v1.84.32...v1.84.33) (2025-11-18)
### Bug Fixes:
* core: fixed character tracking issues
## [v1.84.32](https://github.com/wanderer-industries/wanderer/compare/v1.84.31...v1.84.32) (2025-11-18)
### Bug Fixes:
* core: fixed character tracking issues
## [v1.84.31](https://github.com/wanderer-industries/wanderer/compare/v1.84.30...v1.84.31) (2025-11-17)
### Bug Fixes:
* core: fixed connactions validation logic
## [v1.84.30](https://github.com/wanderer-industries/wanderer/compare/v1.84.29...v1.84.30) (2025-11-17)
## [v1.84.29](https://github.com/wanderer-industries/wanderer/compare/v1.84.28...v1.84.29) (2025-11-17)
## [v1.84.28](https://github.com/wanderer-industries/wanderer/compare/v1.84.27...v1.84.28) (2025-11-17)
### Bug Fixes:
* core: fixed ACL updates
## [v1.84.27](https://github.com/wanderer-industries/wanderer/compare/v1.84.26...v1.84.27) (2025-11-17)

View File

@@ -32,8 +32,58 @@ format f:
test t:
MIX_ENV=test mix test
# Run tests in 4 parallel partitions (useful for CI or faster local runs)
test-parallel tp:
@echo "Running tests in 4 parallel partitions..."
@mkdir -p /tmp/wanderer_test_results
@rm -f /tmp/wanderer_test_results/partition_*.txt /tmp/wanderer_test_results/exit_*.txt
@for i in 1 2 3 4; do \
(MIX_TEST_PARTITION=$$i MIX_ENV=test mix test --partitions 4 2>&1; echo $$? > /tmp/wanderer_test_results/exit_$$i.txt) | \
tee /tmp/wanderer_test_results/partition_$$i.txt | sed "s/^/[P$$i] /" & \
done; \
wait
@echo ""
@echo "========================================"
@echo " TEST RESULTS SUMMARY"
@echo "========================================"
@total_tests=0; total_failures=0; total_excluded=0; all_passed=true; \
for i in 1 2 3 4; do \
exit_code=$$(cat /tmp/wanderer_test_results/exit_$$i.txt 2>/dev/null || echo "1"); \
if [ "$$exit_code" != "0" ]; then all_passed=false; fi; \
summary=$$(grep -E "^[0-9]+ (tests?|doctest)" /tmp/wanderer_test_results/partition_$$i.txt | tail -1 || echo "No results"); \
tests=$$(echo "$$summary" | grep -oE "^[0-9]+" || echo "0"); \
failures=$$(echo "$$summary" | grep -oE "[0-9]+ failures?" | grep -oE "^[0-9]+" || echo "0"); \
excluded=$$(echo "$$summary" | grep -oE "[0-9]+ excluded" | grep -oE "^[0-9]+" || echo "0"); \
total_tests=$$((total_tests + tests)); \
total_failures=$$((total_failures + failures)); \
total_excluded=$$((total_excluded + excluded)); \
if [ "$$exit_code" = "0" ]; then \
echo "Partition $$i: ✓ $$summary"; \
else \
echo "Partition $$i: ✗ $$summary (exit code: $$exit_code)"; \
fi; \
done; \
echo "========================================"; \
echo "TOTAL: $$total_tests tests, $$total_failures failures, $$total_excluded excluded"; \
echo "========================================"; \
if [ "$$all_passed" = "true" ]; then \
echo "✓ All partitions passed!"; \
else \
echo "✗ Some partitions failed. Details below:"; \
echo ""; \
for i in 1 2 3 4; do \
exit_code=$$(cat /tmp/wanderer_test_results/exit_$$i.txt 2>/dev/null || echo "1"); \
if [ "$$exit_code" != "0" ]; then \
echo "======== PARTITION $$i FAILURES ========"; \
grep -A 50 "Failures:" /tmp/wanderer_test_results/partition_$$i.txt 2>/dev/null || cat /tmp/wanderer_test_results/partition_$$i.txt; \
echo ""; \
fi; \
done; \
exit 1; \
fi
coverage cover co:
mix test --cover
MIX_ENV=test mix test --cover
unit-tests ut:
@echo "Running unit tests..."

View File

@@ -1,7 +1,7 @@
import { MarkdownComment } from '@/hooks/Mapper/components/mapInterface/components/Comments/components';
import { useEffect, useRef, useState } from 'react';
import { CommentType } from '@/hooks/Mapper/types';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { CommentType } from '@/hooks/Mapper/types';
import { useEffect, useMemo, useRef, useState } from 'react';
export interface CommentsProps {}
@@ -14,7 +14,9 @@ export const Comments = ({}: CommentsProps) => {
comments: { loadComments, comments, lastUpdateKey },
} = useMapRootState();
const [systemId] = selectedSystems;
const systemId = useMemo(() => {
return +selectedSystems[0];
}, [selectedSystems]);
const ref = useRef({ loadComments, systemId });
ref.current = { loadComments, systemId };

View File

@@ -1,4 +1,3 @@
import classes from './MarkdownComment.module.scss';
import clsx from 'clsx';
import {
InfoDrawer,
@@ -49,7 +48,11 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
<>
<InfoDrawer
labelClassName="mb-[3px]"
className={clsx(classes.MarkdownCommentRoot, 'p-1 bg-stone-700/20 ')}
className={clsx(
'p-1 bg-stone-700/20',
'text-[12px] leading-[1.2] text-stone-300 break-words',
'bg-gradient-to-r from-stone-600/40 via-stone-600/10 to-stone-600/0',
)}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
title={

View File

@@ -0,0 +1,9 @@
.CERoot {
@apply border border-stone-400/30 rounded-[2px];
:global {
.cm-content {
@apply bg-stone-600/40;
}
}
}

View File

@@ -3,9 +3,10 @@ import clsx from 'clsx';
import { PrimeIcons } from 'primereact/api';
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
import { useHotkey } from '@/hooks/Mapper/hooks';
import { useCallback, useRef, useState } from 'react';
import { useCallback, useMemo, useRef, useState } from 'react';
import { OutCommand } from '@/hooks/Mapper/types';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import classes from './CommentsEditor.module.scss';
export interface CommentsEditorProps {}
@@ -18,7 +19,9 @@ export const CommentsEditor = ({}: CommentsEditorProps) => {
outCommand,
} = useMapRootState();
const [systemId] = selectedSystems;
const systemId = useMemo(() => {
return +selectedSystems[0];
}, [selectedSystems]);
const ref = useRef({ outCommand, systemId, textVal });
ref.current = { outCommand, systemId, textVal };
@@ -48,6 +51,7 @@ export const CommentsEditor = ({}: CommentsEditorProps) => {
return (
<MarkdownEditor
className={classes.CERoot}
value={textVal}
onChange={setTextVal}
overlayContent={

View File

@@ -1,9 +1,9 @@
.CERoot {
@apply border border-stone-400/30 rounded-[2px];
@apply border border-stone-500/30 rounded-[2px];
:global {
.cm-content {
@apply bg-stone-600/40;
@apply bg-stone-950/70;
}
.cm-scroller {

View File

@@ -44,9 +44,17 @@ export interface MarkdownEditorProps {
overlayContent?: ReactNode;
value: string;
onChange: (value: string) => void;
height?: string;
className?: string;
}
export const MarkdownEditor = ({ value, onChange, overlayContent }: MarkdownEditorProps) => {
export const MarkdownEditor = ({
value,
onChange,
overlayContent,
height = '70px',
className,
}: MarkdownEditorProps) => {
const [hasShift, setHasShift] = useState(false);
const refData = useRef({ onChange });
@@ -66,9 +74,9 @@ export const MarkdownEditor = ({ value, onChange, overlayContent }: MarkdownEdit
<div className={clsx(classes.MarkdownEditor, 'relative')}>
<CodeMirror
value={value}
height="70px"
height={height}
extensions={CODE_MIRROR_EXTENSIONS}
className={classes.CERoot}
className={clsx(classes.CERoot, className)}
theme={oneDark}
onChange={handleOnChange}
placeholder="Start typing..."

View File

@@ -8,8 +8,8 @@ import { LabelsManager } from '@/hooks/Mapper/utils/labelsManager.ts';
import { Dialog } from 'primereact/dialog';
import { IconField } from 'primereact/iconfield';
import { InputText } from 'primereact/inputtext';
import { InputTextarea } from 'primereact/inputtextarea';
import { useCallback, useEffect, useRef, useState } from 'react';
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
interface SystemSettingsDialog {
systemId: string;
@@ -214,13 +214,9 @@ export const SystemSettingsDialog = ({ systemId, visible, setVisible }: SystemSe
<div className="flex flex-col gap-1">
<label htmlFor="username">Description</label>
<InputTextarea
autoResize
rows={5}
cols={30}
value={description}
onChange={e => setDescription(e.target.value)}
/>
<div className="h-[200px]">
<MarkdownEditor value={description} onChange={e => setDescription(e)} height="180px" />
</div>
</div>
</div>

View File

@@ -2,7 +2,7 @@ import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { isWormholeSpace } from '@/hooks/Mapper/components/map/helpers/isWormholeSpace.ts';
import { useMemo } from 'react';
import { getSystemById, sortWHClasses } from '@/hooks/Mapper/helpers';
import { InfoDrawer, WHClassView, WHEffectView } from '@/hooks/Mapper/components/ui-kit';
import { InfoDrawer, MarkdownTextViewer, WHClassView, WHEffectView } from '@/hooks/Mapper/components/ui-kit';
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
interface SystemInfoContentProps {
@@ -51,7 +51,7 @@ export const SystemInfoContent = ({ systemId }: SystemInfoContentProps) => {
</div>
}
>
<div className="break-words">{description}</div>
<MarkdownTextViewer>{description}</MarkdownTextViewer>
</InfoDrawer>
)}
</div>

View File

@@ -30,10 +30,14 @@ export const SystemStructures: React.FC = () => {
const processClipboard = useCallback(
(text: string) => {
if (!systemId) {
console.warn('Cannot update structures: no system selected');
return;
}
const updated = processSnippetText(text, structures);
handleUpdateStructures(updated);
},
[structures, handleUpdateStructures],
[systemId, structures, handleUpdateStructures],
);
const handlePaste = useCallback(

View File

@@ -56,6 +56,11 @@ export function useSystemStructures({ systemId, outCommand }: UseSystemStructure
const handleUpdateStructures = useCallback(
async (newList: StructureItem[]) => {
if (!systemId) {
console.warn('Cannot update structures: systemId is undefined');
return;
}
const { added, updated, removed } = getActualStructures(structures, newList);
const sanitizedAdded = added.map(sanitizeIds);

View File

@@ -31,7 +31,7 @@ export function useSystemKills({ systemId, outCommand, showAllVisible = false, s
storedSettings: { settingsKills },
} = useMapRootState();
const excludedSystems = useStableValue(settingsKills.excludedSystems);
const excludedSystems = useStableValue(settingsKills.excludedSystems ?? []);
const effectiveSystemIds = useMemo(() => {
if (showAllVisible) {

View File

@@ -9,6 +9,7 @@ import { MapContextMenu } from '@/hooks/Mapper/components/mapRootContent/compone
import { useSkipContextMenu } from '@/hooks/Mapper/hooks/useSkipContextMenu';
import { MapSettings } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings';
import { CharacterActivity } from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity';
import { WormholeSignaturesDialog } from '@/hooks/Mapper/components/mapRootContent/components/WormholeSignaturesDialog';
import { useCharacterActivityHandlers } from './hooks/useCharacterActivityHandlers';
import { TrackingDialog } from '@/hooks/Mapper/components/mapRootContent/components/TrackingDialog';
import { useMapEventListener } from '@/hooks/Mapper/events';
@@ -34,6 +35,7 @@ export const MapRootContent = ({}: MapRootContentProps) => {
const [showOnTheMap, setShowOnTheMap] = useState(false);
const [showMapSettings, setShowMapSettings] = useState(false);
const [showTrackingDialog, setShowTrackingDialog] = useState(false);
const [showWormholeList, setShowWormholeList] = useState(false);
/* Important Notice - this solution needs for use one instance of MapInterface */
const mapInterface = isReady ? <MapInterface /> : null;
@@ -41,6 +43,7 @@ export const MapRootContent = ({}: MapRootContentProps) => {
const handleShowOnTheMap = useCallback(() => setShowOnTheMap(true), []);
const handleShowMapSettings = useCallback(() => setShowMapSettings(true), []);
const handleShowTrackingDialog = useCallback(() => setShowTrackingDialog(true), []);
const handleShowWormholesReference = useCallback(() => setShowWormholeList(true), []);
useMapEventListener(event => {
if (event.name === Commands.showTracking) {
@@ -65,6 +68,7 @@ export const MapRootContent = ({}: MapRootContentProps) => {
onShowOnTheMap={handleShowOnTheMap}
onShowMapSettings={handleShowMapSettings}
onShowTrackingDialog={handleShowTrackingDialog}
onShowWormholesReference={handleShowWormholesReference}
additionalContent={<PingsInterface hasLeftOffset />}
/>
</div>
@@ -79,6 +83,7 @@ export const MapRootContent = ({}: MapRootContentProps) => {
onShowOnTheMap={handleShowOnTheMap}
onShowMapSettings={handleShowMapSettings}
onShowTrackingDialog={handleShowTrackingDialog}
onShowWormholesReference={handleShowWormholesReference}
/>
</div>
</Topbar>
@@ -93,6 +98,7 @@ export const MapRootContent = ({}: MapRootContentProps) => {
{showTrackingDialog && (
<TrackingDialog visible={showTrackingDialog} onHide={() => setShowTrackingDialog(false)} />
)}
<WormholeSignaturesDialog visible={showWormholeList} onHide={() => setShowWormholeList(false)} />
{hasOldSettings && <OldSettingsDialog />}
</Layout>

View File

@@ -1,4 +1,7 @@
import { Dialog } from 'primereact/dialog';
import { Menu } from 'primereact/menu';
import { MenuItem } from 'primereact/menuitem';
import { useState, useCallback, useRef, useMemo } from 'react';
import { CharacterActivityContent } from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity/CharacterActivityContent.tsx';
interface CharacterActivityProps {
@@ -6,17 +9,69 @@ interface CharacterActivityProps {
onHide: () => void;
}
const periodOptions = [
{ value: 30, label: '30 Days' },
{ value: 365, label: '1 Year' },
{ value: null, label: 'All Time' },
];
export const CharacterActivity = ({ visible, onHide }: CharacterActivityProps) => {
const [selectedPeriod, setSelectedPeriod] = useState<number | null>(30);
const menuRef = useRef<Menu>(null);
const handlePeriodChange = useCallback((days: number | null) => {
setSelectedPeriod(days);
}, []);
const menuItems: MenuItem[] = useMemo(
() => [
{
label: 'Period',
items: periodOptions.map(option => ({
label: option.label,
icon: selectedPeriod === option.value ? 'pi pi-check' : undefined,
command: () => handlePeriodChange(option.value),
})),
},
],
[selectedPeriod, handlePeriodChange],
);
const selectedPeriodLabel = useMemo(
() => periodOptions.find(opt => opt.value === selectedPeriod)?.label || 'All Time',
[selectedPeriod],
);
const headerIcons = (
<>
<button
type="button"
className="p-dialog-header-icon p-link"
onClick={e => menuRef.current?.toggle(e)}
aria-label="Filter options"
>
<span className="pi pi-bars" />
</button>
<Menu model={menuItems} popup ref={menuRef} />
</>
);
return (
<Dialog
header="Character Activity"
header={
<div className="flex items-center gap-2">
<span>Character Activity</span>
<span className="text-xs text-stone-400">({selectedPeriodLabel})</span>
</div>
}
visible={visible}
className="w-[550px] max-h-[90vh]"
onHide={onHide}
dismissableMask
contentClassName="p-0 h-full flex flex-col"
icons={headerIcons}
>
<CharacterActivityContent />
<CharacterActivityContent selectedPeriod={selectedPeriod} />
</Dialog>
);
};

View File

@@ -7,16 +7,28 @@ import {
} from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity/helpers.tsx';
import { Column } from 'primereact/column';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { useMemo } from 'react';
import { useMemo, useEffect } from 'react';
import { useCharacterActivityHandlers } from '@/hooks/Mapper/components/mapRootContent/hooks/useCharacterActivityHandlers';
export const CharacterActivityContent = () => {
interface CharacterActivityContentProps {
selectedPeriod: number | null;
}
export const CharacterActivityContent = ({ selectedPeriod }: CharacterActivityContentProps) => {
const {
data: { characterActivityData },
} = useMapRootState();
const { handleShowActivity } = useCharacterActivityHandlers();
const activity = useMemo(() => characterActivityData?.activity || [], [characterActivityData]);
const loading = useMemo(() => characterActivityData?.loading !== false, [characterActivityData]);
// Reload activity data when period changes
useEffect(() => {
handleShowActivity(selectedPeriod);
}, [selectedPeriod, handleShowActivity]);
if (loading) {
return (
<div className="flex flex-col items-center justify-center h-full w-full">

View File

@@ -3,7 +3,7 @@
}
.SidebarOnTheMap {
width: 400px;
width: 500px;
padding: 0 !important;
:global {

View File

@@ -5,6 +5,7 @@ import {
ConnectionType,
OutCommand,
Passage,
PassageWithSourceTarget,
SolarSystemConnection,
} from '@/hooks/Mapper/types';
import clsx from 'clsx';
@@ -19,7 +20,7 @@ import { PassageCard } from './PassageCard';
const sortByDate = (a: string, b: string) => new Date(a).getTime() - new Date(b).getTime();
const itemTemplate = (item: Passage, options: VirtualScrollerTemplateOptions) => {
const itemTemplate = (item: PassageWithSourceTarget, options: VirtualScrollerTemplateOptions) => {
return (
<div
className={clsx(classes.CharacterRow, 'w-full box-border', {
@@ -35,7 +36,7 @@ const itemTemplate = (item: Passage, options: VirtualScrollerTemplateOptions) =>
};
export interface ConnectionPassagesContentProps {
passages: Passage[];
passages: PassageWithSourceTarget[];
}
export const ConnectionPassages = ({ passages = [] }: ConnectionPassagesContentProps) => {
@@ -113,6 +114,20 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
[outCommand],
);
const preparedPassages = useMemo(() => {
if (!cnInfo) {
return [];
}
return passages
.sort((a, b) => sortByDate(b.inserted_at, a.inserted_at))
.map<PassageWithSourceTarget>(x => ({
...x,
source: x.from ? cnInfo.target : cnInfo.source,
target: x.from ? cnInfo.source : cnInfo.target,
}));
}, [cnInfo, passages]);
useEffect(() => {
if (!selectedConnection) {
return;
@@ -145,12 +160,14 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
<InfoDrawer title="Connection" rightSide>
<div className="flex justify-end gap-2 items-center">
<SystemView
showCustomName
systemId={cnInfo.source}
className={clsx(classes.InfoTextSize, 'select-none text-center')}
hideRegion
/>
<span className="pi pi-angle-double-right text-stone-500 text-[15px]"></span>
<SystemView
showCustomName
systemId={cnInfo.target}
className={clsx(classes.InfoTextSize, 'select-none text-center')}
hideRegion
@@ -184,7 +201,7 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
{/* separator */}
<div className="w-full h-px bg-neutral-800 px-0.5"></div>
<ConnectionPassages passages={passages} />
<ConnectionPassages passages={preparedPassages} />
</div>
</Sidebar>
);

View File

@@ -35,6 +35,10 @@
&.ThreeColumns {
grid-template-columns: auto 1fr auto;
}
&.FourColumns {
grid-template-columns: auto auto 1fr auto;
}
}
.CardBorderLeftIsOwn {

View File

@@ -1,17 +1,19 @@
import clsx from 'clsx';
import classes from './PassageCard.module.scss';
import { Passage } from '@/hooks/Mapper/types';
import { TimeAgo } from '@/hooks/Mapper/components/ui-kit';
import { PassageWithSourceTarget } from '@/hooks/Mapper/types';
import { SystemView, TimeAgo, TooltipPosition, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
import { kgToTons } from '@/hooks/Mapper/utils/kgToTons.ts';
import { useMemo } from 'react';
import { useCallback, useMemo } from 'react';
import { ZKB_ICON } from '@/hooks/Mapper/icons';
import { charEveWhoLink, charZKBLink } from '@/hooks/Mapper/helpers/linkHelpers.ts';
type PassageCardType = {
// compact?: boolean;
showShipName?: boolean;
// showSystem?: boolean;
// useSystemsCache?: boolean;
} & Passage;
} & PassageWithSourceTarget;
const SHIP_NAME_RX = /u'|'/g;
export const getShipName = (name: string) => {
@@ -25,7 +27,7 @@ export const getShipName = (name: string) => {
});
};
export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardType) => {
export const PassageCard = ({ inserted_at, character: char, ship, source, target, from }: PassageCardType) => {
const isOwn = false;
const insertedAt = useMemo(() => {
@@ -33,11 +35,46 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
return date.toLocaleString();
}, [inserted_at]);
const handleOpenZKB = useCallback(() => window.open(charZKBLink(char.eve_id), '_blank'), [char]);
const handleOpenEveWho = useCallback(() => window.open(charEveWhoLink(char.eve_id), '_blank'), [char]);
return (
<div className={clsx(classes.CharacterCard, 'w-full text-xs', 'flex flex-col box-border')}>
<div className="flex flex-col justify-between px-2 py-1 gap-1">
{/*here icon and other*/}
<div className={clsx(classes.CharRow, classes.ThreeColumns)}>
<div className={clsx(classes.CharRow, classes.FourColumns)}>
<WdTooltipWrapper
position={TooltipPosition.top}
content={
<div className="flex justify-between gap-2 items-center">
<SystemView
showCustomName
systemId={source}
className="select-none text-center !text-[12px]"
hideRegion
/>
<span className="pi pi-angle-double-right text-stone-500 text-[15px]"></span>
<SystemView
showCustomName
systemId={target}
className="select-none text-center !text-[12px]"
hideRegion
/>
</div>
}
>
<div
className={clsx(
'transition-all transform ease-in duration-200',
'pi text-stone-500 text-[15px] w-[35px] h-[33px] !flex items-center justify-center border rounded-[6px]',
{
['pi-angle-double-right !text-orange-400 border-orange-400 hover:bg-orange-400/30']: from,
['pi-angle-double-left !text-stone-500/70 border-stone-500/70 hover:bg-stone-500/30']: !from,
},
)}
/>
</WdTooltipWrapper>
{/*portrait*/}
<span
className={clsx(classes.EveIcon, classes.CharIcon, 'wd-bg-default')}
@@ -49,7 +86,7 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
{/*here name and ship name*/}
<div className="grid gap-1 justify-between grid-cols-[max-content_1fr]">
{/*char name*/}
<div className="grid gap-1 grid-cols-[auto_1px_1fr]">
<div className="grid gap-1 grid-cols-[auto_1px_1fr_auto]">
<span
className={clsx(classes.MaxWidth, 'text-ellipsis overflow-hidden whitespace-nowrap', {
[classes.CardBorderLeftIsOwn]: isOwn,
@@ -62,6 +99,21 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
<div className="h-3 border-r border-neutral-500 my-0.5"></div>
{char.alliance_ticker && <span className="text-neutral-400">{char.alliance_ticker}</span>}
{!char.alliance_ticker && <span className="text-neutral-400">{char.corporation_ticker}</span>}
<div className={clsx('flex gap-1 items-center h-full ml-[2px]')}>
<WdImgButton
width={16}
height={16}
tooltip={{ position: TooltipPosition.top, content: 'Open zkillboard' }}
source={ZKB_ICON}
onClick={handleOpenZKB}
/>
<WdImgButton
tooltip={{ position: TooltipPosition.top, content: 'Open Eve Who' }}
className={clsx('pi pi-user', '!text-[12px] relative top-[-1px]')}
onClick={handleOpenEveWho}
/>
</div>
</div>
{/*ship name*/}

View File

@@ -12,9 +12,15 @@ export interface MapContextMenuProps {
onShowOnTheMap?: () => void;
onShowMapSettings?: () => void;
onShowTrackingDialog?: () => void;
onShowWormholesReference?: () => void;
}
export const MapContextMenu = ({ onShowOnTheMap, onShowMapSettings, onShowTrackingDialog }: MapContextMenuProps) => {
export const MapContextMenu = ({
onShowOnTheMap,
onShowMapSettings,
onShowTrackingDialog,
onShowWormholesReference,
}: MapContextMenuProps) => {
const {
outCommand,
storedSettings: { setInterfaceSettings },
@@ -52,6 +58,12 @@ export const MapContextMenu = ({ onShowOnTheMap, onShowMapSettings, onShowTracki
command: onShowOnTheMap,
visible: canTrackCharacters,
},
{
label: 'Wormholes Ref.',
icon: 'pi pi-bullseye',
command: onShowWormholesReference,
visible: canTrackCharacters,
},
{ separator: true, visible: true },
{
label: 'Settings',

View File

@@ -14,6 +14,7 @@ interface RightBarProps {
onShowOnTheMap?: () => void;
onShowMapSettings?: () => void;
onShowTrackingDialog?: () => void;
onShowWormholesReference?: () => void;
additionalContent?: ReactNode;
}
@@ -21,6 +22,7 @@ export const RightBar = ({
onShowOnTheMap,
onShowMapSettings,
onShowTrackingDialog,
onShowWormholesReference,
additionalContent,
}: RightBarProps) => {
const {
@@ -90,6 +92,16 @@ export const RightBar = ({
<i className="pi pi-hashtag"></i>
</button>
</WdTooltipWrapper>
<WdTooltipWrapper content="Wormholes Reference" position={TooltipPosition.left}>
<button
className="btn bg-transparent text-gray-400 hover:text-white border-transparent hover:bg-transparent py-2 h-auto min-h-auto"
type="button"
onClick={onShowWormholesReference}
>
<i className="pi pi-bullseye"></i>
</button>
</WdTooltipWrapper>
</div>
</>
)}

View File

@@ -1,8 +1,9 @@
import { createContext, useCallback, useContext, useRef, useState } from 'react';
import { OutCommand, TrackingCharacter } from '@/hooks/Mapper/types';
import { createContext, useCallback, useContext, useRef, useState, useEffect } from 'react';
import { Commands, OutCommand, TrackingCharacter } from '@/hooks/Mapper/types';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { IncomingEvent, WithChildren } from '@/hooks/Mapper/types/common.ts';
import { CommandInCharactersTrackingInfo } from '@/hooks/Mapper/types/commandsIn.ts';
import { useMapEventListener } from '@/hooks/Mapper/events';
type DiffTrackingInfo = { characterId: string; tracked: boolean };
@@ -122,6 +123,14 @@ export const TrackingProvider = ({ children }: WithChildren) => {
[outCommand],
);
// Listen for refresh_tracking_data event (triggered when ACL members change)
useMapEventListener(event => {
if (event.name === Commands.refreshTrackingData) {
loadTracking();
return true;
}
});
return (
<TrackingContext.Provider
value={{

View File

@@ -0,0 +1,170 @@
import { useMemo, useState } from 'react';
import { Dialog } from 'primereact/dialog';
import { DataTable } from 'primereact/datatable';
import { Column } from 'primereact/column';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { WormholeDataRaw } from '@/hooks/Mapper/types';
import { RespawnTag, WHClassView } from '@/hooks/Mapper/components/ui-kit';
import { kgToTons } from '@/hooks/Mapper/utils/kgToTons.ts';
import { WORMHOLE_CLASS_STYLES, WORMHOLES_ADDITIONAL_INFO } from '@/hooks/Mapper/components/map/constants.ts';
import clsx from 'clsx';
import { InputText } from 'primereact/inputtext';
import { IconField } from 'primereact/iconfield';
import { InputIcon } from 'primereact/inputicon';
const renderSpawns = (w: WormholeDataRaw) => (
<div className="flex gap-1 flex-wrap">
{w.src.map(s => {
const group = s.split('-')[0];
const info = WORMHOLES_ADDITIONAL_INFO[group];
if (!info) {
return (
<span
key={s}
className="px-[4px] py-[1px] rounded bg-stone-800 text-stone-300 text-xs border border-stone-700"
>
{s}
</span>
);
}
const cls = WORMHOLE_CLASS_STYLES[String(info.wormholeClassID)] || '';
const label = `${info.shortName}`;
return (
<span
key={s}
className={clsx(cls, 'px-[4px] py-[1px] rounded text-xs border border-stone-700 bg-stone-900/40')}
>
{label}
</span>
);
})}
</div>
);
const renderName = (w: WormholeDataRaw) => (
<div className="flex items-center gap-2">
<WHClassView
whClassName={w.name}
noOffset
useShortTitle
classNameWh="overflow-hidden text-ellipsis whitespace-nowrap"
/>
</div>
);
const renderRespawn = (w: WormholeDataRaw) => (
<div className="flex gap-1 flex-wrap">
{w.respawn.map(r => (
<RespawnTag key={r} value={r} />
))}
</div>
);
export interface WormholeSignaturesDialogProps {
visible: boolean;
onHide: () => void;
}
export const WormholeSignaturesDialog = ({ visible, onHide }: WormholeSignaturesDialogProps) => {
const {
data: { wormholes },
} = useMapRootState();
const [filter, setFilter] = useState('');
const filtered = useMemo(() => {
const q = filter.trim().toLowerCase();
if (!q) return wormholes;
return wormholes.filter(w => {
const destInfo = WORMHOLES_ADDITIONAL_INFO[w.dest];
const spawnsLabels = w.src
.map(s => {
const group = s.split('-')[0];
const info = WORMHOLES_ADDITIONAL_INFO[group];
if (!info) return s;
return `${info.title} ${info.shortName}`.trim();
})
.join(' ');
return [
w.name,
destInfo?.title,
destInfo?.shortName,
spawnsLabels,
String(w.total_mass),
String(w.max_mass_per_jump),
w.lifetime,
w.respawn.join(','),
]
.filter(Boolean)
.join(' ')
.toLowerCase()
.includes(q);
});
}, [wormholes, filter]);
return (
<Dialog
header="Wormholes Reference"
visible={visible}
draggable={false}
resizable={false}
className="w-[950px] h-[600px]"
onHide={onHide}
contentClassName="!p-0 flex flex-col h-full"
>
<div className="p-3 flex items-center justify-between gap-2 border-b border-stone-800">
<div className="font-semibold text-sm text-stone-200">Reference list of all wormhole types</div>
<IconField iconPosition="right">
<InputIcon
className={clsx('pi pi-times', {
['cursor-pointer text-stone-400 hover:text-stone-200']: filter,
['text-stone-700 opacity-50 cursor-default']: !filter,
})}
onClick={() => filter && setFilter('')}
role="button"
aria-label="Clear search"
aria-disabled={!filter}
title={filter ? 'Clear' : 'Nothing to clear'}
/>
<InputText className="w-64" placeholder="Search" value={filter} onChange={e => setFilter(e.target.value)} />
</IconField>
</div>
<div className="flex-1 p-3 overflow-x-hidden">
<DataTable value={filtered} size="small" scrollable scrollHeight="flex" stripedRows>
<Column header="Type" body={renderName} className="w-[160px]" bodyClassName="whitespace-normal break-words" />
<Column header="Spawns In" body={renderSpawns} bodyClassName="whitespace-normal break-words text-[13px]" />
<Column
field="lifetime"
header="Lifetime"
className="w-[90px]"
bodyClassName="whitespace-normal break-words text-[13px]"
/>
<Column
header="Total Mass"
className="w-[120px]"
body={(w: WormholeDataRaw) => kgToTons(w.total_mass)}
bodyClassName="whitespace-normal break-words text-[13px]"
/>
<Column
header="Max/jump"
className="w-[120px]"
body={(w: WormholeDataRaw) => kgToTons(w.max_mass_per_jump)}
bodyClassName="whitespace-normal break-words text-[13px]"
/>
<Column
header="Respawn"
className="w-[150px]"
body={renderRespawn}
bodyClassName="whitespace-normal break-words text-[13px]"
/>
</DataTable>
</div>
</Dialog>
);
};

View File

@@ -0,0 +1 @@
export * from './WormholeSignaturesDialog';

View File

@@ -23,17 +23,17 @@ export const useCharacterActivityHandlers = () => {
/**
* Handle showing the character activity dialog
*/
const handleShowActivity = useCallback(() => {
const handleShowActivity = useCallback((days?: number | null) => {
// Update local state to show the dialog
update(state => ({
...state,
showCharacterActivity: true,
}));
// Send the command to the server
// Send the command to the server with optional days parameter
outCommand({
type: OutCommand.showActivity,
data: {},
data: days !== undefined ? { days } : {},
});
}, [outCommand, update]);

View File

@@ -3,6 +3,7 @@ import {
WdEveEntityPortrait,
WdEveEntityPortraitSize,
WdEveEntityPortraitType,
WdImgButton,
WdTooltipWrapper,
} from '@/hooks/Mapper/components/ui-kit';
import { SystemView } from '@/hooks/Mapper/components/ui-kit/SystemView';
@@ -14,6 +15,8 @@ import { Commands } from '@/hooks/Mapper/types/mapHandlers';
import clsx from 'clsx';
import { useCallback } from 'react';
import classes from './CharacterCard.module.scss';
import { ZKB_ICON } from '@/hooks/Mapper/icons';
import { charEveWhoLink, charZKBLink } from '@/hooks/Mapper/helpers/linkHelpers.ts';
export type CharacterCardProps = {
compact?: boolean;
@@ -66,6 +69,9 @@ export const CharacterCard = ({
const shipType = char.ship?.ship_type_info?.name;
const locationShown = showSystem && char.location?.solar_system_id;
const handleOpenZKB = useCallback(() => window.open(charZKBLink(char.eve_id), '_blank'), [char]);
const handleOpenEveWho = useCallback(() => window.open(charEveWhoLink(char.eve_id), '_blank'), [char]);
// INFO: Simple mode show only name and icon of ally/corp. By default it compact view
if (simpleMode) {
return (
@@ -244,7 +250,24 @@ export const CharacterCard = ({
{char.name}
</span>
{showTicker && <span className="flex-shrink-0 text-gray-400 ml-1">[{tickerText}]</span>}
<div className={clsx('flex gap-1 items-center h-full ml-[6px]')}>
<WdImgButton
width={16}
height={16}
tooltip={{ position: TooltipPosition.top, content: 'Open zkillboard' }}
source={ZKB_ICON}
onClick={handleOpenZKB}
className="min-w-[16px]"
/>
<WdImgButton
tooltip={{ position: TooltipPosition.top, content: 'Open Eve Who' }}
className={clsx('pi pi-user', '!text-[12px] relative top-[-1px]')}
onClick={handleOpenEveWho}
/>
</div>
</div>
{locationShown ? (
<div className="text-gray-300 text-xs overflow-hidden text-ellipsis whitespace-nowrap">
<SystemView

View File

@@ -1,8 +1,5 @@
.MarkdownCommentRoot {
border-left-width: 3px;
.MarkdownTextViewer {
@apply text-[12px] leading-[1.2] text-stone-300 break-words;
@apply bg-gradient-to-r from-stone-600/40 via-stone-600/10 to-stone-600/0;
.h1 {
@apply text-[12px] font-normal m-0 p-0 border-none break-words whitespace-normal;
@@ -56,6 +53,10 @@
@apply font-bold text-green-400 break-words whitespace-normal;
}
strong {
font-weight: bold;
}
i, em {
@apply italic text-pink-400 break-words whitespace-normal;
}

View File

@@ -2,10 +2,16 @@ import Markdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import remarkBreaks from 'remark-breaks';
import classes from './MarkdownTextViewer.module.scss';
const REMARK_PLUGINS = [remarkGfm, remarkBreaks];
type MarkdownTextViewerProps = { children: string };
export const MarkdownTextViewer = ({ children }: MarkdownTextViewerProps) => {
return <Markdown remarkPlugins={REMARK_PLUGINS}>{children}</Markdown>;
return (
<div className={classes.MarkdownTextViewer}>
<Markdown remarkPlugins={REMARK_PLUGINS}>{children}</Markdown>
</div>
);
};

View File

@@ -0,0 +1,20 @@
import { Respawn } from '@/hooks/Mapper/types';
import clsx from 'clsx';
export const WORMHOLE_SPAWN_CLASSES_BG = {
[Respawn.static]: 'bg-lime-400/80 text-stone-950',
[Respawn.wandering]: 'bg-stone-800',
[Respawn.reverse]: 'bg-blue-400 text-stone-950',
};
type RespawnTagProps = { value: string };
export const RespawnTag = ({ value }: RespawnTagProps) => (
<span
className={clsx(
'px-[6px] py-[0px] rounded text-stone-300 text-[12px] font-[500] border border-stone-700',
WORMHOLE_SPAWN_CLASSES_BG[value as Respawn],
)}
>
{value}
</span>
);

View File

@@ -23,3 +23,4 @@ export * from './MenuItemWithInfo';
export * from './MarkdownTextViewer.tsx';
export * from './WdButton.tsx';
export * from './constants.ts';
export * from './RespawnTag';

View File

@@ -0,0 +1,2 @@
export const charZKBLink = (characterId: string) => `https://zkillboard.com/character/${characterId}/`;
export const charEveWhoLink = (characterId: string) => `https://evewho.com/character/${characterId}`;

View File

@@ -12,7 +12,7 @@ export const useCommandComments = () => {
}, []);
const removeComment = useCallback((data: CommandCommentRemoved) => {
ref.current.removeComment(data.solarSystemId.toString(), data.commentId);
ref.current.removeComment(data.solarSystemId, data.commentId);
}, []);
return { addComment, removeComment };

View File

@@ -1,5 +1,5 @@
import { useCallback, useRef, useState } from 'react';
import { CommentSystem, CommentType, OutCommand, OutCommandHandler, UseCommentsData } from '@/hooks/Mapper/types';
import { useCallback, useRef, useState } from 'react';
interface UseCommentsProps {
outCommand: OutCommandHandler;
@@ -8,12 +8,12 @@ interface UseCommentsProps {
export const useComments = ({ outCommand }: UseCommentsProps): UseCommentsData => {
const [lastUpdateKey, setLastUpdateKey] = useState(0);
const commentBySystemsRef = useRef<Map<string, CommentSystem>>(new Map());
const commentBySystemsRef = useRef<Map<number, CommentSystem>>(new Map());
const ref = useRef({ outCommand });
ref.current = { outCommand };
const loadComments = useCallback(async (systemId: string) => {
const loadComments = useCallback(async (systemId: number) => {
let cSystem = commentBySystemsRef.current.get(systemId);
if (cSystem?.loading || cSystem?.loaded) {
return;
@@ -45,7 +45,7 @@ export const useComments = ({ outCommand }: UseCommentsProps): UseCommentsData =
setLastUpdateKey(x => x + 1);
}, []);
const addComment = useCallback((systemId: string, comment: CommentType) => {
const addComment = useCallback((systemId: number, comment: CommentType) => {
const cSystem = commentBySystemsRef.current.get(systemId);
if (cSystem) {
cSystem.comments.push(comment);
@@ -61,8 +61,9 @@ export const useComments = ({ outCommand }: UseCommentsProps): UseCommentsData =
setLastUpdateKey(x => x + 1);
}, []);
const removeComment = useCallback((systemId: string, commentId: string) => {
const removeComment = useCallback((systemId: number, commentId: string) => {
const cSystem = commentBySystemsRef.current.get(systemId);
console.log('cSystem', cSystem);
if (!cSystem) {
return;
}

View File

@@ -63,127 +63,122 @@ export const useMapRootHandlers = (ref: ForwardedRef<MapHandlers>) => {
const { pingAdded, pingCancelled } = useCommandPings();
const { characterActivityData, trackingCharactersData, userSettingsUpdated } = useCommandsActivity();
useImperativeHandle(
ref,
() => {
return {
command(type, data) {
switch (type) {
case Commands.init: // USED
mapInit(data as CommandInit);
break;
case Commands.addSystems: // USED
addSystems(data as CommandAddSystems);
break;
case Commands.updateSystems: // USED
updateSystems(data as CommandUpdateSystems);
break;
case Commands.removeSystems: // USED
removeSystems(data as CommandRemoveSystems);
break;
case Commands.addConnections: // USED
addConnections(data as CommandAddConnections);
break;
case Commands.removeConnections: // USED
removeConnections(data as CommandRemoveConnections);
break;
case Commands.updateConnection: // USED
updateConnection(data as CommandUpdateConnection);
break;
case Commands.charactersUpdated: // USED
charactersUpdated(data as CommandCharactersUpdated);
break;
case Commands.characterAdded: // USED
characterAdded(data as CommandCharacterAdded);
break;
case Commands.characterRemoved: // USED
characterRemoved(data as CommandCharacterRemoved);
break;
case Commands.characterUpdated: // USED
characterUpdated(data as CommandCharacterUpdated);
break;
case Commands.presentCharacters: // USED
presentCharacters(data as CommandPresentCharacters);
break;
case Commands.mapUpdated: // USED
mapUpdated(data as CommandMapUpdated);
break;
case Commands.routes:
mapRoutes(data as CommandRoutes);
break;
case Commands.userRoutes:
mapUserRoutes(data as CommandRoutes);
break;
useImperativeHandle(ref, () => {
return {
command(type, data) {
switch (type) {
case Commands.init: // USED
mapInit(data as CommandInit);
break;
case Commands.addSystems: // USED
addSystems(data as CommandAddSystems);
break;
case Commands.updateSystems: // USED
updateSystems(data as CommandUpdateSystems);
break;
case Commands.removeSystems: // USED
removeSystems(data as CommandRemoveSystems);
break;
case Commands.addConnections: // USED
addConnections(data as CommandAddConnections);
break;
case Commands.removeConnections: // USED
removeConnections(data as CommandRemoveConnections);
break;
case Commands.updateConnection: // USED
updateConnection(data as CommandUpdateConnection);
break;
case Commands.charactersUpdated: // USED
charactersUpdated(data as CommandCharactersUpdated);
break;
case Commands.characterAdded: // USED
characterAdded(data as CommandCharacterAdded);
break;
case Commands.characterRemoved: // USED
characterRemoved(data as CommandCharacterRemoved);
break;
case Commands.characterUpdated: // USED
characterUpdated(data as CommandCharacterUpdated);
break;
case Commands.presentCharacters: // USED
presentCharacters(data as CommandPresentCharacters);
break;
case Commands.mapUpdated: // USED
mapUpdated(data as CommandMapUpdated);
break;
case Commands.routes:
mapRoutes(data as CommandRoutes);
break;
case Commands.userRoutes:
mapUserRoutes(data as CommandRoutes);
break;
case Commands.signaturesUpdated: // USED
updateSystemSignatures(data as CommandSignaturesUpdated);
break;
case Commands.signaturesUpdated: // USED
updateSystemSignatures(data as CommandSignaturesUpdated);
break;
case Commands.linkSignatureToSystem: // USED
setTimeout(() => {
updateLinkSignatureToSystem(data as CommandLinkSignatureToSystem);
}, 200);
break;
case Commands.linkSignatureToSystem: // USED
setTimeout(() => {
updateLinkSignatureToSystem(data as CommandLinkSignatureToSystem);
}, 200);
break;
case Commands.centerSystem: // USED
// do nothing here
break;
case Commands.centerSystem: // USED
// do nothing here
break;
case Commands.selectSystem: // USED
// do nothing here
break;
case Commands.selectSystem: // USED
// do nothing here
break;
case Commands.killsUpdated:
// do nothing here
break;
case Commands.killsUpdated:
// do nothing here
break;
case Commands.detailedKillsUpdated:
updateDetailedKills(data as Record<string, DetailedKill[]>);
break;
case Commands.detailedKillsUpdated:
updateDetailedKills(data as Record<string, DetailedKill[]>);
break;
case Commands.characterActivityData:
characterActivityData(data as CommandCharacterActivityData);
break;
case Commands.characterActivityData:
characterActivityData(data as CommandCharacterActivityData);
break;
case Commands.trackingCharactersData:
trackingCharactersData(data as CommandTrackingCharactersData);
break;
case Commands.trackingCharactersData:
trackingCharactersData(data as CommandTrackingCharactersData);
break;
case Commands.updateActivity:
break;
case Commands.updateActivity:
break;
case Commands.updateTracking:
break;
case Commands.updateTracking:
break;
case Commands.userSettingsUpdated:
userSettingsUpdated(data as CommandUserSettingsUpdated);
break;
case Commands.userSettingsUpdated:
userSettingsUpdated(data as CommandUserSettingsUpdated);
break;
case Commands.systemCommentAdded:
addComment(data as CommandCommentAdd);
break;
case Commands.systemCommentAdded:
addComment(data as CommandCommentAdd);
break;
case Commands.systemCommentRemoved:
removeComment(data as CommandCommentRemoved);
break;
case Commands.systemCommentRemoved:
removeComment(data as CommandCommentRemoved);
break;
case Commands.pingAdded:
pingAdded(data as CommandPingAdded);
break;
case Commands.pingAdded:
pingAdded(data as CommandPingAdded);
break;
case Commands.pingCancelled:
pingCancelled(data as CommandPingCancelled);
break;
case Commands.pingCancelled:
pingCancelled(data as CommandPingCancelled);
break;
default:
console.warn(`JOipP Interface handlers: Unknown command: ${type}`, data);
break;
}
default:
console.warn(`JOipP Interface handlers: Unknown command: ${type}`, data);
break;
}
emitMapEvent({ name: type, data });
},
};
},
[],
);
emitMapEvent({ name: type, data });
},
};
}, []);
};

View File

@@ -68,4 +68,5 @@ export interface ActivitySummary {
passages: number;
connections: number;
signatures: number;
timestamp?: string;
}

View File

@@ -13,9 +13,9 @@ export type CommentSystem = {
};
export interface UseCommentsData {
loadComments: (systemId: string) => Promise<void>;
addComment: (systemId: string, comment: CommentType) => void;
removeComment: (systemId: string, commentId: string) => void;
comments: Map<string, CommentSystem>;
loadComments: (systemId: number) => Promise<void>;
addComment: (systemId: number, comment: CommentType) => void;
removeComment: (systemId: number, commentId: string) => void;
comments: Map<number, CommentSystem>;
lastUpdateKey: number;
}

View File

@@ -6,11 +6,17 @@ export type PassageLimitedCharacterType = Pick<
>;
export type Passage = {
from: boolean;
inserted_at: string; // Date
ship: ShipTypeRaw;
character: PassageLimitedCharacterType;
};
export type PassageWithSourceTarget = {
source: string;
target: string;
} & Passage;
export type ConnectionInfoOutput = {
marl_eol_time: string;
};

View File

@@ -38,6 +38,7 @@ export enum Commands {
updateTracking = 'update_tracking',
userSettingsUpdated = 'user_settings_updated',
showTracking = 'show_tracking',
refreshTrackingData = 'refresh_tracking_data',
pingAdded = 'ping_added',
pingCancelled = 'ping_cancelled',
}
@@ -74,6 +75,7 @@ export type Command =
| Commands.updateActivity
| Commands.updateTracking
| Commands.showTracking
| Commands.refreshTrackingData
| Commands.pingAdded
| Commands.pingCancelled;
@@ -131,7 +133,7 @@ export type CommandLinkSignatureToSystem = {
};
export type CommandLinkSignaturesUpdated = number;
export type CommandCommentAdd = {
solarSystemId: string;
solarSystemId: number;
comment: CommentType;
};
export type CommandCommentRemoved = {
@@ -145,6 +147,7 @@ export type CommandUserSettingsUpdated = {
};
export type CommandShowTracking = null;
export type CommandRefreshTrackingData = Record<string, never>;
export type CommandUpdateActivity = {
characterId: number;
systemId: number;
@@ -206,6 +209,7 @@ export interface CommandData {
[Commands.systemCommentRemoved]: CommandCommentRemoved;
[Commands.systemCommentsUpdated]: unknown;
[Commands.showTracking]: CommandShowTracking;
[Commands.refreshTrackingData]: CommandRefreshTrackingData;
[Commands.pingAdded]: CommandPingAdded;
[Commands.pingCancelled]: CommandPingCancelled;
}

View File

@@ -63,6 +63,7 @@ config :wanderer_app, WandererAppWeb.Endpoint,
]
config :wanderer_app,
environment: :dev,
dev_routes: true
# Do not include metadata nor timestamps in development logs

View File

@@ -1,5 +1,8 @@
import Config
# Set environment at compile time for modules using Application.compile_env
config :wanderer_app, environment: :prod
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix assets.deploy` task,

View File

@@ -177,7 +177,34 @@ config :wanderer_app,
],
extra_characters_50: map_subscription_extra_characters_50_price,
extra_hubs_10: map_subscription_extra_hubs_10_price
}
},
# Finch pool configuration - separate pools for different services
# ESI Character Tracking pool - high capacity for bulk character operations
# With 30+ TrackerPools × ~100 concurrent tasks, need large pool
finch_esi_character_pool_size:
System.get_env("WANDERER_FINCH_ESI_CHARACTER_POOL_SIZE", "200") |> String.to_integer(),
finch_esi_character_pool_count:
System.get_env("WANDERER_FINCH_ESI_CHARACTER_POOL_COUNT", "4") |> String.to_integer(),
# ESI General pool - standard capacity for general ESI operations
finch_esi_general_pool_size:
System.get_env("WANDERER_FINCH_ESI_GENERAL_POOL_SIZE", "50") |> String.to_integer(),
finch_esi_general_pool_count:
System.get_env("WANDERER_FINCH_ESI_GENERAL_POOL_COUNT", "4") |> String.to_integer(),
# Webhooks pool - isolated from ESI rate limits
finch_webhooks_pool_size:
System.get_env("WANDERER_FINCH_WEBHOOKS_POOL_SIZE", "25") |> String.to_integer(),
finch_webhooks_pool_count:
System.get_env("WANDERER_FINCH_WEBHOOKS_POOL_COUNT", "2") |> String.to_integer(),
# Default pool - everything else (email, license manager, etc.)
finch_default_pool_size:
System.get_env("WANDERER_FINCH_DEFAULT_POOL_SIZE", "25") |> String.to_integer(),
finch_default_pool_count:
System.get_env("WANDERER_FINCH_DEFAULT_POOL_COUNT", "2") |> String.to_integer(),
# Character tracker concurrency settings
# Location updates need high concurrency for <2s response with 3000+ characters
location_concurrency:
System.get_env("WANDERER_LOCATION_CONCURRENCY", "#{System.schedulers_online() * 12}")
|> String.to_integer()
config :ueberauth, Ueberauth,
providers: [
@@ -237,7 +264,7 @@ config :logger,
case config_env() do
:prod -> "info"
:dev -> "info"
:test -> "debug"
:test -> "warning"
end
)
)
@@ -405,7 +432,7 @@ config :wanderer_app, :license_manager,
config :wanderer_app, :sse,
enabled:
config_dir
|> get_var_from_path_or_env("WANDERER_SSE_ENABLED", "true")
|> get_var_from_path_or_env("WANDERER_SSE_ENABLED", "false")
|> String.to_existing_atom(),
max_connections_total:
config_dir |> get_int_from_path_or_env("WANDERER_SSE_MAX_CONNECTIONS", 1000),
@@ -420,6 +447,6 @@ config :wanderer_app, :sse,
config :wanderer_app, :external_events,
webhooks_enabled:
config_dir
|> get_var_from_path_or_env("WANDERER_WEBHOOKS_ENABLED", "true")
|> get_var_from_path_or_env("WANDERER_WEBHOOKS_ENABLED", "false")
|> String.to_existing_atom(),
webhook_timeout_ms: config_dir |> get_int_from_path_or_env("WANDERER_WEBHOOK_TIMEOUT_MS", 15000)

View File

@@ -1,5 +1,9 @@
import Config
# Disable Ash async operations in tests to ensure transactional safety
# This prevents Ash from spawning tasks that could bypass the Ecto sandbox
config :ash, :disable_async?, true
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
@@ -24,7 +28,11 @@ config :wanderer_app,
pubsub_client: Test.PubSubMock,
cached_info: WandererApp.CachedInfo.Mock,
character_api_disabled: false,
environment: :test
environment: :test,
map_subscriptions_enabled: false,
wanderer_kills_service_enabled: false,
sse: [enabled: false],
external_events: [webhooks_enabled: false]
# We don't run a server during test. If one is required,
# you can enable the server option below.

View File

@@ -60,19 +60,17 @@ defmodule WandererApp.Api.AccessList do
# Added :api_key to the accepted attributes
accept [:name, :description, :owner_id, :api_key]
primary?(true)
argument :owner_id, :uuid, allow_nil?: false
change manage_relationship(:owner_id, :owner, on_lookup: :relate, on_no_match: nil)
end
update :update do
accept [:name, :description, :owner_id, :api_key]
primary?(true)
require_atomic? false
end
update :assign_owner do
accept [:owner_id]
require_atomic? false
end
end

View File

@@ -53,7 +53,11 @@ defmodule WandererApp.Api.AccessListMember do
:role
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
read :read_by_access_list do
argument(:access_list_id, :string, allow_nil?: false)
@@ -67,12 +71,14 @@ defmodule WandererApp.Api.AccessListMember do
update :block do
accept([])
require_atomic? false
change(set_attribute(:blocked, true))
end
update :unblock do
accept([])
require_atomic? false
change(set_attribute(:blocked, false))
end

View File

@@ -0,0 +1,80 @@
defmodule WandererApp.Api.ActorHelpers do
@moduledoc """
Utilities for extracting actor information from Ash contexts.
Provides helper functions for working with ActorWithMap and extracting
user, map, and character information from various context formats.
"""
alias WandererApp.Api.ActorWithMap
@doc """
Extract map from actor or context.
Handles various context formats:
- Direct ActorWithMap struct
- Context map with :actor key
- Context map with :map key
- Ash.Resource.Change.Context struct
"""
def get_map(%{actor: %ActorWithMap{map: %{} = map}}), do: map
def get_map(%{map: %{} = map}), do: map
# Handle Ash.Resource.Change.Context struct
def get_map(%Ash.Resource.Change.Context{actor: %ActorWithMap{map: %{} = map}}), do: map
def get_map(%Ash.Resource.Change.Context{actor: _}), do: nil
def get_map(context) when is_map(context) do
# For plain maps, check private.actor
with private when is_map(private) <- Map.get(context, :private),
%ActorWithMap{map: %{} = map} <- Map.get(private, :actor) do
map
else
_ -> nil
end
end
def get_map(_), do: nil
@doc """
Extract user from actor.
Handles:
- ActorWithMap struct
- Direct user struct with :id field
"""
def get_user(%ActorWithMap{user: user}), do: user
def get_user(%{id: _} = user), do: user
def get_user(_), do: nil
@doc """
Get character IDs for the actor.
Used for ACL filtering to determine which resources the user can access.
Returns {:ok, list} or {:ok, []} if no characters found.
"""
def get_character_ids(%ActorWithMap{user: user}), do: get_character_ids(user)
def get_character_ids(%{characters: characters}) when is_list(characters) do
{:ok, Enum.map(characters, & &1.id)}
end
def get_character_ids(%{characters: %Ecto.Association.NotLoaded{}, id: user_id}) do
# Load characters from database
load_characters_by_id(user_id)
end
def get_character_ids(%{id: user_id}) do
# Fallback: load user with characters
load_characters_by_id(user_id)
end
def get_character_ids(_), do: {:ok, []}
defp load_characters_by_id(user_id) do
case WandererApp.Api.User.by_id(user_id, load: [:characters]) do
{:ok, user} -> {:ok, Enum.map(user.characters, & &1.id)}
_ -> {:ok, []}
end
end
end

View File

@@ -0,0 +1,15 @@
defmodule WandererApp.Api.ActorWithMap do
@moduledoc """
Wraps a user and map together as an actor for token-based authentication.
When API requests use Bearer token auth, the token identifies both the user
(map owner) and the map. This struct allows passing both through Ash's actor system.
"""
@enforce_keys [:user, :map]
defstruct [:user, :map]
def new(user, map) do
%__MODULE__{user: user, map: map}
end
end

View File

@@ -0,0 +1,39 @@
defmodule WandererApp.Api.Changes.InjectMapFromActor do
@moduledoc """
Ash change that injects map_id from the authenticated actor.
For token-based auth, the map is determined by the API token.
This change automatically sets map_id, so clients don't need to provide it.
"""
use Ash.Resource.Change
alias WandererApp.Api.ActorHelpers
@impl true
def change(changeset, _opts, context) do
case ActorHelpers.get_map(context) do
%{id: map_id} ->
Ash.Changeset.force_change_attribute(changeset, :map_id, map_id)
_other ->
# nil or unexpected return shape - check for direct map_id
# Check params (input), arguments, and attributes (in that order)
map_id = Map.get(changeset.params, :map_id) ||
Ash.Changeset.get_argument(changeset, :map_id) ||
Ash.Changeset.get_attribute(changeset, :map_id)
case map_id do
nil ->
Ash.Changeset.add_error(changeset,
field: :map_id,
message: "map_id is required (provide via token or attribute)"
)
_map_id ->
# map_id provided directly (internal calls, tests)
changeset
end
end
end
end

View File

@@ -69,11 +69,6 @@ defmodule WandererApp.Api.Character do
filter(expr(user_id == ^arg(:user_id) and deleted == false))
end
read :available_by_map do
argument(:map_id, :uuid, allow_nil?: false)
filter(expr(user_id == ^arg(:user_id) and deleted == false))
end
read :last_active do
argument(:from, :utc_datetime, allow_nil?: false)
@@ -100,6 +95,7 @@ defmodule WandererApp.Api.Character do
update :mark_as_deleted do
accept([])
require_atomic? false
change(atomic_update(:deleted, true))
change(atomic_update(:user_id, nil))
@@ -107,6 +103,7 @@ defmodule WandererApp.Api.Character do
update :update_online do
accept([:online])
require_atomic? false
end
update :update_location do

View File

@@ -33,7 +33,11 @@ defmodule WandererApp.Api.CorpWalletTransaction do
:ref_type
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
create :new do
accept [

View File

@@ -36,7 +36,11 @@ defmodule WandererApp.Api.License do
:expire_at
]
defaults [:read, :update, :destroy]
defaults [:read, :destroy]
update :update do
require_atomic? false
end
create :create do
primary? true
@@ -58,12 +62,14 @@ defmodule WandererApp.Api.License do
update :invalidate do
accept([])
require_atomic? false
change(set_attribute(:is_valid, false))
end
update :set_valid do
accept([])
require_atomic? false
change(set_attribute(:is_valid, true))
end

View File

@@ -8,9 +8,13 @@ defmodule WandererApp.Api.Map do
alias Ash.Resource.Change.Builtins
require Logger
postgres do
repo(WandererApp.Repo)
table("maps_v1")
migration_defaults scopes: "'{wormholes}'"
end
json_api do
@@ -44,6 +48,7 @@ defmodule WandererApp.Api.Map do
code_interface do
define(:available, action: :available)
define(:get_map_by_slug, action: :by_slug, args: [:slug])
define(:by_api_key, action: :by_api_key, args: [:api_key])
define(:new, action: :new)
define(:create, action: :create)
define(:update, action: :update)
@@ -54,6 +59,7 @@ defmodule WandererApp.Api.Map do
define(:mark_as_deleted, action: :mark_as_deleted)
define(:update_api_key, action: :update_api_key)
define(:toggle_webhooks, action: :toggle_webhooks)
define(:toggle_sse, action: :toggle_sse)
define(:by_id,
get_by: [:id],
@@ -90,22 +96,35 @@ defmodule WandererApp.Api.Map do
filter expr(slug == ^arg(:slug))
end
read :by_api_key do
get? true
argument :api_key, :string, allow_nil?: false
prepare WandererApp.Api.Preparations.SecureApiKeyLookup
end
read :available do
prepare WandererApp.Api.Preparations.FilterMapsByRoles
end
create :new do
accept [:name, :slug, :description, :scope, :only_tracked_characters, :owner_id]
primary?(true)
accept [
:name,
:slug,
:description,
:scope,
:scopes,
:only_tracked_characters,
:owner_id,
:sse_enabled
]
argument :owner_id, :uuid, allow_nil?: false
primary?(true)
argument :create_default_acl, :boolean, allow_nil?: true
argument :acls, {:array, :uuid}, allow_nil?: true
argument :acls_text_input, :string, allow_nil?: true
argument :scope_text_input, :string, allow_nil?: true
argument :acls_empty_selection, :string, allow_nil?: true
change manage_relationship(:owner_id, :owner, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:acls, type: :append_and_remove)
change WandererApp.Api.Changes.SlugifyName
end
@@ -113,7 +132,17 @@ defmodule WandererApp.Api.Map do
update :update do
primary? true
require_atomic? false
accept [:name, :slug, :description, :scope, :only_tracked_characters, :owner_id]
accept [
:name,
:slug,
:description,
:scope,
:scopes,
:only_tracked_characters,
:owner_id,
:sse_enabled
]
argument :owner_id_text_input, :string, allow_nil?: true
argument :acls_text_input, :string, allow_nil?: true
@@ -128,6 +157,9 @@ defmodule WandererApp.Api.Map do
)
change WandererApp.Api.Changes.SlugifyName
# Validate subscription when enabling SSE
validate &validate_sse_subscription/2
end
update :update_acls do
@@ -142,33 +174,46 @@ defmodule WandererApp.Api.Map do
update :assign_owner do
accept [:owner_id]
require_atomic? false
end
update :update_hubs do
accept [:hubs]
require_atomic? false
end
update :update_options do
accept [:options]
require_atomic? false
end
update :mark_as_deleted do
accept([])
require_atomic? false
change(set_attribute(:deleted, true))
end
update :update_api_key do
accept [:public_api_key]
require_atomic? false
end
update :toggle_webhooks do
accept [:webhooks_enabled]
require_atomic? false
end
update :toggle_sse do
require_atomic? false
accept [:sse_enabled]
# Validate subscription when enabling SSE
validate &validate_sse_subscription/2
end
create :duplicate do
accept [:name, :description, :scope, :only_tracked_characters]
accept [:name, :description, :scope, :scopes, :only_tracked_characters]
argument :source_map_id, :uuid, allow_nil?: false
argument :copy_acls, :boolean, default: true
argument :copy_user_settings, :boolean, default: true
@@ -184,9 +229,14 @@ defmodule WandererApp.Api.Map do
description =
Ash.Changeset.get_attribute(changeset, :description) || source_map.description
# Use provided scopes or fall back to source map scopes
scopes =
Ash.Changeset.get_attribute(changeset, :scopes) || source_map.scopes
changeset
|> Ash.Changeset.change_attribute(:description, description)
|> Ash.Changeset.change_attribute(:scope, source_map.scope)
|> Ash.Changeset.change_attribute(:scopes, scopes)
|> Ash.Changeset.change_attribute(
:only_tracked_characters,
source_map.only_tracked_characters
@@ -312,12 +362,37 @@ defmodule WandererApp.Api.Map do
public?(true)
end
attribute :sse_enabled, :boolean do
default(false)
allow_nil?(false)
public?(true)
end
attribute :scopes, {:array, :atom} do
default([:wormholes])
allow_nil?(true)
public?(true)
constraints(
items: [
one_of: [
:wormholes,
:hi,
:low,
:null,
:pochven
]
]
)
end
create_timestamp(:inserted_at)
update_timestamp(:updated_at)
end
identities do
identity :unique_slug, [:slug]
identity :unique_public_api_key, [:public_api_key]
end
relationships do
@@ -344,4 +419,49 @@ defmodule WandererApp.Api.Map do
public? false
end
end
# SSE Subscription Validation
#
# This validation ensures that SSE can only be enabled when:
# 1. SSE is being disabled (always allowed)
# 2. Map is being created (skip validation, will be checked on first update)
# 3. Community Edition mode (always allowed)
# 4. Enterprise mode with active subscription
defp validate_sse_subscription(changeset, _context) do
sse_enabled = Ash.Changeset.get_attribute(changeset, :sse_enabled)
map_id = changeset.data.id
subscriptions_enabled = WandererApp.Env.map_subscriptions_enabled?()
cond do
# Not enabling SSE - no validation needed
not sse_enabled ->
:ok
# Map creation (no ID yet) - skip validation
is_nil(map_id) ->
:ok
# Community Edition mode - always allow
not subscriptions_enabled ->
:ok
# Enterprise mode - check subscription
true ->
validate_active_subscription(map_id)
end
end
defp validate_active_subscription(map_id) do
case WandererApp.Map.is_subscription_active?(map_id) do
{:ok, true} ->
:ok
{:ok, false} ->
{:error, field: :sse_enabled, message: "Active subscription required to enable SSE"}
{:error, reason} ->
Logger.error("Error checking subscription status: #{inspect(reason)}")
{:error, field: :sse_enabled, message: "Unable to verify subscription status"}
end
end
end

View File

@@ -61,7 +61,11 @@ defmodule WandererApp.Api.MapAccessList do
:access_list_id
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
read :read_by_map do
argument(:map_id, :string, allow_nil?: false)

View File

@@ -27,7 +27,11 @@ defmodule WandererApp.Api.MapChainPassages do
:solar_system_target_id
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
create :new do
accept [
@@ -40,12 +44,6 @@ defmodule WandererApp.Api.MapChainPassages do
]
primary?(true)
argument :map_id, :uuid, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
change manage_relationship(:map_id, :map, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:character_id, :character, on_lookup: :relate, on_no_match: nil)
end
action :by_map_id, {:array, :struct} do

View File

@@ -81,12 +81,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
:character_id,
:tracked
]
argument :map_id, :uuid, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
change manage_relationship(:map_id, :map, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:character_id, :character, on_lookup: :relate, on_no_match: nil)
end
read :by_map_filtered do
@@ -134,6 +128,8 @@ defmodule WandererApp.Api.MapCharacterSettings do
require_atomic? false
accept([
:tracked,
:followed,
:ship,
:ship_name,
:ship_item_id,
@@ -145,8 +141,7 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :track do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
require_atomic? false
# Load the record first
load do
@@ -159,8 +154,7 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :untrack do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
require_atomic? false
# Load the record first
load do
@@ -173,8 +167,7 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :follow do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
require_atomic? false
# Load the record first
load do
@@ -187,8 +180,7 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :unfollow do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
require_atomic? false
# Load the record first
load do

View File

@@ -4,7 +4,8 @@ defmodule WandererApp.Api.MapConnection do
use Ash.Resource,
domain: WandererApp.Api,
data_layer: AshPostgres.DataLayer,
extensions: [AshJsonApi.Resource]
extensions: [AshJsonApi.Resource],
primary_read_warning?: false
postgres do
repo(WandererApp.Repo)
@@ -73,7 +74,56 @@ defmodule WandererApp.Api.MapConnection do
:custom_info
]
defaults [:create, :read, :update, :destroy]
create :create do
primary? true
accept [
:map_id,
:solar_system_source,
:solar_system_target,
:type,
:ship_size_type,
:mass_status,
:time_status,
:wormhole_type,
:count_of_passage,
:locked,
:custom_info
]
# Inject map_id from token
change WandererApp.Api.Changes.InjectMapFromActor
end
read :read do
primary? true
# Security: Filter to only connections from actor's map
prepare WandererApp.Api.Preparations.FilterConnectionsByActorMap
end
update :update do
primary? true
accept [
:solar_system_source,
:solar_system_target,
:type,
:ship_size_type,
:mass_status,
:time_status,
:wormhole_type,
:count_of_passage,
:locked,
:custom_info
]
require_atomic? false
end
destroy :destroy do
primary? true
end
read :read_by_map do
argument(:map_id, :string, allow_nil?: false)
@@ -110,30 +160,37 @@ defmodule WandererApp.Api.MapConnection do
update :update_mass_status do
accept [:mass_status]
require_atomic? false
end
update :update_time_status do
accept [:time_status]
require_atomic? false
end
update :update_ship_size_type do
accept [:ship_size_type]
require_atomic? false
end
update :update_locked do
accept [:locked]
require_atomic? false
end
update :update_custom_info do
accept [:custom_info]
require_atomic? false
end
update :update_type do
accept [:type]
require_atomic? false
end
update :update_wormhole_type do
accept [:wormhole_type]
require_atomic? false
end
end

View File

@@ -30,7 +30,11 @@ defmodule WandererApp.Api.MapInvite do
:token
]
defaults [:read, :update, :destroy]
defaults [:read, :destroy]
update :update do
require_atomic? false
end
create :new do
accept [
@@ -41,10 +45,6 @@ defmodule WandererApp.Api.MapInvite do
]
primary?(true)
argument :map_id, :uuid, allow_nil?: true
change manage_relationship(:map_id, :map, on_lookup: :relate, on_no_match: nil)
end
read :by_map do

View File

@@ -3,7 +3,8 @@ defmodule WandererApp.Api.MapPing do
use Ash.Resource,
domain: WandererApp.Api,
data_layer: AshPostgres.DataLayer
data_layer: AshPostgres.DataLayer,
primary_read_warning?: false
postgres do
repo(WandererApp.Repo)
@@ -36,7 +37,18 @@ defmodule WandererApp.Api.MapPing do
:message
]
defaults [:read, :update, :destroy]
defaults [:destroy]
update :update do
require_atomic? false
end
read :read do
primary? true
# Security: Filter to only pings from actor's map
prepare WandererApp.Api.Preparations.FilterPingsByActorMap
end
create :new do
accept [
@@ -48,14 +60,6 @@ defmodule WandererApp.Api.MapPing do
]
primary?(true)
argument :map_id, :uuid, allow_nil?: false
argument :system_id, :uuid, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
change manage_relationship(:map_id, :map, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:system_id, :system, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:character_id, :character, on_lookup: :relate, on_no_match: nil)
end
read :by_map do

View File

@@ -65,7 +65,11 @@ defmodule WandererApp.Api.MapSolarSystem do
:sun_type_id
]
defaults [:read, :destroy, :update]
defaults [:read, :destroy]
update :update do
require_atomic? false
end
create :create do
primary? true

View File

@@ -24,7 +24,11 @@ defmodule WandererApp.Api.MapSolarSystemJumps do
:to_solar_system_id
]
defaults [:read, :destroy, :update]
defaults [:read, :destroy]
update :update do
require_atomic? false
end
create :create do
primary? true

View File

@@ -45,7 +45,11 @@ defmodule WandererApp.Api.MapState do
:connections_start_time
]
defaults [:read, :update, :destroy]
defaults [:read, :destroy]
update :update do
require_atomic? false
end
create :create do
primary? true

View File

@@ -62,7 +62,11 @@ defmodule WandererApp.Api.MapSubscription do
:auto_renew?
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
read :all_active do
prepare build(sort: [updated_at: :asc], load: [:map])
@@ -88,32 +92,39 @@ defmodule WandererApp.Api.MapSubscription do
update :update_plan do
accept [:plan]
require_atomic? false
end
update :update_characters_limit do
accept [:characters_limit]
require_atomic? false
end
update :update_hubs_limit do
accept [:hubs_limit]
require_atomic? false
end
update :update_active_till do
accept [:active_till]
require_atomic? false
end
update :update_auto_renew do
accept [:auto_renew?]
require_atomic? false
end
update :cancel do
accept([])
require_atomic? false
change(set_attribute(:status, :cancelled))
end
update :expire do
accept([])
require_atomic? false
change(set_attribute(:status, :expired))
end

View File

@@ -24,16 +24,12 @@ defmodule WandererApp.Api.MapSystem do
use Ash.Resource,
domain: WandererApp.Api,
data_layer: AshPostgres.DataLayer,
extensions: [AshJsonApi.Resource]
extensions: [AshJsonApi.Resource],
primary_read_warning?: false
postgres do
repo(WandererApp.Repo)
table("map_system_v1")
custom_indexes do
# Partial index for efficient visible systems query
index [:map_id], where: "visible = true", name: "map_system_v1_map_id_visible_index"
end
end
json_api do
@@ -70,10 +66,7 @@ defmodule WandererApp.Api.MapSystem do
define(:upsert, action: :upsert)
define(:destroy, action: :destroy)
define(:by_id,
get_by: [:id],
action: :read
)
define :by_id, action: :get_by_id, args: [:id], get?: true
define(:by_solar_system_id,
get_by: [:solar_system_id],
@@ -103,6 +96,7 @@ defmodule WandererApp.Api.MapSystem do
define(:update_status, action: :update_status)
define(:update_tag, action: :update_tag)
define(:update_temporary_name, action: :update_temporary_name)
define(:update_custom_name, action: :update_custom_name)
define(:update_labels, action: :update_labels)
define(:update_linked_sig_eve_id, action: :update_linked_sig_eve_id)
define(:update_position, action: :update_position)
@@ -128,7 +122,56 @@ defmodule WandererApp.Api.MapSystem do
:linked_sig_eve_id
]
defaults [:create, :update, :destroy]
create :create do
primary? true
accept [
:map_id,
:name,
:solar_system_id,
:position_x,
:position_y,
:status,
:visible,
:locked,
:custom_name,
:description,
:tag,
:temporary_name,
:labels,
:added_at,
:linked_sig_eve_id
]
# Inject map_id from token
change WandererApp.Api.Changes.InjectMapFromActor
end
update :update do
primary? true
require_atomic? false
# Note: name and solar_system_id are not in accept
# - solar_system_id should be immutable (identifier)
# - name has allow_nil? false which makes it required in JSON:API
accept [
:position_x,
:position_y,
:status,
:visible,
:locked,
:custom_name,
:description,
:tag,
:temporary_name,
:labels,
:linked_sig_eve_id
]
end
destroy :destroy do
primary? true
end
create :upsert do
primary? false
@@ -158,6 +201,9 @@ defmodule WandererApp.Api.MapSystem do
read :read do
primary?(true)
# Security: Filter to only systems from actor's map
prepare WandererApp.Api.Preparations.FilterSystemsByActorMap
pagination offset?: true,
default_limit: 100,
max_page_size: 500,
@@ -165,6 +211,11 @@ defmodule WandererApp.Api.MapSystem do
required?: false
end
read :get_by_id do
argument(:id, :string, allow_nil?: false)
filter(expr(id == ^arg(:id)))
end
read :read_all_by_map do
argument(:map_id, :string, allow_nil?: false)
filter(expr(map_id == ^arg(:map_id)))
@@ -186,44 +237,59 @@ defmodule WandererApp.Api.MapSystem do
update :update_name do
accept [:name]
require_atomic? false
end
update :update_description do
accept [:description]
require_atomic? false
end
update :update_locked do
accept [:locked]
require_atomic? false
end
update :update_status do
accept [:status]
require_atomic? false
end
update :update_tag do
accept [:tag]
require_atomic? false
end
update :update_temporary_name do
accept [:temporary_name]
require_atomic? false
end
update :update_custom_name do
accept [:custom_name]
require_atomic? false
end
update :update_labels do
accept [:labels]
require_atomic? false
end
update :update_position do
accept [:position_x, :position_y]
require_atomic? false
change(set_attribute(:visible, true))
end
update :update_linked_sig_eve_id do
accept [:linked_sig_eve_id]
require_atomic? false
end
update :update_visible do
accept [:visible]
require_atomic? false
end
end

View File

@@ -59,12 +59,6 @@ defmodule WandererApp.Api.MapSystemComment do
:character_id,
:text
]
argument :system_id, :uuid, allow_nil?: false
argument :character_id, :uuid, allow_nil?: false
change manage_relationship(:system_id, :system, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:character_id, :character, on_lookup: :relate, on_no_match: nil)
end
read :by_system_id do

View File

@@ -111,10 +111,6 @@ defmodule WandererApp.Api.MapSystemSignature do
:custom_info,
:deleted
]
argument :system_id, :uuid, allow_nil?: false
change manage_relationship(:system_id, :system, on_lookup: :relate, on_no_match: nil)
end
update :update do
@@ -139,14 +135,17 @@ defmodule WandererApp.Api.MapSystemSignature do
update :update_linked_system do
accept [:linked_system_id]
require_atomic? false
end
update :update_type do
accept [:type]
require_atomic? false
end
update :update_group do
accept [:group]
require_atomic? false
end
read :by_system_id do

View File

@@ -122,13 +122,6 @@ defmodule WandererApp.Api.MapSystemStructure do
:status,
:end_time
]
argument :system_id, :uuid, allow_nil?: false
change manage_relationship(:system_id, :system,
on_lookup: :relate,
on_no_match: nil
)
end
update :update do

View File

@@ -29,7 +29,11 @@ defmodule WandererApp.Api.MapTransaction do
:amount
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
read :by_map do
argument(:map_id, :string, allow_nil?: false)

View File

@@ -53,22 +53,30 @@ defmodule WandererApp.Api.MapUserSettings do
:settings
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
update :update_settings do
accept [:settings]
require_atomic? false
end
update :update_main_character do
accept [:main_character_eve_id]
require_atomic? false
end
update :update_following_character do
accept [:following_character_eve_id]
require_atomic? false
end
update :update_hubs do
accept [:hubs]
require_atomic? false
end
end

View File

@@ -58,6 +58,8 @@ defmodule WandererApp.Api.MapWebhookSubscription do
:consecutive_failures,
:secret
]
require_atomic? false
end
read :by_map do

View File

@@ -0,0 +1,64 @@
defmodule WandererApp.Api.Preparations.FilterByActorMap do
@moduledoc """
Shared filtering logic for actor map context.
Filters queries to only return resources belonging to the actor's map.
Used by preparations for MapSystem, MapConnection, and MapPing resources.
"""
require Ash.Query
alias WandererApp.Api.ActorHelpers
@doc """
Filter a query by the actor's map context.
If a map is found in the context, filters the query to only return
resources where map_id matches. If no map context exists, returns
a query that will return no results.
## Parameters
* `query` - The Ash query to filter
* `context` - The Ash context containing actor/map information
* `resource_name` - Name of the resource for telemetry (atom)
## Examples
iex> query = Ash.Query.new(WandererApp.Api.MapSystem)
iex> context = %{map: %{id: "map-123"}}
iex> result = FilterByActorMap.filter_by_map(query, context, :map_system)
# Returns query filtered by map_id == "map-123"
"""
def filter_by_map(query, context, resource_name) do
case ActorHelpers.get_map(context) do
%{id: map_id} ->
emit_telemetry(resource_name, map_id)
Ash.Query.filter(query, map_id == ^map_id)
nil ->
emit_telemetry_no_context(resource_name)
Ash.Query.filter(query, false)
_other ->
emit_telemetry_no_context(resource_name)
Ash.Query.filter(query, false)
end
end
defp emit_telemetry(resource_name, map_id) do
:telemetry.execute(
[:wanderer_app, :ash, :preparation, :filter_by_map],
%{count: 1},
%{resource: resource_name, map_id: map_id}
)
end
defp emit_telemetry_no_context(resource_name) do
:telemetry.execute(
[:wanderer_app, :ash, :preparation, :filter_by_map, :no_context],
%{count: 1},
%{resource: resource_name}
)
end
end

View File

@@ -0,0 +1,17 @@
defmodule WandererApp.Api.Preparations.FilterConnectionsByActorMap do
@moduledoc """
Ash preparation that filters connections to only those from the actor's map.
For token-based auth, this ensures the API only returns connections
from the map associated with the token.
"""
use Ash.Resource.Preparation
alias WandererApp.Api.Preparations.FilterByActorMap
@impl true
def prepare(query, _opts, context) do
FilterByActorMap.filter_by_map(query, context, :map_connection)
end
end

View File

@@ -0,0 +1,17 @@
defmodule WandererApp.Api.Preparations.FilterPingsByActorMap do
@moduledoc """
Ash preparation that filters pings to only those from the actor's map.
For token-based auth, this ensures the API only returns pings
from the map associated with the token.
"""
use Ash.Resource.Preparation
alias WandererApp.Api.Preparations.FilterByActorMap
@impl true
def prepare(query, _opts, context) do
FilterByActorMap.filter_by_map(query, context, :map_ping)
end
end

View File

@@ -0,0 +1,17 @@
defmodule WandererApp.Api.Preparations.FilterSystemsByActorMap do
@moduledoc """
Ash preparation that filters systems to only those from the actor's map.
For token-based auth, this ensures the API only returns systems
from the map associated with the token.
"""
use Ash.Resource.Preparation
alias WandererApp.Api.Preparations.FilterByActorMap
@impl true
def prepare(query, _opts, context) do
FilterByActorMap.filter_by_map(query, context, :map_system)
end
end

View File

@@ -0,0 +1,62 @@
defmodule WandererApp.Api.Preparations.SecureApiKeyLookup do
@moduledoc """
Preparation that performs secure API key lookup using constant-time comparison.
This preparation:
1. Queries for the map with the given API key using database index
2. Performs constant-time comparison to verify the key matches
3. Returns the map only if the secure comparison passes
The constant-time comparison prevents timing attacks where an attacker
could deduce information about valid API keys by measuring response times.
"""
use Ash.Resource.Preparation
require Ash.Query
@dummy_key "dummy_key_for_timing_consistency_00000000"
def prepare(query, _params, _context) do
api_key = Ash.Query.get_argument(query, :api_key)
if is_nil(api_key) or api_key == "" do
# Return empty result for invalid input
Ash.Query.filter(query, expr(false))
else
# First, do the database lookup using the index
# Then apply constant-time comparison in after_action
query
|> Ash.Query.filter(expr(public_api_key == ^api_key))
|> Ash.Query.after_action(fn _query, results ->
verify_results_with_secure_compare(results, api_key)
end)
end
end
defp verify_results_with_secure_compare(results, provided_key) do
case results do
[map] ->
# Map found - verify with constant-time comparison
stored_key = map.public_api_key || @dummy_key
if Plug.Crypto.secure_compare(stored_key, provided_key) do
{:ok, [map]}
else
# Keys don't match (shouldn't happen if DB returned it, but safety check)
{:ok, []}
end
[] ->
# No map found - still do a comparison to maintain consistent timing
# This prevents timing attacks from distinguishing "not found" from "found but wrong"
_result = Plug.Crypto.secure_compare(@dummy_key, provided_key)
{:ok, []}
_multiple ->
# Multiple results - shouldn't happen with unique constraint
# Do comparison for timing consistency and return error
_result = Plug.Crypto.secure_compare(@dummy_key, provided_key)
{:ok, []}
end
end
end

View File

@@ -49,7 +49,11 @@ defmodule WandererApp.Api.ShipTypeInfo do
:volume
]
defaults [:read, :destroy, :update]
defaults [:read, :destroy]
update :update do
require_atomic? false
end
create :create do
primary? true

View File

@@ -51,10 +51,15 @@ defmodule WandererApp.Api.User do
:hash
]
defaults [:create, :read, :update, :destroy]
defaults [:create, :read, :destroy]
update :update do
require_atomic? false
end
update :update_last_map do
accept([:last_map_id])
require_atomic? false
end
update :update_balance do

View File

@@ -4,7 +4,8 @@ defmodule WandererApp.Api.UserActivity do
use Ash.Resource,
domain: WandererApp.Api,
data_layer: AshPostgres.DataLayer,
extensions: [AshJsonApi.Resource]
extensions: [AshJsonApi.Resource],
primary_read_warning?: false
require Ash.Expr
@@ -55,7 +56,8 @@ defmodule WandererApp.Api.UserActivity do
:entity_type,
:event_type,
:event_data,
:user_id
:user_id,
:character_id
]
read :read do
@@ -70,14 +72,8 @@ defmodule WandererApp.Api.UserActivity do
end
create :new do
accept [:entity_id, :entity_type, :event_type, :event_data]
accept [:entity_id, :entity_type, :event_type, :event_data, :user_id, :character_id]
primary?(true)
argument :user_id, :uuid, allow_nil?: true
argument :character_id, :uuid, allow_nil?: true
change manage_relationship(:user_id, :user, on_lookup: :relate, on_no_match: nil)
change manage_relationship(:character_id, :character, on_lookup: :relate, on_no_match: nil)
end
destroy :archive do

View File

@@ -28,10 +28,6 @@ defmodule WandererApp.Api.UserTransaction do
create :new do
accept [:journal_ref_id, :user_id, :date, :amount, :corporation_id]
primary?(true)
argument :user_id, :uuid, allow_nil?: false
change manage_relationship(:user_id, :user, on_lookup: :relate, on_no_match: nil)
end
end

View File

@@ -16,15 +16,48 @@ defmodule WandererApp.Application do
WandererApp.Vault,
WandererApp.Repo,
{Phoenix.PubSub, name: WandererApp.PubSub, adapter_name: Phoenix.PubSub.PG2},
# Multiple Finch pools for different services to prevent connection pool exhaustion
# ESI Character Tracking pool - high capacity for bulk character operations
{
Finch,
name: WandererApp.Finch.ESI.CharacterTracking,
pools: %{
default: [
size: Application.get_env(:wanderer_app, :finch_esi_character_pool_size, 100),
count: Application.get_env(:wanderer_app, :finch_esi_character_pool_count, 4)
]
}
},
# ESI General pool - standard capacity for general ESI operations
{
Finch,
name: WandererApp.Finch.ESI.General,
pools: %{
default: [
size: Application.get_env(:wanderer_app, :finch_esi_general_pool_size, 50),
count: Application.get_env(:wanderer_app, :finch_esi_general_pool_count, 4)
]
}
},
# Webhooks pool - isolated from ESI rate limits
{
Finch,
name: WandererApp.Finch.Webhooks,
pools: %{
default: [
size: Application.get_env(:wanderer_app, :finch_webhooks_pool_size, 25),
count: Application.get_env(:wanderer_app, :finch_webhooks_pool_count, 2)
]
}
},
# Default pool - everything else (email, license manager, etc.)
{
Finch,
name: WandererApp.Finch,
pools: %{
default: [
# number of connections per pool
size: 50,
# number of pools (so total 50 connections)
count: 4
size: Application.get_env(:wanderer_app, :finch_default_pool_size, 25),
count: Application.get_env(:wanderer_app, :finch_default_pool_count, 2)
]
}
},
@@ -120,13 +153,16 @@ defmodule WandererApp.Application do
:ok
end
defp maybe_start_corp_wallet_tracker(true),
do: [
WandererApp.StartCorpWalletTrackerTask
]
defp maybe_start_corp_wallet_tracker(true) do
# Don't start corp wallet tracker in test environment
if Application.get_env(:wanderer_app, :environment) == :test do
[]
else
[WandererApp.StartCorpWalletTrackerTask]
end
end
defp maybe_start_corp_wallet_tracker(_),
do: []
defp maybe_start_corp_wallet_tracker(_), do: []
defp maybe_start_kills_services do
# Don't start kills services in test environment

View File

@@ -45,7 +45,7 @@ defmodule WandererApp.Cache do
def insert({id, key}, value, opts) when is_binary(id) and (is_binary(key) or is_atom(key)),
do: insert("#{id}:#{key}", value, opts)
def insert(key, nil, opts) when is_binary(key) or is_atom(key), do: delete(key)
def insert(key, nil, _opts) when is_binary(key) or is_atom(key), do: delete(key)
def insert(key, value, opts) when is_binary(key) or is_atom(key), do: put(key, value, opts)
def insert_or_update(key, value, update_fn, opts \\ [])

View File

@@ -1,6 +1,8 @@
defmodule WandererApp.CachedInfo do
require Logger
alias WandererAppWeb.Helpers.APIUtils
def run(_arg) do
:ok = cache_trig_systems()
end
@@ -29,14 +31,73 @@ defmodule WandererApp.CachedInfo do
)
end)
Cachex.get(:ship_types_cache, type_id)
get_ship_type_from_cache_or_api(type_id)
{:ok, ship_type} ->
{:ok, ship_type}
end
end
defp get_ship_type_from_cache_or_api(type_id) do
case Cachex.get(:ship_types_cache, type_id) do
{:ok, ship_type} when not is_nil(ship_type) ->
{:ok, ship_type}
{:ok, nil} ->
case WandererApp.Esi.get_type_info(type_id) do
{:ok, info} when not is_nil(info) ->
ship_type = parse_type(type_id, info)
{:ok, group_info} = get_group_info(ship_type.group_id)
{:ok, ship_type_info} =
WandererApp.Api.ShipTypeInfo |> Ash.create(ship_type |> Map.merge(group_info))
{:ok,
ship_type_info
|> Map.take([
:type_id,
:group_id,
:group_name,
:name,
:description,
:mass,
:capacity,
:volume
])}
{:error, reason} ->
Logger.error("Failed to get ship_type #{type_id} from ESI: #{inspect(reason)}")
{:ok, nil}
error ->
Logger.error("Failed to get ship_type #{type_id} from ESI: #{inspect(error)}")
{:ok, nil}
end
end
end
def get_group_info(nil), do: {:ok, nil}
def get_group_info(group_id) do
case WandererApp.Esi.get_group_info(group_id) do
{:ok, info} when not is_nil(info) ->
{:ok, parse_group(group_id, info)}
{:error, reason} ->
Logger.error("Failed to get group_info #{group_id} from ESI: #{inspect(reason)}")
{:ok, %{group_name: ""}}
error ->
Logger.error("Failed to get group_info #{group_id} from ESI: #{inspect(error)}")
{:ok, %{group_name: ""}}
end
end
def get_system_static_info(nil), do: {:ok, nil}
def get_system_static_info(solar_system_id) do
{:ok, solar_system_id} = APIUtils.parse_int(solar_system_id)
case Cachex.get(:system_static_info_cache, solar_system_id) do
{:ok, nil} ->
case WandererApp.Api.MapSolarSystem.read() do
@@ -149,6 +210,25 @@ defmodule WandererApp.CachedInfo do
end
end
defp parse_group(group_id, group) do
%{
group_id: group_id,
group_name: Map.get(group, "name")
}
end
defp parse_type(type_id, type) do
%{
type_id: type_id,
name: Map.get(type, "name"),
description: Map.get(type, "description"),
group_id: Map.get(type, "group_id"),
mass: "#{Map.get(type, "mass")}",
capacity: "#{Map.get(type, "capacity")}",
volume: "#{Map.get(type, "volume")}"
}
end
defp build_jump_index() do
case get_solar_system_jumps() do
{:ok, jumps} ->

View File

@@ -331,7 +331,7 @@ defmodule WandererApp.Character do
do:
{:ok,
Enum.map(eve_ids, fn eve_id ->
Task.async(fn -> apply(WandererApp.Esi.ApiClient, method, [eve_id]) end)
Task.async(fn -> apply(WandererApp.Esi, method, [eve_id]) end)
end)
# 145000 == Timeout in milliseconds
|> Enum.map(fn task -> Task.await(task, 145_000) end)

View File

@@ -43,13 +43,14 @@ defmodule WandererApp.Character.Activity do
## Parameters
- `map_id`: ID of the map
- `current_user`: Current user struct (used only to get user settings)
- `days`: Optional number of days to filter activity (nil for all time)
## Returns
- List of processed activity data
"""
def process_character_activity(map_id, current_user) do
def process_character_activity(map_id, current_user, days \\ nil) do
with {:ok, map_user_settings} <- get_map_user_settings(map_id, current_user.id),
{:ok, raw_activity} <- WandererApp.Map.get_character_activity(map_id),
{:ok, raw_activity} <- WandererApp.Map.get_character_activity(map_id, days),
{:ok, user_characters} <-
WandererApp.Api.Character.active_by_user(%{user_id: current_user.id}) do
process_activity_data(raw_activity, map_user_settings, user_characters)

View File

@@ -14,8 +14,8 @@ defmodule WandererApp.Character.Tracker do
active_maps: [],
is_online: false,
track_online: true,
track_location: true,
track_ship: true,
track_location: false,
track_ship: false,
track_wallet: false,
status: "new"
]
@@ -155,7 +155,7 @@ defmodule WandererApp.Character.Tracker do
)
end
if online.online == true && online.online != is_online do
if online.online == true && not is_online do
WandererApp.Cache.delete("character:#{character_id}:ship_error_time")
WandererApp.Cache.delete("character:#{character_id}:location_error_time")
WandererApp.Cache.delete("character:#{character_id}:location_error_count")
@@ -598,9 +598,6 @@ defmodule WandererApp.Character.Tracker do
{:error, :skipped}
end
_ ->
{:error, :skipped}
end
_ ->
@@ -709,6 +706,7 @@ defmodule WandererApp.Character.Tracker do
end
end
# when old_alliance_id != alliance_id and is_nil(alliance_id)
defp maybe_update_alliance(
%{character_id: character_id, alliance_id: old_alliance_id} = state,
alliance_id
@@ -734,6 +732,7 @@ defmodule WandererApp.Character.Tracker do
)
state
|> Map.merge(%{alliance_id: nil})
end
defp maybe_update_alliance(
@@ -771,6 +770,7 @@ defmodule WandererApp.Character.Tracker do
)
state
|> Map.merge(%{alliance_id: alliance_id})
_error ->
Logger.error("Failed to get alliance info for #{alliance_id}")
@@ -796,7 +796,7 @@ defmodule WandererApp.Character.Tracker do
corporation_id
|> WandererApp.Esi.get_corporation_info()
|> case do
{:ok, %{"name" => corporation_name, "ticker" => corporation_ticker} = corporation_info} ->
{:ok, %{"name" => corporation_name, "ticker" => corporation_ticker}} ->
{:ok, character} =
WandererApp.Character.get_character(character_id)
@@ -963,9 +963,7 @@ defmodule WandererApp.Character.Tracker do
),
do: %{
state
| track_online: true,
track_location: true,
track_ship: true
| track_online: true
}
defp maybe_start_online_tracking(
@@ -1001,7 +999,7 @@ defmodule WandererApp.Character.Tracker do
defp maybe_update_active_maps(
%{character_id: character_id, active_maps: active_maps} =
state,
%{map_id: map_id, track: true} = track_settings
%{map_id: map_id, track: true}
) do
if not Enum.member?(active_maps, map_id) do
WandererApp.Cache.put(
@@ -1009,11 +1007,6 @@ defmodule WandererApp.Character.Tracker do
DateTime.utc_now()
)
WandererApp.Cache.put(
"map:#{map_id}:character:#{character_id}:start_solar_system_id",
track_settings |> Map.get(:solar_system_id)
)
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:solar_system_id")
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:station_id")
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:structure_id")
@@ -1064,7 +1057,7 @@ defmodule WandererApp.Character.Tracker do
)
end
state
%{state | track_location: false, track_ship: false}
end
defp maybe_stop_tracking(

View File

@@ -1,5 +1,18 @@
defmodule WandererApp.Character.TrackerManager.Impl do
@moduledoc false
@moduledoc """
Implementation of the character tracker manager.
This module manages the lifecycle of character trackers and handles:
- Starting/stopping character tracking
- Garbage collection of inactive trackers (5-minute timeout)
- Processing the untrack queue (5-minute interval)
## Logging
This module emits detailed logs for debugging character tracking issues:
- WARNING: Unexpected states or potential issues
- DEBUG: Start/stop tracking events, garbage collection, queue processing
"""
require Logger
defstruct [
@@ -27,6 +40,11 @@ defmodule WandererApp.Character.TrackerManager.Impl do
Process.send_after(self(), :garbage_collect, @garbage_collection_interval)
Process.send_after(self(), :untrack_characters, @untrack_characters_interval)
Logger.debug("[TrackerManager] Initialized with intervals: " <>
"garbage_collection=#{div(@garbage_collection_interval, 60_000)}min, " <>
"untrack=#{div(@untrack_characters_interval, 60_000)}min, " <>
"inactive_timeout=#{div(@inactive_character_timeout, 60_000)}min")
%{
characters: [],
opts: args
@@ -38,6 +56,10 @@ defmodule WandererApp.Character.TrackerManager.Impl do
{:ok, tracked_characters} = WandererApp.Cache.lookup("tracked_characters", [])
WandererApp.Cache.insert("tracked_characters", [])
if length(tracked_characters) > 0 do
Logger.debug("[TrackerManager] Restoring #{length(tracked_characters)} tracked characters from cache")
end
tracked_characters
|> Enum.each(fn character_id ->
start_tracking(state, character_id)
@@ -53,7 +75,9 @@ defmodule WandererApp.Character.TrackerManager.Impl do
true
)
Logger.debug(fn -> "Add character to track_characters_queue: #{inspect(character_id)}" end)
Logger.debug(fn ->
"[TrackerManager] Queuing character #{character_id} for tracking start"
end)
WandererApp.Cache.insert_or_update(
"track_characters_queue",
@@ -71,13 +95,33 @@ defmodule WandererApp.Character.TrackerManager.Impl do
with {:ok, characters} <- WandererApp.Cache.lookup("tracked_characters", []),
true <- Enum.member?(characters, character_id),
false <- WandererApp.Cache.has_key?("#{character_id}:track_requested") do
Logger.debug(fn -> "Shutting down character tracker: #{inspect(character_id)}" end)
Logger.debug(fn ->
"[TrackerManager] Stopping tracker for character #{character_id} - " <>
"reason: no active maps (garbage collected after #{div(@inactive_character_timeout, 60_000)} minutes)"
end)
WandererApp.Cache.delete("character:#{character_id}:last_active_time")
WandererApp.Character.delete_character_state(character_id)
WandererApp.Character.TrackerPoolDynamicSupervisor.stop_tracking(character_id)
:telemetry.execute([:wanderer_app, :character, :tracker, :stopped], %{count: 1})
:telemetry.execute(
[:wanderer_app, :character, :tracker, :stopped],
%{count: 1, system_time: System.system_time()},
%{character_id: character_id, reason: :garbage_collection}
)
else
{:ok, characters} when is_list(characters) ->
Logger.debug(fn ->
"[TrackerManager] Character #{character_id} not in tracked list, skipping stop"
end)
false ->
Logger.debug(fn ->
"[TrackerManager] Character #{character_id} has pending track request, skipping stop"
end)
_ ->
:ok
end
WandererApp.Cache.insert_or_update(
@@ -101,13 +145,35 @@ defmodule WandererApp.Character.TrackerManager.Impl do
} = track_settings
) do
if track do
Logger.debug(fn ->
"[TrackerManager] Enabling tracking for character #{character_id} on map #{map_id}"
end)
remove_from_untrack_queue(map_id, character_id)
{:ok, character_state} =
WandererApp.Character.Tracker.update_settings(character_id, track_settings)
case WandererApp.Character.Tracker.update_settings(character_id, track_settings) do
{:ok, character_state} ->
WandererApp.Character.update_character_state(character_id, character_state)
WandererApp.Character.update_character_state(character_id, character_state)
{:error, :not_found} ->
# Tracker process not running yet - this is expected during initial tracking setup
# The tracking_start_time cache key was already set by TrackingUtils.track_character
Logger.debug(fn ->
"[TrackerManager] Tracker not yet running for character #{character_id} - " <>
"tracking will be active via cache key"
end)
{:error, reason} ->
Logger.warning(fn ->
"[TrackerManager] Failed to update settings for character #{character_id}: #{inspect(reason)}"
end)
end
else
Logger.debug(fn ->
"[TrackerManager] Queuing character #{character_id} for untracking from map #{map_id} - " <>
"will be processed within #{div(@untrack_characters_interval, 60_000)} minutes"
end)
add_to_untrack_queue(map_id, character_id)
end
@@ -130,8 +196,19 @@ defmodule WandererApp.Character.TrackerManager.Impl do
"character_untrack_queue",
[],
fn untrack_queue ->
untrack_queue
|> Enum.reject(fn {m_id, c_id} -> m_id == map_id and c_id == character_id end)
original_length = length(untrack_queue)
filtered =
untrack_queue
|> Enum.reject(fn {m_id, c_id} -> m_id == map_id and c_id == character_id end)
if length(filtered) < original_length do
Logger.debug(fn ->
"[TrackerManager] Removed character #{character_id} from untrack queue for map #{map_id} - " <>
"character re-enabled tracking"
end)
end
filtered
end
)
end
@@ -170,6 +247,12 @@ defmodule WandererApp.Character.TrackerManager.Impl do
Process.send_after(self(), :check_start_queue, @check_start_queue_interval)
{:ok, track_characters_queue} = WandererApp.Cache.lookup("track_characters_queue", [])
if length(track_characters_queue) > 0 do
Logger.debug(fn ->
"[TrackerManager] Processing start queue: #{length(track_characters_queue)} characters"
end)
end
track_characters_queue
|> Enum.each(fn character_id ->
track_character(character_id, %{})
@@ -186,35 +269,66 @@ defmodule WandererApp.Character.TrackerManager.Impl do
{:ok, characters} = WandererApp.Cache.lookup("tracked_characters", [])
characters
|> Task.async_stream(
fn character_id ->
case WandererApp.Cache.lookup("character:#{character_id}:last_active_time") do
{:ok, nil} ->
:skip
Logger.debug(fn ->
"[TrackerManager] Running garbage collection on #{length(characters)} tracked characters"
end)
{:ok, last_active_time} ->
duration = DateTime.diff(DateTime.utc_now(), last_active_time, :second)
if duration * 1000 > @inactive_character_timeout do
{:stop, character_id}
else
inactive_characters =
characters
|> Task.async_stream(
fn character_id ->
case WandererApp.Cache.lookup("character:#{character_id}:last_active_time") do
{:ok, nil} ->
# Character is still active (no last_active_time set)
:skip
end
end
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.seconds(60)
)
|> Enum.each(fn result ->
case result do
{:ok, {:stop, character_id}} ->
Process.send_after(self(), {:stop_track, character_id}, 100)
_ ->
:ok
end
{:ok, last_active_time} ->
duration_seconds = DateTime.diff(DateTime.utc_now(), last_active_time, :second)
duration_ms = duration_seconds * 1000
if duration_ms > @inactive_character_timeout do
Logger.debug(fn ->
"[TrackerManager] Character #{character_id} marked for garbage collection - " <>
"inactive for #{div(duration_seconds, 60)} minutes " <>
"(threshold: #{div(@inactive_character_timeout, 60_000)} minutes)"
end)
{:stop, character_id, duration_seconds}
else
:skip
end
end
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.seconds(60)
)
|> Enum.reduce([], fn result, acc ->
case result do
{:ok, {:stop, character_id, duration}} ->
[{character_id, duration} | acc]
_ ->
acc
end
end)
if length(inactive_characters) > 0 do
Logger.debug(fn ->
"[TrackerManager] Garbage collection found #{length(inactive_characters)} inactive characters to stop"
end)
# Emit telemetry for garbage collection
:telemetry.execute(
[:wanderer_app, :character, :tracker, :garbage_collection],
%{inactive_count: length(inactive_characters), total_tracked: length(characters)},
%{character_ids: Enum.map(inactive_characters, fn {id, _} -> id end)}
)
end
inactive_characters
|> Enum.each(fn {character_id, _duration} ->
Process.send_after(self(), {:stop_track, character_id}, 100)
end)
state
@@ -226,9 +340,22 @@ defmodule WandererApp.Character.TrackerManager.Impl do
) do
Process.send_after(self(), :untrack_characters, @untrack_characters_interval)
WandererApp.Cache.lookup!("character_untrack_queue", [])
untrack_queue = WandererApp.Cache.lookup!("character_untrack_queue", [])
if length(untrack_queue) > 0 do
Logger.debug(fn ->
"[TrackerManager] Processing untrack queue: #{length(untrack_queue)} character-map pairs"
end)
end
untrack_queue
|> Task.async_stream(
fn {map_id, character_id} ->
Logger.debug(fn ->
"[TrackerManager] Untracking character #{character_id} from map #{map_id} - " <>
"reason: character no longer present on map"
end)
remove_from_untrack_queue(map_id, character_id)
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:solar_system_id")
@@ -255,12 +382,36 @@ defmodule WandererApp.Character.TrackerManager.Impl do
WandererApp.Character.update_character_state(character_id, character_state)
WandererApp.Map.Server.Impl.broadcast!(map_id, :untrack_character, character_id)
# Emit telemetry for untrack event
:telemetry.execute(
[:wanderer_app, :character, :tracker, :untracked_from_map],
%{system_time: System.system_time()},
%{character_id: character_id, map_id: map_id, reason: :presence_left}
)
{:ok, character_id, map_id}
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.seconds(30)
)
|> Enum.each(fn _result -> :ok end)
|> Enum.each(fn result ->
case result do
{:ok, {:ok, character_id, map_id}} ->
Logger.debug(fn ->
"[TrackerManager] Successfully untracked character #{character_id} from map #{map_id}"
end)
{:exit, reason} ->
Logger.warning(fn ->
"[TrackerManager] Untrack task exited with reason: #{inspect(reason)}"
end)
_ ->
:ok
end
end)
state
end
@@ -268,9 +419,17 @@ defmodule WandererApp.Character.TrackerManager.Impl do
def handle_info({:stop_track, character_id}, state) do
if not WandererApp.Cache.has_key?("character:#{character_id}:is_stop_tracking") do
WandererApp.Cache.insert("character:#{character_id}:is_stop_tracking", true)
Logger.debug(fn -> "Stopping character tracker: #{inspect(character_id)}" end)
Logger.debug(fn ->
"[TrackerManager] Executing stop_track for character #{character_id}"
end)
stop_tracking(state, character_id)
WandererApp.Cache.delete("character:#{character_id}:is_stop_tracking")
else
Logger.debug(fn ->
"[TrackerManager] Character #{character_id} already being stopped, skipping duplicate request"
end)
end
state
@@ -279,7 +438,9 @@ defmodule WandererApp.Character.TrackerManager.Impl do
def track_character(character_id, opts) do
with {:ok, characters} <- WandererApp.Cache.lookup("tracked_characters", []),
false <- Enum.member?(characters, character_id) do
Logger.debug(fn -> "Start character tracker: #{inspect(character_id)}" end)
Logger.debug(fn ->
"[TrackerManager] Starting tracker for character #{character_id}"
end)
WandererApp.Cache.insert_or_update(
"tracked_characters",
@@ -312,7 +473,30 @@ defmodule WandererApp.Character.TrackerManager.Impl do
character_id,
%{opts: opts}
])
# Emit telemetry for tracker start
:telemetry.execute(
[:wanderer_app, :character, :tracker, :started],
%{count: 1, system_time: System.system_time()},
%{character_id: character_id}
)
else
true ->
Logger.debug(fn ->
"[TrackerManager] Character #{character_id} already being tracked"
end)
WandererApp.Cache.insert_or_update(
"track_characters_queue",
[],
fn existing ->
existing
|> Enum.reject(fn c_id -> c_id == character_id end)
end
)
WandererApp.Cache.delete("#{character_id}:track_requested")
_ ->
WandererApp.Cache.insert_or_update(
"track_characters_queue",

View File

@@ -8,7 +8,8 @@ defmodule WandererApp.Character.TrackerPool do
:tracked_ids,
:uuid,
:characters,
server_online: false
server_online: false,
last_location_duration: 0
]
@name __MODULE__
@@ -23,6 +24,16 @@ defmodule WandererApp.Character.TrackerPool do
@update_info_interval :timer.minutes(2)
@update_wallet_interval :timer.minutes(10)
# Per-operation concurrency limits
# Location updates are critical and need high concurrency (100 chars in ~200ms)
# Note: This is fetched at runtime since it's configured via runtime.exs
defp location_concurrency do
Application.get_env(:wanderer_app, :location_concurrency, System.schedulers_online() * 12)
end
# Other operations can use lower concurrency
@standard_concurrency System.schedulers_online() * 2
@logger Application.compile_env(:wanderer_app, :logger)
def new(), do: __struct__()
@@ -106,14 +117,23 @@ defmodule WandererApp.Character.TrackerPool do
"server_status"
)
Process.send_after(self(), :update_online, 100)
Process.send_after(self(), :update_location, 300)
Process.send_after(self(), :update_ship, 500)
Process.send_after(self(), :update_info, 1500)
# Stagger pool startups to distribute load across multiple pools
# Critical location updates get minimal stagger (0-500ms)
# Other operations get wider stagger (0-10s) to reduce thundering herd
location_stagger = :rand.uniform(500)
online_stagger = :rand.uniform(10_000)
ship_stagger = :rand.uniform(10_000)
info_stagger = :rand.uniform(60_000)
Process.send_after(self(), :update_online, 100 + online_stagger)
Process.send_after(self(), :update_location, 300 + location_stagger)
Process.send_after(self(), :update_ship, 500 + ship_stagger)
Process.send_after(self(), :update_info, 1500 + info_stagger)
Process.send_after(self(), :check_offline_characters, @check_offline_characters_interval)
if WandererApp.Env.wallet_tracking_enabled?() do
Process.send_after(self(), :update_wallet, 1000)
wallet_stagger = :rand.uniform(120_000)
Process.send_after(self(), :update_wallet, 1000 + wallet_stagger)
end
{:noreply, state}
@@ -163,7 +183,7 @@ defmodule WandererApp.Character.TrackerPool do
fn character_id ->
WandererApp.Character.Tracker.update_online(character_id)
end,
max_concurrency: System.schedulers_online() * 4,
max_concurrency: @standard_concurrency,
on_timeout: :kill_task,
timeout: :timer.seconds(5)
)
@@ -226,7 +246,7 @@ defmodule WandererApp.Character.TrackerPool do
WandererApp.Character.Tracker.check_offline(character_id)
end,
timeout: :timer.seconds(15),
max_concurrency: System.schedulers_online() * 4,
max_concurrency: @standard_concurrency,
on_timeout: :kill_task
)
|> Enum.each(fn
@@ -254,26 +274,52 @@ defmodule WandererApp.Character.TrackerPool do
) do
Process.send_after(self(), :update_location, @update_location_interval)
start_time = System.monotonic_time(:millisecond)
try do
characters
|> Task.async_stream(
fn character_id ->
WandererApp.Character.Tracker.update_location(character_id)
end,
max_concurrency: System.schedulers_online() * 4,
max_concurrency: location_concurrency(),
on_timeout: :kill_task,
timeout: :timer.seconds(5)
)
|> Enum.each(fn _result -> :ok end)
# Emit telemetry for location update performance
duration = System.monotonic_time(:millisecond) - start_time
:telemetry.execute(
[:wanderer_app, :tracker_pool, :location_update],
%{duration: duration, character_count: length(characters)},
%{pool_uuid: state.uuid}
)
# Warn if location updates are falling behind (taking > 800ms for 100 chars)
if duration > 2000 do
Logger.warning(
"[Tracker Pool] Location updates falling behind: #{duration}ms for #{length(characters)} chars (pool: #{state.uuid})"
)
:telemetry.execute(
[:wanderer_app, :tracker_pool, :location_lag],
%{duration: duration, character_count: length(characters)},
%{pool_uuid: state.uuid}
)
end
{:noreply, %{state | last_location_duration: duration}}
rescue
e ->
Logger.error("""
[Tracker Pool] update_location => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
{:noreply, state}
end
end
def handle_info(
@@ -289,32 +335,48 @@ defmodule WandererApp.Character.TrackerPool do
:update_ship,
%{
characters: characters,
server_online: true
server_online: true,
last_location_duration: location_duration
} =
state
) do
Process.send_after(self(), :update_ship, @update_ship_interval)
try do
characters
|> Task.async_stream(
fn character_id ->
WandererApp.Character.Tracker.update_ship(character_id)
end,
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task,
timeout: :timer.seconds(5)
# Backpressure: Skip ship updates if location updates are falling behind
if location_duration > 1000 do
Logger.debug(
"[Tracker Pool] Skipping ship update due to location lag (#{location_duration}ms)"
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Tracker Pool] update_ship => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
:telemetry.execute(
[:wanderer_app, :tracker_pool, :ship_skipped],
%{count: 1},
%{pool_uuid: state.uuid, reason: :location_lag}
)
{:noreply, state}
else
try do
characters
|> Task.async_stream(
fn character_id ->
WandererApp.Character.Tracker.update_ship(character_id)
end,
max_concurrency: @standard_concurrency,
on_timeout: :kill_task,
timeout: :timer.seconds(5)
)
|> Enum.each(fn _result -> :ok end)
rescue
e ->
Logger.error("""
[Tracker Pool] update_ship => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
end
def handle_info(
@@ -330,35 +392,51 @@ defmodule WandererApp.Character.TrackerPool do
:update_info,
%{
characters: characters,
server_online: true
server_online: true,
last_location_duration: location_duration
} =
state
) do
Process.send_after(self(), :update_info, @update_info_interval)
try do
characters
|> Task.async_stream(
fn character_id ->
WandererApp.Character.Tracker.update_info(character_id)
end,
timeout: :timer.seconds(15),
max_concurrency: System.schedulers_online() * 4,
on_timeout: :kill_task
# Backpressure: Skip info updates if location updates are severely falling behind
if location_duration > 1500 do
Logger.debug(
"[Tracker Pool] Skipping info update due to location lag (#{location_duration}ms)"
)
|> Enum.each(fn
{:ok, _result} -> :ok
error -> Logger.error("Error in update_info: #{inspect(error)}")
end)
rescue
e ->
Logger.error("""
[Tracker Pool] update_info => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
:telemetry.execute(
[:wanderer_app, :tracker_pool, :info_skipped],
%{count: 1},
%{pool_uuid: state.uuid, reason: :location_lag}
)
{:noreply, state}
else
try do
characters
|> Task.async_stream(
fn character_id ->
WandererApp.Character.Tracker.update_info(character_id)
end,
timeout: :timer.seconds(15),
max_concurrency: @standard_concurrency,
on_timeout: :kill_task
)
|> Enum.each(fn
{:ok, _result} -> :ok
error -> Logger.error("Error in update_info: #{inspect(error)}")
end)
rescue
e ->
Logger.error("""
[Tracker Pool] update_info => exception: #{Exception.message(e)}
#{Exception.format_stacktrace(__STACKTRACE__)}
""")
end
{:noreply, state}
end
end
def handle_info(
@@ -387,7 +465,7 @@ defmodule WandererApp.Character.TrackerPool do
WandererApp.Character.Tracker.update_wallet(character_id)
end,
timeout: :timer.minutes(5),
max_concurrency: System.schedulers_online() * 4,
max_concurrency: @standard_concurrency,
on_timeout: :kill_task
)
|> Enum.each(fn

View File

@@ -89,14 +89,4 @@ defmodule WandererApp.Character.TrackerPoolDynamicSupervisor do
end
end
defp stop_child(uuid) do
case Registry.lookup(@registry, uuid) do
[{pid, _}] ->
GenServer.cast(pid, :stop)
_ ->
Logger.warn("Unable to locate pool assigned to #{inspect(uuid)}")
:ok
end
end
end

View File

@@ -38,7 +38,7 @@ defmodule WandererApp.Character.TrackingConfigUtils do
%{id: "default", title: "Default", value: default_count}
]
{:ok, pools_count} =
{:ok, _pools_count} =
Cachex.get(
:esi_auth_cache,
"configs_total_count"

View File

@@ -53,6 +53,7 @@ defmodule WandererApp.Character.TrackingUtils do
@doc """
Builds tracking data for all characters with access to a map.
Only includes characters that have actual tracking permission.
"""
def build_tracking_data(map_id, current_user_id) do
with {:ok, map} <-
@@ -65,12 +66,16 @@ defmodule WandererApp.Character.TrackingUtils do
{:ok, user_settings} <- WandererApp.MapUserSettingsRepo.get(map_id, current_user_id),
{:ok, %{characters: characters_with_access}} <-
WandererApp.Maps.load_characters(map, current_user_id) do
# Filter to only characters with actual tracking permission
characters_with_tracking_permission =
filter_characters_with_tracking_permission(characters_with_access, map)
# Map characters to tracking data
{:ok, characters_data} =
build_character_tracking_data(characters_with_access)
build_character_tracking_data(characters_with_tracking_permission)
{:ok, main_character} =
get_main_character(user_settings, characters_with_access, characters_with_access)
get_main_character(user_settings, characters_with_tracking_permission, characters_with_tracking_permission)
following_character_eve_id =
case user_settings do
@@ -112,10 +117,154 @@ defmodule WandererApp.Character.TrackingUtils do
end)}
end
# Filter characters to only include those with actual tracking permission
# This prevents showing characters in the tracking dialog that will fail when toggled
defp filter_characters_with_tracking_permission(characters, %{id: map_id, owner_id: owner_id}) do
# Load ACLs with members properly (same approach as get_map_characters)
acls = load_map_acls_with_members(map_id)
Enum.filter(characters, fn character ->
has_tracking_permission?(character, owner_id, acls)
end)
end
# Load ACLs with members in the correct format for permission checking
defp load_map_acls_with_members(map_id) do
case WandererApp.Api.MapAccessList.read_by_map(%{map_id: map_id},
load: [access_list: [:owner, :members]]
) do
{:ok, map_access_lists} ->
map_access_lists
|> Enum.map(fn mal -> mal.access_list end)
|> Enum.reject(&is_nil/1)
_ ->
[]
end
end
# Check if a character has tracking permission on a map
# Returns true if the character can be tracked, false otherwise
defp has_tracking_permission?(character, owner_id, acls) do
cond do
# Map owner always has tracking permission
character.id == owner_id ->
true
# Character belongs to same user as map owner
# Note: character data from load_characters may not have user_id, so we need to load it
check_same_user_as_owner_by_id(character.id, owner_id) ->
true
# Check ACL-based permissions
true ->
case WandererApp.Permissions.check_characters_access([character], acls) do
[character_permissions] ->
map_permissions = WandererApp.Permissions.get_permissions(character_permissions)
map_permissions.track_character and map_permissions.view_system
_ ->
false
end
end
end
# Check if character belongs to the same user as the map owner (by character IDs)
defp check_same_user_as_owner_by_id(_character_id, nil), do: false
defp check_same_user_as_owner_by_id(character_id, owner_id) do
with {:ok, character} <- WandererApp.Character.get_character(character_id),
{:ok, owner_character} <- WandererApp.Character.get_character(owner_id) do
character.user_id != nil and character.user_id == owner_character.user_id
else
_ -> false
end
end
# Private implementation of update character tracking
defp do_update_character_tracking(character, map_id, track, caller_pid) do
WandererApp.MapCharacterSettingsRepo.get(map_id, character.id)
|> case do
# First check current tracking state to avoid unnecessary permission checks
current_settings = WandererApp.MapCharacterSettingsRepo.get(map_id, character.id)
case {track, current_settings} do
# Already tracked and wants to stay tracked - no permission check needed
{true, {:ok, %{tracked: true} = settings}} ->
do_update_character_tracking_impl(character, map_id, track, caller_pid, {:ok, settings})
# Wants to enable tracking - check permissions first
{true, settings_result} ->
case check_character_tracking_permission(character, map_id) do
{:ok, :allowed} ->
do_update_character_tracking_impl(character, map_id, track, caller_pid, settings_result)
{:error, reason} ->
Logger.warning(
"[CharacterTracking] Character #{character.id} cannot be tracked on map #{map_id}: #{reason}"
)
{:error, reason}
end
# Untracking is always allowed
{false, settings_result} ->
do_update_character_tracking_impl(character, map_id, track, caller_pid, settings_result)
end
end
# Check if a character has permission to be tracked on a map
defp check_character_tracking_permission(character, map_id) do
with {:ok, %{acls: acls, owner_id: owner_id}} <-
WandererApp.MapRepo.get(map_id,
acls: [
:owner_id,
members: [:role, :eve_character_id, :eve_corporation_id, :eve_alliance_id]
]
) do
# Check if character is the map owner
if character.id == owner_id do
{:ok, :allowed}
else
# Check if character belongs to same user as owner (Option 3 check)
case check_same_user_as_owner(character, owner_id) do
true ->
{:ok, :allowed}
false ->
# Check ACL-based permissions
[character_permissions] =
WandererApp.Permissions.check_characters_access([character], acls)
map_permissions = WandererApp.Permissions.get_permissions(character_permissions)
if map_permissions.track_character and map_permissions.view_system do
{:ok, :allowed}
else
{:error,
"Character does not have tracking permission on this map. Please add the character to a map access list or ensure you are the map owner."}
end
end
end
else
{:error, _} ->
{:error, "Failed to verify map permissions"}
end
end
# Check if character belongs to the same user as the map owner
defp check_same_user_as_owner(_character, nil), do: false
defp check_same_user_as_owner(character, owner_id) do
case WandererApp.Character.get_character(owner_id) do
{:ok, owner_character} ->
character.user_id != nil and character.user_id == owner_character.user_id
_ ->
false
end
end
defp do_update_character_tracking_impl(character, map_id, track, caller_pid, settings_result) do
case settings_result do
# Untracking flow
{:ok, %{tracked: true} = existing_settings} ->
if not track do
@@ -132,6 +281,9 @@ defmodule WandererApp.Character.TrackingUtils do
{:ok, %{tracked: false} = existing_settings} ->
if track do
{:ok, updated_settings} = WandererApp.MapCharacterSettingsRepo.track(existing_settings)
# Ensure character is in map state (fixes race condition where character
# might not be synced yet from presence updates)
:ok = WandererApp.Map.add_character(map_id, character)
:ok = track([character], map_id, true, caller_pid)
{:ok, updated_settings}
else
@@ -148,6 +300,9 @@ defmodule WandererApp.Character.TrackingUtils do
tracked: true
})
# Add character to map state immediately (fixes race condition where
# character wouldn't appear on map until next update_presence cycle)
:ok = WandererApp.Map.add_character(map_id, character)
:ok = track([character], map_id, true, caller_pid)
{:ok, settings}
else
@@ -210,6 +365,31 @@ defmodule WandererApp.Character.TrackingUtils do
if is_track_allowed do
:ok = WandererApp.Character.TrackerManager.start_tracking(character_id)
# Immediately set tracking_start_time cache key to enable map tracking
# This ensures the character is tracked for updates even before the
# Tracker process is fully started (avoids race condition)
tracking_start_key = "character:#{character_id}:map:#{map_id}:tracking_start_time"
case WandererApp.Cache.lookup(tracking_start_key) do
{:ok, nil} ->
WandererApp.Cache.put(tracking_start_key, DateTime.utc_now())
# Clear stale location caches for fresh tracking
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:solar_system_id")
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:station_id")
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:structure_id")
_ ->
# Already tracking, no need to update
:ok
end
# Also call update_track_settings to update character state when tracker is ready
WandererApp.Character.TrackerManager.update_track_settings(character_id, %{
map_id: map_id,
track: true
})
end
:ok

View File

@@ -17,7 +17,6 @@ defmodule WandererApp.Env do
def invites(), do: get_key(:invites, false)
def map_subscriptions_enabled?(), do: get_key(:map_subscriptions_enabled, false)
def websocket_events_enabled?(), do: get_key(:websocket_events_enabled, false)
def public_api_disabled?(), do: get_key(:public_api_disabled, false)
@decorate cacheable(

View File

@@ -2,6 +2,8 @@ defmodule WandererApp.Esi do
@moduledoc group: :esi
defdelegate get_server_status, to: WandererApp.Esi.ApiClient
defdelegate get_group_info(group_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_type_info(type_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_alliance_info(eve_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_corporation_info(eve_id, opts \\ []), to: WandererApp.Esi.ApiClient
defdelegate get_character_info(eve_id, opts \\ []), to: WandererApp.Esi.ApiClient

View File

@@ -8,7 +8,6 @@ defmodule WandererApp.Esi.ApiClient do
@ttl :timer.hours(1)
@wanderrer_user_agent "(wanderer-industries@proton.me; +https://github.com/wanderer-industries/wanderer)"
@req_esi_options [base_url: "https://esi.evetech.net", finch: WandererApp.Finch]
@cache_opts [cache: true]
@retry_opts [retry: false, retry_log_level: :warning]
@@ -17,6 +16,17 @@ defmodule WandererApp.Esi.ApiClient do
@logger Application.compile_env(:wanderer_app, :logger)
# Pool selection for different operation types
# Character tracking operations use dedicated high-capacity pool
@character_tracking_pool WandererApp.Finch.ESI.CharacterTracking
# General ESI operations use standard pool
@general_pool WandererApp.Finch.ESI.General
# Helper function to get Req options with appropriate Finch pool
defp req_options_for_pool(pool) do
[base_url: "https://esi.evetech.net", finch: pool]
end
def get_server_status, do: do_get("/status", [], @cache_opts)
def set_autopilot_waypoint(add_to_beginning, clear_other_waypoints, destination_id, opts \\ []),
@@ -38,10 +48,13 @@ defmodule WandererApp.Esi.ApiClient do
do:
do_post_esi(
"/characters/affiliation/",
json: character_eve_ids,
params: %{
datasource: "tranquility"
}
[
json: character_eve_ids,
params: %{
datasource: "tranquility"
}
],
@character_tracking_pool
)
def get_routes_custom(hubs, origin, params),
@@ -60,7 +73,7 @@ defmodule WandererApp.Esi.ApiClient do
|> Keyword.merge(@timeout_opts)
)
def get_routes_eve(hubs, origin, params, opts),
def get_routes_eve(hubs, origin, _params, _opts),
do:
{:ok,
hubs
@@ -87,36 +100,35 @@ defmodule WandererApp.Esi.ApiClient do
end
end)}
defp do_get_routes_eve(origin, destination, params, opts) do
esi_params =
Map.merge(params, %{
connections: params.connections |> Enum.join(","),
avoid: params.avoid |> Enum.join(",")
})
do_get(
"/route/#{origin}/#{destination}/?#{esi_params |> Plug.Conn.Query.encode()}",
opts,
@cache_opts
)
|> case do
{:ok, result} ->
%{
"origin" => origin,
"destination" => destination,
"systems" => result,
"success" => true
}
error ->
Logger.warning("Error getting routes: #{inspect(error)}")
%{"origin" => origin, "destination" => destination, "systems" => [], "success" => false}
end
end
@decorate cacheable(
cache: Cache,
key: "group-info-#{group_id}",
opts: [ttl: @ttl]
)
def get_group_info(group_id, opts),
do:
do_get(
"/universe/groups/#{group_id}/",
opts,
@cache_opts
)
@decorate cacheable(
cache: Cache,
key: "info-#{eve_id}",
key: "type-info-#{type_id}",
opts: [ttl: @ttl]
)
def get_type_info(type_id, opts),
do:
do_get(
"/universe/types/#{type_id}/",
opts,
@cache_opts
)
@decorate cacheable(
cache: Cache,
key: "alliance-info-#{eve_id}",
opts: [ttl: @ttl]
)
def get_alliance_info(eve_id, opts \\ []) do
@@ -137,7 +149,7 @@ defmodule WandererApp.Esi.ApiClient do
@decorate cacheable(
cache: Cache,
key: "info-#{eve_id}",
key: "corporation-info-#{eve_id}",
opts: [ttl: @ttl]
)
def get_corporation_info(eve_id, opts \\ []) do
@@ -150,7 +162,7 @@ defmodule WandererApp.Esi.ApiClient do
@decorate cacheable(
cache: Cache,
key: "info-#{eve_id}",
key: "character-info-#{eve_id}",
opts: [ttl: @ttl]
)
def get_character_info(eve_id, opts \\ []) do
@@ -203,8 +215,17 @@ defmodule WandererApp.Esi.ApiClient do
do: get_character_auth_data(character_eve_id, "ship", opts ++ @cache_opts)
def search(character_eve_id, opts \\ []) do
search_val = to_string(opts[:params][:search] || "")
categories_val = to_string(opts[:params][:categories] || "character,alliance,corporation")
params = Keyword.get(opts, :params, %{}) |> Map.new()
search_val =
to_string(Map.get(params, :search) || Map.get(params, "search") || "")
categories_val =
to_string(
Map.get(params, :categories) ||
Map.get(params, "categories") ||
"character,alliance,corporation"
)
query_params = [
{"search", search_val},
@@ -220,10 +241,12 @@ defmodule WandererApp.Esi.ApiClient do
@decorate cacheable(
cache: Cache,
key: "search-#{character_eve_id}-#{categories_val}-#{search_val |> Slug.slugify()}",
key: "search-#{character_eve_id}-#{categories_val}-#{Base.encode64(search_val)}",
opts: [ttl: @ttl]
)
defp get_search(character_eve_id, search_val, categories_val, merged_opts) do
# Note: search_val and categories_val are used by the @decorate cacheable annotation above
_unused = {search_val, categories_val}
get_character_auth_data(character_eve_id, "search", merged_opts)
end
@@ -254,14 +277,18 @@ defmodule WandererApp.Esi.ApiClient do
character_id = opts |> Keyword.get(:character_id, nil)
# Use character tracking pool for character operations
pool = @character_tracking_pool
if not is_access_token_expired?(character_id) do
do_get(
path,
auth_opts,
opts |> with_refresh_token()
opts |> with_refresh_token(),
pool
)
else
do_get_retry(path, auth_opts, opts |> with_refresh_token())
do_get_retry(path, auth_opts, opts |> with_refresh_token(), :forbidden, pool)
end
end
@@ -295,19 +322,19 @@ defmodule WandererApp.Esi.ApiClient do
defp with_cache_opts(opts),
do: opts |> Keyword.merge(@cache_opts) |> Keyword.merge(cache_dir: System.tmp_dir!())
defp do_get(path, api_opts \\ [], opts \\ []) do
defp do_get(path, api_opts, opts, pool \\ @general_pool) do
case Cachex.get(:api_cache, path) do
{:ok, cached_data} when not is_nil(cached_data) ->
{:ok, cached_data}
_ ->
do_get_request(path, api_opts, opts)
do_get_request(path, api_opts, opts, pool)
end
end
defp do_get_request(path, api_opts \\ [], opts \\ []) do
defp do_get_request(path, api_opts, opts, pool) do
try do
@req_esi_options
req_options_for_pool(pool)
|> Req.new()
|> Req.get(
api_opts
@@ -395,15 +422,52 @@ defmodule WandererApp.Esi.ApiClient do
{:ok, %{status: status} = _error} when status in [401, 403] ->
do_get_retry(path, api_opts, opts)
{:ok, %{status: status, headers: headers}} ->
{:ok, %{status: status}} ->
{:error, "Unexpected status: #{status}"}
{:error, _reason} ->
{:error, %Mint.TransportError{reason: :timeout}} ->
# Emit telemetry for pool timeout
:telemetry.execute(
[:wanderer_app, :finch, :pool_timeout],
%{count: 1},
%{method: "GET", path: path, pool: pool}
)
{:error, :pool_timeout}
{:error, reason} ->
# Check if this is a Finch pool error
if is_exception(reason) and
Exception.message(reason) =~ "unable to provide a connection" do
:telemetry.execute(
[:wanderer_app, :finch, :pool_exhausted],
%{count: 1},
%{method: "GET", path: path, pool: pool}
)
end
{:error, "Request failed"}
end
rescue
e ->
Logger.error(Exception.message(e))
error_msg = Exception.message(e)
# Emit telemetry for pool exhaustion errors
if error_msg =~ "unable to provide a connection" do
:telemetry.execute(
[:wanderer_app, :finch, :pool_exhausted],
%{count: 1},
%{method: "GET", path: path, pool: pool}
)
Logger.error("FINCH_POOL_EXHAUSTED: #{error_msg}",
method: "GET",
path: path,
pool: inspect(pool)
)
else
Logger.error(error_msg)
end
{:error, "Request failed"}
end
@@ -492,13 +556,13 @@ defmodule WandererApp.Esi.ApiClient do
end
end
defp do_post_esi(url, opts) do
defp do_post_esi(url, opts, pool \\ @general_pool) do
try do
req_opts =
(opts |> with_user_agent_opts() |> Keyword.merge(@retry_opts)) ++
[params: opts[:params] || []]
Req.new(@req_esi_options ++ req_opts)
Req.new(req_options_for_pool(pool) ++ req_opts)
|> Req.post(url: url)
|> case do
{:ok, %{status: status, body: body}} when status in [200, 201] ->
@@ -576,18 +640,55 @@ defmodule WandererApp.Esi.ApiClient do
{:ok, %{status: status}} ->
{:error, "Unexpected status: #{status}"}
{:error, %Mint.TransportError{reason: :timeout}} ->
# Emit telemetry for pool timeout
:telemetry.execute(
[:wanderer_app, :finch, :pool_timeout],
%{count: 1},
%{method: "POST_ESI", path: url, pool: pool}
)
{:error, :pool_timeout}
{:error, reason} ->
# Check if this is a Finch pool error
if is_exception(reason) and
Exception.message(reason) =~ "unable to provide a connection" do
:telemetry.execute(
[:wanderer_app, :finch, :pool_exhausted],
%{count: 1},
%{method: "POST_ESI", path: url, pool: pool}
)
end
{:error, reason}
end
rescue
e ->
@logger.error(Exception.message(e))
error_msg = Exception.message(e)
# Emit telemetry for pool exhaustion errors
if error_msg =~ "unable to provide a connection" do
:telemetry.execute(
[:wanderer_app, :finch, :pool_exhausted],
%{count: 1},
%{method: "POST_ESI", path: url, pool: pool}
)
@logger.error("FINCH_POOL_EXHAUSTED: #{error_msg}",
method: "POST_ESI",
path: url,
pool: inspect(pool)
)
else
@logger.error(error_msg)
end
{:error, "Request failed"}
end
end
defp do_get_retry(path, api_opts, opts, status \\ :forbidden) do
defp do_get_retry(path, api_opts, opts, status \\ :forbidden, pool \\ @general_pool) do
refresh_token? = opts |> Keyword.get(:refresh_token?, false)
retry_count = opts |> Keyword.get(:retry_count, 0)
character_id = opts |> Keyword.get(:character_id, nil)
@@ -602,7 +703,8 @@ defmodule WandererApp.Esi.ApiClient do
do_get(
path,
api_opts |> Keyword.merge(auth_opts),
opts |> Keyword.merge(retry_count: retry_count + 1)
opts |> Keyword.merge(retry_count: retry_count + 1),
pool
)
{:error, _error} ->
@@ -704,10 +806,10 @@ defmodule WandererApp.Esi.ApiClient do
defp handle_refresh_token_result(
{:error, %OAuth2.Error{reason: :econnrefused} = error},
character,
_character,
character_id,
expires_at,
scopes
_scopes
) do
expires_at_datetime = DateTime.from_unix!(expires_at)
time_since_expiry = DateTime.diff(DateTime.utc_now(), expires_at_datetime, :second)

View File

@@ -393,9 +393,6 @@ defmodule WandererApp.EveDataService do
end
end
defp get_solar_system_name(solar_system_name, wormhole_class) do
end
defp get_triglavian_data(default_data, triglavian_systems, solar_system_id) do
case Enum.find(triglavian_systems, fn system -> system.solar_system_id == solar_system_id end) do
nil ->
@@ -414,7 +411,7 @@ defmodule WandererApp.EveDataService do
defp get_security(security) do
case security do
nil -> {:ok, ""}
_ -> {:ok, String.to_float(security) |> get_true_security() |> Float.to_string(decimals: 1)}
_ -> {:ok, String.to_float(security) |> get_true_security() |> :erlang.float_to_binary(decimals: 1)}
end
end
@@ -496,23 +493,23 @@ defmodule WandererApp.EveDataService do
do: {:ok, 10_100}
defp get_wormhole_class_id(systems, region_id, constellation_id, solar_system_id) do
with region <-
Enum.find(systems, fn system ->
system.location_id |> Integer.parse() |> elem(0) == region_id
end),
constellation <-
Enum.find(systems, fn system ->
system.location_id |> Integer.parse() |> elem(0) == constellation_id
end),
solar_system <-
Enum.find(systems, fn system ->
system.location_id |> Integer.parse() |> elem(0) == solar_system_id
end),
wormhole_class_id <- get_wormhole_class_id(region, constellation, solar_system) do
{:ok, wormhole_class_id}
else
_ -> {:ok, -1}
end
region =
Enum.find(systems, fn system ->
system.location_id |> Integer.parse() |> elem(0) == region_id
end)
constellation =
Enum.find(systems, fn system ->
system.location_id |> Integer.parse() |> elem(0) == constellation_id
end)
solar_system =
Enum.find(systems, fn system ->
system.location_id |> Integer.parse() |> elem(0) == solar_system_id
end)
wormhole_class_id = get_wormhole_class_id(region, constellation, solar_system)
{:ok, wormhole_class_id}
end
defp get_wormhole_class_id(_region, _constellation, solar_system)

View File

@@ -2,7 +2,7 @@ defmodule WandererApp.ExternalEvents do
@moduledoc """
External event system for SSE and webhook delivery.
This system is completely separate from the internal Phoenix PubSub
This system is completely separate from the internal Phoenix PubSub
event system and does NOT modify any existing event flows.
External events are delivered to:
@@ -77,7 +77,7 @@ defmodule WandererApp.ExternalEvents do
GenServer.cast(MapEventRelay, {:deliver_event, event})
:ok
else
Logger.warning("MapEventRelay not available for event delivery (map: #{map_id})")
Logger.debug(fn -> "MapEventRelay not available for event delivery (map: #{map_id})" end)
{:error, :relay_not_available}
end
else

View File

@@ -178,6 +178,10 @@ defmodule WandererApp.ExternalEvents.Event do
end
end
defp serialize_payload(payload, visited) when is_map(payload) do
Map.new(payload, fn {k, v} -> {to_string(k), serialize_value(v, visited)} end)
end
# Get allowed fields based on struct type
defp get_allowed_fields(module) do
module_name = module |> Module.split() |> List.last()
@@ -192,10 +196,6 @@ defmodule WandererApp.ExternalEvents.Event do
end
end
defp serialize_payload(payload, visited) when is_map(payload) do
Map.new(payload, fn {k, v} -> {to_string(k), serialize_value(v, visited)} end)
end
defp serialize_fields(fields, visited) do
Enum.reduce(fields, %{}, fn {k, v}, acc ->
if is_nil(v) do

View File

@@ -155,26 +155,23 @@ defmodule WandererApp.ExternalEvents.MapEventRelay do
# 1. Store in ETS for backfill
store_event(event, state.ets_table)
# 2. Convert event to JSON for delivery methods
event_json = Event.to_json(event)
Logger.debug(fn ->
"MapEventRelay converted event to JSON: #{inspect(String.slice(inspect(event_json), 0, 200))}..."
end)
# 3. Send to webhook subscriptions via WebhookDispatcher
WebhookDispatcher.dispatch_event(event.map_id, event)
# 4. Broadcast to SSE clients
Logger.debug(fn -> "MapEventRelay broadcasting to SSE clients for map #{event.map_id}" end)
WandererApp.ExternalEvents.SseStreamManager.broadcast_event(event.map_id, event_json)
case WandererApp.ExternalEvents.SseAccessControl.sse_allowed?(event.map_id) do
:ok ->
WandererApp.ExternalEvents.SseStreamManager.broadcast_event(event.map_id, event_json)
# Emit delivered telemetry
:telemetry.execute(
[:wanderer_app, :external_events, :relay, :delivered],
%{count: 1},
%{map_id: event.map_id, event_type: event.type}
)
:telemetry.execute(
[:wanderer_app, :external_events, :relay, :delivered],
%{count: 1},
%{map_id: event.map_id, event_type: event.type}
)
{:error, _reason} ->
:ok
end
%{state | event_count: state.event_count + 1}
end

Some files were not shown because too many files have changed in this diff Show More