mirror of
https://github.com/wanderer-industries/wanderer
synced 2025-12-02 05:52:55 +00:00
Compare commits
63 Commits
tests-fixe
...
v1.88.11
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f75d4440d | ||
|
|
34210f63e3 | ||
|
|
5f60fd4922 | ||
|
|
47ef7dda55 | ||
|
|
0f3550a687 | ||
|
|
8f242f3535 | ||
|
|
1ce39e5394 | ||
|
|
cca7b912aa | ||
|
|
d939e32500 | ||
|
|
97ebe66db5 | ||
|
|
f437fc4541 | ||
|
|
6c65538450 | ||
|
|
d566a74df4 | ||
|
|
03e030a7d3 | ||
|
|
e738e1da9c | ||
|
|
972b3a6cbe | ||
|
|
96b4a3077e | ||
|
|
6b308e8a1e | ||
|
|
d0874cbc6f | ||
|
|
f106a51bf5 | ||
|
|
dc47dc5f81 | ||
|
|
dc81cffeea | ||
|
|
5766fcf4d8 | ||
|
|
c57a3b2cea | ||
|
|
0c1fa8e79b | ||
|
|
36cc91915c | ||
|
|
bb644fde31 | ||
|
|
269b54d382 | ||
|
|
a9115cc653 | ||
|
|
eeea7aee8b | ||
|
|
700089e381 | ||
|
|
932935557c | ||
|
|
2890a76cf2 | ||
|
|
4ac9b2e2b7 | ||
|
|
f92436f3f0 | ||
|
|
22d97cc99d | ||
|
|
305838573c | ||
|
|
cc7ad81d2f | ||
|
|
a694e57512 | ||
|
|
20be7fc67d | ||
|
|
54bfee414b | ||
|
|
bcfa47bd94 | ||
|
|
b784f68818 | ||
|
|
344ee54018 | ||
|
|
42e0f8f660 | ||
|
|
99b081887c | ||
|
|
dee8d0dae8 | ||
|
|
147dd5880e | ||
|
|
69991fff72 | ||
|
|
b881c84a52 | ||
|
|
de4e1f859f | ||
|
|
8e2a19540c | ||
|
|
855c596672 | ||
|
|
36d3c0937b | ||
|
|
d8fb1f78cf | ||
|
|
98fa7e0235 | ||
|
|
e4396fe2f9 | ||
|
|
1c117903f6 | ||
|
|
9e9dc39200 | ||
|
|
abd7e4e15c | ||
|
|
88ed9cd39e | ||
|
|
9666a8e78a | ||
|
|
7a74ae566b |
110
.github/workflows/test.yml
vendored
110
.github/workflows/test.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
@@ -35,17 +35,17 @@ jobs:
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
|
||||
- name: Setup Elixir/OTP
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
elixir-version: ${{ env.ELIXIR_VERSION }}
|
||||
otp-version: ${{ env.OTP_VERSION }}
|
||||
|
||||
|
||||
- name: Cache Elixir dependencies
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
@@ -54,12 +54,12 @@ jobs:
|
||||
_build
|
||||
key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: ${{ runner.os }}-mix-
|
||||
|
||||
|
||||
- name: Install Elixir dependencies
|
||||
run: |
|
||||
mix deps.get
|
||||
mix deps.compile
|
||||
|
||||
|
||||
- name: Check code formatting
|
||||
id: format
|
||||
run: |
|
||||
@@ -71,42 +71,42 @@ jobs:
|
||||
echo "count=1" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Compile code and capture warnings
|
||||
id: compile
|
||||
run: |
|
||||
# Capture compilation output
|
||||
output=$(mix compile 2>&1 || true)
|
||||
echo "$output" > compile_output.txt
|
||||
|
||||
|
||||
# Count warnings
|
||||
warning_count=$(echo "$output" | grep -c "warning:" || echo "0")
|
||||
|
||||
|
||||
# Check if compilation succeeded
|
||||
if mix compile > /dev/null 2>&1; then
|
||||
echo "status=✅ Success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Failed" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
|
||||
echo "warnings=$warning_count" >> $GITHUB_OUTPUT
|
||||
echo "output<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$output" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Setup database
|
||||
run: |
|
||||
mix ecto.create
|
||||
mix ecto.migrate
|
||||
|
||||
|
||||
- name: Run tests with coverage
|
||||
id: tests
|
||||
run: |
|
||||
# Run tests with coverage
|
||||
output=$(mix test --cover 2>&1 || true)
|
||||
echo "$output" > test_output.txt
|
||||
|
||||
|
||||
# Parse test results
|
||||
if echo "$output" | grep -q "0 failures"; then
|
||||
echo "status=✅ All Passed" >> $GITHUB_OUTPUT
|
||||
@@ -115,16 +115,16 @@ jobs:
|
||||
echo "status=❌ Some Failed" >> $GITHUB_OUTPUT
|
||||
test_status="failed"
|
||||
fi
|
||||
|
||||
|
||||
# Extract test counts
|
||||
test_line=$(echo "$output" | grep -E "[0-9]+ tests?, [0-9]+ failures?" | head -1 || echo "0 tests, 0 failures")
|
||||
total_tests=$(echo "$test_line" | grep -o '[0-9]\+ tests\?' | grep -o '[0-9]\+' | head -1 || echo "0")
|
||||
failures=$(echo "$test_line" | grep -o '[0-9]\+ failures\?' | grep -o '[0-9]\+' | head -1 || echo "0")
|
||||
|
||||
|
||||
echo "total=$total_tests" >> $GITHUB_OUTPUT
|
||||
echo "failures=$failures" >> $GITHUB_OUTPUT
|
||||
echo "passed=$((total_tests - failures))" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Calculate success rate
|
||||
if [ "$total_tests" -gt 0 ]; then
|
||||
success_rate=$(echo "scale=1; ($total_tests - $failures) * 100 / $total_tests" | bc)
|
||||
@@ -132,26 +132,26 @@ jobs:
|
||||
success_rate="0"
|
||||
fi
|
||||
echo "success_rate=$success_rate" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
exit_code=$?
|
||||
echo "exit_code=$exit_code" >> $GITHUB_OUTPUT
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Generate coverage report
|
||||
id: coverage
|
||||
run: |
|
||||
# Generate coverage report with GitHub format
|
||||
output=$(mix coveralls.github 2>&1 || true)
|
||||
echo "$output" > coverage_output.txt
|
||||
|
||||
|
||||
# Extract coverage percentage
|
||||
coverage=$(echo "$output" | grep -o '[0-9]\+\.[0-9]\+%' | head -1 | sed 's/%//' || echo "0")
|
||||
if [ -z "$coverage" ]; then
|
||||
coverage="0"
|
||||
fi
|
||||
|
||||
|
||||
echo "percentage=$coverage" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Determine status
|
||||
if (( $(echo "$coverage >= 80" | bc -l) )); then
|
||||
echo "status=✅ Excellent" >> $GITHUB_OUTPUT
|
||||
@@ -161,14 +161,14 @@ jobs:
|
||||
echo "status=❌ Needs Improvement" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Run Credo analysis
|
||||
id: credo
|
||||
run: |
|
||||
# Run Credo and capture output
|
||||
output=$(mix credo --strict --format=json 2>&1 || true)
|
||||
echo "$output" > credo_output.txt
|
||||
|
||||
|
||||
# Try to parse JSON output
|
||||
if echo "$output" | jq . > /dev/null 2>&1; then
|
||||
issues=$(echo "$output" | jq '.issues | length' 2>/dev/null || echo "0")
|
||||
@@ -183,12 +183,12 @@ jobs:
|
||||
normal_issues="0"
|
||||
low_issues="0"
|
||||
fi
|
||||
|
||||
|
||||
echo "total_issues=$issues" >> $GITHUB_OUTPUT
|
||||
echo "high_issues=$high_issues" >> $GITHUB_OUTPUT
|
||||
echo "normal_issues=$normal_issues" >> $GITHUB_OUTPUT
|
||||
echo "low_issues=$low_issues" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Determine status
|
||||
if [ "$issues" -eq 0 ]; then
|
||||
echo "status=✅ Clean" >> $GITHUB_OUTPUT
|
||||
@@ -198,24 +198,24 @@ jobs:
|
||||
echo "status=❌ Needs Attention" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Run Dialyzer analysis
|
||||
id: dialyzer
|
||||
run: |
|
||||
# Ensure PLT is built
|
||||
mix dialyzer --plt
|
||||
|
||||
|
||||
# Run Dialyzer and capture output
|
||||
output=$(mix dialyzer --format=github 2>&1 || true)
|
||||
echo "$output" > dialyzer_output.txt
|
||||
|
||||
|
||||
# Count warnings and errors
|
||||
warnings=$(echo "$output" | grep -c "warning:" || echo "0")
|
||||
errors=$(echo "$output" | grep -c "error:" || echo "0")
|
||||
|
||||
|
||||
echo "warnings=$warnings" >> $GITHUB_OUTPUT
|
||||
echo "errors=$errors" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Determine status
|
||||
if [ "$errors" -eq 0 ] && [ "$warnings" -eq 0 ]; then
|
||||
echo "status=✅ Clean" >> $GITHUB_OUTPUT
|
||||
@@ -225,7 +225,7 @@ jobs:
|
||||
echo "status=❌ Has Errors" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Create test results summary
|
||||
id: summary
|
||||
run: |
|
||||
@@ -236,11 +236,11 @@ jobs:
|
||||
coverage_score=${{ steps.coverage.outputs.percentage }}
|
||||
credo_score=$(echo "scale=0; (100 - ${{ steps.credo.outputs.total_issues }} * 2)" | bc | sed 's/^-.*$/0/')
|
||||
dialyzer_score=$(echo "scale=0; (100 - ${{ steps.dialyzer.outputs.warnings }} * 2 - ${{ steps.dialyzer.outputs.errors }} * 10)" | bc | sed 's/^-.*$/0/')
|
||||
|
||||
|
||||
overall_score=$(echo "scale=1; ($format_score + $compile_score + $test_score + $coverage_score + $credo_score + $dialyzer_score) / 6" | bc)
|
||||
|
||||
|
||||
echo "overall_score=$overall_score" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
# Determine overall status
|
||||
if (( $(echo "$overall_score >= 90" | bc -l) )); then
|
||||
echo "overall_status=🌟 Excellent" >> $GITHUB_OUTPUT
|
||||
@@ -252,7 +252,7 @@ jobs:
|
||||
echo "overall_status=❌ Poor" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
|
||||
- name: Find existing PR comment
|
||||
if: github.event_name == 'pull_request'
|
||||
id: find_comment
|
||||
@@ -261,7 +261,7 @@ jobs:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '## 🧪 Test Results Summary'
|
||||
|
||||
|
||||
- name: Create or update PR comment
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
@@ -271,11 +271,11 @@ jobs:
|
||||
edit-mode: replace
|
||||
body: |
|
||||
## 🧪 Test Results Summary
|
||||
|
||||
|
||||
**Overall Quality Score: ${{ steps.summary.outputs.overall_score }}%** ${{ steps.summary.outputs.overall_status }}
|
||||
|
||||
|
||||
### 📊 Metrics Dashboard
|
||||
|
||||
|
||||
| Category | Status | Count | Details |
|
||||
|----------|---------|-------|---------|
|
||||
| 📝 **Code Formatting** | ${{ steps.format.outputs.status }} | ${{ steps.format.outputs.count }} issues | `mix format --check-formatted` |
|
||||
@@ -284,50 +284,50 @@ jobs:
|
||||
| 📊 **Coverage** | ${{ steps.coverage.outputs.status }} | ${{ steps.coverage.outputs.percentage }}% | `mix coveralls` |
|
||||
| 🎯 **Credo** | ${{ steps.credo.outputs.status }} | ${{ steps.credo.outputs.total_issues }} issues | High: ${{ steps.credo.outputs.high_issues }}, Normal: ${{ steps.credo.outputs.normal_issues }}, Low: ${{ steps.credo.outputs.low_issues }} |
|
||||
| 🔍 **Dialyzer** | ${{ steps.dialyzer.outputs.status }} | ${{ steps.dialyzer.outputs.errors }} errors, ${{ steps.dialyzer.outputs.warnings }} warnings | `mix dialyzer` |
|
||||
|
||||
|
||||
### 🎯 Quality Gates
|
||||
|
||||
|
||||
Based on the project's quality thresholds:
|
||||
- **Compilation Warnings**: ${{ steps.compile.outputs.warnings }}/148 (limit: 148)
|
||||
- **Credo Issues**: ${{ steps.credo.outputs.total_issues }}/87 (limit: 87)
|
||||
- **Credo Issues**: ${{ steps.credo.outputs.total_issues }}/87 (limit: 87)
|
||||
- **Dialyzer Warnings**: ${{ steps.dialyzer.outputs.warnings }}/161 (limit: 161)
|
||||
- **Test Coverage**: ${{ steps.coverage.outputs.percentage }}%/50% (minimum: 50%)
|
||||
- **Test Failures**: ${{ steps.tests.outputs.failures }}/0 (limit: 0)
|
||||
|
||||
|
||||
<details>
|
||||
<summary>📈 Progress Toward Goals</summary>
|
||||
|
||||
|
||||
Target goals for the project:
|
||||
- ✨ **Zero compilation warnings** (currently: ${{ steps.compile.outputs.warnings }})
|
||||
- ✨ **≤10 Credo issues** (currently: ${{ steps.credo.outputs.total_issues }})
|
||||
- ✨ **Zero Dialyzer warnings** (currently: ${{ steps.dialyzer.outputs.warnings }})
|
||||
- ✨ **≥85% test coverage** (currently: ${{ steps.coverage.outputs.percentage }}%)
|
||||
- ✅ **Zero test failures** (currently: ${{ steps.tests.outputs.failures }})
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
<details>
|
||||
<summary>🔧 Quick Actions</summary>
|
||||
|
||||
|
||||
To improve code quality:
|
||||
```bash
|
||||
# Fix formatting issues
|
||||
mix format
|
||||
|
||||
|
||||
# View detailed Credo analysis
|
||||
mix credo --strict
|
||||
|
||||
|
||||
# Check Dialyzer warnings
|
||||
mix dialyzer
|
||||
|
||||
|
||||
# Generate detailed coverage report
|
||||
mix coveralls.html
|
||||
```
|
||||
|
||||
|
||||
</details>
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
🤖 *Auto-generated by GitHub Actions* • Updated: ${{ github.event.head_commit.timestamp }}
|
||||
|
||||
> **Note**: This comment will be updated automatically when new commits are pushed to this PR.
|
||||
|
||||
> **Note**: This comment will be updated automatically when new commits are pushed to this PR.
|
||||
|
||||
134
CHANGELOG.md
134
CHANGELOG.md
@@ -2,6 +2,140 @@
|
||||
|
||||
<!-- changelog -->
|
||||
|
||||
## [v1.88.11](https://github.com/wanderer-industries/wanderer/compare/v1.88.10...v1.88.11) (2025-11-29)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.88.10](https://github.com/wanderer-industries/wanderer/compare/v1.88.9...v1.88.10) (2025-11-29)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed pings cleanup
|
||||
|
||||
## [v1.88.9](https://github.com/wanderer-industries/wanderer/compare/v1.88.8...v1.88.9) (2025-11-29)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed linked signatures cleanup
|
||||
|
||||
## [v1.88.8](https://github.com/wanderer-industries/wanderer/compare/v1.88.7...v1.88.8) (2025-11-28)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed pings issue
|
||||
|
||||
## [v1.88.7](https://github.com/wanderer-industries/wanderer/compare/v1.88.6...v1.88.7) (2025-11-28)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed tracking issues
|
||||
|
||||
## [v1.88.6](https://github.com/wanderer-industries/wanderer/compare/v1.88.5...v1.88.6) (2025-11-28)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed tracking issues
|
||||
|
||||
## [v1.88.5](https://github.com/wanderer-industries/wanderer/compare/v1.88.4...v1.88.5) (2025-11-28)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed env errors
|
||||
|
||||
## [v1.88.4](https://github.com/wanderer-industries/wanderer/compare/v1.88.3...v1.88.4) (2025-11-27)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* defensive check for undefined excluded systems
|
||||
|
||||
## [v1.88.3](https://github.com/wanderer-industries/wanderer/compare/v1.88.2...v1.88.3) (2025-11-26)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: fixed env issues
|
||||
|
||||
## [v1.88.1](https://github.com/wanderer-industries/wanderer/compare/v1.88.0...v1.88.1) (2025-11-26)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* sse enable checkbox, and kills ticker
|
||||
|
||||
* apiv1 token auth and structure fixes
|
||||
|
||||
* removed ipv6 distribution env settings
|
||||
|
||||
* tests: updated tests
|
||||
|
||||
* tests: updated tests
|
||||
|
||||
* clean up id generation
|
||||
|
||||
* resolve issue with async event processing
|
||||
|
||||
## [v1.88.0](https://github.com/wanderer-industries/wanderer/compare/v1.87.0...v1.88.0) (2025-11-25)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* Add zkb and eve who links for characters where it possibly was add
|
||||
|
||||
## [v1.87.0](https://github.com/wanderer-industries/wanderer/compare/v1.86.1...v1.87.0) (2025-11-25)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* Add support markdown for system description
|
||||
|
||||
## [v1.86.1](https://github.com/wanderer-industries/wanderer/compare/v1.86.0...v1.86.1) (2025-11-25)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Map: Add ability to see character passage direction in list of passages
|
||||
|
||||
## [v1.86.0](https://github.com/wanderer-industries/wanderer/compare/v1.85.5...v1.86.0) (2025-11-25)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* add date filter for character activity
|
||||
|
||||
## [v1.85.5](https://github.com/wanderer-industries/wanderer/compare/v1.85.4...v1.85.5) (2025-11-24)
|
||||
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import classes from './MarkdownComment.module.scss';
|
||||
import clsx from 'clsx';
|
||||
import {
|
||||
InfoDrawer,
|
||||
@@ -49,7 +48,11 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
<>
|
||||
<InfoDrawer
|
||||
labelClassName="mb-[3px]"
|
||||
className={clsx(classes.MarkdownCommentRoot, 'p-1 bg-stone-700/20 ')}
|
||||
className={clsx(
|
||||
'p-1 bg-stone-700/20',
|
||||
'text-[12px] leading-[1.2] text-stone-300 break-words',
|
||||
'bg-gradient-to-r from-stone-600/40 via-stone-600/10 to-stone-600/0',
|
||||
)}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
title={
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
.CERoot {
|
||||
@apply border border-stone-400/30 rounded-[2px];
|
||||
|
||||
:global {
|
||||
.cm-content {
|
||||
@apply bg-stone-600/40;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,12 @@
|
||||
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
|
||||
import { TooltipPosition, WdImageSize, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { useHotkey } from '@/hooks/Mapper/hooks';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import clsx from 'clsx';
|
||||
import { PrimeIcons } from 'primereact/api';
|
||||
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
|
||||
import { useHotkey } from '@/hooks/Mapper/hooks';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import classes from './CommentsEditor.module.scss';
|
||||
|
||||
export interface CommentsEditorProps {}
|
||||
|
||||
@@ -50,6 +51,7 @@ export const CommentsEditor = ({}: CommentsEditorProps) => {
|
||||
|
||||
return (
|
||||
<MarkdownEditor
|
||||
className={classes.CERoot}
|
||||
value={textVal}
|
||||
onChange={setTextVal}
|
||||
overlayContent={
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
.CERoot {
|
||||
@apply border border-stone-400/30 rounded-[2px];
|
||||
@apply border border-stone-500/30 rounded-[2px];
|
||||
|
||||
:global {
|
||||
.cm-content {
|
||||
@apply bg-stone-600/40;
|
||||
@apply bg-stone-950/70;
|
||||
}
|
||||
|
||||
.cm-scroller {
|
||||
|
||||
@@ -44,9 +44,17 @@ export interface MarkdownEditorProps {
|
||||
overlayContent?: ReactNode;
|
||||
value: string;
|
||||
onChange: (value: string) => void;
|
||||
height?: string;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export const MarkdownEditor = ({ value, onChange, overlayContent }: MarkdownEditorProps) => {
|
||||
export const MarkdownEditor = ({
|
||||
value,
|
||||
onChange,
|
||||
overlayContent,
|
||||
height = '70px',
|
||||
className,
|
||||
}: MarkdownEditorProps) => {
|
||||
const [hasShift, setHasShift] = useState(false);
|
||||
|
||||
const refData = useRef({ onChange });
|
||||
@@ -66,9 +74,9 @@ export const MarkdownEditor = ({ value, onChange, overlayContent }: MarkdownEdit
|
||||
<div className={clsx(classes.MarkdownEditor, 'relative')}>
|
||||
<CodeMirror
|
||||
value={value}
|
||||
height="70px"
|
||||
height={height}
|
||||
extensions={CODE_MIRROR_EXTENSIONS}
|
||||
className={classes.CERoot}
|
||||
className={clsx(classes.CERoot, className)}
|
||||
theme={oneDark}
|
||||
onChange={handleOnChange}
|
||||
placeholder="Start typing..."
|
||||
|
||||
@@ -8,8 +8,8 @@ import { LabelsManager } from '@/hooks/Mapper/utils/labelsManager.ts';
|
||||
import { Dialog } from 'primereact/dialog';
|
||||
import { IconField } from 'primereact/iconfield';
|
||||
import { InputText } from 'primereact/inputtext';
|
||||
import { InputTextarea } from 'primereact/inputtextarea';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
|
||||
|
||||
interface SystemSettingsDialog {
|
||||
systemId: string;
|
||||
@@ -214,13 +214,9 @@ export const SystemSettingsDialog = ({ systemId, visible, setVisible }: SystemSe
|
||||
|
||||
<div className="flex flex-col gap-1">
|
||||
<label htmlFor="username">Description</label>
|
||||
<InputTextarea
|
||||
autoResize
|
||||
rows={5}
|
||||
cols={30}
|
||||
value={description}
|
||||
onChange={e => setDescription(e.target.value)}
|
||||
/>
|
||||
<div className="h-[200px]">
|
||||
<MarkdownEditor value={description} onChange={e => setDescription(e)} height="180px" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { isWormholeSpace } from '@/hooks/Mapper/components/map/helpers/isWormholeSpace.ts';
|
||||
import { useMemo } from 'react';
|
||||
import { getSystemById, sortWHClasses } from '@/hooks/Mapper/helpers';
|
||||
import { InfoDrawer, WHClassView, WHEffectView } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { InfoDrawer, MarkdownTextViewer, WHClassView, WHEffectView } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
|
||||
|
||||
interface SystemInfoContentProps {
|
||||
@@ -51,7 +51,7 @@ export const SystemInfoContent = ({ systemId }: SystemInfoContentProps) => {
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="break-words">{description}</div>
|
||||
<MarkdownTextViewer>{description}</MarkdownTextViewer>
|
||||
</InfoDrawer>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -31,7 +31,7 @@ export function useSystemKills({ systemId, outCommand, showAllVisible = false, s
|
||||
storedSettings: { settingsKills },
|
||||
} = useMapRootState();
|
||||
|
||||
const excludedSystems = useStableValue(settingsKills.excludedSystems);
|
||||
const excludedSystems = useStableValue(settingsKills.excludedSystems ?? []);
|
||||
|
||||
const effectiveSystemIds = useMemo(() => {
|
||||
if (showAllVisible) {
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { Dialog } from 'primereact/dialog';
|
||||
import { Menu } from 'primereact/menu';
|
||||
import { MenuItem } from 'primereact/menuitem';
|
||||
import { useState, useCallback, useRef, useMemo } from 'react';
|
||||
import { CharacterActivityContent } from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity/CharacterActivityContent.tsx';
|
||||
|
||||
interface CharacterActivityProps {
|
||||
@@ -6,17 +9,69 @@ interface CharacterActivityProps {
|
||||
onHide: () => void;
|
||||
}
|
||||
|
||||
const periodOptions = [
|
||||
{ value: 30, label: '30 Days' },
|
||||
{ value: 365, label: '1 Year' },
|
||||
{ value: null, label: 'All Time' },
|
||||
];
|
||||
|
||||
export const CharacterActivity = ({ visible, onHide }: CharacterActivityProps) => {
|
||||
const [selectedPeriod, setSelectedPeriod] = useState<number | null>(30);
|
||||
const menuRef = useRef<Menu>(null);
|
||||
|
||||
const handlePeriodChange = useCallback((days: number | null) => {
|
||||
setSelectedPeriod(days);
|
||||
}, []);
|
||||
|
||||
const menuItems: MenuItem[] = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: 'Period',
|
||||
items: periodOptions.map(option => ({
|
||||
label: option.label,
|
||||
icon: selectedPeriod === option.value ? 'pi pi-check' : undefined,
|
||||
command: () => handlePeriodChange(option.value),
|
||||
})),
|
||||
},
|
||||
],
|
||||
[selectedPeriod, handlePeriodChange],
|
||||
);
|
||||
|
||||
const selectedPeriodLabel = useMemo(
|
||||
() => periodOptions.find(opt => opt.value === selectedPeriod)?.label || 'All Time',
|
||||
[selectedPeriod],
|
||||
);
|
||||
|
||||
const headerIcons = (
|
||||
<>
|
||||
<button
|
||||
type="button"
|
||||
className="p-dialog-header-icon p-link"
|
||||
onClick={e => menuRef.current?.toggle(e)}
|
||||
aria-label="Filter options"
|
||||
>
|
||||
<span className="pi pi-bars" />
|
||||
</button>
|
||||
<Menu model={menuItems} popup ref={menuRef} />
|
||||
</>
|
||||
);
|
||||
|
||||
return (
|
||||
<Dialog
|
||||
header="Character Activity"
|
||||
header={
|
||||
<div className="flex items-center gap-2">
|
||||
<span>Character Activity</span>
|
||||
<span className="text-xs text-stone-400">({selectedPeriodLabel})</span>
|
||||
</div>
|
||||
}
|
||||
visible={visible}
|
||||
className="w-[550px] max-h-[90vh]"
|
||||
onHide={onHide}
|
||||
dismissableMask
|
||||
contentClassName="p-0 h-full flex flex-col"
|
||||
icons={headerIcons}
|
||||
>
|
||||
<CharacterActivityContent />
|
||||
<CharacterActivityContent selectedPeriod={selectedPeriod} />
|
||||
</Dialog>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -7,16 +7,28 @@ import {
|
||||
} from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity/helpers.tsx';
|
||||
import { Column } from 'primereact/column';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useMemo } from 'react';
|
||||
import { useMemo, useEffect } from 'react';
|
||||
import { useCharacterActivityHandlers } from '@/hooks/Mapper/components/mapRootContent/hooks/useCharacterActivityHandlers';
|
||||
|
||||
export const CharacterActivityContent = () => {
|
||||
interface CharacterActivityContentProps {
|
||||
selectedPeriod: number | null;
|
||||
}
|
||||
|
||||
export const CharacterActivityContent = ({ selectedPeriod }: CharacterActivityContentProps) => {
|
||||
const {
|
||||
data: { characterActivityData },
|
||||
} = useMapRootState();
|
||||
|
||||
const { handleShowActivity } = useCharacterActivityHandlers();
|
||||
|
||||
const activity = useMemo(() => characterActivityData?.activity || [], [characterActivityData]);
|
||||
const loading = useMemo(() => characterActivityData?.loading !== false, [characterActivityData]);
|
||||
|
||||
// Reload activity data when period changes
|
||||
useEffect(() => {
|
||||
handleShowActivity(selectedPeriod);
|
||||
}, [selectedPeriod, handleShowActivity]);
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="flex flex-col items-center justify-center h-full w-full">
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
}
|
||||
|
||||
.SidebarOnTheMap {
|
||||
width: 400px;
|
||||
width: 500px;
|
||||
padding: 0 !important;
|
||||
|
||||
:global {
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
ConnectionType,
|
||||
OutCommand,
|
||||
Passage,
|
||||
PassageWithSourceTarget,
|
||||
SolarSystemConnection,
|
||||
} from '@/hooks/Mapper/types';
|
||||
import clsx from 'clsx';
|
||||
@@ -19,7 +20,7 @@ import { PassageCard } from './PassageCard';
|
||||
|
||||
const sortByDate = (a: string, b: string) => new Date(a).getTime() - new Date(b).getTime();
|
||||
|
||||
const itemTemplate = (item: Passage, options: VirtualScrollerTemplateOptions) => {
|
||||
const itemTemplate = (item: PassageWithSourceTarget, options: VirtualScrollerTemplateOptions) => {
|
||||
return (
|
||||
<div
|
||||
className={clsx(classes.CharacterRow, 'w-full box-border', {
|
||||
@@ -35,7 +36,7 @@ const itemTemplate = (item: Passage, options: VirtualScrollerTemplateOptions) =>
|
||||
};
|
||||
|
||||
export interface ConnectionPassagesContentProps {
|
||||
passages: Passage[];
|
||||
passages: PassageWithSourceTarget[];
|
||||
}
|
||||
|
||||
export const ConnectionPassages = ({ passages = [] }: ConnectionPassagesContentProps) => {
|
||||
@@ -113,6 +114,20 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
|
||||
[outCommand],
|
||||
);
|
||||
|
||||
const preparedPassages = useMemo(() => {
|
||||
if (!cnInfo) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return passages
|
||||
.sort((a, b) => sortByDate(b.inserted_at, a.inserted_at))
|
||||
.map<PassageWithSourceTarget>(x => ({
|
||||
...x,
|
||||
source: x.from ? cnInfo.target : cnInfo.source,
|
||||
target: x.from ? cnInfo.source : cnInfo.target,
|
||||
}));
|
||||
}, [cnInfo, passages]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!selectedConnection) {
|
||||
return;
|
||||
@@ -145,12 +160,14 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
|
||||
<InfoDrawer title="Connection" rightSide>
|
||||
<div className="flex justify-end gap-2 items-center">
|
||||
<SystemView
|
||||
showCustomName
|
||||
systemId={cnInfo.source}
|
||||
className={clsx(classes.InfoTextSize, 'select-none text-center')}
|
||||
hideRegion
|
||||
/>
|
||||
<span className="pi pi-angle-double-right text-stone-500 text-[15px]"></span>
|
||||
<SystemView
|
||||
showCustomName
|
||||
systemId={cnInfo.target}
|
||||
className={clsx(classes.InfoTextSize, 'select-none text-center')}
|
||||
hideRegion
|
||||
@@ -184,7 +201,7 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
|
||||
{/* separator */}
|
||||
<div className="w-full h-px bg-neutral-800 px-0.5"></div>
|
||||
|
||||
<ConnectionPassages passages={passages} />
|
||||
<ConnectionPassages passages={preparedPassages} />
|
||||
</div>
|
||||
</Sidebar>
|
||||
);
|
||||
|
||||
@@ -35,6 +35,10 @@
|
||||
&.ThreeColumns {
|
||||
grid-template-columns: auto 1fr auto;
|
||||
}
|
||||
|
||||
&.FourColumns {
|
||||
grid-template-columns: auto auto 1fr auto;
|
||||
}
|
||||
}
|
||||
|
||||
.CardBorderLeftIsOwn {
|
||||
|
||||
@@ -1,17 +1,19 @@
|
||||
import clsx from 'clsx';
|
||||
import classes from './PassageCard.module.scss';
|
||||
import { Passage } from '@/hooks/Mapper/types';
|
||||
import { TimeAgo } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { PassageWithSourceTarget } from '@/hooks/Mapper/types';
|
||||
import { SystemView, TimeAgo, TooltipPosition, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
|
||||
import { kgToTons } from '@/hooks/Mapper/utils/kgToTons.ts';
|
||||
import { useMemo } from 'react';
|
||||
import { useCallback, useMemo } from 'react';
|
||||
import { ZKB_ICON } from '@/hooks/Mapper/icons';
|
||||
import { charEveWhoLink, charZKBLink } from '@/hooks/Mapper/helpers/linkHelpers.ts';
|
||||
|
||||
type PassageCardType = {
|
||||
// compact?: boolean;
|
||||
showShipName?: boolean;
|
||||
// showSystem?: boolean;
|
||||
// useSystemsCache?: boolean;
|
||||
} & Passage;
|
||||
} & PassageWithSourceTarget;
|
||||
|
||||
const SHIP_NAME_RX = /u'|'/g;
|
||||
export const getShipName = (name: string) => {
|
||||
@@ -25,7 +27,7 @@ export const getShipName = (name: string) => {
|
||||
});
|
||||
};
|
||||
|
||||
export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardType) => {
|
||||
export const PassageCard = ({ inserted_at, character: char, ship, source, target, from }: PassageCardType) => {
|
||||
const isOwn = false;
|
||||
|
||||
const insertedAt = useMemo(() => {
|
||||
@@ -33,11 +35,46 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
|
||||
return date.toLocaleString();
|
||||
}, [inserted_at]);
|
||||
|
||||
const handleOpenZKB = useCallback(() => window.open(charZKBLink(char.eve_id), '_blank'), [char]);
|
||||
const handleOpenEveWho = useCallback(() => window.open(charEveWhoLink(char.eve_id), '_blank'), [char]);
|
||||
|
||||
return (
|
||||
<div className={clsx(classes.CharacterCard, 'w-full text-xs', 'flex flex-col box-border')}>
|
||||
<div className="flex flex-col justify-between px-2 py-1 gap-1">
|
||||
{/*here icon and other*/}
|
||||
<div className={clsx(classes.CharRow, classes.ThreeColumns)}>
|
||||
<div className={clsx(classes.CharRow, classes.FourColumns)}>
|
||||
<WdTooltipWrapper
|
||||
position={TooltipPosition.top}
|
||||
content={
|
||||
<div className="flex justify-between gap-2 items-center">
|
||||
<SystemView
|
||||
showCustomName
|
||||
systemId={source}
|
||||
className="select-none text-center !text-[12px]"
|
||||
hideRegion
|
||||
/>
|
||||
<span className="pi pi-angle-double-right text-stone-500 text-[15px]"></span>
|
||||
<SystemView
|
||||
showCustomName
|
||||
systemId={target}
|
||||
className="select-none text-center !text-[12px]"
|
||||
hideRegion
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div
|
||||
className={clsx(
|
||||
'transition-all transform ease-in duration-200',
|
||||
'pi text-stone-500 text-[15px] w-[35px] h-[33px] !flex items-center justify-center border rounded-[6px]',
|
||||
{
|
||||
['pi-angle-double-right !text-orange-400 border-orange-400 hover:bg-orange-400/30']: from,
|
||||
['pi-angle-double-left !text-stone-500/70 border-stone-500/70 hover:bg-stone-500/30']: !from,
|
||||
},
|
||||
)}
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
|
||||
{/*portrait*/}
|
||||
<span
|
||||
className={clsx(classes.EveIcon, classes.CharIcon, 'wd-bg-default')}
|
||||
@@ -49,7 +86,7 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
|
||||
{/*here name and ship name*/}
|
||||
<div className="grid gap-1 justify-between grid-cols-[max-content_1fr]">
|
||||
{/*char name*/}
|
||||
<div className="grid gap-1 grid-cols-[auto_1px_1fr]">
|
||||
<div className="grid gap-1 grid-cols-[auto_1px_1fr_auto]">
|
||||
<span
|
||||
className={clsx(classes.MaxWidth, 'text-ellipsis overflow-hidden whitespace-nowrap', {
|
||||
[classes.CardBorderLeftIsOwn]: isOwn,
|
||||
@@ -62,6 +99,21 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
|
||||
<div className="h-3 border-r border-neutral-500 my-0.5"></div>
|
||||
{char.alliance_ticker && <span className="text-neutral-400">{char.alliance_ticker}</span>}
|
||||
{!char.alliance_ticker && <span className="text-neutral-400">{char.corporation_ticker}</span>}
|
||||
|
||||
<div className={clsx('flex gap-1 items-center h-full ml-[2px]')}>
|
||||
<WdImgButton
|
||||
width={16}
|
||||
height={16}
|
||||
tooltip={{ position: TooltipPosition.top, content: 'Open zkillboard' }}
|
||||
source={ZKB_ICON}
|
||||
onClick={handleOpenZKB}
|
||||
/>
|
||||
<WdImgButton
|
||||
tooltip={{ position: TooltipPosition.top, content: 'Open Eve Who' }}
|
||||
className={clsx('pi pi-user', '!text-[12px] relative top-[-1px]')}
|
||||
onClick={handleOpenEveWho}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/*ship name*/}
|
||||
|
||||
@@ -23,17 +23,17 @@ export const useCharacterActivityHandlers = () => {
|
||||
/**
|
||||
* Handle showing the character activity dialog
|
||||
*/
|
||||
const handleShowActivity = useCallback(() => {
|
||||
const handleShowActivity = useCallback((days?: number | null) => {
|
||||
// Update local state to show the dialog
|
||||
update(state => ({
|
||||
...state,
|
||||
showCharacterActivity: true,
|
||||
}));
|
||||
|
||||
// Send the command to the server
|
||||
// Send the command to the server with optional days parameter
|
||||
outCommand({
|
||||
type: OutCommand.showActivity,
|
||||
data: {},
|
||||
data: days !== undefined ? { days } : {},
|
||||
});
|
||||
}, [outCommand, update]);
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import {
|
||||
WdEveEntityPortrait,
|
||||
WdEveEntityPortraitSize,
|
||||
WdEveEntityPortraitType,
|
||||
WdImgButton,
|
||||
WdTooltipWrapper,
|
||||
} from '@/hooks/Mapper/components/ui-kit';
|
||||
import { SystemView } from '@/hooks/Mapper/components/ui-kit/SystemView';
|
||||
@@ -14,6 +15,8 @@ import { Commands } from '@/hooks/Mapper/types/mapHandlers';
|
||||
import clsx from 'clsx';
|
||||
import { useCallback } from 'react';
|
||||
import classes from './CharacterCard.module.scss';
|
||||
import { ZKB_ICON } from '@/hooks/Mapper/icons';
|
||||
import { charEveWhoLink, charZKBLink } from '@/hooks/Mapper/helpers/linkHelpers.ts';
|
||||
|
||||
export type CharacterCardProps = {
|
||||
compact?: boolean;
|
||||
@@ -66,6 +69,9 @@ export const CharacterCard = ({
|
||||
const shipType = char.ship?.ship_type_info?.name;
|
||||
const locationShown = showSystem && char.location?.solar_system_id;
|
||||
|
||||
const handleOpenZKB = useCallback(() => window.open(charZKBLink(char.eve_id), '_blank'), [char]);
|
||||
const handleOpenEveWho = useCallback(() => window.open(charEveWhoLink(char.eve_id), '_blank'), [char]);
|
||||
|
||||
// INFO: Simple mode show only name and icon of ally/corp. By default it compact view
|
||||
if (simpleMode) {
|
||||
return (
|
||||
@@ -244,7 +250,24 @@ export const CharacterCard = ({
|
||||
{char.name}
|
||||
</span>
|
||||
{showTicker && <span className="flex-shrink-0 text-gray-400 ml-1">[{tickerText}]</span>}
|
||||
|
||||
<div className={clsx('flex gap-1 items-center h-full ml-[6px]')}>
|
||||
<WdImgButton
|
||||
width={16}
|
||||
height={16}
|
||||
tooltip={{ position: TooltipPosition.top, content: 'Open zkillboard' }}
|
||||
source={ZKB_ICON}
|
||||
onClick={handleOpenZKB}
|
||||
className="min-w-[16px]"
|
||||
/>
|
||||
<WdImgButton
|
||||
tooltip={{ position: TooltipPosition.top, content: 'Open Eve Who' }}
|
||||
className={clsx('pi pi-user', '!text-[12px] relative top-[-1px]')}
|
||||
onClick={handleOpenEveWho}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{locationShown ? (
|
||||
<div className="text-gray-300 text-xs overflow-hidden text-ellipsis whitespace-nowrap">
|
||||
<SystemView
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
.MarkdownCommentRoot {
|
||||
border-left-width: 3px;
|
||||
|
||||
.MarkdownTextViewer {
|
||||
@apply text-[12px] leading-[1.2] text-stone-300 break-words;
|
||||
@apply bg-gradient-to-r from-stone-600/40 via-stone-600/10 to-stone-600/0;
|
||||
|
||||
.h1 {
|
||||
@apply text-[12px] font-normal m-0 p-0 border-none break-words whitespace-normal;
|
||||
@@ -56,6 +53,10 @@
|
||||
@apply font-bold text-green-400 break-words whitespace-normal;
|
||||
}
|
||||
|
||||
strong {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
i, em {
|
||||
@apply italic text-pink-400 break-words whitespace-normal;
|
||||
}
|
||||
@@ -2,10 +2,16 @@ import Markdown from 'react-markdown';
|
||||
import remarkGfm from 'remark-gfm';
|
||||
import remarkBreaks from 'remark-breaks';
|
||||
|
||||
import classes from './MarkdownTextViewer.module.scss';
|
||||
|
||||
const REMARK_PLUGINS = [remarkGfm, remarkBreaks];
|
||||
|
||||
type MarkdownTextViewerProps = { children: string };
|
||||
|
||||
export const MarkdownTextViewer = ({ children }: MarkdownTextViewerProps) => {
|
||||
return <Markdown remarkPlugins={REMARK_PLUGINS}>{children}</Markdown>;
|
||||
return (
|
||||
<div className={classes.MarkdownTextViewer}>
|
||||
<Markdown remarkPlugins={REMARK_PLUGINS}>{children}</Markdown>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
2
assets/js/hooks/Mapper/helpers/linkHelpers.ts
Normal file
2
assets/js/hooks/Mapper/helpers/linkHelpers.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export const charZKBLink = (characterId: string) => `https://zkillboard.com/character/${characterId}/`;
|
||||
export const charEveWhoLink = (characterId: string) => `https://evewho.com/character/${characterId}`;
|
||||
@@ -68,4 +68,5 @@ export interface ActivitySummary {
|
||||
passages: number;
|
||||
connections: number;
|
||||
signatures: number;
|
||||
timestamp?: string;
|
||||
}
|
||||
|
||||
@@ -6,11 +6,17 @@ export type PassageLimitedCharacterType = Pick<
|
||||
>;
|
||||
|
||||
export type Passage = {
|
||||
from: boolean;
|
||||
inserted_at: string; // Date
|
||||
ship: ShipTypeRaw;
|
||||
character: PassageLimitedCharacterType;
|
||||
};
|
||||
|
||||
export type PassageWithSourceTarget = {
|
||||
source: string;
|
||||
target: string;
|
||||
} & Passage;
|
||||
|
||||
export type ConnectionInfoOutput = {
|
||||
marl_eol_time: string;
|
||||
};
|
||||
|
||||
@@ -63,6 +63,7 @@ config :wanderer_app, WandererAppWeb.Endpoint,
|
||||
]
|
||||
|
||||
config :wanderer_app,
|
||||
environment: :dev,
|
||||
dev_routes: true
|
||||
|
||||
# Do not include metadata nor timestamps in development logs
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import Config
|
||||
|
||||
# Set environment at compile time for modules using Application.compile_env
|
||||
config :wanderer_app, environment: :prod
|
||||
|
||||
# Note we also include the path to a cache manifest
|
||||
# containing the digested version of static files. This
|
||||
# manifest is generated by the `mix assets.deploy` task,
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import Config
|
||||
|
||||
# Disable Ash async operations in tests to ensure transactional safety
|
||||
# This prevents Ash from spawning tasks that could bypass the Ecto sandbox
|
||||
config :ash, :disable_async?, true
|
||||
|
||||
# Configure your database
|
||||
#
|
||||
# The MIX_TEST_PARTITION environment variable can be used
|
||||
|
||||
@@ -8,6 +8,8 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
alias Ash.Resource.Change.Builtins
|
||||
|
||||
require Logger
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("maps_v1")
|
||||
@@ -55,6 +57,7 @@ defmodule WandererApp.Api.Map do
|
||||
define(:mark_as_deleted, action: :mark_as_deleted)
|
||||
define(:update_api_key, action: :update_api_key)
|
||||
define(:toggle_webhooks, action: :toggle_webhooks)
|
||||
define(:toggle_sse, action: :toggle_sse)
|
||||
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
@@ -103,7 +106,16 @@ defmodule WandererApp.Api.Map do
|
||||
end
|
||||
|
||||
create :new do
|
||||
accept [:name, :slug, :description, :scope, :only_tracked_characters, :owner_id, :sse_enabled]
|
||||
accept [
|
||||
:name,
|
||||
:slug,
|
||||
:description,
|
||||
:scope,
|
||||
:only_tracked_characters,
|
||||
:owner_id,
|
||||
:sse_enabled
|
||||
]
|
||||
|
||||
primary?(true)
|
||||
argument :create_default_acl, :boolean, allow_nil?: true
|
||||
argument :acls, {:array, :uuid}, allow_nil?: true
|
||||
@@ -188,6 +200,14 @@ defmodule WandererApp.Api.Map do
|
||||
require_atomic? false
|
||||
end
|
||||
|
||||
update :toggle_sse do
|
||||
require_atomic? false
|
||||
accept [:sse_enabled]
|
||||
|
||||
# Validate subscription when enabling SSE
|
||||
validate &validate_sse_subscription/2
|
||||
end
|
||||
|
||||
create :duplicate do
|
||||
accept [:name, :description, :scope, :only_tracked_characters]
|
||||
argument :source_map_id, :uuid, allow_nil?: false
|
||||
@@ -373,19 +393,13 @@ defmodule WandererApp.Api.Map do
|
||||
end
|
||||
end
|
||||
|
||||
# Private validation functions
|
||||
|
||||
@doc false
|
||||
# Validates that SSE can be enabled based on subscription status.
|
||||
# SSE Subscription Validation
|
||||
#
|
||||
# Validation rules:
|
||||
# 1. Skip if SSE not being enabled (no validation needed)
|
||||
# 2. Skip during map creation (map_id is nil, subscription doesn't exist yet)
|
||||
# 3. Skip in Community Edition mode (subscriptions disabled globally)
|
||||
# 4. Require active subscription in Enterprise mode
|
||||
#
|
||||
# This ensures users cannot enable SSE without a valid subscription in Enterprise mode,
|
||||
# while allowing SSE in Community Edition and during map creation.
|
||||
# This validation ensures that SSE can only be enabled when:
|
||||
# 1. SSE is being disabled (always allowed)
|
||||
# 2. Map is being created (skip validation, will be checked on first update)
|
||||
# 3. Community Edition mode (always allowed)
|
||||
# 4. Enterprise mode with active subscription
|
||||
defp validate_sse_subscription(changeset, _context) do
|
||||
sse_enabled = Ash.Changeset.get_attribute(changeset, :sse_enabled)
|
||||
map_id = changeset.data.id
|
||||
@@ -397,7 +411,6 @@ defmodule WandererApp.Api.Map do
|
||||
:ok
|
||||
|
||||
# Map creation (no ID yet) - skip validation
|
||||
# Subscription check will happen on first update if they try to enable SSE
|
||||
is_nil(map_id) ->
|
||||
:ok
|
||||
|
||||
@@ -411,7 +424,6 @@ defmodule WandererApp.Api.Map do
|
||||
end
|
||||
end
|
||||
|
||||
# Helper to check if map has an active subscription
|
||||
defp validate_active_subscription(map_id) do
|
||||
case WandererApp.Map.is_subscription_active?(map_id) do
|
||||
{:ok, true} ->
|
||||
@@ -421,11 +433,8 @@ defmodule WandererApp.Api.Map do
|
||||
{:error, field: :sse_enabled, message: "Active subscription required to enable SSE"}
|
||||
|
||||
{:error, reason} ->
|
||||
require Logger
|
||||
Logger.warning("Failed to check subscription for map #{map_id}: #{inspect(reason)}")
|
||||
# Fail open - allow the operation but log the error
|
||||
# This prevents database errors from blocking legitimate operations
|
||||
:ok
|
||||
Logger.error("Error checking subscription status: #{inspect(reason)}")
|
||||
{:error, field: :sse_enabled, message: "Unable to verify subscription status"}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -128,6 +128,8 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
require_atomic? false
|
||||
|
||||
accept([
|
||||
:tracked,
|
||||
:followed,
|
||||
:ship,
|
||||
:ship_name,
|
||||
:ship_item_id,
|
||||
@@ -139,7 +141,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
|
||||
update :track do
|
||||
accept [:map_id, :character_id]
|
||||
argument :map_id, :string, allow_nil?: false
|
||||
require_atomic? false
|
||||
|
||||
# Load the record first
|
||||
@@ -153,7 +154,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
|
||||
update :untrack do
|
||||
accept [:map_id, :character_id]
|
||||
argument :map_id, :string, allow_nil?: false
|
||||
require_atomic? false
|
||||
|
||||
# Load the record first
|
||||
@@ -167,7 +167,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
|
||||
update :follow do
|
||||
accept [:map_id, :character_id]
|
||||
argument :map_id, :string, allow_nil?: false
|
||||
require_atomic? false
|
||||
|
||||
# Load the record first
|
||||
@@ -181,7 +180,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
|
||||
update :unfollow do
|
||||
accept [:map_id, :character_id]
|
||||
argument :map_id, :string, allow_nil?: false
|
||||
require_atomic? false
|
||||
|
||||
# Load the record first
|
||||
|
||||
@@ -58,6 +58,7 @@ defmodule WandererApp.Api.MapWebhookSubscription do
|
||||
:consecutive_failures,
|
||||
:secret
|
||||
]
|
||||
|
||||
require_atomic? false
|
||||
end
|
||||
|
||||
|
||||
@@ -93,6 +93,8 @@ defmodule WandererApp.CachedInfo do
|
||||
end
|
||||
end
|
||||
|
||||
def get_system_static_info(nil), do: {:ok, nil}
|
||||
|
||||
def get_system_static_info(solar_system_id) do
|
||||
{:ok, solar_system_id} = APIUtils.parse_int(solar_system_id)
|
||||
|
||||
|
||||
@@ -43,13 +43,14 @@ defmodule WandererApp.Character.Activity do
|
||||
## Parameters
|
||||
- `map_id`: ID of the map
|
||||
- `current_user`: Current user struct (used only to get user settings)
|
||||
- `days`: Optional number of days to filter activity (nil for all time)
|
||||
|
||||
## Returns
|
||||
- List of processed activity data
|
||||
"""
|
||||
def process_character_activity(map_id, current_user) do
|
||||
def process_character_activity(map_id, current_user, days \\ nil) do
|
||||
with {:ok, map_user_settings} <- get_map_user_settings(map_id, current_user.id),
|
||||
{:ok, raw_activity} <- WandererApp.Map.get_character_activity(map_id),
|
||||
{:ok, raw_activity} <- WandererApp.Map.get_character_activity(map_id, days),
|
||||
{:ok, user_characters} <-
|
||||
WandererApp.Api.Character.active_by_user(%{user_id: current_user.id}) do
|
||||
process_activity_data(raw_activity, map_user_settings, user_characters)
|
||||
|
||||
@@ -1,5 +1,18 @@
|
||||
defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
@moduledoc false
|
||||
@moduledoc """
|
||||
Implementation of the character tracker manager.
|
||||
|
||||
This module manages the lifecycle of character trackers and handles:
|
||||
- Starting/stopping character tracking
|
||||
- Garbage collection of inactive trackers (5-minute timeout)
|
||||
- Processing the untrack queue (5-minute interval)
|
||||
|
||||
## Logging
|
||||
|
||||
This module emits detailed logs for debugging character tracking issues:
|
||||
- WARNING: Unexpected states or potential issues
|
||||
- DEBUG: Start/stop tracking events, garbage collection, queue processing
|
||||
"""
|
||||
require Logger
|
||||
|
||||
defstruct [
|
||||
@@ -27,6 +40,11 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
Process.send_after(self(), :garbage_collect, @garbage_collection_interval)
|
||||
Process.send_after(self(), :untrack_characters, @untrack_characters_interval)
|
||||
|
||||
Logger.debug("[TrackerManager] Initialized with intervals: " <>
|
||||
"garbage_collection=#{div(@garbage_collection_interval, 60_000)}min, " <>
|
||||
"untrack=#{div(@untrack_characters_interval, 60_000)}min, " <>
|
||||
"inactive_timeout=#{div(@inactive_character_timeout, 60_000)}min")
|
||||
|
||||
%{
|
||||
characters: [],
|
||||
opts: args
|
||||
@@ -38,6 +56,10 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
{:ok, tracked_characters} = WandererApp.Cache.lookup("tracked_characters", [])
|
||||
WandererApp.Cache.insert("tracked_characters", [])
|
||||
|
||||
if length(tracked_characters) > 0 do
|
||||
Logger.debug("[TrackerManager] Restoring #{length(tracked_characters)} tracked characters from cache")
|
||||
end
|
||||
|
||||
tracked_characters
|
||||
|> Enum.each(fn character_id ->
|
||||
start_tracking(state, character_id)
|
||||
@@ -53,7 +75,9 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
true
|
||||
)
|
||||
|
||||
Logger.debug(fn -> "Add character to track_characters_queue: #{inspect(character_id)}" end)
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Queuing character #{character_id} for tracking start"
|
||||
end)
|
||||
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"track_characters_queue",
|
||||
@@ -71,13 +95,33 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
with {:ok, characters} <- WandererApp.Cache.lookup("tracked_characters", []),
|
||||
true <- Enum.member?(characters, character_id),
|
||||
false <- WandererApp.Cache.has_key?("#{character_id}:track_requested") do
|
||||
Logger.debug(fn -> "Shutting down character tracker: #{inspect(character_id)}" end)
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Stopping tracker for character #{character_id} - " <>
|
||||
"reason: no active maps (garbage collected after #{div(@inactive_character_timeout, 60_000)} minutes)"
|
||||
end)
|
||||
|
||||
WandererApp.Cache.delete("character:#{character_id}:last_active_time")
|
||||
WandererApp.Character.delete_character_state(character_id)
|
||||
WandererApp.Character.TrackerPoolDynamicSupervisor.stop_tracking(character_id)
|
||||
|
||||
:telemetry.execute([:wanderer_app, :character, :tracker, :stopped], %{count: 1})
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracker, :stopped],
|
||||
%{count: 1, system_time: System.system_time()},
|
||||
%{character_id: character_id, reason: :garbage_collection}
|
||||
)
|
||||
else
|
||||
{:ok, characters} when is_list(characters) ->
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Character #{character_id} not in tracked list, skipping stop"
|
||||
end)
|
||||
|
||||
false ->
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Character #{character_id} has pending track request, skipping stop"
|
||||
end)
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
|
||||
WandererApp.Cache.insert_or_update(
|
||||
@@ -101,6 +145,10 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
} = track_settings
|
||||
) do
|
||||
if track do
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Enabling tracking for character #{character_id} on map #{map_id}"
|
||||
end)
|
||||
|
||||
remove_from_untrack_queue(map_id, character_id)
|
||||
|
||||
{:ok, character_state} =
|
||||
@@ -108,6 +156,11 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
|
||||
WandererApp.Character.update_character_state(character_id, character_state)
|
||||
else
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Queuing character #{character_id} for untracking from map #{map_id} - " <>
|
||||
"will be processed within #{div(@untrack_characters_interval, 60_000)} minutes"
|
||||
end)
|
||||
|
||||
add_to_untrack_queue(map_id, character_id)
|
||||
end
|
||||
|
||||
@@ -130,8 +183,19 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
"character_untrack_queue",
|
||||
[],
|
||||
fn untrack_queue ->
|
||||
untrack_queue
|
||||
|> Enum.reject(fn {m_id, c_id} -> m_id == map_id and c_id == character_id end)
|
||||
original_length = length(untrack_queue)
|
||||
filtered =
|
||||
untrack_queue
|
||||
|> Enum.reject(fn {m_id, c_id} -> m_id == map_id and c_id == character_id end)
|
||||
|
||||
if length(filtered) < original_length do
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Removed character #{character_id} from untrack queue for map #{map_id} - " <>
|
||||
"character re-enabled tracking"
|
||||
end)
|
||||
end
|
||||
|
||||
filtered
|
||||
end
|
||||
)
|
||||
end
|
||||
@@ -170,6 +234,12 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
Process.send_after(self(), :check_start_queue, @check_start_queue_interval)
|
||||
{:ok, track_characters_queue} = WandererApp.Cache.lookup("track_characters_queue", [])
|
||||
|
||||
if length(track_characters_queue) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Processing start queue: #{length(track_characters_queue)} characters"
|
||||
end)
|
||||
end
|
||||
|
||||
track_characters_queue
|
||||
|> Enum.each(fn character_id ->
|
||||
track_character(character_id, %{})
|
||||
@@ -186,35 +256,66 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
|
||||
{:ok, characters} = WandererApp.Cache.lookup("tracked_characters", [])
|
||||
|
||||
characters
|
||||
|> Task.async_stream(
|
||||
fn character_id ->
|
||||
case WandererApp.Cache.lookup("character:#{character_id}:last_active_time") do
|
||||
{:ok, nil} ->
|
||||
:skip
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Running garbage collection on #{length(characters)} tracked characters"
|
||||
end)
|
||||
|
||||
{:ok, last_active_time} ->
|
||||
duration = DateTime.diff(DateTime.utc_now(), last_active_time, :second)
|
||||
|
||||
if duration * 1000 > @inactive_character_timeout do
|
||||
{:stop, character_id}
|
||||
else
|
||||
inactive_characters =
|
||||
characters
|
||||
|> Task.async_stream(
|
||||
fn character_id ->
|
||||
case WandererApp.Cache.lookup("character:#{character_id}:last_active_time") do
|
||||
{:ok, nil} ->
|
||||
# Character is still active (no last_active_time set)
|
||||
:skip
|
||||
end
|
||||
end
|
||||
end,
|
||||
max_concurrency: System.schedulers_online() * 4,
|
||||
on_timeout: :kill_task,
|
||||
timeout: :timer.seconds(60)
|
||||
)
|
||||
|> Enum.each(fn result ->
|
||||
case result do
|
||||
{:ok, {:stop, character_id}} ->
|
||||
Process.send_after(self(), {:stop_track, character_id}, 100)
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
{:ok, last_active_time} ->
|
||||
duration_seconds = DateTime.diff(DateTime.utc_now(), last_active_time, :second)
|
||||
duration_ms = duration_seconds * 1000
|
||||
|
||||
if duration_ms > @inactive_character_timeout do
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Character #{character_id} marked for garbage collection - " <>
|
||||
"inactive for #{div(duration_seconds, 60)} minutes " <>
|
||||
"(threshold: #{div(@inactive_character_timeout, 60_000)} minutes)"
|
||||
end)
|
||||
|
||||
{:stop, character_id, duration_seconds}
|
||||
else
|
||||
:skip
|
||||
end
|
||||
end
|
||||
end,
|
||||
max_concurrency: System.schedulers_online() * 4,
|
||||
on_timeout: :kill_task,
|
||||
timeout: :timer.seconds(60)
|
||||
)
|
||||
|> Enum.reduce([], fn result, acc ->
|
||||
case result do
|
||||
{:ok, {:stop, character_id, duration}} ->
|
||||
[{character_id, duration} | acc]
|
||||
|
||||
_ ->
|
||||
acc
|
||||
end
|
||||
end)
|
||||
|
||||
if length(inactive_characters) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Garbage collection found #{length(inactive_characters)} inactive characters to stop"
|
||||
end)
|
||||
|
||||
# Emit telemetry for garbage collection
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracker, :garbage_collection],
|
||||
%{inactive_count: length(inactive_characters), total_tracked: length(characters)},
|
||||
%{character_ids: Enum.map(inactive_characters, fn {id, _} -> id end)}
|
||||
)
|
||||
end
|
||||
|
||||
inactive_characters
|
||||
|> Enum.each(fn {character_id, _duration} ->
|
||||
Process.send_after(self(), {:stop_track, character_id}, 100)
|
||||
end)
|
||||
|
||||
state
|
||||
@@ -226,9 +327,22 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
) do
|
||||
Process.send_after(self(), :untrack_characters, @untrack_characters_interval)
|
||||
|
||||
WandererApp.Cache.lookup!("character_untrack_queue", [])
|
||||
untrack_queue = WandererApp.Cache.lookup!("character_untrack_queue", [])
|
||||
|
||||
if length(untrack_queue) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Processing untrack queue: #{length(untrack_queue)} character-map pairs"
|
||||
end)
|
||||
end
|
||||
|
||||
untrack_queue
|
||||
|> Task.async_stream(
|
||||
fn {map_id, character_id} ->
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Untracking character #{character_id} from map #{map_id} - " <>
|
||||
"reason: character no longer present on map"
|
||||
end)
|
||||
|
||||
remove_from_untrack_queue(map_id, character_id)
|
||||
|
||||
WandererApp.Cache.delete("map:#{map_id}:character:#{character_id}:solar_system_id")
|
||||
@@ -255,12 +369,36 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
|
||||
WandererApp.Character.update_character_state(character_id, character_state)
|
||||
WandererApp.Map.Server.Impl.broadcast!(map_id, :untrack_character, character_id)
|
||||
|
||||
# Emit telemetry for untrack event
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracker, :untracked_from_map],
|
||||
%{system_time: System.system_time()},
|
||||
%{character_id: character_id, map_id: map_id, reason: :presence_left}
|
||||
)
|
||||
|
||||
{:ok, character_id, map_id}
|
||||
end,
|
||||
max_concurrency: System.schedulers_online() * 4,
|
||||
on_timeout: :kill_task,
|
||||
timeout: :timer.seconds(30)
|
||||
)
|
||||
|> Enum.each(fn _result -> :ok end)
|
||||
|> Enum.each(fn result ->
|
||||
case result do
|
||||
{:ok, {:ok, character_id, map_id}} ->
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Successfully untracked character #{character_id} from map #{map_id}"
|
||||
end)
|
||||
|
||||
{:exit, reason} ->
|
||||
Logger.warning(fn ->
|
||||
"[TrackerManager] Untrack task exited with reason: #{inspect(reason)}"
|
||||
end)
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end)
|
||||
|
||||
state
|
||||
end
|
||||
@@ -268,9 +406,17 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
def handle_info({:stop_track, character_id}, state) do
|
||||
if not WandererApp.Cache.has_key?("character:#{character_id}:is_stop_tracking") do
|
||||
WandererApp.Cache.insert("character:#{character_id}:is_stop_tracking", true)
|
||||
Logger.debug(fn -> "Stopping character tracker: #{inspect(character_id)}" end)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Executing stop_track for character #{character_id}"
|
||||
end)
|
||||
|
||||
stop_tracking(state, character_id)
|
||||
WandererApp.Cache.delete("character:#{character_id}:is_stop_tracking")
|
||||
else
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Character #{character_id} already being stopped, skipping duplicate request"
|
||||
end)
|
||||
end
|
||||
|
||||
state
|
||||
@@ -279,7 +425,9 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
def track_character(character_id, opts) do
|
||||
with {:ok, characters} <- WandererApp.Cache.lookup("tracked_characters", []),
|
||||
false <- Enum.member?(characters, character_id) do
|
||||
Logger.debug(fn -> "Start character tracker: #{inspect(character_id)}" end)
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Starting tracker for character #{character_id}"
|
||||
end)
|
||||
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"tracked_characters",
|
||||
@@ -312,7 +460,30 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
character_id,
|
||||
%{opts: opts}
|
||||
])
|
||||
|
||||
# Emit telemetry for tracker start
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracker, :started],
|
||||
%{count: 1, system_time: System.system_time()},
|
||||
%{character_id: character_id}
|
||||
)
|
||||
else
|
||||
true ->
|
||||
Logger.debug(fn ->
|
||||
"[TrackerManager] Character #{character_id} already being tracked"
|
||||
end)
|
||||
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"track_characters_queue",
|
||||
[],
|
||||
fn existing ->
|
||||
existing
|
||||
|> Enum.reject(fn c_id -> c_id == character_id end)
|
||||
end
|
||||
)
|
||||
|
||||
WandererApp.Cache.delete("#{character_id}:track_requested")
|
||||
|
||||
_ ->
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"track_characters_queue",
|
||||
|
||||
@@ -114,8 +114,88 @@ defmodule WandererApp.Character.TrackingUtils do
|
||||
|
||||
# Private implementation of update character tracking
|
||||
defp do_update_character_tracking(character, map_id, track, caller_pid) do
|
||||
WandererApp.MapCharacterSettingsRepo.get(map_id, character.id)
|
||||
|> case do
|
||||
# First check current tracking state to avoid unnecessary permission checks
|
||||
current_settings = WandererApp.MapCharacterSettingsRepo.get(map_id, character.id)
|
||||
|
||||
case {track, current_settings} do
|
||||
# Already tracked and wants to stay tracked - no permission check needed
|
||||
{true, {:ok, %{tracked: true} = settings}} ->
|
||||
do_update_character_tracking_impl(character, map_id, track, caller_pid, {:ok, settings})
|
||||
|
||||
# Wants to enable tracking - check permissions first
|
||||
{true, settings_result} ->
|
||||
case check_character_tracking_permission(character, map_id) do
|
||||
{:ok, :allowed} ->
|
||||
do_update_character_tracking_impl(character, map_id, track, caller_pid, settings_result)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"[CharacterTracking] Character #{character.id} cannot be tracked on map #{map_id}: #{reason}"
|
||||
)
|
||||
|
||||
{:error, reason}
|
||||
end
|
||||
|
||||
# Untracking is always allowed
|
||||
{false, settings_result} ->
|
||||
do_update_character_tracking_impl(character, map_id, track, caller_pid, settings_result)
|
||||
end
|
||||
end
|
||||
|
||||
# Check if a character has permission to be tracked on a map
|
||||
defp check_character_tracking_permission(character, map_id) do
|
||||
with {:ok, %{acls: acls, owner_id: owner_id}} <-
|
||||
WandererApp.MapRepo.get(map_id,
|
||||
acls: [
|
||||
:owner_id,
|
||||
members: [:role, :eve_character_id, :eve_corporation_id, :eve_alliance_id]
|
||||
]
|
||||
) do
|
||||
# Check if character is the map owner
|
||||
if character.id == owner_id do
|
||||
{:ok, :allowed}
|
||||
else
|
||||
# Check if character belongs to same user as owner (Option 3 check)
|
||||
case check_same_user_as_owner(character, owner_id) do
|
||||
true ->
|
||||
{:ok, :allowed}
|
||||
|
||||
false ->
|
||||
# Check ACL-based permissions
|
||||
[character_permissions] =
|
||||
WandererApp.Permissions.check_characters_access([character], acls)
|
||||
|
||||
map_permissions = WandererApp.Permissions.get_permissions(character_permissions)
|
||||
|
||||
if map_permissions.track_character and map_permissions.view_system do
|
||||
{:ok, :allowed}
|
||||
else
|
||||
{:error,
|
||||
"Character does not have tracking permission on this map. Please add the character to a map access list or ensure you are the map owner."}
|
||||
end
|
||||
end
|
||||
end
|
||||
else
|
||||
{:error, _} ->
|
||||
{:error, "Failed to verify map permissions"}
|
||||
end
|
||||
end
|
||||
|
||||
# Check if character belongs to the same user as the map owner
|
||||
defp check_same_user_as_owner(_character, nil), do: false
|
||||
|
||||
defp check_same_user_as_owner(character, owner_id) do
|
||||
case WandererApp.Character.get_character(owner_id) do
|
||||
{:ok, owner_character} ->
|
||||
character.user_id != nil and character.user_id == owner_character.user_id
|
||||
|
||||
_ ->
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
defp do_update_character_tracking_impl(character, map_id, track, caller_pid, settings_result) do
|
||||
case settings_result do
|
||||
# Untracking flow
|
||||
{:ok, %{tracked: true} = existing_settings} ->
|
||||
if not track do
|
||||
|
||||
@@ -463,7 +463,8 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
|
||||
{:error, reason} ->
|
||||
# Check if this is a Finch pool error
|
||||
if is_exception(reason) and Exception.message(reason) =~ "unable to provide a connection" do
|
||||
if is_exception(reason) and
|
||||
Exception.message(reason) =~ "unable to provide a connection" do
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :finch, :pool_exhausted],
|
||||
%{count: 1},
|
||||
@@ -677,7 +678,8 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
|
||||
{:error, reason} ->
|
||||
# Check if this is a Finch pool error
|
||||
if is_exception(reason) and Exception.message(reason) =~ "unable to provide a connection" do
|
||||
if is_exception(reason) and
|
||||
Exception.message(reason) =~ "unable to provide a connection" do
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :finch, :pool_exhausted],
|
||||
%{count: 1},
|
||||
|
||||
@@ -403,10 +403,24 @@ defmodule WandererApp.Kills.MessageHandler do
|
||||
|
||||
defp extract_field(_data, _field_names), do: nil
|
||||
|
||||
# Specific field extractors using the generic function
|
||||
# Generic nested field extraction - tries flat keys first, then nested object
|
||||
@spec extract_nested_field(map(), list(String.t()), String.t(), String.t()) :: String.t() | nil
|
||||
defp extract_nested_field(data, flat_keys, nested_key, field) when is_map(data) do
|
||||
case extract_field(data, flat_keys) do
|
||||
nil ->
|
||||
case data[nested_key] do
|
||||
%{^field => value} when is_binary(value) and value != "" -> value
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
value ->
|
||||
value
|
||||
end
|
||||
end
|
||||
|
||||
# Specific field extractors using the generic functions
|
||||
@spec get_character_name(map() | any()) :: String.t() | nil
|
||||
defp get_character_name(data) when is_map(data) do
|
||||
# Try multiple possible field names
|
||||
field_names = ["attacker_name", "victim_name", "character_name", "name"]
|
||||
|
||||
extract_field(data, field_names) ||
|
||||
@@ -419,30 +433,26 @@ defmodule WandererApp.Kills.MessageHandler do
|
||||
defp get_character_name(_), do: nil
|
||||
|
||||
@spec get_corp_ticker(map() | any()) :: String.t() | nil
|
||||
defp get_corp_ticker(data) when is_map(data) do
|
||||
extract_field(data, ["corporation_ticker", "corp_ticker"])
|
||||
end
|
||||
defp get_corp_ticker(data) when is_map(data),
|
||||
do: extract_nested_field(data, ["corporation_ticker", "corp_ticker"], "corporation", "ticker")
|
||||
|
||||
defp get_corp_ticker(_), do: nil
|
||||
|
||||
@spec get_corp_name(map() | any()) :: String.t() | nil
|
||||
defp get_corp_name(data) when is_map(data) do
|
||||
extract_field(data, ["corporation_name", "corp_name"])
|
||||
end
|
||||
defp get_corp_name(data) when is_map(data),
|
||||
do: extract_nested_field(data, ["corporation_name", "corp_name"], "corporation", "name")
|
||||
|
||||
defp get_corp_name(_), do: nil
|
||||
|
||||
@spec get_alliance_ticker(map() | any()) :: String.t() | nil
|
||||
defp get_alliance_ticker(data) when is_map(data) do
|
||||
extract_field(data, ["alliance_ticker"])
|
||||
end
|
||||
defp get_alliance_ticker(data) when is_map(data),
|
||||
do: extract_nested_field(data, ["alliance_ticker"], "alliance", "ticker")
|
||||
|
||||
defp get_alliance_ticker(_), do: nil
|
||||
|
||||
@spec get_alliance_name(map() | any()) :: String.t() | nil
|
||||
defp get_alliance_name(data) when is_map(data) do
|
||||
extract_field(data, ["alliance_name"])
|
||||
end
|
||||
defp get_alliance_name(data) when is_map(data),
|
||||
do: extract_nested_field(data, ["alliance_name"], "alliance", "name")
|
||||
|
||||
defp get_alliance_name(_), do: nil
|
||||
|
||||
|
||||
@@ -9,6 +9,8 @@ defmodule WandererApp.Map.Manager do
|
||||
|
||||
alias WandererApp.Map.Server
|
||||
|
||||
@environment Application.compile_env(:wanderer_app, :environment)
|
||||
|
||||
@maps_start_chunk_size 20
|
||||
@maps_start_interval 500
|
||||
@maps_queue :maps_queue
|
||||
@@ -19,7 +21,7 @@ defmodule WandererApp.Map.Manager do
|
||||
|
||||
# Test-aware async task runner
|
||||
defp safe_async_task(fun) do
|
||||
if Mix.env() == :test do
|
||||
if @environment == :test do
|
||||
# In tests, run synchronously to avoid database ownership issues
|
||||
try do
|
||||
fun.()
|
||||
@@ -113,11 +115,20 @@ defmodule WandererApp.Map.Manager do
|
||||
Enum.each(pings, fn %{id: ping_id, map_id: map_id, type: type} = ping ->
|
||||
{:ok, %{system: system}} = ping |> Ash.load([:system])
|
||||
|
||||
Server.Impl.broadcast!(map_id, :ping_cancelled, %{
|
||||
id: ping_id,
|
||||
solar_system_id: system.solar_system_id,
|
||||
type: type
|
||||
})
|
||||
# Handle case where parent system was already deleted
|
||||
case system do
|
||||
nil ->
|
||||
Logger.warning(
|
||||
"[cleanup_expired_pings] ping #{ping_id} destroyed (parent system already deleted)"
|
||||
)
|
||||
|
||||
%{solar_system_id: solar_system_id} ->
|
||||
Server.Impl.broadcast!(map_id, :ping_cancelled, %{
|
||||
id: ping_id,
|
||||
solar_system_id: solar_system_id,
|
||||
type: type
|
||||
})
|
||||
end
|
||||
|
||||
Ash.destroy!(ping)
|
||||
end)
|
||||
@@ -139,7 +150,7 @@ defmodule WandererApp.Map.Manager do
|
||||
|
||||
WandererApp.Queue.clear(@maps_queue)
|
||||
|
||||
if Mix.env() == :test do
|
||||
if @environment == :test do
|
||||
# In tests, run synchronously to avoid database ownership issues
|
||||
Logger.debug(fn -> "Starting maps synchronously in test mode" end)
|
||||
|
||||
|
||||
@@ -18,10 +18,22 @@ defmodule WandererApp.Map.MapPool do
|
||||
@map_pool_limit 10
|
||||
|
||||
@garbage_collection_interval :timer.hours(4)
|
||||
@systems_cleanup_timeout :timer.minutes(30)
|
||||
@characters_cleanup_timeout :timer.minutes(5)
|
||||
@connections_cleanup_timeout :timer.minutes(5)
|
||||
@backup_state_timeout :timer.minutes(1)
|
||||
# Use very long timeouts in test environment to prevent background tasks from running during tests
|
||||
# This avoids database connection ownership errors when tests finish before async tasks complete
|
||||
@environment Application.compile_env(:wanderer_app, :environment)
|
||||
|
||||
@systems_cleanup_timeout if @environment == :test,
|
||||
do: :timer.hours(24),
|
||||
else: :timer.minutes(30)
|
||||
@characters_cleanup_timeout if @environment == :test,
|
||||
do: :timer.hours(24),
|
||||
else: :timer.minutes(5)
|
||||
@connections_cleanup_timeout if @environment == :test,
|
||||
do: :timer.hours(24),
|
||||
else: :timer.minutes(5)
|
||||
@backup_state_timeout if @environment == :test,
|
||||
do: :timer.hours(24),
|
||||
else: :timer.minutes(1)
|
||||
|
||||
def new(), do: __struct__()
|
||||
def new(args), do: __struct__(args)
|
||||
@@ -187,7 +199,7 @@ defmodule WandererApp.Map.MapPool do
|
||||
|
||||
# Schedule periodic tasks
|
||||
Process.send_after(self(), :backup_state, @backup_state_timeout)
|
||||
Process.send_after(self(), :cleanup_systems, 15_000)
|
||||
Process.send_after(self(), :cleanup_systems, @systems_cleanup_timeout)
|
||||
Process.send_after(self(), :cleanup_characters, @characters_cleanup_timeout)
|
||||
Process.send_after(self(), :cleanup_connections, @connections_cleanup_timeout)
|
||||
Process.send_after(self(), :garbage_collect, @garbage_collection_interval)
|
||||
|
||||
@@ -1,5 +1,19 @@
|
||||
defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
@moduledoc false
|
||||
@moduledoc """
|
||||
Handles character-related operations for map servers.
|
||||
|
||||
This module manages:
|
||||
- Character tracking on maps
|
||||
- Permission-based character cleanup
|
||||
- Character presence updates
|
||||
|
||||
## Logging
|
||||
|
||||
This module emits detailed logs for debugging character tracking issues:
|
||||
- INFO: Character track/untrack events, permission cleanup results
|
||||
- WARNING: Permission failures, unexpected states
|
||||
- DEBUG: Detailed permission check results
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@@ -15,6 +29,11 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
if Enum.empty?(invalidate_character_ids) do
|
||||
:ok
|
||||
else
|
||||
Logger.debug(fn ->
|
||||
"[CharactersImpl] Running permission cleanup for map #{map_id} - " <>
|
||||
"checking #{length(invalidate_character_ids)} characters"
|
||||
end)
|
||||
|
||||
{:ok, %{acls: acls}} =
|
||||
WandererApp.MapRepo.get(map_id,
|
||||
acls: [
|
||||
@@ -30,6 +49,11 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
def track_characters(_map_id, []), do: :ok
|
||||
|
||||
def track_characters(map_id, [character_id | rest]) do
|
||||
Logger.debug(fn ->
|
||||
"[CharactersImpl] Starting tracking for character #{character_id} on map #{map_id} - " <>
|
||||
"reason: character joined presence"
|
||||
end)
|
||||
|
||||
track_character(map_id, character_id)
|
||||
track_characters(map_id, rest)
|
||||
end
|
||||
@@ -41,6 +65,12 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
|> WandererApp.Map.get_map!()
|
||||
|> Map.get(:characters, [])
|
||||
|
||||
if length(character_ids) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[CharactersImpl] Scheduling permission check for #{length(character_ids)} characters on map #{map_id}"
|
||||
end)
|
||||
end
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:invalidate_character_ids", character_ids)
|
||||
|
||||
:ok
|
||||
@@ -48,6 +78,13 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
end
|
||||
|
||||
def untrack_characters(map_id, character_ids) do
|
||||
if length(character_ids) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[CharactersImpl] Untracking #{length(character_ids)} characters from map #{map_id} - " <>
|
||||
"reason: characters no longer in presence_character_ids (grace period expired or user disconnected)"
|
||||
end)
|
||||
end
|
||||
|
||||
character_ids
|
||||
|> Enum.each(fn character_id ->
|
||||
character_map_active = is_character_map_active?(map_id, character_id)
|
||||
@@ -58,13 +95,32 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
end
|
||||
|
||||
defp untrack_character(true, map_id, character_id) do
|
||||
Logger.info(fn ->
|
||||
"[CharactersImpl] Untracking character #{character_id} from map #{map_id} - " <>
|
||||
"character was actively tracking this map"
|
||||
end)
|
||||
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracking, :stopped],
|
||||
%{system_time: System.system_time()},
|
||||
%{character_id: character_id, map_id: map_id, reason: :presence_expired}
|
||||
)
|
||||
|
||||
WandererApp.Character.TrackerManager.update_track_settings(character_id, %{
|
||||
map_id: map_id,
|
||||
track: false
|
||||
})
|
||||
end
|
||||
|
||||
defp untrack_character(_is_character_map_active, _map_id, _character_id), do: :ok
|
||||
defp untrack_character(false, map_id, character_id) do
|
||||
Logger.debug(fn ->
|
||||
"[CharactersImpl] Skipping untrack for character #{character_id} on map #{map_id} - " <>
|
||||
"character was not actively tracking this map"
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp is_character_map_active?(map_id, character_id) do
|
||||
case WandererApp.Character.get_character_state(character_id) do
|
||||
@@ -79,59 +135,134 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
defp process_invalidate_characters(invalidate_character_ids, map_id, acls) do
|
||||
{:ok, %{map: %{owner_id: owner_id}}} = WandererApp.Map.get_map_state(map_id)
|
||||
|
||||
invalidate_character_ids
|
||||
|> Task.async_stream(
|
||||
fn character_id ->
|
||||
character_id
|
||||
|> WandererApp.Character.get_character()
|
||||
|> case do
|
||||
{:ok, %{user_id: nil}} ->
|
||||
{:remove_character, character_id}
|
||||
# Option 3: Get owner's user_id to allow all characters from the same user
|
||||
owner_user_id = get_owner_user_id(owner_id)
|
||||
|
||||
{:ok, character} ->
|
||||
[character_permissions] =
|
||||
WandererApp.Permissions.check_characters_access([character], acls)
|
||||
|
||||
map_permissions =
|
||||
WandererApp.Permissions.get_map_permissions(
|
||||
character_permissions,
|
||||
owner_id,
|
||||
[character_id]
|
||||
)
|
||||
|
||||
case map_permissions do
|
||||
%{view_system: false} ->
|
||||
{:remove_character, character_id}
|
||||
|
||||
%{track_character: false} ->
|
||||
{:remove_character, character_id}
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end,
|
||||
timeout: :timer.seconds(60),
|
||||
max_concurrency: System.schedulers_online() * 4,
|
||||
on_timeout: :kill_task
|
||||
)
|
||||
|> Enum.reduce([], fn
|
||||
{:ok, {:remove_character, character_id}}, acc ->
|
||||
[character_id | acc]
|
||||
|
||||
{:ok, _result}, acc ->
|
||||
acc
|
||||
|
||||
{:error, reason}, acc ->
|
||||
Logger.error("Error in cleanup_characters: #{inspect(reason)}")
|
||||
acc
|
||||
Logger.debug(fn ->
|
||||
"[CharacterCleanup] Map #{map_id} - validating permissions for #{length(invalidate_character_ids)} characters"
|
||||
end)
|
||||
|> case do
|
||||
[] -> :ok
|
||||
character_ids_to_remove -> remove_and_untrack_characters(map_id, character_ids_to_remove)
|
||||
|
||||
results =
|
||||
invalidate_character_ids
|
||||
|> Task.async_stream(
|
||||
fn character_id ->
|
||||
character_id
|
||||
|> WandererApp.Character.get_character()
|
||||
|> case do
|
||||
{:ok, %{user_id: nil}} ->
|
||||
{:remove_character, character_id, :no_user_id}
|
||||
|
||||
{:ok, character} ->
|
||||
# Option 3: Check if character belongs to the same user as owner
|
||||
is_same_user_as_owner =
|
||||
owner_user_id != nil and character.user_id == owner_user_id
|
||||
|
||||
if is_same_user_as_owner do
|
||||
# All characters from the map owner's account have full access
|
||||
:ok
|
||||
else
|
||||
[character_permissions] =
|
||||
WandererApp.Permissions.check_characters_access([character], acls)
|
||||
|
||||
map_permissions =
|
||||
WandererApp.Permissions.get_map_permissions(
|
||||
character_permissions,
|
||||
owner_id,
|
||||
[character_id]
|
||||
)
|
||||
|
||||
case map_permissions do
|
||||
%{view_system: false} ->
|
||||
{:remove_character, character_id, :no_view_permission}
|
||||
|
||||
%{track_character: false} ->
|
||||
{:remove_character, character_id, :no_track_permission}
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end,
|
||||
timeout: :timer.seconds(60),
|
||||
max_concurrency: System.schedulers_online() * 4,
|
||||
on_timeout: :kill_task
|
||||
)
|
||||
|> Enum.reduce([], fn
|
||||
{:ok, {:remove_character, character_id, reason}}, acc ->
|
||||
[{character_id, reason} | acc]
|
||||
|
||||
{:ok, _result}, acc ->
|
||||
acc
|
||||
|
||||
{:error, reason}, acc ->
|
||||
Logger.error(
|
||||
"[CharacterCleanup] Error checking character permissions: #{inspect(reason)}"
|
||||
)
|
||||
|
||||
acc
|
||||
end)
|
||||
|
||||
case results do
|
||||
[] ->
|
||||
Logger.debug(fn ->
|
||||
"[CharacterCleanup] Map #{map_id} - all #{length(invalidate_character_ids)} characters passed permission check"
|
||||
end)
|
||||
|
||||
:ok
|
||||
|
||||
characters_to_remove ->
|
||||
# Group by reason for better logging
|
||||
by_reason = Enum.group_by(characters_to_remove, fn {_id, reason} -> reason end)
|
||||
|
||||
Enum.each(by_reason, fn {reason, chars} ->
|
||||
char_ids = Enum.map(chars, fn {id, _} -> id end)
|
||||
reason_str = permission_removal_reason_to_string(reason)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[CharacterCleanup] Map #{map_id} - removing #{length(char_ids)} characters: #{reason_str} - " <>
|
||||
"character_ids: #{inspect(char_ids)}"
|
||||
end)
|
||||
|
||||
# Emit telemetry for each removal reason
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracking, :permission_revoked],
|
||||
%{count: length(char_ids), system_time: System.system_time()},
|
||||
%{map_id: map_id, character_ids: char_ids, reason: reason}
|
||||
)
|
||||
end)
|
||||
|
||||
character_ids_to_remove = Enum.map(characters_to_remove, fn {id, _} -> id end)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[CharacterCleanup] Map #{map_id} - total #{length(character_ids_to_remove)} characters " <>
|
||||
"will be removed due to permission issues (NO GRACE PERIOD)"
|
||||
end)
|
||||
|
||||
remove_and_untrack_characters(map_id, character_ids_to_remove)
|
||||
end
|
||||
end
|
||||
|
||||
defp permission_removal_reason_to_string(:no_user_id),
|
||||
do: "no user_id associated with character"
|
||||
|
||||
defp permission_removal_reason_to_string(:no_view_permission), do: "lost view_system permission"
|
||||
|
||||
defp permission_removal_reason_to_string(:no_track_permission),
|
||||
do: "lost track_character permission"
|
||||
|
||||
defp permission_removal_reason_to_string(reason), do: "#{inspect(reason)}"
|
||||
|
||||
# Helper to get the owner's user_id for Option 3
|
||||
defp get_owner_user_id(nil), do: nil
|
||||
|
||||
defp get_owner_user_id(owner_id) do
|
||||
case WandererApp.Character.get_character(owner_id) do
|
||||
{:ok, %{user_id: user_id}} -> user_id
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
@@ -161,10 +292,18 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
end
|
||||
|
||||
defp remove_and_untrack_characters(map_id, character_ids) do
|
||||
Logger.debug(fn ->
|
||||
"Map #{map_id} - remove and untrack characters #{inspect(character_ids)}"
|
||||
# Option 4: Enhanced logging for character removal
|
||||
Logger.info(fn ->
|
||||
"[CharacterCleanup] Map #{map_id} - starting removal of #{length(character_ids)} characters: #{inspect(character_ids)}"
|
||||
end)
|
||||
|
||||
# Emit telemetry for monitoring
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :map, :characters_cleanup, :removal_started],
|
||||
%{character_count: length(character_ids), system_time: System.system_time()},
|
||||
%{map_id: map_id, character_ids: character_ids}
|
||||
)
|
||||
|
||||
map_id
|
||||
|> untrack_characters(character_ids)
|
||||
|
||||
@@ -174,10 +313,21 @@ defmodule WandererApp.Map.Server.CharactersImpl do
|
||||
{:ok, settings} ->
|
||||
settings
|
||||
|> Enum.each(fn s ->
|
||||
Logger.info(fn ->
|
||||
"[CharacterCleanup] Map #{map_id} - destroying settings and removing character #{s.character_id}"
|
||||
end)
|
||||
|
||||
WandererApp.MapCharacterSettingsRepo.destroy!(s)
|
||||
remove_character(map_id, s.character_id)
|
||||
end)
|
||||
|
||||
# Emit telemetry for successful removal
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :map, :characters_cleanup, :removal_complete],
|
||||
%{removed_count: length(settings), system_time: System.system_time()},
|
||||
%{map_id: map_id}
|
||||
)
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
|
||||
@@ -45,12 +45,6 @@ defmodule WandererApp.Map.Server.Impl do
|
||||
}
|
||||
|> new()
|
||||
|
||||
# In test mode, give the test setup time to grant database access
|
||||
# This is necessary for async tests where the sandbox needs to allow this process
|
||||
if Mix.env() == :test do
|
||||
Process.sleep(150)
|
||||
end
|
||||
|
||||
# Parallelize database queries for faster initialization
|
||||
start_time = System.monotonic_time(:millisecond)
|
||||
|
||||
@@ -314,56 +308,12 @@ defmodule WandererApp.Map.Server.Impl do
|
||||
end)
|
||||
|
||||
# Create map state with retry logic for test scenarios
|
||||
create_map_state_with_retry(
|
||||
%{
|
||||
map_id: map_id,
|
||||
systems_last_activity: systems_last_activity,
|
||||
connections_eol_time: connections_eol_time,
|
||||
connections_start_time: connections_start_time
|
||||
},
|
||||
3
|
||||
)
|
||||
end
|
||||
|
||||
# Helper to create map state with retry logic for async tests
|
||||
defp create_map_state_with_retry(attrs, retries_left) when retries_left > 0 do
|
||||
case WandererApp.Api.MapState.create(attrs) do
|
||||
{:ok, map_state} = result ->
|
||||
result
|
||||
|
||||
{:error, %Ash.Error.Invalid{errors: errors}} = error ->
|
||||
# Check if it's a foreign key constraint error
|
||||
has_fkey_error =
|
||||
Enum.any?(errors, fn
|
||||
%Ash.Error.Changes.InvalidAttribute{private_vars: private_vars} ->
|
||||
Enum.any?(private_vars, fn
|
||||
{:constraint_type, :foreign_key} -> true
|
||||
_ -> false
|
||||
end)
|
||||
|
||||
_ ->
|
||||
false
|
||||
end)
|
||||
|
||||
if has_fkey_error and retries_left > 1 do
|
||||
# In test environments with async tests, the parent map might not be
|
||||
# visible yet due to sandbox timing. Brief retry with exponential backoff.
|
||||
sleep_time = (4 - retries_left) * 15 + 10
|
||||
Process.sleep(sleep_time)
|
||||
create_map_state_with_retry(attrs, retries_left - 1)
|
||||
else
|
||||
# Return error if not a foreign key issue or out of retries
|
||||
error
|
||||
end
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp create_map_state_with_retry(attrs, 0) do
|
||||
# Final attempt without retry
|
||||
WandererApp.Api.MapState.create(attrs)
|
||||
WandererApp.Api.MapState.create(%{
|
||||
map_id: map_id,
|
||||
systems_last_activity: systems_last_activity,
|
||||
connections_eol_time: connections_eol_time,
|
||||
connections_start_time: connections_start_time
|
||||
})
|
||||
end
|
||||
|
||||
def handle_event({:update_characters, map_id} = event) do
|
||||
@@ -712,12 +662,45 @@ defmodule WandererApp.Map.Server.Impl do
|
||||
not Enum.member?(presence_character_ids, character_id)
|
||||
end)
|
||||
|
||||
# Log presence changes for debugging
|
||||
if length(new_present_character_ids) > 0 or length(not_present_character_ids) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[MapServer] Map #{map_id} presence update - " <>
|
||||
"newly_present: #{inspect(new_present_character_ids)}, " <>
|
||||
"no_longer_present: #{inspect(not_present_character_ids)}, " <>
|
||||
"total_present: #{length(presence_character_ids)}"
|
||||
end)
|
||||
end
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map_#{map_id}:old_presence_character_ids",
|
||||
presence_character_ids
|
||||
)
|
||||
|
||||
# Track new characters
|
||||
if length(new_present_character_ids) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[MapServer] Map #{map_id} - starting tracking for #{length(new_present_character_ids)} newly present characters"
|
||||
end)
|
||||
end
|
||||
|
||||
CharactersImpl.track_characters(map_id, new_present_character_ids)
|
||||
|
||||
# Untrack characters no longer present (grace period has expired)
|
||||
if length(not_present_character_ids) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[MapServer] Map #{map_id} - #{length(not_present_character_ids)} characters no longer in presence " <>
|
||||
"(grace period expired or never had one) - will be untracked"
|
||||
end)
|
||||
|
||||
# Emit telemetry for presence-based untracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :map, :presence, :characters_left],
|
||||
%{count: length(not_present_character_ids), system_time: System.system_time()},
|
||||
%{map_id: map_id, character_ids: not_present_character_ids}
|
||||
)
|
||||
end
|
||||
|
||||
CharactersImpl.untrack_characters(map_id, not_present_character_ids)
|
||||
|
||||
broadcast!(
|
||||
|
||||
@@ -405,11 +405,20 @@ defmodule WandererApp.Map.Server.SystemsImpl do
|
||||
{:ok, %{eve_id: eve_id, system: system}} = s |> Ash.load([:system])
|
||||
:ok = Ash.destroy!(s)
|
||||
|
||||
Logger.warning(
|
||||
"[cleanup_linked_signatures] for system #{system.solar_system_id}: #{inspect(eve_id)}"
|
||||
)
|
||||
# Handle case where parent system was already deleted
|
||||
case system do
|
||||
nil ->
|
||||
Logger.warning(
|
||||
"[cleanup_linked_signatures] signature #{eve_id} destroyed (parent system already deleted)"
|
||||
)
|
||||
|
||||
Impl.broadcast!(map_id, :signatures_updated, system.solar_system_id)
|
||||
%{solar_system_id: solar_system_id} ->
|
||||
Logger.warning(
|
||||
"[cleanup_linked_signatures] for system #{solar_system_id}: #{inspect(eve_id)}"
|
||||
)
|
||||
|
||||
Impl.broadcast!(map_id, :signatures_updated, solar_system_id)
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("Failed to cleanup linked signature: #{inspect(e)}")
|
||||
|
||||
@@ -203,10 +203,7 @@ defmodule WandererApp.Maps do
|
||||
is_member_corp = to_string(c.corporation_id) in map_member_corporation_ids
|
||||
is_member_alliance = to_string(c.alliance_id) in map_member_alliance_ids
|
||||
|
||||
has_access =
|
||||
is_owner or is_acl_owner or is_member_eve or is_member_corp or is_member_alliance
|
||||
|
||||
has_access
|
||||
is_owner || is_acl_owner || is_member_eve || is_member_corp || is_member_alliance
|
||||
end)
|
||||
end
|
||||
|
||||
@@ -250,11 +247,11 @@ defmodule WandererApp.Maps do
|
||||
members ->
|
||||
members
|
||||
|> Enum.any?(fn member ->
|
||||
(member.role == :blocked and
|
||||
(member.role == :blocked &&
|
||||
member.eve_character_id in user_character_eve_ids) or
|
||||
(member.role == :blocked and
|
||||
(member.role == :blocked &&
|
||||
member.eve_corporation_id in user_character_corporation_ids) or
|
||||
(member.role == :blocked and
|
||||
(member.role == :blocked &&
|
||||
member.eve_alliance_id in user_character_alliance_ids)
|
||||
end)
|
||||
end
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
defmodule WandererApp.MapCharacterSettingsRepo do
|
||||
use WandererApp, :repository
|
||||
|
||||
require Logger
|
||||
|
||||
def get(map_id, character_id) do
|
||||
case WandererApp.Api.MapCharacterSettings.read_by_map_and_character(%{
|
||||
map_id: map_id,
|
||||
@@ -53,22 +55,38 @@ defmodule WandererApp.MapCharacterSettingsRepo do
|
||||
def get_tracked_by_map_all(map_id),
|
||||
do: WandererApp.Api.MapCharacterSettings.tracked_by_map_all(%{map_id: map_id})
|
||||
|
||||
def track(settings) do
|
||||
{:ok, _} = get(settings.map_id, settings.character_id)
|
||||
# Only update the tracked field, preserving other fields
|
||||
WandererApp.Api.MapCharacterSettings.track(%{
|
||||
map_id: settings.map_id,
|
||||
character_id: settings.character_id
|
||||
})
|
||||
def track(%{map_id: map_id, character_id: character_id}) do
|
||||
# First ensure the record exists (get creates if not exists)
|
||||
case get(map_id, character_id) do
|
||||
{:ok, settings} when not is_nil(settings) ->
|
||||
# Now update the tracked field
|
||||
settings
|
||||
|> WandererApp.Api.MapCharacterSettings.update(%{tracked: true})
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"Failed to track character: #{character_id} on map: #{map_id}, #{inspect(error)}"
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def untrack(settings) do
|
||||
{:ok, _} = get(settings.map_id, settings.character_id)
|
||||
# Only update the tracked field, preserving other fields
|
||||
WandererApp.Api.MapCharacterSettings.untrack(%{
|
||||
map_id: settings.map_id,
|
||||
character_id: settings.character_id
|
||||
})
|
||||
def untrack(%{map_id: map_id, character_id: character_id}) do
|
||||
# First ensure the record exists (get creates if not exists)
|
||||
case get(map_id, character_id) do
|
||||
{:ok, settings} when not is_nil(settings) ->
|
||||
# Now update the tracked field
|
||||
settings
|
||||
|> WandererApp.Api.MapCharacterSettings.update(%{tracked: false})
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"Failed to untrack character: #{character_id} on map: #{map_id}, #{inspect(error)}"
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def track!(settings) do
|
||||
@@ -85,18 +103,36 @@ defmodule WandererApp.MapCharacterSettingsRepo do
|
||||
end
|
||||
end
|
||||
|
||||
def follow(settings) do
|
||||
WandererApp.Api.MapCharacterSettings.follow(%{
|
||||
map_id: settings.map_id,
|
||||
character_id: settings.character_id
|
||||
})
|
||||
def follow(%{map_id: map_id, character_id: character_id} = _settings) do
|
||||
# First ensure the record exists (get creates if not exists)
|
||||
case get(map_id, character_id) do
|
||||
{:ok, settings} when not is_nil(settings) ->
|
||||
settings
|
||||
|> WandererApp.Api.MapCharacterSettings.update(%{followed: true})
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"Failed to follow character: #{character_id} on map: #{map_id}, #{inspect(error)}"
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def unfollow(settings) do
|
||||
WandererApp.Api.MapCharacterSettings.unfollow(%{
|
||||
map_id: settings.map_id,
|
||||
character_id: settings.character_id
|
||||
})
|
||||
def unfollow(%{map_id: map_id, character_id: character_id} = _settings) do
|
||||
# First ensure the record exists (get creates if not exists)
|
||||
case get(map_id, character_id) do
|
||||
{:ok, settings} when not is_nil(settings) ->
|
||||
settings
|
||||
|> WandererApp.Api.MapCharacterSettings.update(%{followed: false})
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"Failed to unfollow character: #{character_id} on map: #{map_id}, #{inspect(error)}"
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def follow!(settings) do
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
defmodule WandererApp.TaskWrapper do
|
||||
@environment Application.compile_env(:wanderer_app, :environment)
|
||||
|
||||
def start_link(module, func, args) do
|
||||
if Mix.env() == :test do
|
||||
if @environment == :test do
|
||||
apply(module, func, args)
|
||||
else
|
||||
Task.start_link(module, func, args)
|
||||
|
||||
@@ -336,8 +336,8 @@
|
||||
label="Valid"
|
||||
options={Enum.map(@valid_types, fn valid_type -> {valid_type.label, valid_type.id} end)}
|
||||
/>
|
||||
|
||||
<!-- Modal action buttons -->
|
||||
|
||||
<!-- Modal action buttons -->
|
||||
<div class="modal-action">
|
||||
<.button class="mt-2" type="submit" phx-disable-with="Saving...">
|
||||
{(@live_action == :add_invite_link && "Add") || "Save"}
|
||||
|
||||
@@ -30,14 +30,17 @@ defmodule WandererAppWeb.MapActivityEventHandler do
|
||||
|
||||
def handle_ui_event(
|
||||
"show_activity",
|
||||
_,
|
||||
params,
|
||||
%{assigns: %{map_id: map_id, current_user: current_user}} = socket
|
||||
) do
|
||||
Task.async(fn ->
|
||||
try do
|
||||
# Extract days parameter (nil if not provided)
|
||||
days = Map.get(params, "days")
|
||||
|
||||
# Get raw activity data from the domain logic
|
||||
result =
|
||||
WandererApp.Character.Activity.process_character_activity(map_id, current_user)
|
||||
WandererApp.Character.Activity.process_character_activity(map_id, current_user, days)
|
||||
|
||||
# Group activities by user_id and summarize
|
||||
summarized_result =
|
||||
|
||||
@@ -17,6 +17,10 @@ defmodule WandererAppWeb.MapPingsEventHandler do
|
||||
{:ok, pings} = WandererApp.MapPingsRepo.get_by_map(map_id)
|
||||
|
||||
pings
|
||||
|> Enum.filter(fn ping ->
|
||||
# Skip pings where system or character associations are nil (deleted)
|
||||
not is_nil(ping.system) and not is_nil(ping.character)
|
||||
end)
|
||||
|> Enum.reduce(socket, fn %{
|
||||
id: id,
|
||||
type: type,
|
||||
|
||||
@@ -60,7 +60,10 @@ defmodule WandererAppWeb.MapRoutesEventHandler do
|
||||
|
||||
ping_system_ids =
|
||||
pings
|
||||
|> Enum.map(fn %{system: %{solar_system_id: solar_system_id}} -> "#{solar_system_id}" end)
|
||||
|> Enum.flat_map(fn
|
||||
%{system: %{solar_system_id: solar_system_id}} -> ["#{solar_system_id}"]
|
||||
_ -> []
|
||||
end)
|
||||
|
||||
route_hubs = (ping_system_ids ++ hubs) |> Enum.uniq()
|
||||
|
||||
|
||||
@@ -163,6 +163,7 @@ defmodule WandererAppWeb.MapsLive do
|
||||
|> assign(:map_slug, map_slug)
|
||||
|> assign(:map_id, map.id)
|
||||
|> assign(:public_api_key, map.public_api_key)
|
||||
|> assign(:sse_enabled, map.sse_enabled)
|
||||
|> assign(:map, map)
|
||||
|> assign(
|
||||
export_settings: export_settings |> _get_export_map_data(),
|
||||
@@ -232,6 +233,27 @@ defmodule WandererAppWeb.MapsLive do
|
||||
{:noreply, assign(socket, public_api_key: new_api_key)}
|
||||
end
|
||||
|
||||
def handle_event("toggle-sse", _params, socket) do
|
||||
new_sse_enabled = not socket.assigns.sse_enabled
|
||||
map = socket.assigns.map
|
||||
|
||||
case WandererApp.Api.Map.toggle_sse(map, %{sse_enabled: new_sse_enabled}) do
|
||||
{:ok, updated_map} ->
|
||||
{:noreply, assign(socket, sse_enabled: new_sse_enabled, map: updated_map)}
|
||||
|
||||
{:error, %Ash.Error.Invalid{errors: errors}} ->
|
||||
error_message =
|
||||
errors
|
||||
|> Enum.map(fn error -> Map.get(error, :message, "Unknown error") end)
|
||||
|> Enum.join(", ")
|
||||
|
||||
{:noreply, put_flash(socket, :error, error_message)}
|
||||
|
||||
{:error, _} ->
|
||||
{:noreply, put_flash(socket, :error, "Failed to update SSE setting")}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_event(
|
||||
"live_select_change",
|
||||
|
||||
@@ -165,12 +165,12 @@
|
||||
field={f[:only_tracked_characters]}
|
||||
label="Allow only tracked characters"
|
||||
/>
|
||||
<.input type="checkbox" field={f[:sse_enabled]} label="Enable Server-Sent Events (SSE)" />
|
||||
<.input
|
||||
:if={@live_action == :create}
|
||||
type="checkbox"
|
||||
field={f[:create_default_acl]}
|
||||
label="Create default access list"
|
||||
checked={Phoenix.HTML.Form.normalize_value("checkbox", f[:create_default_acl].value) == true or is_nil(f[:create_default_acl].value)}
|
||||
/>
|
||||
<.live_select
|
||||
field={f[:acls]}
|
||||
@@ -540,6 +540,24 @@
|
||||
</.button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="border-t border-stone-700 mt-4 pt-4">
|
||||
<h3 class="text-md font-semibold mb-3">Server-Sent Events (SSE)</h3>
|
||||
<div class="flex items-center gap-3">
|
||||
<label class="flex items-center gap-2 cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
class="checkbox checkbox-primary"
|
||||
checked={@sse_enabled}
|
||||
phx-click="toggle-sse"
|
||||
/>
|
||||
<span>Enable SSE for this map</span>
|
||||
</label>
|
||||
</div>
|
||||
<p class="text-sm text-stone-400 mt-2">
|
||||
When enabled, external clients can subscribe to real-time map events via SSE.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<.live_component
|
||||
|
||||
@@ -3,18 +3,32 @@ defmodule WandererAppWeb.PresenceGracePeriodManager do
|
||||
Manages grace period for character presence tracking.
|
||||
|
||||
This module prevents rapid start/stop cycles of character tracking
|
||||
by introducing a 5-minute grace period before stopping tracking
|
||||
by introducing a 30-minute grace period before stopping tracking
|
||||
for characters that leave presence.
|
||||
|
||||
## Architecture
|
||||
|
||||
When a character's presence leaves (e.g., browser close, network disconnect):
|
||||
1. Character is scheduled for removal after grace period (30 min)
|
||||
2. Character remains in `presence_character_ids` during grace period
|
||||
3. If character rejoins during grace period, removal is cancelled
|
||||
4. After grace period expires, character is atomically removed from cache
|
||||
|
||||
## Logging
|
||||
|
||||
This module emits detailed logs for debugging character tracking issues:
|
||||
- INFO: Grace period expire events (actual character removal)
|
||||
- WARNING: Unexpected states or potential issues
|
||||
- DEBUG: Grace period start/cancel, presence changes, state changes
|
||||
"""
|
||||
use GenServer
|
||||
|
||||
require Logger
|
||||
|
||||
# 30 minutes
|
||||
@grace_period_ms :timer.minutes(30)
|
||||
@check_remove_queue_interval :timer.seconds(30)
|
||||
# 1 hour grace period before removing disconnected characters
|
||||
@grace_period_ms :timer.hours(1)
|
||||
|
||||
defstruct pending_removals: %{}, timers: %{}, to_remove: []
|
||||
defstruct pending_removals: %{}, timers: %{}
|
||||
|
||||
def start_link(opts \\ []) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
@@ -30,16 +44,105 @@ defmodule WandererAppWeb.PresenceGracePeriodManager do
|
||||
GenServer.cast(__MODULE__, {:process_presence_change, map_id, presence_data})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get current grace period state for debugging purposes.
|
||||
"""
|
||||
def get_state do
|
||||
GenServer.call(__MODULE__, :get_state)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Reset state for testing purposes.
|
||||
Cancels all pending timers and clears all state.
|
||||
"""
|
||||
def reset_state do
|
||||
GenServer.call(__MODULE__, :reset_state)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Clear state for a specific map. Used for cleanup.
|
||||
Cancels any pending timers for characters on this map.
|
||||
"""
|
||||
def clear_map_state(map_id) do
|
||||
GenServer.call(__MODULE__, {:clear_map_state, map_id})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Synchronous version of process_presence_change for testing.
|
||||
Returns :ok when processing is complete.
|
||||
"""
|
||||
def process_presence_change_sync(map_id, presence_data) do
|
||||
GenServer.call(__MODULE__, {:process_presence_change_sync, map_id, presence_data})
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
Logger.info("#{__MODULE__} started")
|
||||
Process.send_after(self(), :check_remove_queue, @check_remove_queue_interval)
|
||||
Logger.debug("[PresenceGracePeriod] Manager started")
|
||||
|
||||
{:ok, %__MODULE__{}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:get_state, _from, state) do
|
||||
{:reply, state, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:reset_state, _from, state) do
|
||||
# Cancel all pending timers
|
||||
Enum.each(state.timers, fn {_key, timer_ref} ->
|
||||
Process.cancel_timer(timer_ref)
|
||||
end)
|
||||
|
||||
Logger.debug("[PresenceGracePeriod] State reset - cancelled #{map_size(state.timers)} timers")
|
||||
|
||||
{:reply, :ok, %__MODULE__{}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call({:clear_map_state, map_id}, _from, state) do
|
||||
# Find and cancel all timers for this map
|
||||
{timers_to_cancel, remaining_timers} =
|
||||
Enum.split_with(state.timers, fn {{m_id, _char_id}, _ref} -> m_id == map_id end)
|
||||
|
||||
# Cancel the timers
|
||||
Enum.each(timers_to_cancel, fn {_key, timer_ref} ->
|
||||
Process.cancel_timer(timer_ref)
|
||||
end)
|
||||
|
||||
# Filter pending_removals for this map
|
||||
remaining_pending =
|
||||
Enum.reject(state.pending_removals, fn {{m_id, _char_id}, _} -> m_id == map_id end)
|
||||
|> Map.new()
|
||||
|
||||
if length(timers_to_cancel) > 0 do
|
||||
Logger.debug("[PresenceGracePeriod] Cleared state for map #{map_id} - cancelled #{length(timers_to_cancel)} timers")
|
||||
end
|
||||
|
||||
new_state = %{
|
||||
state
|
||||
| timers: Map.new(remaining_timers),
|
||||
pending_removals: remaining_pending
|
||||
}
|
||||
|
||||
{:reply, :ok, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call({:process_presence_change_sync, map_id, presence_data}, _from, state) do
|
||||
# Same logic as the cast version, but synchronous
|
||||
new_state = do_process_presence_change(state, map_id, presence_data)
|
||||
{:reply, :ok, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_cast({:process_presence_change, map_id, presence_data}, state) do
|
||||
new_state = do_process_presence_change(state, map_id, presence_data)
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
# Shared logic for presence change processing
|
||||
defp do_process_presence_change(state, map_id, presence_data) do
|
||||
# Extract currently tracked character IDs from presence data
|
||||
current_tracked_character_ids =
|
||||
presence_data
|
||||
@@ -58,48 +161,83 @@ defmodule WandererAppWeb.PresenceGracePeriodManager do
|
||||
# Characters that just left (in previous, but not in current)
|
||||
newly_left = MapSet.difference(previous_set, current_set)
|
||||
|
||||
# Process newly joined characters - cancel any pending removals
|
||||
# Log presence changes for debugging
|
||||
if MapSet.size(newly_joined) > 0 or MapSet.size(newly_left) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[PresenceGracePeriod] Map #{map_id} presence change - " <>
|
||||
"joined: #{inspect(MapSet.to_list(newly_joined))}, " <>
|
||||
"left: #{inspect(MapSet.to_list(newly_left))}"
|
||||
end)
|
||||
end
|
||||
|
||||
# Cancel any pending removals for ALL currently present tracked characters
|
||||
# This handles the case where a character rejoins during grace period
|
||||
# (they're still in cache, so they won't be in "newly_joined")
|
||||
state =
|
||||
state
|
||||
|> cancel_pending_removals(map_id, current_set)
|
||||
|> schedule_removals(map_id, newly_left)
|
||||
|
||||
# Process newly left characters - schedule them for removal after grace period
|
||||
# Calculate the final character IDs (current + still pending removal)
|
||||
pending_for_map = get_pending_removals_for_map(state, map_id)
|
||||
# Calculate the final character IDs (current + characters in grace period)
|
||||
# This includes both pending_removals (timer not yet fired)
|
||||
characters_in_grace_period = get_characters_in_grace_period(state, map_id)
|
||||
|
||||
final_character_ids = MapSet.union(current_set, pending_for_map) |> MapSet.to_list()
|
||||
final_character_ids =
|
||||
MapSet.union(current_set, characters_in_grace_period) |> MapSet.to_list()
|
||||
|
||||
# Update cache with final character IDs (includes grace period logic)
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_character_ids", final_character_ids)
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_data", presence_data)
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_updated", true)
|
||||
|
||||
{:noreply, state}
|
||||
Logger.debug(fn ->
|
||||
"[PresenceGracePeriod] Map #{map_id} cache updated - " <>
|
||||
"current: #{length(current_tracked_character_ids)}, " <>
|
||||
"in_grace_period: #{MapSet.size(characters_in_grace_period)}, " <>
|
||||
"final: #{length(final_character_ids)}"
|
||||
end)
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:grace_period_expired, map_id, character_id}, state) do
|
||||
Logger.debug(fn -> "Grace period expired for character #{character_id} on map #{map_id}" end)
|
||||
# Check if this removal is still valid (wasn't cancelled)
|
||||
case get_timer_ref(state, map_id, character_id) do
|
||||
nil ->
|
||||
# Timer was cancelled (character rejoined), ignore
|
||||
Logger.debug(fn ->
|
||||
"[PresenceGracePeriod] Grace period expired for character #{character_id} on map #{map_id} " <>
|
||||
"but timer was already cancelled (character likely rejoined)"
|
||||
end)
|
||||
|
||||
# Remove from pending removals and timers
|
||||
state =
|
||||
state
|
||||
|> remove_pending_removal(map_id, character_id)
|
||||
|> remove_after_grace_period(map_id, character_id)
|
||||
{:noreply, state}
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:check_remove_queue, state) do
|
||||
Process.send_after(self(), :check_remove_queue, @check_remove_queue_interval)
|
||||
|
||||
remove_from_cache_after_grace_period(state)
|
||||
{:noreply, %{state | to_remove: []}}
|
||||
_timer_ref ->
|
||||
# Grace period expired and is still valid - perform atomic removal
|
||||
Logger.info(fn ->
|
||||
"[PresenceGracePeriod] Grace period expired for character #{character_id} on map #{map_id} - " <>
|
||||
"removing from tracking after #{div(@grace_period_ms, 60_000)} minutes of inactivity"
|
||||
end)
|
||||
|
||||
# Remove from pending removals state
|
||||
state = remove_pending_removal(state, map_id, character_id)
|
||||
|
||||
# Atomically remove from cache (Fix #2 - no batching)
|
||||
remove_character_from_cache(map_id, character_id)
|
||||
|
||||
# Emit telemetry for monitoring
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :presence, :grace_period_expired],
|
||||
%{duration_ms: @grace_period_ms, system_time: System.system_time()},
|
||||
%{map_id: map_id, character_id: character_id, reason: :grace_period_timeout}
|
||||
)
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
end
|
||||
|
||||
# Cancel pending removals for characters that have rejoined
|
||||
defp cancel_pending_removals(state, map_id, character_ids) do
|
||||
Enum.reduce(character_ids, state, fn character_id, acc_state ->
|
||||
case get_timer_ref(acc_state, map_id, character_id) do
|
||||
@@ -107,23 +245,42 @@ defmodule WandererAppWeb.PresenceGracePeriodManager do
|
||||
acc_state
|
||||
|
||||
timer_ref ->
|
||||
# Character rejoined during grace period - cancel removal
|
||||
time_remaining = Process.cancel_timer(timer_ref)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"Cancelling grace period for character #{character_id} on map #{map_id} (rejoined)"
|
||||
time_remaining_str =
|
||||
if is_integer(time_remaining) do
|
||||
"#{div(time_remaining, 60_000)} minutes remaining"
|
||||
else
|
||||
"timer already fired"
|
||||
end
|
||||
|
||||
"[PresenceGracePeriod] Cancelled grace period for character #{character_id} on map #{map_id} - " <>
|
||||
"character rejoined (#{time_remaining_str})"
|
||||
end)
|
||||
|
||||
Process.cancel_timer(timer_ref)
|
||||
# Emit telemetry for cancelled grace period
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :presence, :grace_period_cancelled],
|
||||
%{system_time: System.system_time()},
|
||||
%{map_id: map_id, character_id: character_id, reason: :character_rejoined}
|
||||
)
|
||||
|
||||
remove_pending_removal(acc_state, map_id, character_id)
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
# Schedule removals for characters that have left presence
|
||||
defp schedule_removals(state, map_id, character_ids) do
|
||||
Enum.reduce(character_ids, state, fn character_id, acc_state ->
|
||||
# Only schedule if not already pending
|
||||
case get_timer_ref(acc_state, map_id, character_id) do
|
||||
nil ->
|
||||
Logger.debug(fn ->
|
||||
"Scheduling grace period for character #{character_id} on map #{map_id}"
|
||||
"[PresenceGracePeriod] Starting #{div(@grace_period_ms, 60_000)}-minute grace period " <>
|
||||
"for character #{character_id} on map #{map_id} - character left presence"
|
||||
end)
|
||||
|
||||
timer_ref =
|
||||
@@ -133,9 +290,21 @@ defmodule WandererAppWeb.PresenceGracePeriodManager do
|
||||
@grace_period_ms
|
||||
)
|
||||
|
||||
# Emit telemetry for grace period start
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :presence, :grace_period_started],
|
||||
%{grace_period_ms: @grace_period_ms, system_time: System.system_time()},
|
||||
%{map_id: map_id, character_id: character_id, reason: :presence_left}
|
||||
)
|
||||
|
||||
add_pending_removal(acc_state, map_id, character_id, timer_ref)
|
||||
|
||||
_ ->
|
||||
_existing_timer ->
|
||||
# Already has a pending removal scheduled
|
||||
Logger.debug(fn ->
|
||||
"[PresenceGracePeriod] Character #{character_id} on map #{map_id} already has pending removal"
|
||||
end)
|
||||
|
||||
acc_state
|
||||
end
|
||||
end)
|
||||
@@ -172,58 +341,52 @@ defmodule WandererAppWeb.PresenceGracePeriodManager do
|
||||
end
|
||||
end
|
||||
|
||||
defp get_pending_removals_for_map(state, map_id) do
|
||||
# Fix #1: Include all characters in grace period (both pending and awaiting removal)
|
||||
# This prevents race conditions where a character could be removed early
|
||||
defp get_characters_in_grace_period(state, map_id) do
|
||||
state.pending_removals
|
||||
|> Enum.filter(fn {{pending_map_id, _character_id}, _} -> pending_map_id == map_id end)
|
||||
|> Enum.map(fn {{_map_id, character_id}, _} -> character_id end)
|
||||
|> MapSet.new()
|
||||
end
|
||||
|
||||
defp remove_after_grace_period(%{to_remove: to_remove} = state, map_id, character_id_to_remove) do
|
||||
%{
|
||||
state
|
||||
| to_remove:
|
||||
(to_remove ++ [{map_id, character_id_to_remove}])
|
||||
|> Enum.uniq_by(fn {map_id, character_id} -> map_id <> character_id end)
|
||||
}
|
||||
end
|
||||
|
||||
defp remove_from_cache_after_grace_period(%{to_remove: to_remove} = state) do
|
||||
# Get current presence data to recalculate without the expired character
|
||||
to_remove
|
||||
|> Enum.each(fn {map_id, character_id_to_remove} ->
|
||||
case WandererApp.Cache.get("map_#{map_id}:presence_data") do
|
||||
nil ->
|
||||
:ok
|
||||
|
||||
presence_data ->
|
||||
# Recalculate tracked character IDs from current presence data
|
||||
updated_presence_data =
|
||||
presence_data
|
||||
|> Enum.filter(fn %{character_id: character_id} ->
|
||||
character_id != character_id_to_remove
|
||||
end)
|
||||
|
||||
presence_tracked_character_ids =
|
||||
updated_presence_data
|
||||
|> Enum.filter(fn %{tracked: tracked} ->
|
||||
tracked
|
||||
end)
|
||||
|> Enum.map(fn %{character_id: character_id} -> character_id end)
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_data", updated_presence_data)
|
||||
# Update both caches
|
||||
WandererApp.Cache.insert(
|
||||
"map_#{map_id}:presence_character_ids",
|
||||
presence_tracked_character_ids
|
||||
)
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_updated", true)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"Updated cache after grace period for map #{map_id}, tracked characters: #{inspect(presence_tracked_character_ids)}"
|
||||
end)
|
||||
# Fix #2: Atomic removal from cache when grace period expires
|
||||
# This removes the character immediately instead of batching
|
||||
defp remove_character_from_cache(map_id, character_id_to_remove) do
|
||||
# Get current presence_character_ids and remove the character
|
||||
current_character_ids =
|
||||
case WandererApp.Cache.get("map_#{map_id}:presence_character_ids") do
|
||||
nil -> []
|
||||
ids -> ids
|
||||
end
|
||||
|
||||
updated_character_ids =
|
||||
Enum.reject(current_character_ids, fn id -> id == character_id_to_remove end)
|
||||
|
||||
# Also update presence_data if it exists
|
||||
case WandererApp.Cache.get("map_#{map_id}:presence_data") do
|
||||
nil ->
|
||||
# No presence data, just update character IDs
|
||||
:ok
|
||||
|
||||
presence_data ->
|
||||
updated_presence_data =
|
||||
presence_data
|
||||
|> Enum.filter(fn %{character_id: character_id} ->
|
||||
character_id != character_id_to_remove
|
||||
end)
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_data", updated_presence_data)
|
||||
end
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_character_ids", updated_character_ids)
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_updated", true)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[PresenceGracePeriod] Removed character #{character_id_to_remove} from map #{map_id} cache - " <>
|
||||
"remaining tracked characters: #{length(updated_character_ids)}"
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
2
mix.exs
2
mix.exs
@@ -3,7 +3,7 @@ defmodule WandererApp.MixProject do
|
||||
|
||||
@source_url "https://github.com/wanderer-industries/wanderer"
|
||||
|
||||
@version "1.85.5"
|
||||
@version "1.88.11"
|
||||
|
||||
def project do
|
||||
[
|
||||
|
||||
@@ -52,25 +52,24 @@ defmodule WandererApp.Repo.Migrations.AddPublicApiKeyUniqueIndex do
|
||||
end
|
||||
|
||||
defp create_backup_table do
|
||||
repo().query!("""
|
||||
CREATE TABLE IF NOT EXISTS maps_v1_api_key_backup (
|
||||
id UUID PRIMARY KEY,
|
||||
map_id UUID NOT NULL,
|
||||
old_public_api_key TEXT NOT NULL,
|
||||
reason TEXT NOT NULL,
|
||||
backed_up_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||
repo().query!(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS maps_v1_api_key_backup (
|
||||
id UUID PRIMARY KEY,
|
||||
map_id UUID NOT NULL,
|
||||
old_public_api_key TEXT NOT NULL,
|
||||
reason TEXT NOT NULL,
|
||||
backed_up_at TIMESTAMP NOT NULL DEFAULT NOW()
|
||||
)
|
||||
""",
|
||||
[]
|
||||
)
|
||||
""", [])
|
||||
|
||||
IO.puts("Created backup table maps_v1_api_key_backup")
|
||||
end
|
||||
|
||||
def down do
|
||||
drop_if_exists(
|
||||
index(:maps_v1, [:public_api_key],
|
||||
name: :maps_v1_unique_public_api_key_index
|
||||
)
|
||||
)
|
||||
drop_if_exists(index(:maps_v1, [:public_api_key], name: :maps_v1_unique_public_api_key_index))
|
||||
|
||||
IO.puts("Dropped unique index on maps_v1.public_api_key")
|
||||
|
||||
@@ -119,6 +118,7 @@ defmodule WandererApp.Repo.Migrations.AddPublicApiKeyUniqueIndex do
|
||||
INSERT INTO maps_v1_api_key_backup (id, map_id, old_public_api_key, reason)
|
||||
VALUES (gen_random_uuid(), $1::uuid, $2, 'duplicate_api_key_cleared_for_unique_index')
|
||||
"""
|
||||
|
||||
repo().query!(backup_query, [id, api_key])
|
||||
|
||||
# Clear the duplicate
|
||||
|
||||
@@ -104,7 +104,6 @@ defmodule WandererAppWeb.MapAuditAPIControllerIntegrationTest do
|
||||
assert length(events) >= 0
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "supports different period values", %{conn: conn, map: map} do
|
||||
character = Factory.insert(:character, %{eve_id: "123456789"})
|
||||
user = Factory.insert(:user)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
defmodule WandererAppWeb.MapSystemStructureAPIControllerTest do
|
||||
use WandererAppWeb.ApiCase
|
||||
use WandererAppWeb.ApiCase, async: false
|
||||
|
||||
alias WandererAppWeb.Factory
|
||||
|
||||
|
||||
@@ -247,9 +247,10 @@ defmodule WandererAppWeb.Api.V1.MapSystemApiV1Test do
|
||||
payload = %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"attributes" => %{
|
||||
# Missing solar_system_id - JSON:API returns 400 for schema validation
|
||||
}
|
||||
"attributes" =>
|
||||
%{
|
||||
# Missing solar_system_id - JSON:API returns 400 for schema validation
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -47,24 +47,26 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
user = create_user(%{name: "Test User", hash: "test_hash_#{:rand.uniform(1_000_000)}"})
|
||||
|
||||
# Create test character with location tracking scopes
|
||||
character = create_character(%{
|
||||
eve_id: "#{@test_character_eve_id}",
|
||||
name: "Test Character",
|
||||
user_id: user.id,
|
||||
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
|
||||
tracking_pool: "default"
|
||||
})
|
||||
character =
|
||||
create_character(%{
|
||||
eve_id: "#{@test_character_eve_id}",
|
||||
name: "Test Character",
|
||||
user_id: user.id,
|
||||
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
|
||||
tracking_pool: "default"
|
||||
})
|
||||
|
||||
# Create test map
|
||||
# Note: scope: :all is used because :none prevents system addition
|
||||
# (is_connection_valid returns false for :none scope)
|
||||
map = create_map(%{
|
||||
name: "Test Char Track",
|
||||
slug: "test-char-tracking-#{:rand.uniform(1_000_000)}",
|
||||
owner_id: character.id,
|
||||
scope: :all,
|
||||
only_tracked_characters: false
|
||||
})
|
||||
map =
|
||||
create_map(%{
|
||||
name: "Test Char Track",
|
||||
slug: "test-char-tracking-#{:rand.uniform(1_000_000)}",
|
||||
owner_id: character.id,
|
||||
scope: :all,
|
||||
only_tracked_characters: false
|
||||
})
|
||||
|
||||
on_exit(fn ->
|
||||
cleanup_test_data(map.id)
|
||||
@@ -150,6 +152,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Setup: Character starts at Jita
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -157,6 +160,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# First update - start system is intentionally NOT added yet
|
||||
CharactersImpl.update_characters(map.id)
|
||||
|
||||
refute system_on_map?(map.id, @system_jita),
|
||||
"Start system should not be added until character moves"
|
||||
|
||||
@@ -167,8 +171,11 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
CharactersImpl.update_characters(map.id)
|
||||
|
||||
# Verify: Both systems should be on map after character moves
|
||||
assert wait_for_system_on_map(map.id, @system_jita), "Jita should be added after character moves"
|
||||
assert wait_for_system_on_map(map.id, @system_amarr), "Amarr should be added as the new location"
|
||||
assert wait_for_system_on_map(map.id, @system_jita),
|
||||
"Jita should be added after character moves"
|
||||
|
||||
assert wait_for_system_on_map(map.id, @system_amarr),
|
||||
"Amarr should be added as the new location"
|
||||
end
|
||||
end
|
||||
|
||||
@@ -185,6 +192,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Character starts at Jita
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -192,6 +200,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# First update - start system is intentionally NOT added yet
|
||||
CharactersImpl.update_characters(map.id)
|
||||
|
||||
refute system_on_map?(map.id, @system_jita),
|
||||
"Start system should not be added until character moves"
|
||||
|
||||
@@ -202,7 +211,9 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
CharactersImpl.update_characters(map.id)
|
||||
|
||||
# Verify both Jita and Amarr are now on map
|
||||
assert wait_for_system_on_map(map.id, @system_jita), "Jita (start) should be on map after movement"
|
||||
assert wait_for_system_on_map(map.id, @system_jita),
|
||||
"Jita (start) should be on map after movement"
|
||||
|
||||
assert wait_for_system_on_map(map.id, @system_amarr), "Amarr should be on map"
|
||||
|
||||
# Rapid jump to Dodixie before next update cycle
|
||||
@@ -213,7 +224,10 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Verify: All three systems should be on map
|
||||
assert wait_for_system_on_map(map.id, @system_jita), "Jita (start) should still be on map"
|
||||
assert wait_for_system_on_map(map.id, @system_amarr), "Amarr (intermediate) should still be on map - this is the critical test"
|
||||
|
||||
assert wait_for_system_on_map(map.id, @system_amarr),
|
||||
"Amarr (intermediate) should still be on map - this is the critical test"
|
||||
|
||||
assert wait_for_system_on_map(map.id, @system_dodixie), "Dodixie (end) should be on map"
|
||||
end
|
||||
|
||||
@@ -230,6 +244,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Start at Jita
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -284,9 +299,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Verify start_solar_system_id still exists after first update
|
||||
{:ok, start_system} =
|
||||
WandererApp.Cache.lookup(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id"
|
||||
)
|
||||
WandererApp.Cache.lookup("map:#{map.id}:character:#{character.id}:start_solar_system_id")
|
||||
|
||||
assert start_system == @system_jita,
|
||||
"start_solar_system_id should persist after first update (not be taken/removed)"
|
||||
@@ -369,6 +382,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Set character at Jita and set start location
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -401,6 +415,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Set up character location
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -424,19 +439,22 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Create a second character
|
||||
user2 = create_user(%{name: "Test User 2", hash: "test_hash_#{:rand.uniform(1_000_000)}"})
|
||||
character2 = create_character(%{
|
||||
eve_id: "#{@test_character_eve_id + 1}",
|
||||
name: "Test Character 2",
|
||||
user_id: user2.id,
|
||||
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
|
||||
tracking_pool: "default"
|
||||
})
|
||||
|
||||
character2 =
|
||||
create_character(%{
|
||||
eve_id: "#{@test_character_eve_id + 1}",
|
||||
name: "Test Character 2",
|
||||
user_id: user2.id,
|
||||
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
|
||||
tracking_pool: "default"
|
||||
})
|
||||
|
||||
# Add both characters to map presence
|
||||
add_character_to_map_presence(map.id, character2.id)
|
||||
|
||||
# Set locations for both characters
|
||||
set_character_location(character2.id, @system_amarr)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character2.id}:start_solar_system_id",
|
||||
@system_amarr
|
||||
@@ -464,6 +482,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Set up character with location
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -491,32 +510,38 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
ensure_map_started(map.id)
|
||||
|
||||
# Create multiple characters for concurrent processing
|
||||
characters = for i <- 1..5 do
|
||||
user = create_user(%{
|
||||
name: "Test User #{i}",
|
||||
hash: "test_hash_#{:rand.uniform(1_000_000)}"
|
||||
})
|
||||
characters =
|
||||
for i <- 1..5 do
|
||||
user =
|
||||
create_user(%{
|
||||
name: "Test User #{i}",
|
||||
hash: "test_hash_#{:rand.uniform(1_000_000)}"
|
||||
})
|
||||
|
||||
character = create_character(%{
|
||||
eve_id: "#{@test_character_eve_id + i}",
|
||||
name: "Test Character #{i}",
|
||||
user_id: user.id,
|
||||
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
|
||||
tracking_pool: "default"
|
||||
})
|
||||
character =
|
||||
create_character(%{
|
||||
eve_id: "#{@test_character_eve_id + i}",
|
||||
name: "Test Character #{i}",
|
||||
user_id: user.id,
|
||||
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
|
||||
tracking_pool: "default"
|
||||
})
|
||||
|
||||
# Add character to presence and set location
|
||||
add_character_to_map_presence(map.id, character.id)
|
||||
# Add character to presence and set location
|
||||
add_character_to_map_presence(map.id, character.id)
|
||||
|
||||
solar_system_id = Enum.at([@system_jita, @system_amarr, @system_dodixie, @system_rens], rem(i, 4))
|
||||
set_character_location(character.id, solar_system_id)
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
solar_system_id
|
||||
)
|
||||
solar_system_id =
|
||||
Enum.at([@system_jita, @system_amarr, @system_dodixie, @system_rens], rem(i, 4))
|
||||
|
||||
character
|
||||
end
|
||||
set_character_location(character.id, solar_system_id)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
solar_system_id
|
||||
)
|
||||
|
||||
character
|
||||
end
|
||||
|
||||
# Run update_characters - should handle all characters concurrently
|
||||
result = CharactersImpl.update_characters(map.id)
|
||||
@@ -563,7 +588,8 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
CharactersImpl.update_characters(map.id)
|
||||
|
||||
# Should receive start and complete events (or error event if something failed)
|
||||
assert_receive {:telemetry_event, [:wanderer_app, :map, :update_characters, :start], _, _}, 1000
|
||||
assert_receive {:telemetry_event, [:wanderer_app, :map, :update_characters, :start], _, _},
|
||||
1000
|
||||
|
||||
# Should receive either complete or error event
|
||||
receive do
|
||||
@@ -593,6 +619,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Set location in character cache
|
||||
set_character_location(character.id, @system_jita)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
|
||||
@system_jita
|
||||
@@ -613,10 +640,12 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
|
||||
|
||||
# Verify both caches updated
|
||||
{:ok, character_data} = Cachex.get(:character_cache, character.id)
|
||||
|
||||
{:ok, map_cached_location} =
|
||||
WandererApp.Cache.lookup("map:#{map.id}:character:#{character.id}:solar_system_id")
|
||||
|
||||
assert character_data.solar_system_id == @system_amarr
|
||||
|
||||
assert map_cached_location == @system_amarr,
|
||||
"Both caches should be consistent after update"
|
||||
end
|
||||
|
||||
@@ -239,7 +239,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
|
||||
{:ok, conn: conn, map: map, user: user, character: character}
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "CREATE: fails with missing required parameters", %{conn: conn, map: map} do
|
||||
invalid_params = %{
|
||||
"type" => 0
|
||||
@@ -252,7 +251,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
|
||||
assert conn.status in [400, 422]
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "UPDATE: fails for non-existent connection", %{conn: conn, map: map} do
|
||||
non_existent_id = Ecto.UUID.generate()
|
||||
|
||||
@@ -267,7 +265,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
|
||||
assert conn.status in [404, 422, 500]
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "DELETE: handles non-existent connection gracefully", %{conn: conn, map: map} do
|
||||
non_existent_id = Ecto.UUID.generate()
|
||||
|
||||
@@ -277,7 +274,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
|
||||
assert conn.status in [200, 204, 404]
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "READ: handles filtering with non-existent systems", %{conn: conn, map: map} do
|
||||
params = %{
|
||||
"solar_system_source" => "99999999",
|
||||
|
||||
@@ -59,7 +59,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
|
||||
{:ok, %{conn: conn, map: map, user: user, character: character}}
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "READ: successfully retrieves systems for a map", %{conn: conn, map: map} do
|
||||
# Create some systems for the map
|
||||
system1 =
|
||||
@@ -108,7 +107,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
|
||||
assert amarr["status"] == 0
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "CREATE: successfully creates a single system", %{conn: conn, map: map} do
|
||||
# Start the map server
|
||||
ensure_map_started(map.id)
|
||||
@@ -133,7 +131,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
|
||||
assert created_count >= 1
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "UPDATE: successfully updates system position", %{conn: conn, map: map} do
|
||||
system =
|
||||
insert(:map_system, %{
|
||||
@@ -165,7 +162,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
|
||||
assert updated_system["position_y"] == 400.0
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "UPDATE: successfully updates custom_name", %{conn: conn, map: map} do
|
||||
system =
|
||||
insert(:map_system, %{
|
||||
@@ -194,7 +190,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
|
||||
assert updated_system["custom_name"] == "My Trade Hub"
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "DELETE: successfully deletes a system", %{conn: conn, map: map} do
|
||||
system =
|
||||
insert(:map_system, %{
|
||||
@@ -222,7 +217,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
|
||||
end
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "DELETE: successfully deletes multiple systems", %{conn: conn, map: map} do
|
||||
system1 = insert(:map_system, %{map_id: map.id, solar_system_id: 30_000_142})
|
||||
system2 = insert(:map_system, %{map_id: map.id, solar_system_id: 30_000_144})
|
||||
|
||||
@@ -47,7 +47,9 @@ defmodule WandererAppWeb.ApiCase do
|
||||
end
|
||||
|
||||
# Set up mocks for this test process
|
||||
WandererApp.Test.Mocks.setup_test_mocks()
|
||||
# Use global mode for integration tests so mocks work in spawned processes
|
||||
mock_mode = if integration_test?, do: :global, else: :private
|
||||
WandererApp.Test.Mocks.setup_test_mocks(mode: mock_mode)
|
||||
|
||||
# Set up integration test environment if needed
|
||||
if integration_test? do
|
||||
|
||||
@@ -63,13 +63,22 @@ defmodule WandererApp.DataCase do
|
||||
# Use shared mode if requested or if running as a ConnCase test (to avoid ownership issues)
|
||||
# Otherwise use non-shared mode for proper test isolation
|
||||
shared = (tags[:shared] || tags[:conn_case] || not tags[:async]) and not tags[:async]
|
||||
|
||||
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: shared)
|
||||
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
|
||||
|
||||
# Store the sandbox owner pid for allowing background processes
|
||||
# Start the sandbox owner and link it to the test process
|
||||
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: shared)
|
||||
|
||||
# Store the sandbox owner pid BEFORE registering on_exit
|
||||
# This ensures it's available for use in setup callbacks
|
||||
Process.put(:sandbox_owner_pid, pid)
|
||||
|
||||
# Register cleanup - this will be called last (LIFO order)
|
||||
on_exit(fn ->
|
||||
# Only stop if the owner is still alive
|
||||
if Process.alive?(pid) do
|
||||
Ecto.Adapters.SQL.Sandbox.stop_owner(pid)
|
||||
end
|
||||
end)
|
||||
|
||||
# Allow critical system processes to access the database
|
||||
allow_system_processes_database_access()
|
||||
|
||||
@@ -112,7 +121,9 @@ defmodule WandererApp.DataCase do
|
||||
WandererApp.Server.TheraDataFetcher,
|
||||
WandererApp.ExternalEvents.MapEventRelay,
|
||||
WandererApp.ExternalEvents.WebhookDispatcher,
|
||||
WandererApp.ExternalEvents.SseStreamManager
|
||||
WandererApp.ExternalEvents.SseStreamManager,
|
||||
# Task.Supervisor for Task.async_stream calls (e.g., from MapPool background tasks)
|
||||
Task.Supervisor
|
||||
]
|
||||
|
||||
Enum.each(system_processes, fn process_name ->
|
||||
|
||||
@@ -108,6 +108,10 @@ defmodule WandererAppWeb.Factory do
|
||||
create_map_transaction(map_id, attrs)
|
||||
end
|
||||
|
||||
def insert(:solar_system, attrs) do
|
||||
create_solar_system(attrs)
|
||||
end
|
||||
|
||||
def insert(resource_type, _attrs) do
|
||||
raise "Unknown factory resource type: #{resource_type}"
|
||||
end
|
||||
@@ -802,4 +806,45 @@ defmodule WandererAppWeb.Factory do
|
||||
{:ok, webhook} = Ash.create(Api.MapWebhookSubscription, attrs)
|
||||
webhook
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates a test solar system (static EVE Online system data) with reasonable defaults.
|
||||
"""
|
||||
def build_solar_system(attrs \\ %{}) do
|
||||
unique_id = System.unique_integer([:positive])
|
||||
solar_system_id = Map.get(attrs, :solar_system_id, 30_000_000 + rem(unique_id, 10_000))
|
||||
|
||||
default_attrs = %{
|
||||
solar_system_id: solar_system_id,
|
||||
solar_system_name: "System #{solar_system_id}",
|
||||
solar_system_name_lc: "system #{solar_system_id}",
|
||||
region_id: 10_000_000 + rem(unique_id, 1000),
|
||||
region_name: "Test Region",
|
||||
constellation_id: 20_000_000 + rem(unique_id, 1000),
|
||||
constellation_name: "Test Constellation",
|
||||
security: "0.5",
|
||||
system_class: 0,
|
||||
type_description: "HS",
|
||||
class_title: "High Sec"
|
||||
}
|
||||
|
||||
merged_attrs = Map.merge(default_attrs, attrs)
|
||||
|
||||
# Automatically compute solar_system_name_lc from solar_system_name if not provided
|
||||
if Map.has_key?(attrs, :solar_system_name) and not Map.has_key?(attrs, :solar_system_name_lc) do
|
||||
Map.put(merged_attrs, :solar_system_name_lc, String.downcase(merged_attrs.solar_system_name))
|
||||
else
|
||||
merged_attrs
|
||||
end
|
||||
end
|
||||
|
||||
def create_solar_system(attrs \\ %{}) do
|
||||
attrs = build_solar_system(attrs)
|
||||
|
||||
# Use upsert to handle cases where the system might already exist
|
||||
case Ash.create(Api.MapSolarSystem, attrs) do
|
||||
{:ok, solar_system} -> solar_system
|
||||
{:error, reason} -> raise "Failed to create solar system: #{inspect(reason)}"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -81,8 +81,9 @@ defmodule WandererApp.Test.IntegrationConfig do
|
||||
:ok
|
||||
end
|
||||
|
||||
# Give the supervisor a moment to fully initialize its children
|
||||
Process.sleep(100)
|
||||
# Wait for MapPoolDynamicSupervisor to be ready using efficient polling
|
||||
# instead of a fixed 100ms sleep
|
||||
wait_for_process(WandererApp.Map.MapPoolDynamicSupervisor, 2000)
|
||||
|
||||
# Start Map.Manager AFTER MapPoolSupervisor
|
||||
case GenServer.whereis(WandererApp.Map.Manager) do
|
||||
@@ -96,6 +97,27 @@ defmodule WandererApp.Test.IntegrationConfig do
|
||||
:ok
|
||||
end
|
||||
|
||||
# Efficiently wait for a process to be registered
|
||||
defp wait_for_process(name, timeout) do
|
||||
deadline = System.monotonic_time(:millisecond) + timeout
|
||||
do_wait_for_process(name, deadline)
|
||||
end
|
||||
|
||||
defp do_wait_for_process(name, deadline) do
|
||||
case Process.whereis(name) do
|
||||
pid when is_pid(pid) ->
|
||||
:ok
|
||||
|
||||
nil ->
|
||||
if System.monotonic_time(:millisecond) < deadline do
|
||||
Process.sleep(5)
|
||||
do_wait_for_process(name, deadline)
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Cleans up integration test environment.
|
||||
|
||||
|
||||
@@ -48,8 +48,9 @@ defmodule WandererAppWeb.IntegrationConnCase do
|
||||
setup tags do
|
||||
WandererAppWeb.IntegrationConnCase.setup_sandbox(tags)
|
||||
|
||||
# Set up mocks for this test process
|
||||
WandererApp.Test.Mocks.setup_test_mocks()
|
||||
# Set up mocks for this test process in global mode
|
||||
# Integration tests spawn processes (MapPool, etc.) that need mock access
|
||||
WandererApp.Test.Mocks.setup_test_mocks(mode: :global)
|
||||
|
||||
# Set up integration test environment (including Map.Manager)
|
||||
WandererApp.Test.IntegrationConfig.setup_integration_environment()
|
||||
@@ -74,7 +75,7 @@ defmodule WandererAppWeb.IntegrationConnCase do
|
||||
- Uses shared: false for better isolation
|
||||
- Child processes require explicit allowance
|
||||
"""
|
||||
def setup_sandbox(tags) do
|
||||
def setup_sandbox(_tags) do
|
||||
# Ensure the repo is started before setting up sandbox
|
||||
unless Process.whereis(WandererApp.Repo) do
|
||||
{:ok, _} = WandererApp.Repo.start_link()
|
||||
@@ -85,26 +86,22 @@ defmodule WandererAppWeb.IntegrationConnCase do
|
||||
# - This requires tests to be synchronous (async: false) if they share the same case
|
||||
shared_mode = true
|
||||
|
||||
# Set up sandbox mode based on test type
|
||||
pid =
|
||||
if shared_mode do
|
||||
# For async tests with shared mode:
|
||||
# Checkout the sandbox connection instead of starting an owner
|
||||
# This allows multiple async tests to use the same connection pool
|
||||
:ok = Ecto.Adapters.SQL.Sandbox.checkout(WandererApp.Repo)
|
||||
# Put the connection in shared mode
|
||||
Ecto.Adapters.SQL.Sandbox.mode(WandererApp.Repo, {:shared, self()})
|
||||
self()
|
||||
else
|
||||
# For sync tests, start a dedicated owner
|
||||
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: false)
|
||||
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
|
||||
pid
|
||||
end
|
||||
# Set up sandbox mode - always use start_owner! for proper ownership setup
|
||||
# This ensures that spawned processes (like Ash transactions) can access the database
|
||||
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: shared_mode)
|
||||
|
||||
# Store the sandbox owner pid for allowing background processes
|
||||
# Store the sandbox owner pid BEFORE registering on_exit
|
||||
# This ensures it's available for use in setup callbacks
|
||||
Process.put(:sandbox_owner_pid, pid)
|
||||
|
||||
# Register cleanup - this will be called last (LIFO order)
|
||||
on_exit(fn ->
|
||||
# Only stop if the owner is still alive
|
||||
if Process.alive?(pid) do
|
||||
Ecto.Adapters.SQL.Sandbox.stop_owner(pid)
|
||||
end
|
||||
end)
|
||||
|
||||
# Allow critical system processes to access the database
|
||||
allow_system_processes_database_access()
|
||||
|
||||
@@ -136,7 +133,9 @@ defmodule WandererAppWeb.IntegrationConnCase do
|
||||
WandererApp.Server.TheraDataFetcher,
|
||||
WandererApp.ExternalEvents.MapEventRelay,
|
||||
WandererApp.ExternalEvents.WebhookDispatcher,
|
||||
WandererApp.ExternalEvents.SseStreamManager
|
||||
WandererApp.ExternalEvents.SseStreamManager,
|
||||
# Task.Supervisor for Task.async_stream calls
|
||||
Task.Supervisor
|
||||
]
|
||||
|
||||
Enum.each(system_processes, fn process_name ->
|
||||
@@ -177,7 +176,7 @@ defmodule WandererAppWeb.IntegrationConnCase do
|
||||
end
|
||||
end
|
||||
|
||||
# Monitor for dynamically spawned children and grant them mock access
|
||||
# Monitor for dynamically spawned children and grant them mock and database access
|
||||
defp monitor_and_allow_children(supervisor_pid, owner_pid, interval \\ 50) do
|
||||
if Process.alive?(supervisor_pid) do
|
||||
:timer.sleep(interval)
|
||||
@@ -191,7 +190,9 @@ defmodule WandererAppWeb.IntegrationConnCase do
|
||||
|> Enum.filter(&is_pid/1)
|
||||
|> Enum.filter(&Process.alive?/1)
|
||||
|> Enum.each(fn child_pid ->
|
||||
# Grant both mock and database access
|
||||
WandererApp.Test.MockOwnership.allow_mocks_for_process(child_pid, owner_pid)
|
||||
allow_database_access(child_pid)
|
||||
end)
|
||||
|
||||
_ ->
|
||||
|
||||
@@ -76,7 +76,7 @@ defmodule WandererApp.MapTestHelpers do
|
||||
raise "Map #{map_id} failed to stop within #{timeout}ms"
|
||||
end
|
||||
|
||||
Process.sleep(50)
|
||||
Process.sleep(10)
|
||||
:continue
|
||||
end
|
||||
end)
|
||||
@@ -86,23 +86,16 @@ defmodule WandererApp.MapTestHelpers do
|
||||
@doc """
|
||||
Continuously grants database access to all MapPool processes and their children.
|
||||
This is necessary when maps are started dynamically during tests.
|
||||
Polls multiple times to catch processes spawned at different stages.
|
||||
Uses efficient polling with minimal delays.
|
||||
"""
|
||||
defp grant_database_access_continuously do
|
||||
owner_pid = Process.get(:sandbox_owner_pid) || self()
|
||||
|
||||
# Grant access multiple times with delays to catch processes at different spawn stages
|
||||
# First few times quickly, then with longer delays
|
||||
# Quick initial grants (3 times with 10ms)
|
||||
Enum.each(1..3, fn _ ->
|
||||
# Grant access with minimal delays - 5 quick passes to catch spawned processes
|
||||
# Total time: ~25ms instead of 170ms
|
||||
Enum.each(1..5, fn _ ->
|
||||
grant_database_access_to_map_pools(owner_pid)
|
||||
Process.sleep(10)
|
||||
end)
|
||||
|
||||
# Then slower grants (7 times with 20ms)
|
||||
Enum.each(1..7, fn _ ->
|
||||
grant_database_access_to_map_pools(owner_pid)
|
||||
Process.sleep(20)
|
||||
Process.sleep(5)
|
||||
end)
|
||||
end
|
||||
|
||||
@@ -164,19 +157,10 @@ defmodule WandererApp.MapTestHelpers do
|
||||
map_started_flag and in_started_maps_list ->
|
||||
{:ok, :started}
|
||||
|
||||
# Map is partially started (in one but not both) - keep waiting
|
||||
map_started_flag or in_started_maps_list ->
|
||||
if System.monotonic_time(:millisecond) < deadline do
|
||||
Process.sleep(100)
|
||||
:continue
|
||||
else
|
||||
{:error, :timeout}
|
||||
end
|
||||
|
||||
# Map not started yet
|
||||
# Map is partially started or not started yet - keep waiting
|
||||
true ->
|
||||
if System.monotonic_time(:millisecond) < deadline do
|
||||
Process.sleep(100)
|
||||
Process.sleep(20)
|
||||
:continue
|
||||
else
|
||||
{:error, :timeout}
|
||||
@@ -186,8 +170,8 @@ defmodule WandererApp.MapTestHelpers do
|
||||
|> Enum.find(fn result -> result != :continue end)
|
||||
|> case do
|
||||
{:ok, :started} ->
|
||||
# Give it a bit more time to fully initialize all subsystems
|
||||
Process.sleep(200)
|
||||
# Brief pause for subsystem initialization (reduced from 200ms)
|
||||
Process.sleep(50)
|
||||
:ok
|
||||
|
||||
{:error, :timeout} ->
|
||||
@@ -356,7 +340,8 @@ defmodule WandererApp.MapTestHelpers do
|
||||
def set_character_location(character_id, solar_system_id, opts \\ []) do
|
||||
structure_id = opts[:structure_id]
|
||||
station_id = opts[:station_id]
|
||||
ship = opts[:ship] || 670 # Capsule
|
||||
# Capsule
|
||||
ship = opts[:ship] || 670
|
||||
|
||||
# First get the existing character from cache or database to maintain all fields
|
||||
{:ok, existing_character} = WandererApp.Character.get_character(character_id)
|
||||
@@ -461,7 +446,7 @@ defmodule WandererApp.MapTestHelpers do
|
||||
{:ok, true}
|
||||
else
|
||||
if System.monotonic_time(:millisecond) < deadline do
|
||||
Process.sleep(50)
|
||||
Process.sleep(10)
|
||||
:continue
|
||||
else
|
||||
{:error, :timeout}
|
||||
|
||||
@@ -60,7 +60,7 @@ defmodule WandererApp.Test.MockAllowance do
|
||||
Mox.set_mox_global()
|
||||
|
||||
# Re-setup mocks to ensure they're available globally
|
||||
WandererApp.Test.Mocks.setup_mocks()
|
||||
WandererApp.Test.Mocks.setup_test_mocks(mode: :global)
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -16,9 +16,15 @@ defmodule WandererApp.Test.Mocks do
|
||||
:ok
|
||||
end
|
||||
"""
|
||||
def setup_test_mocks do
|
||||
# Claim ownership of all mocks for this test process
|
||||
Mox.set_mox_private()
|
||||
def setup_test_mocks(opts \\ []) do
|
||||
# For integration tests that spawn processes (MapPool, etc.),
|
||||
# we need global mode so mocks work across process boundaries
|
||||
mode = Keyword.get(opts, :mode, :private)
|
||||
|
||||
case mode do
|
||||
:global -> Mox.set_mox_global()
|
||||
:private -> Mox.set_mox_private()
|
||||
end
|
||||
|
||||
# Set up default stubs for this test
|
||||
setup_default_stubs()
|
||||
|
||||
@@ -174,6 +174,39 @@ defmodule WandererApp.TestHelpers do
|
||||
"Expected log to contain '#{expected_message}', but got: #{log_output}"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Waits for a condition to become true, with configurable timeout and interval.
|
||||
More efficient than fixed sleeps - uses small polling intervals.
|
||||
|
||||
## Options
|
||||
* `:timeout` - Maximum time to wait in milliseconds (default: 5000)
|
||||
* `:interval` - Polling interval in milliseconds (default: 10)
|
||||
|
||||
## Examples
|
||||
wait_until(fn -> Process.whereis(:my_server) != nil end)
|
||||
wait_until(fn -> cache_has_value?() end, timeout: 2000, interval: 5)
|
||||
"""
|
||||
def wait_until(condition_fn, opts \\ []) do
|
||||
timeout = Keyword.get(opts, :timeout, 5000)
|
||||
interval = Keyword.get(opts, :interval, 10)
|
||||
deadline = System.monotonic_time(:millisecond) + timeout
|
||||
|
||||
do_wait_until(condition_fn, deadline, interval)
|
||||
end
|
||||
|
||||
defp do_wait_until(condition_fn, deadline, interval) do
|
||||
if condition_fn.() do
|
||||
:ok
|
||||
else
|
||||
if System.monotonic_time(:millisecond) < deadline do
|
||||
Process.sleep(interval)
|
||||
do_wait_until(condition_fn, deadline, interval)
|
||||
else
|
||||
{:error, :timeout}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Ensures a map server is started for testing.
|
||||
This function has been simplified to use the standard map startup flow.
|
||||
@@ -183,8 +216,13 @@ defmodule WandererApp.TestHelpers do
|
||||
# Use the standard map startup flow through Map.Manager
|
||||
:ok = WandererApp.Map.Manager.start_map(map_id)
|
||||
|
||||
# Wait a bit for the map to fully initialize
|
||||
:timer.sleep(500)
|
||||
# Wait for the map to be in started_maps cache with efficient polling
|
||||
wait_until(fn ->
|
||||
case WandererApp.Cache.lookup("map_#{map_id}:started") do
|
||||
{:ok, true} -> true
|
||||
_ -> false
|
||||
end
|
||||
end, timeout: 5000, interval: 20)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererApp.Api.ActorHelpersTest do
|
||||
use ExUnit.Case, async: false
|
||||
# Pure unit tests - no database or external dependencies
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererApp.Api.ActorHelpers
|
||||
alias WandererApp.Api.ActorWithMap
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererApp.Api.ActorWithMapTest do
|
||||
use ExUnit.Case, async: false
|
||||
# Pure unit tests - no database or external dependencies
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererApp.Api.ActorWithMap
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererApp.Api.Changes.InjectMapFromActorTest do
|
||||
use ExUnit.Case, async: false
|
||||
# Tests Ash changeset logic but doesn't need database
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererApp.Api.ActorWithMap
|
||||
|
||||
|
||||
@@ -132,22 +132,6 @@ defmodule WandererAppWeb.AuthTest do
|
||||
assert result.status == 400
|
||||
end
|
||||
|
||||
test "rejects request for non-existent map" do
|
||||
non_existent_id = "550e8400-e29b-41d4-a716-446655440000"
|
||||
|
||||
conn =
|
||||
build_conn()
|
||||
|> put_req_header("authorization", "Bearer test_api_key_123")
|
||||
|> put_private(:phoenix_router, WandererAppWeb.Router)
|
||||
|> Map.put(:params, %{"map_identifier" => non_existent_id})
|
||||
|> Plug.Conn.fetch_query_params()
|
||||
|
||||
result = CheckMapApiKey.call(conn, CheckMapApiKey.init([]))
|
||||
|
||||
assert result.halted
|
||||
assert result.status == 404
|
||||
end
|
||||
|
||||
test "rejects request for map without API key configured", %{map: map} do
|
||||
# Update map to have no API key using the proper action
|
||||
{:ok, map_without_key} = Ash.update(map, %{public_api_key: nil}, action: :update_api_key)
|
||||
@@ -166,6 +150,24 @@ defmodule WandererAppWeb.AuthTest do
|
||||
end
|
||||
end
|
||||
|
||||
describe "CheckMapApiKey plug without fixtures" do
|
||||
test "rejects request for non-existent map" do
|
||||
non_existent_id = "550e8400-e29b-41d4-a716-446655440000"
|
||||
|
||||
conn =
|
||||
build_conn()
|
||||
|> put_req_header("authorization", "Bearer test_api_key_123")
|
||||
|> put_private(:phoenix_router, WandererAppWeb.Router)
|
||||
|> Map.put(:params, %{"map_identifier" => non_existent_id})
|
||||
|> Plug.Conn.fetch_query_params()
|
||||
|
||||
result = CheckMapApiKey.call(conn, CheckMapApiKey.init([]))
|
||||
|
||||
assert result.halted
|
||||
assert result.status == 404
|
||||
end
|
||||
end
|
||||
|
||||
describe "CheckAclApiKey plug" do
|
||||
setup do
|
||||
user = Factory.insert(:user)
|
||||
@@ -248,6 +250,25 @@ defmodule WandererAppWeb.AuthTest do
|
||||
assert result.status == 401
|
||||
end
|
||||
|
||||
test "rejects request for ACL without API key configured", %{acl: acl} do
|
||||
# Update ACL to have no API key
|
||||
{:ok, acl_without_key} = Ash.update(acl, %{api_key: nil})
|
||||
|
||||
conn =
|
||||
build_conn()
|
||||
|> put_req_header("authorization", "Bearer test_acl_key_456")
|
||||
|> put_private(:phoenix_router, WandererAppWeb.Router)
|
||||
|> Map.put(:params, %{"id" => acl_without_key.id})
|
||||
|> Plug.Conn.fetch_query_params()
|
||||
|
||||
result = CheckAclApiKey.call(conn, CheckAclApiKey.init([]))
|
||||
|
||||
assert result.halted
|
||||
assert result.status == 401
|
||||
end
|
||||
end
|
||||
|
||||
describe "CheckAclApiKey plug without fixtures" do
|
||||
test "rejects request with missing ACL ID" do
|
||||
conn =
|
||||
build_conn()
|
||||
@@ -277,23 +298,6 @@ defmodule WandererAppWeb.AuthTest do
|
||||
assert result.halted
|
||||
assert result.status == 404
|
||||
end
|
||||
|
||||
test "rejects request for ACL without API key configured", %{acl: acl} do
|
||||
# Update ACL to have no API key
|
||||
{:ok, acl_without_key} = Ash.update(acl, %{api_key: nil})
|
||||
|
||||
conn =
|
||||
build_conn()
|
||||
|> put_req_header("authorization", "Bearer test_acl_key_456")
|
||||
|> put_private(:phoenix_router, WandererAppWeb.Router)
|
||||
|> Map.put(:params, %{"id" => acl_without_key.id})
|
||||
|> Plug.Conn.fetch_query_params()
|
||||
|
||||
result = CheckAclApiKey.call(conn, CheckAclApiKey.init([]))
|
||||
|
||||
assert result.halted
|
||||
assert result.status == 401
|
||||
end
|
||||
end
|
||||
|
||||
describe "BasicAuth" do
|
||||
|
||||
@@ -144,8 +144,8 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
# Give it time to process
|
||||
Process.sleep(200)
|
||||
# Give it time to process (reduced from 200ms)
|
||||
Process.sleep(50)
|
||||
|
||||
# Verify zombie was cleaned up
|
||||
{:ok, started_maps_after} = WandererApp.Cache.lookup("started_maps", [])
|
||||
@@ -171,7 +171,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
# Verify all caches cleaned
|
||||
{:ok, started_maps} = WandererApp.Cache.lookup("started_maps", [])
|
||||
@@ -217,7 +217,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
# The reconciler would detect this if the map was in a registry
|
||||
# For now, we just verify the logic doesn't crash
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
# No assertions needed - just verifying no crashes
|
||||
end
|
||||
@@ -236,7 +236,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
# Cache entry should be removed since pool doesn't exist
|
||||
{:ok, cache_entry} = Cachex.get(@cache, map_id)
|
||||
@@ -264,7 +264,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
# Should receive telemetry event
|
||||
assert_receive {:telemetry, measurements}, 500
|
||||
@@ -303,7 +303,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Trigger manual reconciliation
|
||||
Reconciler.trigger_reconciliation()
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
# Verify zombie was cleaned up
|
||||
{:ok, started_maps_after} = WandererApp.Cache.lookup("started_maps", [])
|
||||
@@ -335,7 +335,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Should not crash even with empty data
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
# No assertions - just verifying no crash
|
||||
assert true
|
||||
@@ -353,7 +353,7 @@ defmodule WandererApp.Map.MapPoolTest do
|
||||
|
||||
# Should handle gracefully
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
Process.sleep(50)
|
||||
|
||||
assert true
|
||||
else
|
||||
|
||||
@@ -1,359 +0,0 @@
|
||||
defmodule WandererApp.Map.MapPoolTest do
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererApp.Map.{MapPool, MapPoolDynamicSupervisor, Reconciler}
|
||||
|
||||
@cache :map_pool_cache
|
||||
@registry :map_pool_registry
|
||||
@unique_registry :unique_map_pool_registry
|
||||
|
||||
setup do
|
||||
# Clean up any existing test data
|
||||
cleanup_test_data()
|
||||
|
||||
# Check if required infrastructure is running
|
||||
registries_running? =
|
||||
try do
|
||||
Registry.keys(@registry, self()) != :error
|
||||
rescue
|
||||
_ -> false
|
||||
end
|
||||
|
||||
reconciler_running? = Process.whereis(Reconciler) != nil
|
||||
|
||||
on_exit(fn ->
|
||||
cleanup_test_data()
|
||||
end)
|
||||
|
||||
{:ok, registries_running: registries_running?, reconciler_running: reconciler_running?}
|
||||
end
|
||||
|
||||
defp cleanup_test_data do
|
||||
# Clean up test caches
|
||||
WandererApp.Cache.delete("started_maps")
|
||||
Cachex.clear(@cache)
|
||||
end
|
||||
|
||||
describe "garbage collection with synchronous stop" do
|
||||
@tag :skip
|
||||
test "garbage collector successfully stops map with synchronous call" do
|
||||
# This test would require setting up a full map pool with a test map
|
||||
# Skipping for now as it requires more complex setup with actual map data
|
||||
:ok
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "garbage collector handles stop failures gracefully" do
|
||||
# This test would verify error handling when stop fails
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
describe "cache lookup with registry fallback" do
|
||||
test "stop_map handles cache miss by scanning registry", %{
|
||||
registries_running: registries_running?
|
||||
} do
|
||||
if registries_running? do
|
||||
# Setup: Create a map_id that's not in cache but will be found in registry scan
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Verify cache is empty for this map
|
||||
assert {:ok, nil} = Cachex.get(@cache, map_id)
|
||||
|
||||
# Call stop_map - should handle gracefully with fallback
|
||||
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
|
||||
else
|
||||
# Skip test if registries not running
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
test "stop_map handles non-existent pool_uuid in registry", %{
|
||||
registries_running: registries_running?
|
||||
} do
|
||||
if registries_running? do
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
fake_uuid = "fake_uuid_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Put fake uuid in cache that doesn't exist in registry
|
||||
Cachex.put(@cache, map_id, fake_uuid)
|
||||
|
||||
# Call stop_map - should handle gracefully with fallback
|
||||
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
test "stop_map updates cache when found via registry scan", %{
|
||||
registries_running: registries_running?
|
||||
} do
|
||||
if registries_running? do
|
||||
# This test would require a running pool with registered maps
|
||||
# For now, we verify the fallback logic doesn't crash
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "state cleanup atomicity" do
|
||||
@tag :skip
|
||||
test "rollback occurs when registry update fails" do
|
||||
# This would require mocking Registry.update_value to fail
|
||||
# Skipping for now as it requires more complex mocking setup
|
||||
:ok
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "rollback occurs when cache delete fails" do
|
||||
# This would require mocking Cachex.del to fail
|
||||
:ok
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "successful cleanup updates all three state stores" do
|
||||
# This would verify Registry, Cache, and GenServer state are all updated
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
describe "Reconciler - zombie map detection and cleanup" do
|
||||
test "reconciler detects zombie maps in started_maps cache", %{
|
||||
reconciler_running: reconciler_running?
|
||||
} do
|
||||
if reconciler_running? do
|
||||
# Setup: Add maps to started_maps that aren't in any registry
|
||||
zombie_map_id = "zombie_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"started_maps",
|
||||
[zombie_map_id],
|
||||
fn existing -> [zombie_map_id | existing] |> Enum.uniq() end
|
||||
)
|
||||
|
||||
# Get started_maps
|
||||
{:ok, started_maps} = WandererApp.Cache.lookup("started_maps", [])
|
||||
assert zombie_map_id in started_maps
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
# Give it time to process
|
||||
Process.sleep(200)
|
||||
|
||||
# Verify zombie was cleaned up
|
||||
{:ok, started_maps_after} = WandererApp.Cache.lookup("started_maps", [])
|
||||
refute zombie_map_id in started_maps_after
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
test "reconciler cleans up zombie map caches", %{reconciler_running: reconciler_running?} do
|
||||
if reconciler_running? do
|
||||
zombie_map_id = "zombie_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Setup zombie state
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"started_maps",
|
||||
[zombie_map_id],
|
||||
fn existing -> [zombie_map_id | existing] |> Enum.uniq() end
|
||||
)
|
||||
|
||||
WandererApp.Cache.insert("map_#{zombie_map_id}:started", true)
|
||||
Cachex.put(@cache, zombie_map_id, "fake_uuid")
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
|
||||
# Verify all caches cleaned
|
||||
{:ok, started_maps} = WandererApp.Cache.lookup("started_maps", [])
|
||||
refute zombie_map_id in started_maps
|
||||
|
||||
{:ok, cache_entry} = Cachex.get(@cache, zombie_map_id)
|
||||
assert cache_entry == nil
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "Reconciler - orphan map detection and fix" do
|
||||
@tag :skip
|
||||
test "reconciler detects orphan maps in registry" do
|
||||
# This would require setting up a pool with maps in registry
|
||||
# but not in started_maps cache
|
||||
:ok
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "reconciler adds orphan maps to started_maps cache" do
|
||||
# This would verify orphan maps get added to the cache
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
describe "Reconciler - cache inconsistency detection and fix" do
|
||||
test "reconciler detects map with missing cache entry", %{
|
||||
reconciler_running: reconciler_running?
|
||||
} do
|
||||
if reconciler_running? do
|
||||
# This test verifies the reconciler can detect when a map
|
||||
# is in the registry but has no cache entry
|
||||
# Since we can't easily set up a full pool, we test the detection logic
|
||||
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Ensure no cache entry
|
||||
Cachex.del(@cache, map_id)
|
||||
|
||||
# The reconciler would detect this if the map was in a registry
|
||||
# For now, we just verify the logic doesn't crash
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
|
||||
# No assertions needed - just verifying no crashes
|
||||
end
|
||||
end
|
||||
|
||||
test "reconciler detects cache pointing to non-existent pool", %{
|
||||
reconciler_running: reconciler_running?
|
||||
} do
|
||||
if reconciler_running? do
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
fake_uuid = "fake_uuid_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Put fake uuid in cache
|
||||
Cachex.put(@cache, map_id, fake_uuid)
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
|
||||
# Cache entry should be removed since pool doesn't exist
|
||||
{:ok, cache_entry} = Cachex.get(@cache, map_id)
|
||||
assert cache_entry == nil
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "Reconciler - stats and telemetry" do
|
||||
test "reconciler emits telemetry events", %{reconciler_running: reconciler_running?} do
|
||||
if reconciler_running? do
|
||||
# Setup telemetry handler
|
||||
test_pid = self()
|
||||
|
||||
:telemetry.attach(
|
||||
"test-reconciliation",
|
||||
[:wanderer_app, :map, :reconciliation],
|
||||
fn _event, measurements, _metadata, _config ->
|
||||
send(test_pid, {:telemetry, measurements})
|
||||
end,
|
||||
nil
|
||||
)
|
||||
|
||||
# Trigger reconciliation
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
|
||||
# Should receive telemetry event
|
||||
assert_receive {:telemetry, measurements}, 500
|
||||
|
||||
assert is_integer(measurements.total_started_maps)
|
||||
assert is_integer(measurements.total_registry_maps)
|
||||
assert is_integer(measurements.zombie_maps)
|
||||
assert is_integer(measurements.orphan_maps)
|
||||
assert is_integer(measurements.cache_inconsistencies)
|
||||
|
||||
# Cleanup
|
||||
:telemetry.detach("test-reconciliation")
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "Reconciler - manual trigger" do
|
||||
test "trigger_reconciliation runs reconciliation immediately", %{
|
||||
reconciler_running: reconciler_running?
|
||||
} do
|
||||
if reconciler_running? do
|
||||
zombie_map_id = "zombie_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Setup zombie state
|
||||
WandererApp.Cache.insert_or_update(
|
||||
"started_maps",
|
||||
[zombie_map_id],
|
||||
fn existing -> [zombie_map_id | existing] |> Enum.uniq() end
|
||||
)
|
||||
|
||||
# Verify it exists
|
||||
{:ok, started_maps_before} = WandererApp.Cache.lookup("started_maps", [])
|
||||
assert zombie_map_id in started_maps_before
|
||||
|
||||
# Trigger manual reconciliation
|
||||
Reconciler.trigger_reconciliation()
|
||||
Process.sleep(200)
|
||||
|
||||
# Verify zombie was cleaned up
|
||||
{:ok, started_maps_after} = WandererApp.Cache.lookup("started_maps", [])
|
||||
refute zombie_map_id in started_maps_after
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "edge cases and error handling" do
|
||||
test "stop_map with cache error returns ok", %{registries_running: registries_running?} do
|
||||
if registries_running? do
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Even if cache operations fail, should return :ok
|
||||
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
test "reconciler handles empty registries gracefully", %{
|
||||
reconciler_running: reconciler_running?
|
||||
} do
|
||||
if reconciler_running? do
|
||||
# Clear everything
|
||||
cleanup_test_data()
|
||||
|
||||
# Should not crash even with empty data
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
|
||||
# No assertions - just verifying no crash
|
||||
assert true
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
test "reconciler handles nil values in caches", %{reconciler_running: reconciler_running?} do
|
||||
if reconciler_running? do
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Explicitly set nil
|
||||
Cachex.put(@cache, map_id, nil)
|
||||
|
||||
# Should handle gracefully
|
||||
send(Reconciler, :reconcile)
|
||||
Process.sleep(200)
|
||||
|
||||
assert true
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,361 +0,0 @@
|
||||
defmodule WandererApp.Map.SlugUniquenessTest do
|
||||
@moduledoc """
|
||||
Tests for map slug uniqueness constraints and handling.
|
||||
|
||||
These tests verify that:
|
||||
1. Database unique constraint is enforced
|
||||
2. Application-level slug generation handles uniqueness
|
||||
3. Concurrent map creation doesn't create duplicates
|
||||
4. Error handling works correctly for slug conflicts
|
||||
"""
|
||||
use WandererApp.DataCase, async: true
|
||||
|
||||
alias WandererApp.Api.Map
|
||||
|
||||
require Logger
|
||||
|
||||
describe "slug uniqueness constraint" do
|
||||
setup do
|
||||
# Create a test character (which includes a user)
|
||||
character = create_test_user()
|
||||
%{character: character}
|
||||
end
|
||||
|
||||
test "prevents duplicate slugs via database constraint", %{character: character} do
|
||||
# Create first map with a specific slug
|
||||
{:ok, map1} =
|
||||
Map.new(%{
|
||||
name: "Test Map",
|
||||
slug: "test-map",
|
||||
owner_id: character.id,
|
||||
description: "First map",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
assert map1.slug == "test-map"
|
||||
|
||||
# Attempt to create second map with same slug
|
||||
# The updated logic now auto-increments the slug instead of failing
|
||||
result =
|
||||
Map.new(%{
|
||||
name: "Different Name",
|
||||
slug: "test-map",
|
||||
owner_id: character.id,
|
||||
description: "Second map",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
# Should succeed with auto-incremented slug
|
||||
assert {:ok, map2} = result
|
||||
assert map2.slug == "test-map-2"
|
||||
end
|
||||
|
||||
test "automatically increments slug when duplicate detected", %{character: character} do
|
||||
# Create first map
|
||||
{:ok, map1} =
|
||||
Map.new(%{
|
||||
name: "Test Map",
|
||||
slug: "test-map",
|
||||
owner_id: character.id,
|
||||
description: "First map",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
assert map1.slug == "test-map"
|
||||
|
||||
# Create second map with same name (should auto-increment slug)
|
||||
{:ok, map2} =
|
||||
Map.new(%{
|
||||
name: "Test Map",
|
||||
slug: "test-map",
|
||||
owner_id: character.id,
|
||||
description: "Second map",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
# Slug should be automatically incremented
|
||||
assert map2.slug == "test-map-2"
|
||||
|
||||
# Create third map with same name
|
||||
{:ok, map3} =
|
||||
Map.new(%{
|
||||
name: "Test Map",
|
||||
slug: "test-map",
|
||||
owner_id: character.id,
|
||||
description: "Third map",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
assert map3.slug == "test-map-3"
|
||||
end
|
||||
|
||||
test "handles many maps with similar names", %{character: character} do
|
||||
# Create 10 maps with the same base slug
|
||||
maps =
|
||||
for i <- 1..10 do
|
||||
{:ok, map} =
|
||||
Map.new(%{
|
||||
name: "Popular Name",
|
||||
slug: "popular-name",
|
||||
owner_id: character.id,
|
||||
description: "Map #{i}",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
map
|
||||
end
|
||||
|
||||
# Verify all slugs are unique
|
||||
slugs = Enum.map(maps, & &1.slug)
|
||||
assert length(Enum.uniq(slugs)) == 10
|
||||
|
||||
# First should keep the base slug
|
||||
assert List.first(maps).slug == "popular-name"
|
||||
|
||||
# Others should be numbered
|
||||
assert "popular-name-2" in slugs
|
||||
assert "popular-name-10" in slugs
|
||||
end
|
||||
end
|
||||
|
||||
describe "concurrent slug creation (race condition)" do
|
||||
setup do
|
||||
character = create_test_user()
|
||||
%{character: character}
|
||||
end
|
||||
|
||||
@tag :slow
|
||||
test "handles concurrent map creation with identical slugs", %{character: character} do
|
||||
# Create 5 concurrent map creation requests with the same slug
|
||||
tasks =
|
||||
for i <- 1..5 do
|
||||
Task.async(fn ->
|
||||
Map.new(%{
|
||||
name: "Concurrent Test",
|
||||
slug: "concurrent-test",
|
||||
owner_id: character.id,
|
||||
description: "Concurrent map #{i}",
|
||||
scope: "wormholes"
|
||||
})
|
||||
end)
|
||||
end
|
||||
|
||||
# Wait for all tasks to complete
|
||||
results = Task.await_many(tasks, 10_000)
|
||||
|
||||
# All should either succeed or fail gracefully (no crashes)
|
||||
assert length(results) == 5
|
||||
|
||||
# Get successful results
|
||||
successful = Enum.filter(results, &match?({:ok, _}, &1))
|
||||
failed = Enum.filter(results, &match?({:error, _}, &1))
|
||||
|
||||
# At least some should succeed
|
||||
assert length(successful) > 0
|
||||
|
||||
# Extract maps from successful results
|
||||
maps = Enum.map(successful, fn {:ok, map} -> map end)
|
||||
|
||||
# Verify all successful maps have unique slugs
|
||||
slugs = Enum.map(maps, & &1.slug)
|
||||
|
||||
assert length(Enum.uniq(slugs)) == length(slugs),
|
||||
"All successful maps should have unique slugs"
|
||||
|
||||
# Log results for visibility
|
||||
Logger.info("Concurrent test: #{length(successful)} succeeded, #{length(failed)} failed")
|
||||
Logger.info("Unique slugs created: #{inspect(slugs)}")
|
||||
end
|
||||
|
||||
@tag :slow
|
||||
test "concurrent creation with different names creates different base slugs", %{character: character} do
|
||||
# Create concurrent requests with different names (should all succeed)
|
||||
tasks =
|
||||
for i <- 1..5 do
|
||||
Task.async(fn ->
|
||||
Map.new(%{
|
||||
name: "Concurrent Map #{i}",
|
||||
slug: "concurrent-map-#{i}",
|
||||
owner_id: character.id,
|
||||
description: "Map #{i}",
|
||||
scope: "wormholes"
|
||||
})
|
||||
end)
|
||||
end
|
||||
|
||||
results = Task.await_many(tasks, 10_000)
|
||||
|
||||
# All should succeed
|
||||
assert Enum.all?(results, &match?({:ok, _}, &1))
|
||||
|
||||
# All should have different slugs
|
||||
slugs = Enum.map(results, fn {:ok, map} -> map.slug end)
|
||||
assert length(Enum.uniq(slugs)) == 5
|
||||
end
|
||||
end
|
||||
|
||||
describe "slug generation edge cases" do
|
||||
setup do
|
||||
character = create_test_user()
|
||||
%{character: character}
|
||||
end
|
||||
|
||||
test "handles very long slugs", %{character: character} do
|
||||
# Create map with name within limits but slug that's very long
|
||||
# Note: name max is 20 chars, slug max is 40 chars
|
||||
long_slug = String.duplicate("a", 50)
|
||||
|
||||
# Attempting to create a map with a slug that's too long should fail validation
|
||||
result =
|
||||
Map.new(%{
|
||||
name: "Long Slug Test",
|
||||
slug: long_slug,
|
||||
owner_id: character.id,
|
||||
description: "Long slug test",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
# Should fail because slug exceeds max length
|
||||
assert {:error, _error} = result
|
||||
|
||||
# But creating with a slug exactly at max length should work
|
||||
max_length_slug = String.duplicate("a", 40)
|
||||
|
||||
{:ok, map} =
|
||||
Map.new(%{
|
||||
name: "Long Slug Test",
|
||||
slug: max_length_slug,
|
||||
owner_id: character.id,
|
||||
description: "Long slug test",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
assert String.length(map.slug) == 40
|
||||
end
|
||||
|
||||
test "handles special characters in slugs", %{character: character} do
|
||||
# Test that special characters are properly slugified
|
||||
{:ok, map} =
|
||||
Map.new(%{
|
||||
name: "Test: Map & Name!",
|
||||
slug: "test-map-name",
|
||||
owner_id: character.id,
|
||||
description: "Special chars test",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
# Slug should only contain allowed characters
|
||||
assert map.slug =~ ~r/^[a-z0-9-]+$/
|
||||
end
|
||||
end
|
||||
|
||||
describe "slug update operations" do
|
||||
setup do
|
||||
character = create_test_user()
|
||||
|
||||
{:ok, map} =
|
||||
Map.new(%{
|
||||
name: "Original Map",
|
||||
slug: "original-map",
|
||||
owner_id: character.id,
|
||||
description: "Original",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
%{character: character, map: map}
|
||||
end
|
||||
|
||||
test "updating map with same slug succeeds", %{map: map} do
|
||||
# Update other fields, keep same slug
|
||||
result =
|
||||
Map.update(map, %{
|
||||
description: "Updated description",
|
||||
slug: "original-map"
|
||||
})
|
||||
|
||||
assert {:ok, updated_map} = result
|
||||
assert updated_map.slug == "original-map"
|
||||
assert updated_map.description == "Updated description"
|
||||
end
|
||||
|
||||
test "updating to conflicting slug is handled", %{character: character, map: map} do
|
||||
# Create another map
|
||||
{:ok, _other_map} =
|
||||
Map.new(%{
|
||||
name: "Other Map",
|
||||
slug: "other-map",
|
||||
owner_id: character.id,
|
||||
description: "Other",
|
||||
scope: "wormholes"
|
||||
})
|
||||
|
||||
# Try to update first map to use other map's slug
|
||||
result =
|
||||
Map.update(map, %{
|
||||
slug: "other-map"
|
||||
})
|
||||
|
||||
# Should either fail or auto-increment
|
||||
case result do
|
||||
{:ok, updated_map} ->
|
||||
# If successful, slug should be different
|
||||
assert updated_map.slug != "other-map"
|
||||
assert updated_map.slug =~ ~r/^other-map-\d+$/
|
||||
|
||||
{:error, _} ->
|
||||
# Or it can fail with validation error
|
||||
:ok
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "get_map_by_slug with duplicates" do
|
||||
setup do
|
||||
character = create_test_user()
|
||||
%{character: character}
|
||||
end
|
||||
|
||||
test "get_map_by_slug! raises on duplicates if they exist" do
|
||||
# Note: This test documents the behavior when duplicates somehow exist
|
||||
# In production, this should be prevented by our fixes
|
||||
# If duplicates exist (data integrity issue), the query should fail
|
||||
|
||||
# This is a documentation test - we can't easily create duplicates
|
||||
# due to the database constraint, but we document expected behavior
|
||||
assert true
|
||||
end
|
||||
end
|
||||
|
||||
# Helper functions
|
||||
|
||||
defp create_test_user do
|
||||
# Create a test user with necessary attributes
|
||||
user =
|
||||
case Ash.create(WandererApp.Api.User, %{
|
||||
name: "Test User #{:rand.uniform(10_000)}",
|
||||
hash: "test_hash_#{:rand.uniform(100_000_000)}"
|
||||
}) do
|
||||
{:ok, user} -> user
|
||||
{:error, reason} -> raise "Failed to create user: #{inspect(reason)}"
|
||||
end
|
||||
|
||||
# Create a character for the user (maps need character as owner)
|
||||
unique_id = System.unique_integer([:positive])
|
||||
|
||||
character =
|
||||
case Ash.create(
|
||||
WandererApp.Api.Character,
|
||||
%{
|
||||
eve_id: "#{2_000_000_000 + unique_id}",
|
||||
name: "Test Character #{unique_id}",
|
||||
user_id: user.id
|
||||
},
|
||||
action: :link
|
||||
) do
|
||||
{:ok, character} -> character
|
||||
{:error, reason} -> raise "Failed to create character: #{inspect(reason)}"
|
||||
end
|
||||
|
||||
character
|
||||
end
|
||||
end
|
||||
@@ -82,7 +82,6 @@ defmodule WandererApp.MapDuplicationServiceTest do
|
||||
assert {:error, {:not_found, _message}} = result
|
||||
end
|
||||
|
||||
@tag :skip
|
||||
test "preserves original map unchanged", %{owner: owner, source_map: source_map} do
|
||||
original_name = source_map.name
|
||||
original_description = source_map.description
|
||||
@@ -114,7 +113,7 @@ defmodule WandererApp.MapDuplicationServiceTest do
|
||||
|
||||
{:ok, duplicate1} = Duplication.duplicate_map(source_map.id, target_map1, [])
|
||||
|
||||
# Create second duplicate
|
||||
# Create second duplicate
|
||||
target_map2 =
|
||||
insert(:map, %{
|
||||
name: "Unique Copy 2",
|
||||
|
||||
685
test/unit/presence_grace_period_manager_test.exs
Normal file
685
test/unit/presence_grace_period_manager_test.exs
Normal file
@@ -0,0 +1,685 @@
|
||||
defmodule WandererAppWeb.PresenceGracePeriodManagerTest do
|
||||
@moduledoc """
|
||||
Comprehensive tests for PresenceGracePeriodManager.
|
||||
|
||||
Tests cover:
|
||||
- Grace period scheduling when characters leave presence
|
||||
- Grace period cancellation when characters rejoin
|
||||
- Atomic cache removal after grace period expires
|
||||
- Multiple characters and maps scenarios
|
||||
- Edge cases and error handling
|
||||
"""
|
||||
use ExUnit.Case, async: false
|
||||
|
||||
alias WandererAppWeb.PresenceGracePeriodManager
|
||||
|
||||
setup do
|
||||
# Generate unique map and character IDs for each test
|
||||
map_id = "test_map_#{:rand.uniform(1_000_000)}"
|
||||
character_id = "test_char_#{:rand.uniform(1_000_000)}"
|
||||
character_id_2 = "test_char_2_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
# Clean up GenServer state for this specific map
|
||||
PresenceGracePeriodManager.clear_map_state(map_id)
|
||||
|
||||
# Clean up any existing cache data for this test
|
||||
cleanup_cache(map_id)
|
||||
|
||||
on_exit(fn ->
|
||||
PresenceGracePeriodManager.clear_map_state(map_id)
|
||||
cleanup_cache(map_id)
|
||||
end)
|
||||
|
||||
{:ok,
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2}
|
||||
end
|
||||
|
||||
defp cleanup_cache(map_id) do
|
||||
WandererApp.Cache.delete("map_#{map_id}:presence_character_ids")
|
||||
WandererApp.Cache.delete("map_#{map_id}:presence_data")
|
||||
WandererApp.Cache.delete("map_#{map_id}:presence_updated")
|
||||
end
|
||||
|
||||
defp build_presence_data(characters) do
|
||||
Enum.map(characters, fn {character_id, tracked} ->
|
||||
%{
|
||||
character_id: character_id,
|
||||
tracked: tracked,
|
||||
from: DateTime.utc_now()
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
defp get_presence_character_ids(map_id) do
|
||||
case WandererApp.Cache.get("map_#{map_id}:presence_character_ids") do
|
||||
nil -> []
|
||||
ids -> ids
|
||||
end
|
||||
end
|
||||
|
||||
defp get_presence_data(map_id) do
|
||||
WandererApp.Cache.get("map_#{map_id}:presence_data")
|
||||
end
|
||||
|
||||
defp get_presence_updated(map_id) do
|
||||
WandererApp.Cache.get("map_#{map_id}:presence_updated") || false
|
||||
end
|
||||
|
||||
describe "initialization" do
|
||||
test "manager starts successfully" do
|
||||
# The manager should already be running as part of the application
|
||||
assert Process.whereis(PresenceGracePeriodManager) != nil
|
||||
end
|
||||
|
||||
test "get_state returns valid state structure" do
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
|
||||
assert %PresenceGracePeriodManager{} = state
|
||||
assert is_map(state.pending_removals)
|
||||
assert is_map(state.timers)
|
||||
end
|
||||
|
||||
test "reset_state clears all state" do
|
||||
# First reset
|
||||
PresenceGracePeriodManager.reset_state()
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
|
||||
assert state.pending_removals == %{}
|
||||
assert state.timers == %{}
|
||||
end
|
||||
end
|
||||
|
||||
describe "process_presence_change - character joins" do
|
||||
test "first character joins - updates cache with character ID", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
assert get_presence_data(map_id) == presence_data
|
||||
assert get_presence_updated(map_id) == true
|
||||
end
|
||||
|
||||
test "multiple characters join - all are in cache", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, true}, {character_id_2, true}])
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
cached_ids = get_presence_character_ids(map_id)
|
||||
assert Enum.sort(cached_ids) == Enum.sort([character_id, character_id_2])
|
||||
end
|
||||
|
||||
test "untracked character is not included in presence_character_ids", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, true}, {character_id_2, false}])
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Only tracked character should be in presence_character_ids
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
|
||||
# But both should be in presence_data
|
||||
assert length(get_presence_data(map_id)) == 2
|
||||
end
|
||||
end
|
||||
|
||||
describe "process_presence_change - character leaves (grace period)" do
|
||||
test "character leaving starts grace period - still in cache", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# First, character joins
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
|
||||
# Character leaves (empty presence)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Character should still be in cache (grace period active)
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
|
||||
# State should have pending removal
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
assert Map.has_key?(state.timers, {map_id, character_id})
|
||||
end
|
||||
|
||||
test "multiple characters leave - all have grace periods", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
# Both characters join
|
||||
presence_data = build_presence_data([{character_id, true}, {character_id_2, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Both leave
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Both should still be in cache
|
||||
cached_ids = get_presence_character_ids(map_id)
|
||||
assert Enum.sort(cached_ids) == Enum.sort([character_id, character_id_2])
|
||||
|
||||
# Both should have pending removals
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
assert Map.has_key?(state.pending_removals, {map_id, character_id_2})
|
||||
end
|
||||
|
||||
test "one character leaves, one stays - only leaving character has grace period", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
# Both characters join
|
||||
presence_data = build_presence_data([{character_id, true}, {character_id_2, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Only character_id leaves
|
||||
presence_data_after = build_presence_data([{character_id_2, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data_after)
|
||||
|
||||
# Both should be in cache (one current, one in grace period)
|
||||
cached_ids = get_presence_character_ids(map_id)
|
||||
assert Enum.sort(cached_ids) == Enum.sort([character_id, character_id_2])
|
||||
|
||||
# Only character_id should have pending removal
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
refute Map.has_key?(state.pending_removals, {map_id, character_id_2})
|
||||
end
|
||||
end
|
||||
|
||||
describe "process_presence_change - character rejoins (cancels grace period)" do
|
||||
test "character rejoins during grace period - removal cancelled", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Character joins
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Character leaves (starts grace period)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Verify grace period started
|
||||
state_before = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state_before.pending_removals, {map_id, character_id})
|
||||
|
||||
# Character rejoins
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Grace period should be cancelled
|
||||
state_after = PresenceGracePeriodManager.get_state()
|
||||
refute Map.has_key?(state_after.pending_removals, {map_id, character_id})
|
||||
refute Map.has_key?(state_after.timers, {map_id, character_id})
|
||||
|
||||
# Character should still be in cache
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
end
|
||||
|
||||
test "character leaves and rejoins multiple times - only one grace period at a time", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
# Cycle 1: join -> leave -> rejoin
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Cycle 2: leave -> rejoin
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Should have no pending removals
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
refute Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
|
||||
# Character should be in cache
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
end
|
||||
end
|
||||
|
||||
describe "grace_period_expired - atomic removal" do
|
||||
test "directly sending grace_period_expired removes character from cache", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Setup: character joins then leaves
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Verify grace period started
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.timers, {map_id, character_id})
|
||||
|
||||
# Simulate grace period expiration by sending the message directly
|
||||
send(Process.whereis(PresenceGracePeriodManager), {:grace_period_expired, map_id, character_id})
|
||||
# Small wait for the message to be processed
|
||||
:timer.sleep(20)
|
||||
|
||||
# Character should be removed from cache
|
||||
assert get_presence_character_ids(map_id) == []
|
||||
|
||||
# Pending removal should be cleared
|
||||
state_after = PresenceGracePeriodManager.get_state()
|
||||
refute Map.has_key?(state_after.pending_removals, {map_id, character_id})
|
||||
refute Map.has_key?(state_after.timers, {map_id, character_id})
|
||||
end
|
||||
|
||||
test "grace_period_expired for already cancelled timer is ignored", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Setup: character joins, leaves, then rejoins
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Timer was cancelled, but let's simulate the message arriving anyway
|
||||
send(Process.whereis(PresenceGracePeriodManager), {:grace_period_expired, map_id, character_id})
|
||||
:timer.sleep(20)
|
||||
|
||||
# Character should still be in cache (message was ignored)
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
end
|
||||
|
||||
test "grace_period_expired with no presence_data in cache handles gracefully", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Don't set up any presence data - just send the expired message
|
||||
# This simulates a race condition where the map was stopped
|
||||
|
||||
# First add character to state manually by going through the flow
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Clear the cache to simulate map being stopped
|
||||
cleanup_cache(map_id)
|
||||
|
||||
# Send expired message
|
||||
send(Process.whereis(PresenceGracePeriodManager), {:grace_period_expired, map_id, character_id})
|
||||
:timer.sleep(20)
|
||||
|
||||
# Should handle gracefully without crashing
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
refute Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
end
|
||||
|
||||
test "removes only the specified character, keeps others", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
# Both characters join then leave
|
||||
presence_data = build_presence_data([{character_id, true}, {character_id_2, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Both in grace period
|
||||
cached_before = get_presence_character_ids(map_id)
|
||||
assert length(cached_before) == 2
|
||||
|
||||
# Only expire character_id
|
||||
send(Process.whereis(PresenceGracePeriodManager), {:grace_period_expired, map_id, character_id})
|
||||
:timer.sleep(20)
|
||||
|
||||
# Only character_id_2 should remain
|
||||
assert get_presence_character_ids(map_id) == [character_id_2]
|
||||
|
||||
# character_id_2 should still have pending removal
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
refute Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
assert Map.has_key?(state.pending_removals, {map_id, character_id_2})
|
||||
end
|
||||
end
|
||||
|
||||
describe "multiple maps scenarios" do
|
||||
test "same character on different maps - independent grace periods", %{
|
||||
character_id: character_id
|
||||
} do
|
||||
map_id_1 = "test_map_multi_1_#{:rand.uniform(1_000_000)}"
|
||||
map_id_2 = "test_map_multi_2_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
on_exit(fn ->
|
||||
PresenceGracePeriodManager.clear_map_state(map_id_1)
|
||||
PresenceGracePeriodManager.clear_map_state(map_id_2)
|
||||
cleanup_cache(map_id_1)
|
||||
cleanup_cache(map_id_2)
|
||||
end)
|
||||
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
# Character joins both maps
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id_1, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id_2, presence_data)
|
||||
|
||||
# Character leaves map_id_1 only
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id_1, [])
|
||||
|
||||
# map_id_1 should have grace period, map_id_2 should not
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.pending_removals, {map_id_1, character_id})
|
||||
refute Map.has_key?(state.pending_removals, {map_id_2, character_id})
|
||||
|
||||
# Character should be in cache for both maps
|
||||
assert get_presence_character_ids(map_id_1) == [character_id]
|
||||
assert get_presence_character_ids(map_id_2) == [character_id]
|
||||
|
||||
# Expire grace period for map_id_1
|
||||
send(Process.whereis(PresenceGracePeriodManager), {:grace_period_expired, map_id_1, character_id})
|
||||
:timer.sleep(20)
|
||||
|
||||
# map_id_1 should be empty, map_id_2 should still have character
|
||||
assert get_presence_character_ids(map_id_1) == []
|
||||
assert get_presence_character_ids(map_id_2) == [character_id]
|
||||
end
|
||||
|
||||
test "grace period on one map doesn't affect other maps", %{
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
map_id_1 = "test_map_iso_1_#{:rand.uniform(1_000_000)}"
|
||||
map_id_2 = "test_map_iso_2_#{:rand.uniform(1_000_000)}"
|
||||
|
||||
on_exit(fn ->
|
||||
PresenceGracePeriodManager.clear_map_state(map_id_1)
|
||||
PresenceGracePeriodManager.clear_map_state(map_id_2)
|
||||
cleanup_cache(map_id_1)
|
||||
cleanup_cache(map_id_2)
|
||||
end)
|
||||
|
||||
# Different characters on different maps
|
||||
presence_data_1 = build_presence_data([{character_id, true}])
|
||||
presence_data_2 = build_presence_data([{character_id_2, true}])
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id_1, presence_data_1)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id_2, presence_data_2)
|
||||
|
||||
# Character leaves map_id_1
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id_1, [])
|
||||
|
||||
# map_id_2 should be completely unaffected
|
||||
assert get_presence_character_ids(map_id_2) == [character_id_2]
|
||||
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.pending_removals, {map_id_1, character_id})
|
||||
refute Map.has_key?(state.pending_removals, {map_id_2, character_id_2})
|
||||
end
|
||||
end
|
||||
|
||||
describe "edge cases" do
|
||||
test "empty presence data on fresh map", %{map_id: map_id} do
|
||||
# Process empty presence for a map that never had data
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Should not crash, cache should be empty
|
||||
assert get_presence_character_ids(map_id) == []
|
||||
end
|
||||
|
||||
test "presence data with all untracked characters", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, false}, {character_id_2, false}])
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# No tracked characters, so presence_character_ids should be empty
|
||||
assert get_presence_character_ids(map_id) == []
|
||||
# But presence_data should have both characters
|
||||
assert length(get_presence_data(map_id)) == 2
|
||||
end
|
||||
|
||||
test "rapid presence changes don't cause issues", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
# Rapid fire presence changes (synchronous)
|
||||
for _ <- 1..20 do
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
end
|
||||
|
||||
# Final state: character present
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Should have exactly one pending removal or none (depending on final state)
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
refute Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
end
|
||||
|
||||
test "character switching from tracked to untracked", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Character joins as tracked
|
||||
presence_data_tracked = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data_tracked)
|
||||
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
|
||||
# Character becomes untracked (still present, but not tracking)
|
||||
presence_data_untracked = build_presence_data([{character_id, false}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data_untracked)
|
||||
|
||||
# Character was tracked before, now untracked - should start grace period
|
||||
state = PresenceGracePeriodManager.get_state()
|
||||
assert Map.has_key?(state.pending_removals, {map_id, character_id})
|
||||
|
||||
# Character should still be in cache (grace period)
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
end
|
||||
|
||||
test "character switching from untracked to tracked", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Character joins as untracked
|
||||
presence_data_untracked = build_presence_data([{character_id, false}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data_untracked)
|
||||
|
||||
assert get_presence_character_ids(map_id) == []
|
||||
|
||||
# Character becomes tracked
|
||||
presence_data_tracked = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data_tracked)
|
||||
|
||||
# Character should now be in tracked list
|
||||
assert get_presence_character_ids(map_id) == [character_id]
|
||||
end
|
||||
|
||||
test "duplicate character IDs in presence data are handled", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
# Presence data with duplicate entries (shouldn't happen but let's be safe)
|
||||
presence_data = [
|
||||
%{character_id: character_id, tracked: true, from: DateTime.utc_now()},
|
||||
%{character_id: character_id, tracked: true, from: DateTime.utc_now()}
|
||||
]
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# Should handle gracefully, character appears once in tracked IDs
|
||||
cached_ids = get_presence_character_ids(map_id)
|
||||
# Due to how the code works, duplicates may appear - that's a known limitation
|
||||
# The important thing is it doesn't crash
|
||||
assert character_id in cached_ids
|
||||
end
|
||||
end
|
||||
|
||||
describe "telemetry events" do
|
||||
test "grace_period_started telemetry is emitted when character leaves", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
test_pid = self()
|
||||
handler_id = "test-grace-period-started-#{map_id}"
|
||||
|
||||
:telemetry.attach(
|
||||
handler_id,
|
||||
[:wanderer_app, :presence, :grace_period_started],
|
||||
fn _name, measurements, metadata, _config ->
|
||||
send(test_pid, {:telemetry, :started, measurements, metadata})
|
||||
end,
|
||||
nil
|
||||
)
|
||||
|
||||
on_exit(fn ->
|
||||
:telemetry.detach(handler_id)
|
||||
end)
|
||||
|
||||
# Character joins then leaves
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
assert_receive {:telemetry, :started, measurements, metadata}, 500
|
||||
assert measurements.grace_period_ms > 0
|
||||
assert metadata.map_id == map_id
|
||||
assert metadata.character_id == character_id
|
||||
assert metadata.reason == :presence_left
|
||||
end
|
||||
|
||||
test "grace_period_cancelled telemetry is emitted when character rejoins", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
test_pid = self()
|
||||
handler_id = "test-grace-period-cancelled-#{map_id}"
|
||||
|
||||
:telemetry.attach(
|
||||
handler_id,
|
||||
[:wanderer_app, :presence, :grace_period_cancelled],
|
||||
fn _name, measurements, metadata, _config ->
|
||||
send(test_pid, {:telemetry, :cancelled, measurements, metadata})
|
||||
end,
|
||||
nil
|
||||
)
|
||||
|
||||
on_exit(fn ->
|
||||
:telemetry.detach(handler_id)
|
||||
end)
|
||||
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
# Join -> leave -> rejoin
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
assert_receive {:telemetry, :cancelled, _measurements, metadata}, 500
|
||||
assert metadata.map_id == map_id
|
||||
assert metadata.character_id == character_id
|
||||
assert metadata.reason == :character_rejoined
|
||||
end
|
||||
|
||||
test "grace_period_expired telemetry is emitted when timer fires", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
test_pid = self()
|
||||
handler_id = "test-grace-period-expired-#{map_id}"
|
||||
|
||||
:telemetry.attach(
|
||||
handler_id,
|
||||
[:wanderer_app, :presence, :grace_period_expired],
|
||||
fn _name, measurements, metadata, _config ->
|
||||
send(test_pid, {:telemetry, :expired, measurements, metadata})
|
||||
end,
|
||||
nil
|
||||
)
|
||||
|
||||
on_exit(fn ->
|
||||
:telemetry.detach(handler_id)
|
||||
end)
|
||||
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
# Join -> leave -> simulate expiration
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
# Simulate grace period expiration
|
||||
send(Process.whereis(PresenceGracePeriodManager), {:grace_period_expired, map_id, character_id})
|
||||
:timer.sleep(20)
|
||||
|
||||
assert_receive {:telemetry, :expired, measurements, metadata}, 500
|
||||
assert measurements.duration_ms > 0
|
||||
assert metadata.map_id == map_id
|
||||
assert metadata.character_id == character_id
|
||||
assert metadata.reason == :grace_period_timeout
|
||||
end
|
||||
end
|
||||
|
||||
describe "cache consistency" do
|
||||
test "presence_updated flag is set on every change", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id
|
||||
} do
|
||||
presence_data = build_presence_data([{character_id, true}])
|
||||
|
||||
# Clear the flag
|
||||
WandererApp.Cache.delete("map_#{map_id}:presence_updated")
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
assert get_presence_updated(map_id) == true
|
||||
|
||||
# Clear and change again
|
||||
WandererApp.Cache.delete("map_#{map_id}:presence_updated")
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, [])
|
||||
|
||||
assert get_presence_updated(map_id) == true
|
||||
end
|
||||
|
||||
test "presence_data and presence_character_ids are always in sync", %{
|
||||
map_id: map_id,
|
||||
character_id: character_id,
|
||||
character_id_2: character_id_2
|
||||
} do
|
||||
# Complex scenario: multiple characters, some tracked, some not
|
||||
presence_data = build_presence_data([
|
||||
{character_id, true},
|
||||
{character_id_2, false}
|
||||
])
|
||||
|
||||
PresenceGracePeriodManager.process_presence_change_sync(map_id, presence_data)
|
||||
|
||||
# presence_character_ids should only have tracked characters
|
||||
cached_ids = get_presence_character_ids(map_id)
|
||||
assert cached_ids == [character_id]
|
||||
|
||||
# presence_data should have all characters
|
||||
cached_data = get_presence_data(map_id)
|
||||
assert length(cached_data) == 2
|
||||
data_ids = Enum.map(cached_data, & &1.character_id)
|
||||
assert Enum.sort(data_ids) == Enum.sort([character_id, character_id_2])
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererApp.Repositories.MapContextHelperTest do
|
||||
use ExUnit.Case, async: false
|
||||
# Pure unit tests - no database or external dependencies
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererApp.Repositories.MapContextHelper
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererApp.TestHelpersTest do
|
||||
use ExUnit.Case
|
||||
# Pure unit tests - no database or external dependencies
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererApp.TestHelpers
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererAppWeb.ApiRouter.RouteSpecTest do
|
||||
use ExUnit.Case, async: false
|
||||
# Pure unit tests - no database or external dependencies
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
alias WandererAppWeb.ApiRouter.RouteSpec
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererAppWeb.ErrorHTMLTest do
|
||||
use WandererAppWeb.ConnCase, async: false
|
||||
# Pure function tests - no database or external dependencies needed
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
# Bring render_to_string/4 for testing custom views
|
||||
import Phoenix.Template
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
defmodule WandererAppWeb.ErrorJSONTest do
|
||||
use WandererAppWeb.ConnCase, async: false
|
||||
# Pure function tests - no database or external dependencies needed
|
||||
use ExUnit.Case, async: true
|
||||
|
||||
test "renders 404" do
|
||||
assert WandererAppWeb.ErrorJSON.render("404.json", %{}) == %{errors: %{detail: "Not Found"}}
|
||||
|
||||
Reference in New Issue
Block a user