mirror of
https://github.com/wanderer-industries/wanderer
synced 2025-12-05 23:35:33 +00:00
Compare commits
81 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6db6f0914 | ||
|
|
ab8baeedd1 | ||
|
|
eccee5e72e | ||
|
|
4d93055bda | ||
|
|
c60c16e56a | ||
|
|
99b1de5647 | ||
|
|
7efe11a421 | ||
|
|
954108856a | ||
|
|
cbca745ec4 | ||
|
|
e15e7c8f8d | ||
|
|
65e8a520e5 | ||
|
|
3926af5a6d | ||
|
|
556fb33223 | ||
|
|
82295adeab | ||
|
|
efabf060c7 | ||
|
|
96e434ebf5 | ||
|
|
d81e2567cc | ||
|
|
f8d487639f | ||
|
|
cecfbb5375 | ||
|
|
9d7d4fad2e | ||
|
|
7be64bde02 | ||
|
|
48eb7552a9 | ||
|
|
5347b0060c | ||
|
|
b826c03226 | ||
|
|
1c211a8667 | ||
|
|
da1762934b | ||
|
|
511457c761 | ||
|
|
29b4cedb81 | ||
|
|
585de15e6b | ||
|
|
a9bf118f3a | ||
|
|
6d5a432bad | ||
|
|
f1f12abd16 | ||
|
|
ce82ed97f5 | ||
|
|
36b393dbde | ||
|
|
524c283a0d | ||
|
|
afda53a9bc | ||
|
|
1310d75012 | ||
|
|
80bbde549d | ||
|
|
2451487593 | ||
|
|
ecd626f105 | ||
|
|
123b312965 | ||
|
|
e94de8e629 | ||
|
|
956a5a04ca | ||
|
|
affeb7c624 | ||
|
|
ffba407eaf | ||
|
|
33f710127c | ||
|
|
63faa43c1d | ||
|
|
9f75ae6b03 | ||
|
|
a1f28cd245 | ||
|
|
90a04b517e | ||
|
|
9f6e6a333f | ||
|
|
7b9e2c4fd9 | ||
|
|
63f13711cc | ||
|
|
650170498a | ||
|
|
0f466c51ba | ||
|
|
a1a641bce3 | ||
|
|
4764c25eb1 | ||
|
|
d390455cf2 | ||
|
|
472dbaa68b | ||
|
|
679bd782a8 | ||
|
|
6a316e3906 | ||
|
|
c129db8474 | ||
|
|
10035b4c91 | ||
|
|
5839271de7 | ||
|
|
47db8ef709 | ||
|
|
2656491aaa | ||
|
|
a7637c9cae | ||
|
|
7b83ed8205 | ||
|
|
00cbc77f1d | ||
|
|
4d75b256c4 | ||
|
|
5aeff7c40c | ||
|
|
6a543bf644 | ||
|
|
dfb035525d | ||
|
|
798aec1b74 | ||
|
|
7914d7e151 | ||
|
|
8b579d6837 | ||
|
|
c0fd20dfff | ||
|
|
dd6b67c6e6 | ||
|
|
48ff2f4413 | ||
|
|
d261c6186b | ||
|
|
064a36fcbb |
13
.check.exs
13
.check.exs
@@ -13,8 +13,8 @@
|
||||
|
||||
## list of tools (see `mix check` docs for a list of default curated tools)
|
||||
tools: [
|
||||
## curated tools may be disabled (e.g. the check for compilation warnings)
|
||||
{:compiler, false},
|
||||
## Allow compilation warnings for now (error budget: unlimited warnings)
|
||||
{:compiler, "mix compile"},
|
||||
|
||||
## ...or have command & args adjusted (e.g. enable skip comments for sobelow)
|
||||
# {:sobelow, "mix sobelow --exit --skip"},
|
||||
@@ -22,10 +22,15 @@
|
||||
## ...or reordered (e.g. to see output from dialyzer before others)
|
||||
# {:dialyzer, order: -1},
|
||||
|
||||
## ...or reconfigured (e.g. disable parallel execution of ex_unit in umbrella)
|
||||
## Credo with relaxed error budget: max 200 issues
|
||||
{:credo, "mix credo --strict --max-issues 200"},
|
||||
|
||||
## Dialyzer but don't halt on exit (allow warnings)
|
||||
{:dialyzer, "mix dialyzer"},
|
||||
|
||||
## Tests without warnings-as-errors for now
|
||||
{:ex_unit, "mix test"},
|
||||
{:doctor, false},
|
||||
{:ex_unit, false},
|
||||
{:npm_test, false},
|
||||
{:sobelow, false}
|
||||
|
||||
|
||||
18
.credo.exs
18
.credo.exs
@@ -82,8 +82,6 @@
|
||||
# You can customize the priority of any check
|
||||
# Priority values are: `low, normal, high, higher`
|
||||
#
|
||||
{Credo.Check.Design.AliasUsage,
|
||||
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
|
||||
# You can also customize the exit_status of each check.
|
||||
# If you don't want TODO comments to cause `mix credo` to fail, just
|
||||
# set this value to 0 (zero).
|
||||
@@ -99,10 +97,9 @@
|
||||
{Credo.Check.Readability.LargeNumbers, []},
|
||||
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
|
||||
{Credo.Check.Readability.ModuleAttributeNames, []},
|
||||
{Credo.Check.Readability.ModuleDoc, []},
|
||||
{Credo.Check.Readability.ModuleDoc, false},
|
||||
{Credo.Check.Readability.ModuleNames, []},
|
||||
{Credo.Check.Readability.ParenthesesInCondition, []},
|
||||
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
|
||||
{Credo.Check.Readability.PipeIntoAnonymousFunctions, []},
|
||||
{Credo.Check.Readability.PredicateFunctionNames, []},
|
||||
{Credo.Check.Readability.PreferImplicitTry, []},
|
||||
@@ -121,14 +118,12 @@
|
||||
#
|
||||
{Credo.Check.Refactor.Apply, []},
|
||||
{Credo.Check.Refactor.CondStatements, []},
|
||||
{Credo.Check.Refactor.CyclomaticComplexity, []},
|
||||
{Credo.Check.Refactor.FunctionArity, []},
|
||||
{Credo.Check.Refactor.LongQuoteBlocks, []},
|
||||
{Credo.Check.Refactor.MatchInCondition, []},
|
||||
{Credo.Check.Refactor.MapJoin, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
|
||||
{Credo.Check.Refactor.Nesting, []},
|
||||
{Credo.Check.Refactor.UnlessWithElse, []},
|
||||
{Credo.Check.Refactor.WithClauses, []},
|
||||
{Credo.Check.Refactor.FilterFilter, []},
|
||||
@@ -196,10 +191,19 @@
|
||||
{Credo.Check.Warning.LeakyEnvironment, []},
|
||||
{Credo.Check.Warning.MapGetUnsafePass, []},
|
||||
{Credo.Check.Warning.MixEnv, []},
|
||||
{Credo.Check.Warning.UnsafeToAtom, []}
|
||||
{Credo.Check.Warning.UnsafeToAtom, []},
|
||||
|
||||
# {Credo.Check.Refactor.MapInto, []},
|
||||
|
||||
#
|
||||
# Temporarily disable checks that generate too many issues
|
||||
# to get under the 200 issue budget
|
||||
#
|
||||
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
|
||||
{Credo.Check.Design.AliasUsage, []},
|
||||
{Credo.Check.Refactor.Nesting, []},
|
||||
{Credo.Check.Refactor.CyclomaticComplexity, []}
|
||||
|
||||
#
|
||||
# Custom checks can be created using `mix credo.gen.check`.
|
||||
#
|
||||
|
||||
127
.credo.test.exs
Normal file
127
.credo.test.exs
Normal file
@@ -0,0 +1,127 @@
|
||||
# Credo configuration specific to test files
|
||||
# This enforces stricter quality standards for test code
|
||||
|
||||
%{
|
||||
configs: [
|
||||
%{
|
||||
name: "test",
|
||||
files: %{
|
||||
included: ["test/"],
|
||||
excluded: ["test/support/"]
|
||||
},
|
||||
requires: [],
|
||||
strict: true,
|
||||
color: true,
|
||||
checks: [
|
||||
# Consistency checks
|
||||
{Credo.Check.Consistency.ExceptionNames, []},
|
||||
{Credo.Check.Consistency.LineEndings, []},
|
||||
{Credo.Check.Consistency.MultiAliasImportRequireUse, []},
|
||||
{Credo.Check.Consistency.ParameterPatternMatching, []},
|
||||
{Credo.Check.Consistency.SpaceAroundOperators, []},
|
||||
{Credo.Check.Consistency.SpaceInParentheses, []},
|
||||
{Credo.Check.Consistency.TabsOrSpaces, []},
|
||||
|
||||
# Design checks - stricter for tests
|
||||
{Credo.Check.Design.AliasUsage, priority: :high},
|
||||
# Lower threshold for tests
|
||||
{Credo.Check.Design.DuplicatedCode, mass_threshold: 25},
|
||||
{Credo.Check.Design.TagTODO, []},
|
||||
{Credo.Check.Design.TagFIXME, []},
|
||||
|
||||
# Readability checks - very important for tests
|
||||
{Credo.Check.Readability.AliasOrder, []},
|
||||
{Credo.Check.Readability.FunctionNames, []},
|
||||
{Credo.Check.Readability.LargeNumbers, []},
|
||||
# Slightly longer for test descriptions
|
||||
{Credo.Check.Readability.MaxLineLength, max_length: 120},
|
||||
{Credo.Check.Readability.ModuleAttributeNames, []},
|
||||
# Not required for test modules
|
||||
{Credo.Check.Readability.ModuleDoc, false},
|
||||
{Credo.Check.Readability.ModuleNames, []},
|
||||
{Credo.Check.Readability.ParenthesesInCondition, []},
|
||||
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
|
||||
{Credo.Check.Readability.PredicateFunctionNames, []},
|
||||
{Credo.Check.Readability.PreferImplicitTry, []},
|
||||
{Credo.Check.Readability.RedundantBlankLines, []},
|
||||
{Credo.Check.Readability.Semicolons, []},
|
||||
{Credo.Check.Readability.SpaceAfterCommas, []},
|
||||
{Credo.Check.Readability.StringSigils, []},
|
||||
{Credo.Check.Readability.TrailingBlankLine, []},
|
||||
{Credo.Check.Readability.TrailingWhiteSpace, []},
|
||||
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
|
||||
{Credo.Check.Readability.VariableNames, []},
|
||||
{Credo.Check.Readability.WithSingleClause, []},
|
||||
|
||||
# Test-specific readability checks
|
||||
# Discourage single pipes in tests
|
||||
{Credo.Check.Readability.SinglePipe, []},
|
||||
# Specs not needed in tests
|
||||
{Credo.Check.Readability.Specs, false},
|
||||
{Credo.Check.Readability.StrictModuleLayout, []},
|
||||
|
||||
# Refactoring opportunities - important for test maintainability
|
||||
# Higher limit for complex test setups
|
||||
{Credo.Check.Refactor.ABCSize, max_size: 50},
|
||||
{Credo.Check.Refactor.AppendSingleItem, []},
|
||||
{Credo.Check.Refactor.CondStatements, []},
|
||||
{Credo.Check.Refactor.CyclomaticComplexity, max_complexity: 10},
|
||||
# Lower for test helpers
|
||||
{Credo.Check.Refactor.FunctionArity, max_arity: 4},
|
||||
{Credo.Check.Refactor.LongQuoteBlocks, []},
|
||||
{Credo.Check.Refactor.MapInto, []},
|
||||
{Credo.Check.Refactor.MatchInCondition, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
|
||||
# Keep tests flat
|
||||
{Credo.Check.Refactor.Nesting, max_nesting: 3},
|
||||
{Credo.Check.Refactor.UnlessWithElse, []},
|
||||
{Credo.Check.Refactor.WithClauses, []},
|
||||
{Credo.Check.Refactor.FilterFilter, []},
|
||||
{Credo.Check.Refactor.RejectReject, []},
|
||||
{Credo.Check.Refactor.RedundantWithClauseResult, []},
|
||||
|
||||
# Warnings - all should be fixed
|
||||
{Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},
|
||||
{Credo.Check.Warning.BoolOperationOnSameValues, []},
|
||||
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
|
||||
{Credo.Check.Warning.IExPry, []},
|
||||
{Credo.Check.Warning.IoInspect, []},
|
||||
{Credo.Check.Warning.OperationOnSameValues, []},
|
||||
{Credo.Check.Warning.OperationWithConstantResult, []},
|
||||
{Credo.Check.Warning.RaiseInsideRescue, []},
|
||||
{Credo.Check.Warning.UnusedEnumOperation, []},
|
||||
{Credo.Check.Warning.UnusedFileOperation, []},
|
||||
{Credo.Check.Warning.UnusedKeywordOperation, []},
|
||||
{Credo.Check.Warning.UnusedListOperation, []},
|
||||
{Credo.Check.Warning.UnusedPathOperation, []},
|
||||
{Credo.Check.Warning.UnusedRegexOperation, []},
|
||||
{Credo.Check.Warning.UnusedStringOperation, []},
|
||||
{Credo.Check.Warning.UnusedTupleOperation, []},
|
||||
{Credo.Check.Warning.UnsafeExec, []},
|
||||
|
||||
# Test-specific checks
|
||||
# Important for test isolation
|
||||
{Credo.Check.Warning.LeakyEnvironment, []},
|
||||
|
||||
# Custom checks for test patterns
|
||||
{
|
||||
Credo.Check.Refactor.PipeChainStart,
|
||||
# Factory functions
|
||||
excluded_functions: ["build", "create", "insert"],
|
||||
excluded_argument_types: [:atom, :number]
|
||||
}
|
||||
],
|
||||
|
||||
# Disable these checks for test files
|
||||
disabled: [
|
||||
# Tests don't need module docs
|
||||
{Credo.Check.Readability.ModuleDoc, []},
|
||||
# Tests don't need specs
|
||||
{Credo.Check.Readability.Specs, []},
|
||||
# Common in test setup
|
||||
{Credo.Check.Refactor.VariableRebinding, []}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
39
.devcontainer/setup.sh
Executable file
39
.devcontainer/setup.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
echo "→ fetching & compiling deps"
|
||||
mix deps.get
|
||||
mix compile
|
||||
|
||||
# only run Ecto if the project actually has those tasks
|
||||
if mix help | grep -q "ecto.create"; then
|
||||
echo "→ waiting for database to be ready..."
|
||||
|
||||
# Wait for database to be ready
|
||||
DB_HOST=${DB_HOST:-db}
|
||||
timeout=60
|
||||
while ! nc -z $DB_HOST 5432 2>/dev/null; do
|
||||
if [ $timeout -eq 0 ]; then
|
||||
echo "❌ Database connection timeout"
|
||||
exit 1
|
||||
fi
|
||||
echo "Waiting for database... ($timeout seconds remaining)"
|
||||
sleep 1
|
||||
timeout=$((timeout - 1))
|
||||
done
|
||||
|
||||
# Give the database a bit more time to fully initialize
|
||||
echo "→ giving database 2 more seconds to fully initialize..."
|
||||
sleep 2
|
||||
|
||||
echo "→ database is ready, running ecto.create && ecto.migrate"
|
||||
mix ecto.create --quiet
|
||||
mix ecto.migrate
|
||||
fi
|
||||
|
||||
cd assets
|
||||
echo "→ installing JS & CSS dependencies"
|
||||
yarn install --frozen-lockfile
|
||||
echo "→ building assets"
|
||||
|
||||
echo "✅ setup complete"
|
||||
@@ -9,4 +9,8 @@ export WANDERER_INVITES="false"
|
||||
export WANDERER_PUBLIC_API_DISABLED="false"
|
||||
export WANDERER_CHARACTER_API_DISABLED="false"
|
||||
export WANDERER_KILLS_SERVICE_ENABLED="true"
|
||||
export WANDERER_KILLS_BASE_URL="ws://host.docker.internal:4004"
|
||||
export WANDERER_KILLS_BASE_URL="ws://host.docker.internal:4004"
|
||||
export WANDERER_SSE_ENABLED="true"
|
||||
export WANDERER_WEBHOOKS_ENABLED="true"
|
||||
export WANDERER_SSE_MAX_CONNECTIONS="1000"
|
||||
export WANDERER_WEBHOOK_TIMEOUT_MS="15000"
|
||||
109
.github/workflows/advanced-test.yml
vendored
Normal file
109
.github/workflows/advanced-test.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
name: Build Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REGISTRY_IMAGE: wandererltd/community-edition
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy-test:
|
||||
name: 🚀 Deploy to test env (fly.io)
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.base_ref == 'develop' || (github.ref == 'refs/heads/develop' && github.event_name == 'push') }}
|
||||
steps:
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
|
||||
- name: 👀 Read app name
|
||||
uses: SebRollen/toml-action@v1.0.0
|
||||
id: app_name
|
||||
with:
|
||||
file: "fly.toml"
|
||||
field: "app"
|
||||
|
||||
- name: 🚀 Deploy Test
|
||||
run: flyctl deploy --remote-only --wait-timeout=300 --ha=false
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
build:
|
||||
name: 🛠 Build
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ (github.ref == 'refs/heads/develop') && github.event_name == 'push' }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
strategy:
|
||||
matrix:
|
||||
otp: ["27"]
|
||||
elixir: ["1.17"]
|
||||
node-version: ["18.x"]
|
||||
outputs:
|
||||
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash }}
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup Elixir
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
otp-version: ${{matrix.otp}}
|
||||
elixir-version: ${{matrix.elixir}}
|
||||
# nix build would also work here because `todos` is the default package
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 😅 Cache deps
|
||||
id: cache-deps
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-elixir-deps
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
key: ${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-
|
||||
- name: 😅 Cache compiled build
|
||||
id: cache-build
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-compiled-build
|
||||
with:
|
||||
path: |
|
||||
_build
|
||||
key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}-${{ hashFiles( '**/lib/**/*.{ex,eex}', '**/config/*.exs', '**/mix.exs' ) }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}-
|
||||
${{ runner.os }}-build-
|
||||
# Step: Download project dependencies. If unchanged, uses
|
||||
# the cached version.
|
||||
- name: 🌐 Install dependencies
|
||||
run: mix deps.get --only "prod"
|
||||
|
||||
# Step: Compile the project treating any warnings as errors.
|
||||
# Customize this step if a different behavior is desired.
|
||||
- name: 🛠 Compiles without warnings
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
run: mix compile
|
||||
25
.github/workflows/build.yml
vendored
25
.github/workflows/build.yml
vendored
@@ -4,6 +4,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
- "releases/*"
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
@@ -21,7 +22,7 @@ jobs:
|
||||
build:
|
||||
name: 🛠 Build
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ (github.ref == 'refs/heads/main') && github.event_name == 'push' }}
|
||||
if: ${{ (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') && github.event_name == 'push' }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
@@ -36,7 +37,7 @@ jobs:
|
||||
elixir: ["1.17"]
|
||||
node-version: ["18.x"]
|
||||
outputs:
|
||||
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash }}
|
||||
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash || steps.set-commit-develop.outputs.commit_hash }}
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
@@ -89,12 +90,19 @@ jobs:
|
||||
|
||||
- name: Generate Changelog & Update Tag Version
|
||||
id: generate-changelog
|
||||
if: github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
git config --global user.name 'CI'
|
||||
git config --global user.email 'ci@users.noreply.github.com'
|
||||
mix git_ops.release --force-patch --yes
|
||||
git push --follow-tags
|
||||
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set commit hash for develop
|
||||
id: set-commit-develop
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
run: |
|
||||
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
@@ -130,12 +138,14 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
if: github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
if: github.ref == 'refs/heads/main'
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
@@ -190,12 +200,14 @@ jobs:
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer new release available 🎉
|
||||
@@ -236,9 +248,11 @@ jobs:
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
|
||||
type=semver,pattern={{version}},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=semver,pattern={{major}}.{{minor}},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=develop-{{sha}},enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
@@ -278,6 +292,7 @@ jobs:
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
|
||||
300
.github/workflows/flaky-test-detection.yml
vendored
Normal file
300
.github/workflows/flaky-test-detection.yml
vendored
Normal file
@@ -0,0 +1,300 @@
|
||||
name: Flaky Test Detection
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run nightly at 2 AM UTC
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
test_file:
|
||||
description: 'Specific test file to check (optional)'
|
||||
required: false
|
||||
type: string
|
||||
iterations:
|
||||
description: 'Number of test iterations'
|
||||
required: false
|
||||
default: '10'
|
||||
type: string
|
||||
|
||||
env:
|
||||
MIX_ENV: test
|
||||
ELIXIR_VERSION: "1.17"
|
||||
OTP_VERSION: "27"
|
||||
|
||||
jobs:
|
||||
detect-flaky-tests:
|
||||
name: 🔍 Detect Flaky Tests
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: wanderer_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: ⬇️ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🏗️ Setup Elixir & Erlang
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
elixir-version: ${{ env.ELIXIR_VERSION }}
|
||||
otp-version: ${{ env.OTP_VERSION }}
|
||||
|
||||
- name: 📦 Restore dependencies cache
|
||||
uses: actions/cache@v4
|
||||
id: deps-cache
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
_build
|
||||
key: ${{ runner.os }}-mix-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mix-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
if: steps.deps-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mix deps.get
|
||||
mix deps.compile
|
||||
|
||||
- name: 🏗️ Compile project
|
||||
run: mix compile --warnings-as-errors
|
||||
|
||||
- name: 🏗️ Setup test database
|
||||
run: |
|
||||
mix ecto.create
|
||||
mix ecto.migrate
|
||||
env:
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432/wanderer_test
|
||||
|
||||
- name: 🔍 Run flaky test detection
|
||||
id: flaky-detection
|
||||
run: |
|
||||
# Determine test target
|
||||
TEST_FILE="${{ github.event.inputs.test_file }}"
|
||||
ITERATIONS="${{ github.event.inputs.iterations || '10' }}"
|
||||
|
||||
if [ -n "$TEST_FILE" ]; then
|
||||
echo "Checking specific file: $TEST_FILE"
|
||||
mix test.stability --runs $ITERATIONS --file "$TEST_FILE" --detect --report flaky_report.json
|
||||
else
|
||||
echo "Checking all tests"
|
||||
mix test.stability --runs $ITERATIONS --detect --report flaky_report.json
|
||||
fi
|
||||
env:
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432/wanderer_test
|
||||
continue-on-error: true
|
||||
|
||||
- name: 📊 Upload flaky test report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: flaky-test-report
|
||||
path: flaky_report.json
|
||||
retention-days: 30
|
||||
|
||||
- name: 💬 Comment on flaky tests
|
||||
if: always()
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
// Read the report
|
||||
let report;
|
||||
try {
|
||||
const reportContent = fs.readFileSync('flaky_report.json', 'utf8');
|
||||
report = JSON.parse(reportContent);
|
||||
} catch (error) {
|
||||
console.log('No flaky test report found');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!report.flaky_tests || report.flaky_tests.length === 0) {
|
||||
console.log('No flaky tests detected!');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create issue body
|
||||
const issueBody = `## 🔍 Flaky Tests Detected
|
||||
|
||||
The automated flaky test detection found ${report.flaky_tests.length} potentially flaky test(s).
|
||||
|
||||
### Summary
|
||||
- **Total test runs**: ${report.summary.total_runs}
|
||||
- **Success rate**: ${(report.summary.success_rate * 100).toFixed(1)}%
|
||||
- **Average duration**: ${(report.summary.avg_duration_ms / 1000).toFixed(2)}s
|
||||
|
||||
### Flaky Tests
|
||||
|
||||
| Test | Failure Rate | Details |
|
||||
|------|--------------|---------|
|
||||
${report.flaky_tests.map(test =>
|
||||
`| ${test.test} | ${(test.failure_rate * 100).toFixed(1)}% | Failed ${test.failures}/${report.summary.total_runs} runs |`
|
||||
).join('\n')}
|
||||
|
||||
### Recommended Actions
|
||||
|
||||
1. Review the identified tests for race conditions
|
||||
2. Check for timing dependencies or async issues
|
||||
3. Ensure proper test isolation and cleanup
|
||||
4. Consider adding explicit waits or synchronization
|
||||
5. Use \`async: false\` if tests share resources
|
||||
|
||||
---
|
||||
*This issue was automatically created by the flaky test detection workflow.*
|
||||
*Run time: ${new Date().toISOString()}*
|
||||
`;
|
||||
|
||||
try {
|
||||
// Check if there's already an open issue
|
||||
const issues = await github.rest.issues.listForRepo({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: 'flaky-test',
|
||||
state: 'open'
|
||||
});
|
||||
|
||||
if (issues.data.length > 0) {
|
||||
// Update existing issue
|
||||
const issue = issues.data[0];
|
||||
try {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issue.number,
|
||||
body: issueBody
|
||||
});
|
||||
console.log(`Updated existing issue #${issue.number}`);
|
||||
} catch (commentError) {
|
||||
console.error('Failed to create comment:', commentError.message);
|
||||
throw commentError;
|
||||
}
|
||||
} else {
|
||||
// Create new issue
|
||||
try {
|
||||
const newIssue = await github.rest.issues.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: '🔍 Flaky Tests Detected',
|
||||
body: issueBody,
|
||||
labels: ['flaky-test', 'test-quality', 'automated']
|
||||
});
|
||||
console.log(`Created new issue #${newIssue.data.number}`);
|
||||
} catch (createError) {
|
||||
console.error('Failed to create issue:', createError.message);
|
||||
throw createError;
|
||||
}
|
||||
}
|
||||
} catch (listError) {
|
||||
console.error('Failed to list issues:', listError.message);
|
||||
console.error('API error details:', listError.response?.data || 'No response data');
|
||||
throw listError;
|
||||
}
|
||||
|
||||
- name: 📈 Update metrics
|
||||
if: always()
|
||||
run: |
|
||||
# Parse and store metrics for tracking
|
||||
if [ -f flaky_report.json ]; then
|
||||
FLAKY_COUNT=$(jq '.flaky_tests | length' flaky_report.json)
|
||||
SUCCESS_RATE=$(jq '.summary.success_rate' flaky_report.json)
|
||||
|
||||
echo "FLAKY_TEST_COUNT=$FLAKY_COUNT" >> $GITHUB_ENV
|
||||
echo "TEST_SUCCESS_RATE=$SUCCESS_RATE" >> $GITHUB_ENV
|
||||
|
||||
# Log metrics (could be sent to monitoring service)
|
||||
echo "::notice title=Flaky Test Metrics::Found $FLAKY_COUNT flaky tests with ${SUCCESS_RATE}% success rate"
|
||||
fi
|
||||
|
||||
analyze-test-history:
|
||||
name: 📊 Analyze Test History
|
||||
runs-on: ubuntu-22.04
|
||||
needs: detect-flaky-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⬇️ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download previous reports
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
workflow: flaky-test-detection.yml
|
||||
workflow_conclusion: completed
|
||||
name: flaky-test-report
|
||||
path: historical-reports
|
||||
if_no_artifact_found: warn
|
||||
|
||||
- name: 📊 Generate trend analysis
|
||||
run: |
|
||||
# Analyze historical trends
|
||||
python3 <<'EOF'
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
import glob
|
||||
|
||||
reports = []
|
||||
for report_file in glob.glob('historical-reports/*/flaky_report.json'):
|
||||
try:
|
||||
with open(report_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
reports.append(data)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not reports:
|
||||
print("No historical data found")
|
||||
exit(0)
|
||||
|
||||
# Sort by timestamp
|
||||
reports.sort(key=lambda x: x.get('timestamp', ''), reverse=True)
|
||||
|
||||
# Analyze trends
|
||||
print("## Test Stability Trend Analysis")
|
||||
print(f"\nAnalyzed {len(reports)} historical reports")
|
||||
print("\n### Flaky Test Counts Over Time")
|
||||
|
||||
for report in reports[:10]: # Last 10 reports
|
||||
timestamp = report.get('timestamp', 'Unknown')
|
||||
flaky_count = len(report.get('flaky_tests', []))
|
||||
success_rate = report.get('summary', {}).get('success_rate', 0) * 100
|
||||
print(f"- {timestamp[:10]}: {flaky_count} flaky tests ({success_rate:.1f}% success rate)")
|
||||
|
||||
# Identify persistently flaky tests
|
||||
all_flaky = {}
|
||||
for report in reports:
|
||||
for test in report.get('flaky_tests', []):
|
||||
test_name = test.get('test', '')
|
||||
if test_name not in all_flaky:
|
||||
all_flaky[test_name] = 0
|
||||
all_flaky[test_name] += 1
|
||||
|
||||
if all_flaky:
|
||||
print("\n### Persistently Flaky Tests")
|
||||
sorted_flaky = sorted(all_flaky.items(), key=lambda x: x[1], reverse=True)
|
||||
for test_name, count in sorted_flaky[:5]:
|
||||
percentage = (count / len(reports)) * 100
|
||||
print(f"- {test_name}: Flaky in {count}/{len(reports)} runs ({percentage:.1f}%)")
|
||||
EOF
|
||||
|
||||
- name: 💾 Save analysis
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-stability-analysis
|
||||
path: |
|
||||
flaky_report.json
|
||||
historical-reports/
|
||||
retention-days: 90
|
||||
333
.github/workflows/test.yml
vendored
Normal file
333
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
name: 🧪 Test Suite
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
push:
|
||||
branches: [main, develop]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
env:
|
||||
MIX_ENV: test
|
||||
ELIXIR_VERSION: '1.16'
|
||||
OTP_VERSION: '26'
|
||||
NODE_VERSION: '18'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: wanderer_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Elixir/OTP
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
elixir-version: ${{ env.ELIXIR_VERSION }}
|
||||
otp-version: ${{ env.OTP_VERSION }}
|
||||
|
||||
- name: Cache Elixir dependencies
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
_build
|
||||
key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: ${{ runner.os }}-mix-
|
||||
|
||||
- name: Install Elixir dependencies
|
||||
run: |
|
||||
mix deps.get
|
||||
mix deps.compile
|
||||
|
||||
- name: Check code formatting
|
||||
id: format
|
||||
run: |
|
||||
if mix format --check-formatted; then
|
||||
echo "status=✅ Passed" >> $GITHUB_OUTPUT
|
||||
echo "count=0" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Failed" >> $GITHUB_OUTPUT
|
||||
echo "count=1" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Compile code and capture warnings
|
||||
id: compile
|
||||
run: |
|
||||
# Capture compilation output
|
||||
output=$(mix compile 2>&1 || true)
|
||||
echo "$output" > compile_output.txt
|
||||
|
||||
# Count warnings
|
||||
warning_count=$(echo "$output" | grep -c "warning:" || echo "0")
|
||||
|
||||
# Check if compilation succeeded
|
||||
if mix compile > /dev/null 2>&1; then
|
||||
echo "status=✅ Success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Failed" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "warnings=$warning_count" >> $GITHUB_OUTPUT
|
||||
echo "output<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$output" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
continue-on-error: true
|
||||
|
||||
- name: Setup database
|
||||
run: |
|
||||
mix ecto.create
|
||||
mix ecto.migrate
|
||||
|
||||
- name: Run tests with coverage
|
||||
id: tests
|
||||
run: |
|
||||
# Run tests with coverage
|
||||
output=$(mix test --cover 2>&1 || true)
|
||||
echo "$output" > test_output.txt
|
||||
|
||||
# Parse test results
|
||||
if echo "$output" | grep -q "0 failures"; then
|
||||
echo "status=✅ All Passed" >> $GITHUB_OUTPUT
|
||||
test_status="success"
|
||||
else
|
||||
echo "status=❌ Some Failed" >> $GITHUB_OUTPUT
|
||||
test_status="failed"
|
||||
fi
|
||||
|
||||
# Extract test counts
|
||||
test_line=$(echo "$output" | grep -E "[0-9]+ tests?, [0-9]+ failures?" | head -1 || echo "0 tests, 0 failures")
|
||||
total_tests=$(echo "$test_line" | grep -o '[0-9]\+ tests\?' | grep -o '[0-9]\+' | head -1 || echo "0")
|
||||
failures=$(echo "$test_line" | grep -o '[0-9]\+ failures\?' | grep -o '[0-9]\+' | head -1 || echo "0")
|
||||
|
||||
echo "total=$total_tests" >> $GITHUB_OUTPUT
|
||||
echo "failures=$failures" >> $GITHUB_OUTPUT
|
||||
echo "passed=$((total_tests - failures))" >> $GITHUB_OUTPUT
|
||||
|
||||
# Calculate success rate
|
||||
if [ "$total_tests" -gt 0 ]; then
|
||||
success_rate=$(echo "scale=1; ($total_tests - $failures) * 100 / $total_tests" | bc)
|
||||
else
|
||||
success_rate="0"
|
||||
fi
|
||||
echo "success_rate=$success_rate" >> $GITHUB_OUTPUT
|
||||
|
||||
exit_code=$?
|
||||
echo "exit_code=$exit_code" >> $GITHUB_OUTPUT
|
||||
continue-on-error: true
|
||||
|
||||
- name: Generate coverage report
|
||||
id: coverage
|
||||
run: |
|
||||
# Generate coverage report with GitHub format
|
||||
output=$(mix coveralls.github 2>&1 || true)
|
||||
echo "$output" > coverage_output.txt
|
||||
|
||||
# Extract coverage percentage
|
||||
coverage=$(echo "$output" | grep -o '[0-9]\+\.[0-9]\+%' | head -1 | sed 's/%//' || echo "0")
|
||||
if [ -z "$coverage" ]; then
|
||||
coverage="0"
|
||||
fi
|
||||
|
||||
echo "percentage=$coverage" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine status
|
||||
if (( $(echo "$coverage >= 80" | bc -l) )); then
|
||||
echo "status=✅ Excellent" >> $GITHUB_OUTPUT
|
||||
elif (( $(echo "$coverage >= 60" | bc -l) )); then
|
||||
echo "status=⚠️ Good" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Needs Improvement" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Credo analysis
|
||||
id: credo
|
||||
run: |
|
||||
# Run Credo and capture output
|
||||
output=$(mix credo --strict --format=json 2>&1 || true)
|
||||
echo "$output" > credo_output.txt
|
||||
|
||||
# Try to parse JSON output
|
||||
if echo "$output" | jq . > /dev/null 2>&1; then
|
||||
issues=$(echo "$output" | jq '.issues | length' 2>/dev/null || echo "0")
|
||||
high_issues=$(echo "$output" | jq '.issues | map(select(.priority == "high")) | length' 2>/dev/null || echo "0")
|
||||
normal_issues=$(echo "$output" | jq '.issues | map(select(.priority == "normal")) | length' 2>/dev/null || echo "0")
|
||||
low_issues=$(echo "$output" | jq '.issues | map(select(.priority == "low")) | length' 2>/dev/null || echo "0")
|
||||
else
|
||||
# Fallback: try to count issues from regular output
|
||||
regular_output=$(mix credo --strict 2>&1 || true)
|
||||
issues=$(echo "$regular_output" | grep -c "┃" || echo "0")
|
||||
high_issues="0"
|
||||
normal_issues="0"
|
||||
low_issues="0"
|
||||
fi
|
||||
|
||||
echo "total_issues=$issues" >> $GITHUB_OUTPUT
|
||||
echo "high_issues=$high_issues" >> $GITHUB_OUTPUT
|
||||
echo "normal_issues=$normal_issues" >> $GITHUB_OUTPUT
|
||||
echo "low_issues=$low_issues" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine status
|
||||
if [ "$issues" -eq 0 ]; then
|
||||
echo "status=✅ Clean" >> $GITHUB_OUTPUT
|
||||
elif [ "$issues" -lt 10 ]; then
|
||||
echo "status=⚠️ Minor Issues" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Needs Attention" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Dialyzer analysis
|
||||
id: dialyzer
|
||||
run: |
|
||||
# Ensure PLT is built
|
||||
mix dialyzer --plt
|
||||
|
||||
# Run Dialyzer and capture output
|
||||
output=$(mix dialyzer --format=github 2>&1 || true)
|
||||
echo "$output" > dialyzer_output.txt
|
||||
|
||||
# Count warnings and errors
|
||||
warnings=$(echo "$output" | grep -c "warning:" || echo "0")
|
||||
errors=$(echo "$output" | grep -c "error:" || echo "0")
|
||||
|
||||
echo "warnings=$warnings" >> $GITHUB_OUTPUT
|
||||
echo "errors=$errors" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine status
|
||||
if [ "$errors" -eq 0 ] && [ "$warnings" -eq 0 ]; then
|
||||
echo "status=✅ Clean" >> $GITHUB_OUTPUT
|
||||
elif [ "$errors" -eq 0 ]; then
|
||||
echo "status=⚠️ Warnings Only" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Has Errors" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Create test results summary
|
||||
id: summary
|
||||
run: |
|
||||
# Calculate overall score
|
||||
format_score=${{ steps.format.outputs.count == '0' && '100' || '0' }}
|
||||
compile_score=${{ steps.compile.outputs.warnings == '0' && '100' || '80' }}
|
||||
test_score=${{ steps.tests.outputs.success_rate }}
|
||||
coverage_score=${{ steps.coverage.outputs.percentage }}
|
||||
credo_score=$(echo "scale=0; (100 - ${{ steps.credo.outputs.total_issues }} * 2)" | bc | sed 's/^-.*$/0/')
|
||||
dialyzer_score=$(echo "scale=0; (100 - ${{ steps.dialyzer.outputs.warnings }} * 2 - ${{ steps.dialyzer.outputs.errors }} * 10)" | bc | sed 's/^-.*$/0/')
|
||||
|
||||
overall_score=$(echo "scale=1; ($format_score + $compile_score + $test_score + $coverage_score + $credo_score + $dialyzer_score) / 6" | bc)
|
||||
|
||||
echo "overall_score=$overall_score" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine overall status
|
||||
if (( $(echo "$overall_score >= 90" | bc -l) )); then
|
||||
echo "overall_status=🌟 Excellent" >> $GITHUB_OUTPUT
|
||||
elif (( $(echo "$overall_score >= 80" | bc -l) )); then
|
||||
echo "overall_status=✅ Good" >> $GITHUB_OUTPUT
|
||||
elif (( $(echo "$overall_score >= 70" | bc -l) )); then
|
||||
echo "overall_status=⚠️ Needs Improvement" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "overall_status=❌ Poor" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Find existing PR comment
|
||||
if: github.event_name == 'pull_request'
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '## 🧪 Test Results Summary'
|
||||
|
||||
- name: Create or update PR comment
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
## 🧪 Test Results Summary
|
||||
|
||||
**Overall Quality Score: ${{ steps.summary.outputs.overall_score }}%** ${{ steps.summary.outputs.overall_status }}
|
||||
|
||||
### 📊 Metrics Dashboard
|
||||
|
||||
| Category | Status | Count | Details |
|
||||
|----------|---------|-------|---------|
|
||||
| 📝 **Code Formatting** | ${{ steps.format.outputs.status }} | ${{ steps.format.outputs.count }} issues | `mix format --check-formatted` |
|
||||
| 🔨 **Compilation** | ${{ steps.compile.outputs.status }} | ${{ steps.compile.outputs.warnings }} warnings | `mix compile` |
|
||||
| 🧪 **Tests** | ${{ steps.tests.outputs.status }} | ${{ steps.tests.outputs.failures }}/${{ steps.tests.outputs.total }} failed | Success rate: ${{ steps.tests.outputs.success_rate }}% |
|
||||
| 📊 **Coverage** | ${{ steps.coverage.outputs.status }} | ${{ steps.coverage.outputs.percentage }}% | `mix coveralls` |
|
||||
| 🎯 **Credo** | ${{ steps.credo.outputs.status }} | ${{ steps.credo.outputs.total_issues }} issues | High: ${{ steps.credo.outputs.high_issues }}, Normal: ${{ steps.credo.outputs.normal_issues }}, Low: ${{ steps.credo.outputs.low_issues }} |
|
||||
| 🔍 **Dialyzer** | ${{ steps.dialyzer.outputs.status }} | ${{ steps.dialyzer.outputs.errors }} errors, ${{ steps.dialyzer.outputs.warnings }} warnings | `mix dialyzer` |
|
||||
|
||||
### 🎯 Quality Gates
|
||||
|
||||
Based on the project's quality thresholds:
|
||||
- **Compilation Warnings**: ${{ steps.compile.outputs.warnings }}/148 (limit: 148)
|
||||
- **Credo Issues**: ${{ steps.credo.outputs.total_issues }}/87 (limit: 87)
|
||||
- **Dialyzer Warnings**: ${{ steps.dialyzer.outputs.warnings }}/161 (limit: 161)
|
||||
- **Test Coverage**: ${{ steps.coverage.outputs.percentage }}%/50% (minimum: 50%)
|
||||
- **Test Failures**: ${{ steps.tests.outputs.failures }}/0 (limit: 0)
|
||||
|
||||
<details>
|
||||
<summary>📈 Progress Toward Goals</summary>
|
||||
|
||||
Target goals for the project:
|
||||
- ✨ **Zero compilation warnings** (currently: ${{ steps.compile.outputs.warnings }})
|
||||
- ✨ **≤10 Credo issues** (currently: ${{ steps.credo.outputs.total_issues }})
|
||||
- ✨ **Zero Dialyzer warnings** (currently: ${{ steps.dialyzer.outputs.warnings }})
|
||||
- ✨ **≥85% test coverage** (currently: ${{ steps.coverage.outputs.percentage }}%)
|
||||
- ✅ **Zero test failures** (currently: ${{ steps.tests.outputs.failures }})
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>🔧 Quick Actions</summary>
|
||||
|
||||
To improve code quality:
|
||||
```bash
|
||||
# Fix formatting issues
|
||||
mix format
|
||||
|
||||
# View detailed Credo analysis
|
||||
mix credo --strict
|
||||
|
||||
# Check Dialyzer warnings
|
||||
mix dialyzer
|
||||
|
||||
# Generate detailed coverage report
|
||||
mix coveralls.html
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
🤖 *Auto-generated by GitHub Actions* • Updated: ${{ github.event.head_commit.timestamp }}
|
||||
|
||||
> **Note**: This comment will be updated automatically when new commits are pushed to this PR.
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -4,7 +4,8 @@
|
||||
*.iml
|
||||
|
||||
*.key
|
||||
|
||||
.repomixignore
|
||||
repomix*
|
||||
/.idea/
|
||||
/node_modules/
|
||||
/assets/node_modules/
|
||||
@@ -17,6 +18,9 @@
|
||||
/priv/static/*.js
|
||||
/priv/static/*.css
|
||||
|
||||
# Dialyzer PLT files
|
||||
/priv/plts/
|
||||
|
||||
.DS_Store
|
||||
**/.DS_Store
|
||||
|
||||
|
||||
82
CHANGELOG.md
82
CHANGELOG.md
@@ -2,6 +2,88 @@
|
||||
|
||||
<!-- changelog -->
|
||||
|
||||
## [v1.75.3](https://github.com/wanderer-industries/wanderer/compare/v1.75.2...v1.75.3) (2025-08-10)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: Fixed character tracking issues
|
||||
|
||||
## [v1.75.2](https://github.com/wanderer-industries/wanderer/compare/v1.75.1...v1.75.2) (2025-08-10)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Map: Fix indents for ally logos in list "On the map"
|
||||
|
||||
* Map: Fix cancelling ping from system context menu
|
||||
|
||||
* Map: Hide admin settings tab
|
||||
|
||||
* Map: Remote map setting refactoring
|
||||
|
||||
## [v1.75.1](https://github.com/wanderer-industries/wanderer/compare/v1.75.0...v1.75.1) (2025-07-30)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* unable to cancel ping from right click context menu
|
||||
|
||||
## [v1.75.0](https://github.com/wanderer-industries/wanderer/compare/v1.74.13...v1.75.0) (2025-07-29)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* autoset connection size for c4->null and c13
|
||||
|
||||
* apiv1 and tests
|
||||
|
||||
* support webhook and sse
|
||||
|
||||
* disable webhook/websocket by default
|
||||
|
||||
* add websocket and webhooks for events
|
||||
|
||||
* Add Jest testing for getState util
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* remove bug with lazy delete
|
||||
|
||||
* update broken length and remove verbose logging
|
||||
|
||||
* removed old documents
|
||||
|
||||
* removed unneeded api, and fixed data comparision bug
|
||||
|
||||
* ci comments
|
||||
|
||||
* test updates
|
||||
|
||||
* properly send sse events
|
||||
|
||||
* add test coverage for api
|
||||
|
||||
* add more logging around character online and tracking
|
||||
|
||||
* clean up SSE warnings
|
||||
|
||||
* update env variable usage for sse
|
||||
|
||||
* sse cleanup
|
||||
|
||||
* remove misleading error
|
||||
|
||||
* update killactivity color on nodes
|
||||
|
||||
## [v1.74.13](https://github.com/wanderer-industries/wanderer/compare/v1.74.12...v1.74.13) (2025-07-29)
|
||||
|
||||
|
||||
|
||||
14
assets/jest.config.js
Normal file
14
assets/jest.config.js
Normal file
@@ -0,0 +1,14 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'jsdom',
|
||||
roots: ['<rootDir>'],
|
||||
moduleDirectories: ['node_modules', 'js'],
|
||||
moduleNameMapper: {
|
||||
'^@/(.*)$': '<rootDir>/js/$1',
|
||||
'\.scss$': 'identity-obj-proxy', // Mock SCSS files
|
||||
},
|
||||
transform: {
|
||||
'^.+\.(ts|tsx)$': 'ts-jest',
|
||||
'^.+\.(js|jsx)$': 'babel-jest', // Add babel-jest for JS/JSX files if needed
|
||||
},
|
||||
};
|
||||
@@ -1,7 +1,7 @@
|
||||
.vertical-tabs-container {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
min-height: 300px;
|
||||
min-height: 400px;
|
||||
|
||||
.p-tabview {
|
||||
width: 100%;
|
||||
@@ -68,6 +68,28 @@
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
&.color-warn {
|
||||
@apply bg-yellow-600/5 border-r-yellow-600/20;
|
||||
|
||||
&:hover {
|
||||
@apply bg-yellow-600/10 border-r-yellow-600/40;
|
||||
}
|
||||
|
||||
|
||||
&.p-tabview-selected {
|
||||
@apply bg-yellow-600/10 border-r-yellow-600;
|
||||
|
||||
.p-tabview-nav-link {
|
||||
@apply text-yellow-600;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
@apply bg-yellow-600/10 border-r-yellow-600;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ export interface ContextMenuSystemProps {
|
||||
onSystemStatus(val: number): void;
|
||||
onSystemLabels(val: string): void;
|
||||
onCustomLabelDialog(): void;
|
||||
onTogglePing(type: PingType, solar_system_id: string, hasPing: boolean): void;
|
||||
onTogglePing(type: PingType, solar_system_id: string, ping_id: string | undefined, hasPing: boolean): void;
|
||||
onWaypointSet: WaypointSetContextHandler;
|
||||
}
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ export const useContextMenuSystemItems = ({
|
||||
|
||||
{ separator: true },
|
||||
{
|
||||
command: () => onTogglePing(PingType.Rally, systemId, hasPing),
|
||||
command: () => onTogglePing(PingType.Rally, systemId, ping?.id, hasPing),
|
||||
disabled: !isShowPingBtn,
|
||||
template: () => {
|
||||
const iconClasses = clsx({
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
import { Node } from 'reactflow';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { ContextMenu } from 'primereact/contextmenu';
|
||||
import { SolarSystemRawType } from '@/hooks/Mapper/types';
|
||||
import { ctxManager } from '@/hooks/Mapper/utils/contextManager.ts';
|
||||
import { NodeSelectionMouseHandler } from '@/hooks/Mapper/components/contexts/types.ts';
|
||||
import { useDeleteSystems } from '@/hooks/Mapper/components/contexts/hooks';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
export const useContextMenuSystemMultipleHandlers = () => {
|
||||
const {
|
||||
data: { pings },
|
||||
} = useMapRootState();
|
||||
|
||||
const contextMenuRef = useRef<ContextMenu | null>(null);
|
||||
const [systems, setSystems] = useState<Node<SolarSystemRawType>[]>();
|
||||
|
||||
const { deleteSystems } = useDeleteSystems();
|
||||
|
||||
const ping = useMemo(() => (pings.length === 1 ? pings[0] : undefined), [pings]);
|
||||
|
||||
const handleSystemMultipleContext: NodeSelectionMouseHandler = (ev, systems_) => {
|
||||
setSystems(systems_);
|
||||
ev.preventDefault();
|
||||
@@ -24,13 +31,17 @@ export const useContextMenuSystemMultipleHandlers = () => {
|
||||
return;
|
||||
}
|
||||
|
||||
const sysToDel = systems.filter(x => !x.data.locked).map(x => x.id);
|
||||
const sysToDel = systems
|
||||
.filter(x => !x.data.locked)
|
||||
.filter(x => x.id !== ping?.solar_system_id)
|
||||
.map(x => x.id);
|
||||
|
||||
if (sysToDel.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
deleteSystems(sysToDel);
|
||||
}, [deleteSystems, systems]);
|
||||
}, [deleteSystems, systems, ping]);
|
||||
|
||||
return {
|
||||
handleSystemMultipleContext,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { MapUserSettings, SettingsWithVersion } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
|
||||
const REQUIRED_KEYS = [
|
||||
export const REQUIRED_KEYS = [
|
||||
'widgets',
|
||||
'interface',
|
||||
'onTheMap',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from './useSystemInfo';
|
||||
export * from './useGetOwnOnlineCharacters';
|
||||
export * from './useElementWidth';
|
||||
export * from './useDetectSettingsChanged';
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
export const useDetectSettingsChanged = () => {
|
||||
const {
|
||||
storedSettings: {
|
||||
interfaceSettings,
|
||||
settingsRoutes,
|
||||
settingsLocal,
|
||||
settingsSignatures,
|
||||
settingsOnTheMap,
|
||||
settingsKills,
|
||||
},
|
||||
} = useMapRootState();
|
||||
const [counter, setCounter] = useState(0);
|
||||
|
||||
useEffect(
|
||||
() => setCounter(x => x + 1),
|
||||
[interfaceSettings, settingsRoutes, settingsLocal, settingsSignatures, settingsOnTheMap, settingsKills],
|
||||
);
|
||||
|
||||
return counter;
|
||||
};
|
||||
@@ -22,7 +22,9 @@ import { KillsCounter } from '@/hooks/Mapper/components/map/components/KillsCoun
|
||||
export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>) => {
|
||||
const nodeVars = useSolarSystemNode(props);
|
||||
const { localCounterCharacters } = useLocalCounter(nodeVars);
|
||||
const localKillsCount = useNodeKillsCount(nodeVars.solarSystemId, nodeVars.killsCount);
|
||||
const { killsCount: localKillsCount, killsActivityType: localKillsActivityType } = useNodeKillsCount(
|
||||
nodeVars.solarSystemId,
|
||||
);
|
||||
|
||||
// console.log('JOipP', `render ${nodeVars.id}`, render++);
|
||||
|
||||
@@ -38,13 +40,13 @@ export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{localKillsCount != null && localKillsCount > 0 && nodeVars.solarSystemId && (
|
||||
{localKillsCount != null && localKillsCount > 0 && nodeVars.solarSystemId && localKillsActivityType && (
|
||||
<KillsCounter
|
||||
killsCount={localKillsCount}
|
||||
systemId={nodeVars.solarSystemId}
|
||||
size={TooltipSize.lg}
|
||||
killsActivityType={nodeVars.killsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[nodeVars.killsActivityType!])}
|
||||
killsActivityType={localKillsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[localKillsActivityType])}
|
||||
>
|
||||
<div className={clsx(classes.BookmarkWithIcon)}>
|
||||
<span className={clsx(PrimeIcons.BOLT, classes.icon)} />
|
||||
|
||||
@@ -21,7 +21,9 @@ import { KillsCounter } from '@/hooks/Mapper/components/map/components/KillsCoun
|
||||
export const SolarSystemNodeTheme = memo((props: NodeProps<MapSolarSystemType>) => {
|
||||
const nodeVars = useSolarSystemNode(props);
|
||||
const { localCounterCharacters } = useLocalCounter(nodeVars);
|
||||
const localKillsCount = useNodeKillsCount(nodeVars.solarSystemId, nodeVars.killsCount);
|
||||
const { killsCount: localKillsCount, killsActivityType: localKillsActivityType } = useNodeKillsCount(
|
||||
nodeVars.solarSystemId,
|
||||
);
|
||||
|
||||
// console.log('JOipP', `render ${nodeVars.id}`, render++);
|
||||
|
||||
@@ -37,13 +39,13 @@ export const SolarSystemNodeTheme = memo((props: NodeProps<MapSolarSystemType>)
|
||||
</div>
|
||||
)}
|
||||
|
||||
{localKillsCount && localKillsCount > 0 && nodeVars.solarSystemId && (
|
||||
{localKillsCount && localKillsCount > 0 && nodeVars.solarSystemId && localKillsActivityType && (
|
||||
<KillsCounter
|
||||
killsCount={localKillsCount}
|
||||
systemId={nodeVars.solarSystemId}
|
||||
size={TooltipSize.lg}
|
||||
killsActivityType={nodeVars.killsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[nodeVars.killsActivityType!])}
|
||||
killsActivityType={localKillsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[localKillsActivityType])}
|
||||
>
|
||||
<div className={clsx(classes.BookmarkWithIcon)}>
|
||||
<span className={clsx(PrimeIcons.BOLT, classes.icon)} />
|
||||
|
||||
@@ -14,7 +14,13 @@ interface MapEvent {
|
||||
payload?: Kill[];
|
||||
}
|
||||
|
||||
export function useNodeKillsCount(systemId: number | string, initialKillsCount: number | null): number | null {
|
||||
function getActivityType(count: number): string {
|
||||
if (count <= 5) return 'activityNormal';
|
||||
if (count <= 30) return 'activityWarn';
|
||||
return 'activityDanger';
|
||||
}
|
||||
|
||||
export function useNodeKillsCount(systemId: number | string, initialKillsCount: number | null = null): { killsCount: number | null; killsActivityType: string | null } {
|
||||
const [killsCount, setKillsCount] = useState<number | null>(initialKillsCount);
|
||||
const { data: mapData } = useMapRootState();
|
||||
const { detailedKills = {} } = mapData;
|
||||
@@ -73,5 +79,9 @@ export function useNodeKillsCount(systemId: number | string, initialKillsCount:
|
||||
|
||||
useMapEventListener(handleEvent);
|
||||
|
||||
return killsCount;
|
||||
const killsActivityType = useMemo(() => {
|
||||
return killsCount !== null && killsCount > 0 ? getActivityType(killsCount) : null;
|
||||
}, [killsCount]);
|
||||
|
||||
return { killsCount, killsActivityType };
|
||||
}
|
||||
|
||||
@@ -15,20 +15,12 @@ import { useSystemName } from './useSystemName';
|
||||
import { LabelInfo, useLabelsInfo } from './useLabelsInfo';
|
||||
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
|
||||
|
||||
function getActivityType(count: number): string {
|
||||
if (count <= 5) return 'activityNormal';
|
||||
if (count <= 30) return 'activityWarn';
|
||||
return 'activityDanger';
|
||||
}
|
||||
|
||||
export interface SolarSystemNodeVars {
|
||||
id: string;
|
||||
selected: boolean;
|
||||
visible: boolean;
|
||||
isWormhole: boolean;
|
||||
classTitleColor: string | null;
|
||||
killsCount: number | null;
|
||||
killsActivityType: string | null;
|
||||
hasUserCharacters: boolean;
|
||||
showHandlers: boolean;
|
||||
regionClass: string | null;
|
||||
@@ -126,7 +118,6 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
characters,
|
||||
wormholesData,
|
||||
hubs,
|
||||
kills,
|
||||
userCharacters,
|
||||
isConnecting,
|
||||
hoverNodeId,
|
||||
@@ -163,9 +154,6 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
isShowLinkedSigId,
|
||||
});
|
||||
|
||||
const killsCount = useMemo(() => kills[parseInt(solar_system_id)] ?? null, [kills, solar_system_id]);
|
||||
const killsActivityType = killsCount ? getActivityType(killsCount) : null;
|
||||
|
||||
const hasUserCharacters = useMemo(
|
||||
() => charactersInSystem.some(x => userCharacters.includes(x.eve_id)),
|
||||
[charactersInSystem, userCharacters],
|
||||
@@ -215,8 +203,6 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
visible,
|
||||
isWormhole,
|
||||
classTitleColor,
|
||||
killsCount,
|
||||
killsActivityType,
|
||||
hasUserCharacters,
|
||||
userCharacters,
|
||||
showHandlers,
|
||||
|
||||
@@ -14,6 +14,7 @@ import { PrimeIcons } from 'primereact/api';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
const TOOLTIP_PROPS = { content: 'Remove comment', position: TooltipPosition.top };
|
||||
|
||||
@@ -28,8 +29,7 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
const char = useGetCacheCharacter(characterEveId);
|
||||
const [hovered, setHovered] = useState(false);
|
||||
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
|
||||
const { outCommand } = useMapRootState();
|
||||
const ref = useRef({ outCommand, id });
|
||||
@@ -45,9 +45,6 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
const handleMouseEnter = useCallback(() => setHovered(true), []);
|
||||
const handleMouseLeave = useCallback(() => setHovered(false), []);
|
||||
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
|
||||
return (
|
||||
<>
|
||||
<InfoDrawer
|
||||
@@ -68,11 +65,11 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
{!hovered && <TimeAgo timestamp={time} />}
|
||||
{hovered && (
|
||||
// @ts-ignore
|
||||
<div ref={cpRemoveBtnRef}>
|
||||
<div ref={cfRef}>
|
||||
<WdImgButton
|
||||
className={clsx(PrimeIcons.TRASH, 'hover:text-red-400')}
|
||||
tooltip={TOOLTIP_PROPS}
|
||||
onClick={handleShowCP}
|
||||
onClick={cfShow}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -85,9 +82,9 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
</InfoDrawer>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="Are you sure you want to delete?"
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleDelete}
|
||||
|
||||
@@ -16,8 +16,9 @@ import { PrimeIcons } from 'primereact/api';
|
||||
import { Button } from 'primereact/button';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import useRefState from 'react-usestateref';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
const PING_PLACEMENT_MAP = {
|
||||
[PingsPlacement.rightTop]: 'top-right',
|
||||
@@ -78,9 +79,7 @@ export interface PingsInterfaceProps {
|
||||
export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
const toast = useRef<Toast>(null);
|
||||
const [isShow, setIsShow, isShowRef] = useRefState(false);
|
||||
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
|
||||
const {
|
||||
storedSettings: { interfaceSettings },
|
||||
@@ -98,9 +97,6 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
|
||||
const ping = useMemo(() => (pings.length === 1 ? pings[0] : null), [pings]);
|
||||
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
|
||||
const navigateTo = useCallback(() => {
|
||||
if (!ping) {
|
||||
return;
|
||||
@@ -242,11 +238,11 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
/>
|
||||
|
||||
{/*@ts-ignore*/}
|
||||
<div ref={cpRemoveBtnRef}>
|
||||
<div ref={cfRef}>
|
||||
<WdImgButton
|
||||
className={clsx('pi-trash', 'text-red-400 hover:text-red-300')}
|
||||
tooltip={DELETE_TOOLTIP_PROPS}
|
||||
onClick={handleShowCP}
|
||||
onClick={cfShow}
|
||||
/>
|
||||
</div>
|
||||
{/* TODO ADD solar system menu*/}
|
||||
@@ -272,9 +268,9 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
/>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="Are you sure you want to delete ping?"
|
||||
icon="pi pi-exclamation-triangle text-orange-400"
|
||||
accept={removePing}
|
||||
|
||||
@@ -62,8 +62,11 @@ function useSignatureUndo(
|
||||
|
||||
// determine timeout from settings
|
||||
const timeoutMs = getDeletionTimeoutMs(settings);
|
||||
|
||||
// Ensure a minimum of 1 second for immediate deletion so the UI shows
|
||||
const effectiveTimeoutMs = timeoutMs === 0 ? 1000 : timeoutMs;
|
||||
|
||||
setCountdown(Math.ceil(timeoutMs / 1000));
|
||||
setCountdown(Math.ceil(effectiveTimeoutMs / 1000));
|
||||
|
||||
// start new interval
|
||||
intervalRef.current = window.setInterval(() => {
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
import { getState } from './getState';
|
||||
import { UNKNOWN_SIGNATURE_NAME } from '@/hooks/Mapper/helpers';
|
||||
import { SignatureGroup, SystemSignature } from '@/hooks/Mapper/types';
|
||||
|
||||
describe('getState', () => {
|
||||
const mockSignaturesMatch: string[] = []; // This parameter is not used in the function
|
||||
|
||||
it('should return 0 if group is undefined', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Test Sig', group: undefined } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if group is CosmicSignature', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Test Sig', group: SignatureGroup.CosmicSignature } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 1 if group is not CosmicSignature and name is undefined', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: undefined, group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 1 if group is not CosmicSignature and name is empty', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: '', group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 1 if group is not CosmicSignature and name is UNKNOWN_SIGNATURE_NAME', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: UNKNOWN_SIGNATURE_NAME, group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 2 if group is not CosmicSignature and name is a non-empty string', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Custom Name', group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(2);
|
||||
});
|
||||
|
||||
// According to the current implementation, state = -1 is unreachable
|
||||
// because the conditions for 0, 1, and 2 cover all possibilities for the given inputs.
|
||||
// If the logic of getState were to change to make -1 possible, a test case should be added here.
|
||||
// For now, we can test a scenario that should lead to one of the valid states,
|
||||
// for example, if group is something other than CosmicSignature and name is valid.
|
||||
it('should handle other valid signature groups correctly, leading to state 2 with a valid name', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Combat Site', group: SignatureGroup.CombatSite } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle other valid signature groups correctly, leading to state 1 with an empty name', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: '', group: SignatureGroup.DataSite } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -76,15 +76,10 @@ export const useSystemSignaturesData = ({
|
||||
if (removed.length > 0) {
|
||||
await processRemovedSignatures(removed, added, updated);
|
||||
|
||||
// Only show pending deletions if:
|
||||
// 1. Lazy deletion is enabled AND
|
||||
// 2. Deletion timing is not immediate (> 0)
|
||||
// Show pending deletions if lazy deletion is enabled
|
||||
// The deletion timing controls how long the countdown lasts, not whether lazy delete is active
|
||||
if (onSignatureDeleted && lazyDeleteValue) {
|
||||
const timeoutMs = getDeletionTimeoutMs(settings);
|
||||
|
||||
if (timeoutMs > 0) {
|
||||
onSignatureDeleted(removed);
|
||||
}
|
||||
onSignatureDeleted(removed);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Dialog } from 'primereact/dialog';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { TabPanel, TabView } from 'primereact/tabview';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { OutCommand, UserPermission } from '@/hooks/Mapper/types';
|
||||
import { CONNECTIONS_CHECKBOXES_PROPS, SIGNATURES_CHECKBOXES_PROPS, SYSTEMS_CHECKBOXES_PROPS } from './constants.ts';
|
||||
import {
|
||||
MapSettingsProvider,
|
||||
@@ -12,7 +12,10 @@ import {
|
||||
import { WidgetsSettings } from './components/WidgetsSettings';
|
||||
import { CommonSettings } from './components/CommonSettings';
|
||||
import { SettingsListItem } from './types.ts';
|
||||
import { ImportExport } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/components/ImportExport.tsx';
|
||||
import { ImportExport } from './components/ImportExport.tsx';
|
||||
import { ServerSettings } from './components/ServerSettings.tsx';
|
||||
import { AdminSettings } from './components/AdminSettings.tsx';
|
||||
import { useMapCheckPermissions } from '@/hooks/Mapper/mapRootProvider/hooks/api';
|
||||
|
||||
export interface MapSettingsProps {
|
||||
visible: boolean;
|
||||
@@ -24,6 +27,7 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
const { outCommand } = useMapRootState();
|
||||
|
||||
const { renderSettingItem, setUserRemoteSettings } = useMapSettings();
|
||||
const isAdmin = useMapCheckPermissions([UserPermission.ADMIN_MAP]);
|
||||
|
||||
const refVars = useRef({ outCommand, onHide, visible });
|
||||
refVars.current = { outCommand, onHide, visible };
|
||||
@@ -58,7 +62,7 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
header="Map user settings"
|
||||
visible
|
||||
draggable={false}
|
||||
style={{ width: '550px' }}
|
||||
style={{ width: '600px' }}
|
||||
onShow={handleShow}
|
||||
onHide={handleHide}
|
||||
>
|
||||
@@ -92,6 +96,16 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
<TabPanel header="Import/Export" className="h-full" headerClassName={styles.verticalTabHeader}>
|
||||
<ImportExport />
|
||||
</TabPanel>
|
||||
|
||||
<TabPanel header="Server Settings" className="h-full" headerClassName="color-warn">
|
||||
<ServerSettings />
|
||||
</TabPanel>
|
||||
|
||||
{isAdmin && (
|
||||
<TabPanel header="Admin Settings" className="h-full" headerClassName="color-warn">
|
||||
<AdminSettings />
|
||||
</TabPanel>
|
||||
)}
|
||||
</TabView>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { Button } from 'primereact/button';
|
||||
import { callToastError, callToastSuccess, callToastWarn } from '@/hooks/Mapper/helpers';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
import { MapUserSettings, RemoteAdminSettingsResponse } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
import fastDeepEqual from 'fast-deep-equal';
|
||||
import { useDetectSettingsChanged } from '@/hooks/Mapper/components/hooks';
|
||||
|
||||
export const AdminSettings = () => {
|
||||
const {
|
||||
storedSettings: { getSettingsForExport },
|
||||
outCommand,
|
||||
} = useMapRootState();
|
||||
|
||||
const settingsChanged = useDetectSettingsChanged();
|
||||
|
||||
const [currentRemoteSettings, setCurrentRemoteSettings] = useState<MapUserSettings | null>(null);
|
||||
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const hasSettingsForExport = useMemo(() => !!getSettingsForExport(), [getSettingsForExport]);
|
||||
|
||||
const refVars = useRef({ currentRemoteSettings, getSettingsForExport });
|
||||
refVars.current = { currentRemoteSettings, getSettingsForExport };
|
||||
|
||||
useEffect(() => {
|
||||
const load = async () => {
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (!res || res.default_settings == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
setCurrentRemoteSettings(parseMapUserSettings(res.default_settings));
|
||||
};
|
||||
|
||||
load();
|
||||
}, [outCommand]);
|
||||
|
||||
const isDirty = useMemo(() => {
|
||||
const { currentRemoteSettings, getSettingsForExport } = refVars.current;
|
||||
const localCurrent = parseMapUserSettings(getSettingsForExport());
|
||||
|
||||
return !fastDeepEqual(currentRemoteSettings, localCurrent);
|
||||
// eslint-disable-next-line
|
||||
}, [settingsChanged, currentRemoteSettings]);
|
||||
|
||||
const handleSync = useCallback(async () => {
|
||||
const settings = getSettingsForExport();
|
||||
|
||||
if (!settings) {
|
||||
callToastWarn(toast.current, 'No settings to save');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
let response: { success: boolean } | undefined;
|
||||
|
||||
try {
|
||||
response = await outCommand({
|
||||
type: OutCommand.saveDefaultSettings,
|
||||
data: { settings },
|
||||
});
|
||||
} catch (err) {
|
||||
callToastError(toast.current, 'Something went wrong while saving settings');
|
||||
console.error('ERROR: ', err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!response || !response.success) {
|
||||
callToastError(toast.current, 'Settings not saved - dont not why it');
|
||||
return;
|
||||
}
|
||||
|
||||
setCurrentRemoteSettings(parseMapUserSettings(settings));
|
||||
|
||||
callToastSuccess(toast.current, 'Settings saved successfully');
|
||||
}, [getSettingsForExport, outCommand]);
|
||||
|
||||
return (
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cfRef}
|
||||
onClick={cfShow}
|
||||
icon="pi pi-save"
|
||||
size="small"
|
||||
severity="danger"
|
||||
label="Save as Map Default"
|
||||
className="py-[4px]"
|
||||
disabled={!hasSettingsForExport || !isDirty}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{!isDirty && <span className="text-red-500/70 text-[12px]">*Local and remote are identical.</span>}
|
||||
|
||||
<span className="text-stone-500 text-[12px]">
|
||||
*Will save your current settings as the default for all new users of this map. This action will overwrite any
|
||||
existing default settings.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
|
||||
<ConfirmPopup
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="Your settings will overwrite default. Sure?."
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleSync}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -7,9 +7,14 @@ import {
|
||||
import { useMapSettings } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/MapSettingsProvider.tsx';
|
||||
import { SettingsListItem } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/types.ts';
|
||||
import { useCallback } from 'react';
|
||||
import { Button } from 'primereact/button';
|
||||
import { TooltipPosition, WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
export const CommonSettings = () => {
|
||||
const { renderSettingItem } = useMapSettings();
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
|
||||
const renderSettingsList = useCallback(
|
||||
(list: SettingsListItem[]) => {
|
||||
@@ -18,6 +23,8 @@ export const CommonSettings = () => {
|
||||
[renderSettingItem],
|
||||
);
|
||||
|
||||
const handleResetSettings = () => {};
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full gap-1">
|
||||
<div>
|
||||
@@ -29,6 +36,33 @@ export const CommonSettings = () => {
|
||||
<div className="grid grid-cols-[1fr_auto]">{renderSettingItem(MINI_MAP_PLACEMENT)}</div>
|
||||
<div className="grid grid-cols-[1fr_auto]">{renderSettingItem(PINGS_PLACEMENT)}</div>
|
||||
<div className="grid grid-cols-[1fr_auto]">{renderSettingItem(THEME_SETTING)}</div>
|
||||
|
||||
<div className="border-b-2 border-dotted border-stone-700/50 h-px my-3" />
|
||||
|
||||
<div className="grid grid-cols-[1fr_auto]">
|
||||
<div />
|
||||
<WdTooltipWrapper content="This dangerous action. And can not be undone" position={TooltipPosition.top}>
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cfRef}
|
||||
className="py-[4px]"
|
||||
onClick={cfShow}
|
||||
outlined
|
||||
size="small"
|
||||
severity="danger"
|
||||
label="Reset Settings"
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
</div>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="All settings for this map will be reset to default."
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleResetSettings}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { Button } from 'primereact/button';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { Divider } from 'primereact/divider';
|
||||
import { callToastError, callToastSuccess, callToastWarn } from '@/hooks/Mapper/helpers';
|
||||
|
||||
type SaveDefaultSettingsReturn = { success: boolean; error: string };
|
||||
|
||||
export const DefaultSettings = () => {
|
||||
const {
|
||||
outCommand,
|
||||
storedSettings: { getSettingsForExport },
|
||||
data: { userPermissions },
|
||||
} = useMapRootState();
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const refVars = useRef({ getSettingsForExport, outCommand });
|
||||
refVars.current = { getSettingsForExport, outCommand };
|
||||
|
||||
const handleSaveAsDefault = useCallback(async () => {
|
||||
const settings = refVars.current.getSettingsForExport();
|
||||
if (!settings) {
|
||||
callToastWarn(toast.current, 'No settings to save');
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
|
||||
let response: SaveDefaultSettingsReturn;
|
||||
try {
|
||||
response = await refVars.current.outCommand({
|
||||
type: OutCommand.saveDefaultSettings,
|
||||
data: { settings },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Save default settings error:', error);
|
||||
callToastError(toast.current, 'Failed to save default settings');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.success) {
|
||||
callToastSuccess(toast.current, 'Default settings saved successfully');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
callToastError(toast.current, response.error || 'Failed to save default settings');
|
||||
setLoading(false);
|
||||
}, []);
|
||||
|
||||
if (!userPermissions?.admin_map) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Divider />
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<h3 className="text-lg font-semibold">Default Settings (Admin Only)</h3>
|
||||
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<Button
|
||||
onClick={handleSaveAsDefault}
|
||||
icon="pi pi-save"
|
||||
size="small"
|
||||
severity="danger"
|
||||
label="Save as Map Default"
|
||||
className="py-[4px]"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<span className="text-stone-500 text-[12px]">
|
||||
*Will save your current settings as the default for all new users of this map. This action will overwrite
|
||||
any existing default settings.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,97 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
import { Button } from 'primereact/button';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { createDefaultWidgetSettings } from '@/hooks/Mapper/mapRootProvider/helpers/createDefaultWidgetSettings.ts';
|
||||
import { callToastSuccess } from '@/hooks/Mapper/helpers';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
import { RemoteAdminSettingsResponse } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
|
||||
export const ServerSettings = () => {
|
||||
const {
|
||||
storedSettings: { applySettings },
|
||||
outCommand,
|
||||
} = useMapRootState();
|
||||
|
||||
const [hasSettings, setHasSettings] = useState(false);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const handleSync = useCallback(async () => {
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (res?.default_settings == null) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
applySettings(parseMapUserSettings(res.default_settings));
|
||||
callToastSuccess(toast.current, 'Settings synchronized successfully');
|
||||
} catch (error) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
}
|
||||
}, [applySettings, outCommand]);
|
||||
|
||||
useEffect(() => {
|
||||
const load = async () => {
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (res?.default_settings == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
setHasSettings(true);
|
||||
};
|
||||
|
||||
load();
|
||||
}, [outCommand]);
|
||||
|
||||
return (
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cfRef}
|
||||
onClick={cfShow}
|
||||
icon="pi pi-file-import"
|
||||
size="small"
|
||||
severity="warning"
|
||||
label="Sync with Default Settings"
|
||||
className="py-[4px]"
|
||||
disabled={!hasSettings}
|
||||
/>
|
||||
</div>
|
||||
{!hasSettings && (
|
||||
<span className="text-red-500/70 text-[12px]">*Default settings was not set by map administrator.</span>
|
||||
)}
|
||||
<span className="text-stone-500 text-[12px]">*Will apply admin settings which set as Default for map.</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
|
||||
<ConfirmPopup
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="You lost your current settings. Sure?."
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleSync}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -28,6 +28,9 @@ export const WidgetsSettings = ({}: WidgetsSettingsProps) => {
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="border-b-2 border-dotted border-stone-700/50 h-px my-3" />
|
||||
|
||||
<div className="grid grid-cols-[1fr_auto]">
|
||||
<div />
|
||||
<Button className="py-[4px]" onClick={resetWidgets} outlined size="small" label="Reset Widgets"></Button>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Dialog } from 'primereact/dialog';
|
||||
import { Button } from 'primereact/button';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { useCallback, useRef } from 'react';
|
||||
import { MapUserSettings } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
@@ -15,6 +15,7 @@ import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures.
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { saveTextFile } from '@/hooks/Mapper/utils';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
const createSettings = function <T>(lsSettings: string | null, defaultValues: T) {
|
||||
return {
|
||||
@@ -24,10 +25,7 @@ const createSettings = function <T>(lsSettings: string | null, defaultValues: T)
|
||||
};
|
||||
|
||||
export const OldSettingsDialog = () => {
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const {
|
||||
@@ -143,8 +141,8 @@ export const OldSettingsDialog = () => {
|
||||
<div className="flex items-center justify-end">
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cpRemoveBtnRef}
|
||||
onClick={handleShowCP}
|
||||
ref={cfRef}
|
||||
onClick={cfShow}
|
||||
icon="pi pi-exclamation-triangle"
|
||||
size="small"
|
||||
severity="warning"
|
||||
@@ -192,9 +190,9 @@ export const OldSettingsDialog = () => {
|
||||
</Dialog>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="After click dialog will disappear. Ready?"
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleProceed}
|
||||
|
||||
@@ -13,6 +13,8 @@ import { InputText } from 'primereact/inputtext';
|
||||
import { IconField } from 'primereact/iconfield';
|
||||
|
||||
const itemTemplate = (item: CharacterTypeRaw & WithIsOwnCharacter, options: VirtualScrollerTemplateOptions) => {
|
||||
const showAllyLogoPlaceholder = options.props.items?.some(x => x.alliance_id != null);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={clsx(classes.CharacterRow, 'w-full box-border px-2 py-1', {
|
||||
@@ -22,7 +24,15 @@ const itemTemplate = (item: CharacterTypeRaw & WithIsOwnCharacter, options: Virt
|
||||
})}
|
||||
style={{ height: options.props.itemSize + 'px' }}
|
||||
>
|
||||
<CharacterCard showCorporationLogo showAllyLogo showSystem showTicker showShip {...item} />
|
||||
<CharacterCard
|
||||
showCorporationLogo
|
||||
showAllyLogo
|
||||
showAllyLogoPlaceholder={showAllyLogoPlaceholder}
|
||||
showSystem
|
||||
showTicker
|
||||
showShip
|
||||
{...item}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -181,17 +181,20 @@ export const MapWrapper = () => {
|
||||
ref.current.systemContextProps.systemId && setOpenSettings(ref.current.systemContextProps.systemId);
|
||||
}, []);
|
||||
|
||||
const handleTogglePing = useCallback(async (type: PingType, solar_system_id: string, hasPing: boolean) => {
|
||||
if (hasPing) {
|
||||
await outCommand({
|
||||
type: OutCommand.cancelPing,
|
||||
data: { type, solar_system_id: solar_system_id },
|
||||
});
|
||||
return;
|
||||
}
|
||||
const handleTogglePing = useCallback(
|
||||
async (type: PingType, solar_system_id: string, ping_id: string | undefined, hasPing: boolean) => {
|
||||
if (hasPing) {
|
||||
await outCommand({
|
||||
type: OutCommand.cancelPing,
|
||||
data: { type, id: ping_id },
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
setOpenPing({ type, solar_system_id });
|
||||
}, []);
|
||||
setOpenPing({ type, solar_system_id });
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleCustomLabelDialog = useCallback(() => {
|
||||
const { systemContextProps } = ref.current;
|
||||
|
||||
@@ -24,6 +24,7 @@ export type CharacterCardProps = {
|
||||
useSystemsCache?: boolean;
|
||||
showCorporationLogo?: boolean;
|
||||
showAllyLogo?: boolean;
|
||||
showAllyLogoPlaceholder?: boolean;
|
||||
simpleMode?: boolean;
|
||||
} & WithIsOwnCharacter &
|
||||
WithClassName;
|
||||
@@ -47,6 +48,7 @@ export const CharacterCard = ({
|
||||
showShipName,
|
||||
showCorporationLogo,
|
||||
showAllyLogo,
|
||||
showAllyLogoPlaceholder,
|
||||
showTicker,
|
||||
useSystemsCache,
|
||||
className,
|
||||
@@ -217,6 +219,18 @@ export const CharacterCard = ({
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
)}
|
||||
|
||||
{showAllyLogo && showAllyLogoPlaceholder && !char.alliance_id && (
|
||||
<WdTooltipWrapper position={TooltipPosition.top} content="No alliance">
|
||||
<span
|
||||
className={clsx(
|
||||
'min-w-[33px] min-h-[33px] w-[33px] h-[33px]',
|
||||
'flex transition-[border-color,opacity] duration-250 rounded-none',
|
||||
'wd-bg-default',
|
||||
)}
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col flex-grow overflow-hidden w-[50px]">
|
||||
|
||||
@@ -2,3 +2,4 @@ export * from './sortWHClasses';
|
||||
export * from './parseSignatures';
|
||||
export * from './getSystemById';
|
||||
export * from './getEveImageUrl';
|
||||
export * from './toastHelpers';
|
||||
|
||||
28
assets/js/hooks/Mapper/helpers/toastHelpers.ts
Normal file
28
assets/js/hooks/Mapper/helpers/toastHelpers.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Toast } from 'primereact/toast';
|
||||
|
||||
export const callToastWarn = (toast: Toast | null, msg: string, life = 3000) => {
|
||||
toast?.show({
|
||||
severity: 'warn',
|
||||
summary: 'Warning',
|
||||
detail: msg,
|
||||
life,
|
||||
});
|
||||
};
|
||||
|
||||
export const callToastError = (toast: Toast | null, msg: string, life = 3000) => {
|
||||
toast?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: msg,
|
||||
life,
|
||||
});
|
||||
};
|
||||
|
||||
export const callToastSuccess = (toast: Toast | null, msg: string, life = 3000) => {
|
||||
toast?.show({
|
||||
severity: 'success',
|
||||
summary: 'Success',
|
||||
detail: msg,
|
||||
life,
|
||||
});
|
||||
};
|
||||
@@ -3,3 +3,4 @@ export * from './useHotkey';
|
||||
export * from './usePageVisibility';
|
||||
export * from './useSkipContextMenu';
|
||||
export * from './useThrottle';
|
||||
export * from './useConfirmPopup';
|
||||
|
||||
10
assets/js/hooks/Mapper/hooks/useConfirmPopup.ts
Normal file
10
assets/js/hooks/Mapper/hooks/useConfirmPopup.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
|
||||
export const useConfirmPopup = () => {
|
||||
const cfRef = useRef<HTMLElement>();
|
||||
const [cfVisible, setCfVisible] = useState(false);
|
||||
const cfShow = useCallback(() => setCfVisible(true), []);
|
||||
const cfHide = useCallback(() => setCfVisible(false), []);
|
||||
|
||||
return { cfRef, cfVisible, cfShow, cfHide };
|
||||
};
|
||||
@@ -131,6 +131,7 @@ export interface MapRootContextProps {
|
||||
hasOldSettings: boolean;
|
||||
getSettingsForExport(): string | undefined;
|
||||
applySettings(settings: MapUserSettings): boolean;
|
||||
resetSettings(settings: MapUserSettings): void;
|
||||
checkOldSettings(): void;
|
||||
};
|
||||
}
|
||||
@@ -175,6 +176,7 @@ const MapRootContext = createContext<MapRootContextProps>({
|
||||
hasOldSettings: false,
|
||||
getSettingsForExport: () => '',
|
||||
applySettings: () => false,
|
||||
resetSettings: () => null,
|
||||
checkOldSettings: () => null,
|
||||
},
|
||||
});
|
||||
@@ -196,7 +198,7 @@ const MapRootHandlers = forwardRef(({ children }: WithChildren, fwdRef: Forwarde
|
||||
export const MapRootProvider = ({ children, fwdRef, outCommand }: MapRootProviderProps) => {
|
||||
const { update, ref } = useContextStore<MapRootData>({ ...INITIAL_DATA });
|
||||
|
||||
const storedSettings = useMapUserSettings(ref);
|
||||
const storedSettings = useMapUserSettings(ref, outCommand);
|
||||
|
||||
const { windowsSettings, toggleWidgetVisibility, updateWidgetSettings, resetWidgets } =
|
||||
useStoreWidgets(storedSettings);
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
import { MapUserSettings } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
|
||||
// TODO - we need provide and compare version
|
||||
const createWidgetSettingsWithVersion = <T>(settings: T) => {
|
||||
return {
|
||||
version: 0,
|
||||
settings,
|
||||
};
|
||||
};
|
||||
|
||||
export const createDefaultWidgetSettings = (): MapUserSettings => {
|
||||
return {
|
||||
killsWidget: createWidgetSettingsWithVersion(DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
localWidget: createWidgetSettingsWithVersion(DEFAULT_WIDGET_LOCAL_SETTINGS),
|
||||
widgets: createWidgetSettingsWithVersion(getDefaultWidgetProps()),
|
||||
routes: createWidgetSettingsWithVersion(DEFAULT_ROUTES_SETTINGS),
|
||||
onTheMap: createWidgetSettingsWithVersion(DEFAULT_ON_THE_MAP_SETTINGS),
|
||||
signaturesWidget: createWidgetSettingsWithVersion(DEFAULT_SIGNATURE_SETTINGS),
|
||||
interface: createWidgetSettingsWithVersion(STORED_INTERFACE_DEFAULT_VALUES),
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,66 @@
|
||||
import { OutCommand, OutCommandHandler } from '@/hooks/Mapper/types';
|
||||
import { Dispatch, SetStateAction, useCallback, useEffect, useRef } from 'react';
|
||||
import {
|
||||
MapUserSettings,
|
||||
MapUserSettingsStructure,
|
||||
RemoteAdminSettingsResponse,
|
||||
} from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { createDefaultWidgetSettings } from '@/hooks/Mapper/mapRootProvider/helpers/createDefaultWidgetSettings.ts';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
|
||||
interface UseActualizeRemoteMapSettingsProps {
|
||||
outCommand: OutCommandHandler;
|
||||
mapUserSettings: MapUserSettingsStructure;
|
||||
applySettings: (val: MapUserSettings) => void;
|
||||
setMapUserSettings: Dispatch<SetStateAction<MapUserSettingsStructure>>;
|
||||
map_slug: string | null;
|
||||
}
|
||||
|
||||
export const useActualizeRemoteMapSettings = ({
|
||||
outCommand,
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
applySettings,
|
||||
map_slug,
|
||||
}: UseActualizeRemoteMapSettingsProps) => {
|
||||
const refVars = useRef({ applySettings, mapUserSettings, setMapUserSettings, map_slug });
|
||||
refVars.current = { applySettings, mapUserSettings, setMapUserSettings, map_slug };
|
||||
|
||||
const actualizeRemoteMapSettings = useCallback(async () => {
|
||||
const { applySettings } = refVars.current;
|
||||
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (res?.default_settings == null) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
applySettings(parseMapUserSettings(res.default_settings));
|
||||
} catch (error) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
}
|
||||
}, [outCommand]);
|
||||
|
||||
useEffect(() => {
|
||||
const { mapUserSettings } = refVars.current;
|
||||
|
||||
// INFO: Do nothing if slug is not set
|
||||
if (map_slug == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// INFO: Do nothing if user have already data
|
||||
if (map_slug in mapUserSettings) {
|
||||
return;
|
||||
}
|
||||
|
||||
actualizeRemoteMapSettings();
|
||||
}, [actualizeRemoteMapSettings, map_slug]);
|
||||
};
|
||||
@@ -1,44 +1,16 @@
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import { MapUserSettings, MapUserSettingsStructure } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures';
|
||||
import { MapRootData } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useSettingsValueAndSetter } from '@/hooks/Mapper/mapRootProvider/hooks/useSettingsValueAndSetter.ts';
|
||||
import fastDeepEqual from 'fast-deep-equal';
|
||||
|
||||
// import { actualizeSettings } from '@/hooks/Mapper/mapRootProvider/helpers';
|
||||
|
||||
// TODO - we need provide and compare version
|
||||
const createWidgetSettingsWithVersion = <T>(settings: T) => {
|
||||
return {
|
||||
version: 0,
|
||||
settings,
|
||||
};
|
||||
};
|
||||
|
||||
const createDefaultWidgetSettings = (): MapUserSettings => {
|
||||
return {
|
||||
killsWidget: createWidgetSettingsWithVersion(DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
localWidget: createWidgetSettingsWithVersion(DEFAULT_WIDGET_LOCAL_SETTINGS),
|
||||
widgets: createWidgetSettingsWithVersion(getDefaultWidgetProps()),
|
||||
routes: createWidgetSettingsWithVersion(DEFAULT_ROUTES_SETTINGS),
|
||||
onTheMap: createWidgetSettingsWithVersion(DEFAULT_ON_THE_MAP_SETTINGS),
|
||||
signaturesWidget: createWidgetSettingsWithVersion(DEFAULT_SIGNATURE_SETTINGS),
|
||||
interface: createWidgetSettingsWithVersion(STORED_INTERFACE_DEFAULT_VALUES),
|
||||
};
|
||||
};
|
||||
import { OutCommandHandler } from '@/hooks/Mapper/types';
|
||||
import { useActualizeRemoteMapSettings } from '@/hooks/Mapper/mapRootProvider/hooks/useActualizeRemoteMapSettings.ts';
|
||||
import { createDefaultWidgetSettings } from '@/hooks/Mapper/mapRootProvider/helpers/createDefaultWidgetSettings.ts';
|
||||
|
||||
const EMPTY_OBJ = {};
|
||||
|
||||
export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
export const useMapUserSettings = ({ map_slug }: MapRootData, outCommand: OutCommandHandler) => {
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const [hasOldSettings, setHasOldSettings] = useState(false);
|
||||
|
||||
@@ -49,19 +21,25 @@ export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
const ref = useRef({ mapUserSettings, setMapUserSettings, map_slug });
|
||||
ref.current = { mapUserSettings, setMapUserSettings, map_slug };
|
||||
|
||||
useEffect(() => {
|
||||
const { mapUserSettings, setMapUserSettings } = ref.current;
|
||||
if (map_slug === null) {
|
||||
return;
|
||||
const applySettings = useCallback((settings: MapUserSettings) => {
|
||||
const { map_slug, mapUserSettings, setMapUserSettings } = ref.current;
|
||||
|
||||
if (map_slug == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!(map_slug in mapUserSettings)) {
|
||||
setMapUserSettings({
|
||||
...mapUserSettings,
|
||||
[map_slug]: createDefaultWidgetSettings(),
|
||||
});
|
||||
if (fastDeepEqual(settings, mapUserSettings[map_slug])) {
|
||||
return false;
|
||||
}
|
||||
}, [map_slug]);
|
||||
|
||||
setMapUserSettings(old => ({
|
||||
...old,
|
||||
[map_slug]: settings,
|
||||
}));
|
||||
return true;
|
||||
}, []);
|
||||
|
||||
useActualizeRemoteMapSettings({ outCommand, applySettings, mapUserSettings, setMapUserSettings, map_slug });
|
||||
|
||||
const [interfaceSettings, setInterfaceSettings] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
@@ -178,23 +156,9 @@ export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
return JSON.stringify(ref.current.mapUserSettings[map_slug]);
|
||||
}, []);
|
||||
|
||||
const applySettings = useCallback((settings: MapUserSettings) => {
|
||||
const { map_slug, mapUserSettings, setMapUserSettings } = ref.current;
|
||||
|
||||
if (map_slug == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fastDeepEqual(settings, mapUserSettings[map_slug])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
setMapUserSettings(old => ({
|
||||
...old,
|
||||
[map_slug]: settings,
|
||||
}));
|
||||
return true;
|
||||
}, []);
|
||||
const resetSettings = useCallback(() => {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
}, [applySettings]);
|
||||
|
||||
return {
|
||||
isReady,
|
||||
@@ -217,6 +181,7 @@ export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
|
||||
getSettingsForExport,
|
||||
applySettings,
|
||||
resetSettings,
|
||||
checkOldSettings,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -85,3 +85,7 @@ export type MapUserSettings = {
|
||||
export type MapUserSettingsStructure = {
|
||||
[mapId: string]: MapUserSettings;
|
||||
};
|
||||
|
||||
export type WdResponse<T> = T;
|
||||
|
||||
export type RemoteAdminSettingsResponse = { default_settings?: string };
|
||||
|
||||
@@ -269,6 +269,8 @@ export enum OutCommand {
|
||||
showTracking = 'show_tracking',
|
||||
getUserSettings = 'get_user_settings',
|
||||
updateUserSettings = 'update_user_settings',
|
||||
saveDefaultSettings = 'save_default_settings',
|
||||
getDefaultSettings = 'get_default_settings',
|
||||
unlinkSignature = 'unlink_signature',
|
||||
searchSystems = 'search_systems',
|
||||
undoDeleteSignatures = 'undo_delete_signatures',
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"scripts": {
|
||||
"build": "vite build --emptyOutDir false",
|
||||
"watch": "vite build --watch --minify false --emptyOutDir false --clearScreen true --mode development",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
"test": "jest"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18.0.0"
|
||||
@@ -50,6 +50,7 @@
|
||||
"@tailwindcss/aspect-ratio": "^0.4.2",
|
||||
"@tailwindcss/forms": "^0.5.7",
|
||||
"@tailwindcss/typography": "^0.5.13",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/lodash.debounce": "^4.0.9",
|
||||
"@types/lodash.isequal": "^4.5.8",
|
||||
"@types/react": "^18.3.12",
|
||||
@@ -59,6 +60,7 @@
|
||||
"@vitejs/plugin-react": "^4.3.3",
|
||||
"@vitejs/plugin-react-refresh": "^1.3.6",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"babel-jest": "^29.7.0",
|
||||
"child_process": "^1.0.2",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
@@ -67,6 +69,7 @@
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.6",
|
||||
"heroicons": "^2.0.18",
|
||||
"jest": "^29.7.0",
|
||||
"merge-options": "^3.0.4",
|
||||
"postcss": "^8.4.38",
|
||||
"postcss-cli": "^11.0.0",
|
||||
@@ -74,6 +77,7 @@
|
||||
"prettier": "^3.2.5",
|
||||
"sass": "^1.77.2",
|
||||
"sass-loader": "^14.2.1",
|
||||
"ts-jest": "^29.1.2",
|
||||
"typescript": "^5.2.2",
|
||||
"vite": "^5.0.5",
|
||||
"vite-plugin-cdn-import": "^1.0.1"
|
||||
|
||||
BIN
assets/static/images/news/06-21-webhooks/webhooks-hero.png
Executable file
BIN
assets/static/images/news/06-21-webhooks/webhooks-hero.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.7 MiB |
BIN
assets/static/images/news/07-13-map-duplication/duplicate-map.png
Executable file
BIN
assets/static/images/news/07-13-map-duplication/duplicate-map.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
BIN
assets/static/images/news/07-15-api-modernization/api-hero.png
Executable file
BIN
assets/static/images/news/07-15-api-modernization/api-hero.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 94 KiB |
BIN
assets/static/images/news/2025/07-27-settings/admin_settings.png
Normal file
BIN
assets/static/images/news/2025/07-27-settings/admin_settings.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 97 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 112 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 95 KiB |
275
config/quality_gates.exs
Normal file
275
config/quality_gates.exs
Normal file
@@ -0,0 +1,275 @@
|
||||
# Quality Gates Configuration
|
||||
#
|
||||
# This file defines the error budget thresholds for the project.
|
||||
# These are intentionally set high initially to avoid blocking development
|
||||
# while we work on improving code quality.
|
||||
|
||||
defmodule WandererApp.QualityGates do
|
||||
@moduledoc """
|
||||
Central configuration for all quality gate thresholds.
|
||||
|
||||
## Error Budget Philosophy
|
||||
|
||||
We use error budgets to:
|
||||
1. Allow gradual improvement of code quality
|
||||
2. Avoid blocking development on legacy issues
|
||||
3. Provide clear targets for improvement
|
||||
4. Track progress over time
|
||||
|
||||
## Threshold Levels
|
||||
|
||||
- **Current**: What we enforce today (relaxed)
|
||||
- **Target**: Where we want to be (strict)
|
||||
- **Timeline**: When we plan to tighten the thresholds
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Returns the current error budget configuration.
|
||||
"""
|
||||
def current_thresholds do
|
||||
%{
|
||||
# Compilation warnings
|
||||
compilation: %{
|
||||
# Increased from 100 to accommodate current state
|
||||
max_warnings: 500,
|
||||
target: 0,
|
||||
# Extended timeline
|
||||
timeline: "Q3 2025",
|
||||
description: "Allow existing warnings while we fix them gradually"
|
||||
},
|
||||
|
||||
# Credo code quality issues
|
||||
credo: %{
|
||||
# Increased from 50 to accommodate current state
|
||||
max_issues: 200,
|
||||
# Increased from 10
|
||||
max_high_priority: 50,
|
||||
target_issues: 10,
|
||||
target_high_priority: 0,
|
||||
# Extended timeline
|
||||
timeline: "Q2 2025",
|
||||
description: "Focus on high-priority issues first"
|
||||
},
|
||||
|
||||
# Dialyzer static analysis
|
||||
dialyzer: %{
|
||||
# Allow some errors for now (was 0)
|
||||
max_errors: 20,
|
||||
max_warnings: :unlimited,
|
||||
target_errors: 0,
|
||||
target_warnings: 0,
|
||||
# Extended timeline
|
||||
timeline: "Q4 2025",
|
||||
description: "Temporarily allow some errors during codebase improvement"
|
||||
},
|
||||
|
||||
# Test coverage
|
||||
coverage: %{
|
||||
# Reduced from 70% to accommodate current state
|
||||
minimum: 50,
|
||||
target: 90,
|
||||
# Extended timeline
|
||||
timeline: "Q3 2025",
|
||||
description: "Start with 50% coverage, gradually improve to 90%"
|
||||
},
|
||||
|
||||
# Test execution
|
||||
tests: %{
|
||||
# Increased from 10 to accommodate current state
|
||||
max_failures: 50,
|
||||
# 10% flaky tests allowed (increased)
|
||||
max_flaky_rate: 0.10,
|
||||
# 10 minutes (increased from 5)
|
||||
max_duration_seconds: 600,
|
||||
target_failures: 0,
|
||||
# 5 minutes
|
||||
target_duration_seconds: 300,
|
||||
# Extended timeline
|
||||
timeline: "Q2 2025",
|
||||
description: "Allow more test failures during stabilization phase"
|
||||
},
|
||||
|
||||
# Code formatting
|
||||
formatting: %{
|
||||
enforced: true,
|
||||
auto_fix_in_ci: false,
|
||||
description: "Strict formatting enforcement from day one"
|
||||
},
|
||||
|
||||
# Documentation
|
||||
documentation: %{
|
||||
# 50% of modules documented
|
||||
min_module_doc_coverage: 0.5,
|
||||
# 30% of public functions documented
|
||||
min_function_doc_coverage: 0.3,
|
||||
target_module_coverage: 0.9,
|
||||
target_function_coverage: 0.8,
|
||||
timeline: "Q3 2025",
|
||||
description: "Gradually improve documentation coverage"
|
||||
},
|
||||
|
||||
# Security
|
||||
security: %{
|
||||
sobelow_enabled: false,
|
||||
max_high_risk: 0,
|
||||
max_medium_risk: 5,
|
||||
target_enabled: true,
|
||||
timeline: "Q2 2025",
|
||||
description: "Security scanning to be enabled after initial cleanup"
|
||||
},
|
||||
|
||||
# Dependencies
|
||||
dependencies: %{
|
||||
max_outdated_major: 10,
|
||||
max_outdated_minor: 20,
|
||||
max_vulnerable: 0,
|
||||
audit_enabled: true,
|
||||
description: "Keep dependencies reasonably up to date"
|
||||
},
|
||||
|
||||
# Performance
|
||||
performance: %{
|
||||
max_slow_tests_seconds: 5,
|
||||
max_memory_usage_mb: 500,
|
||||
profiling_enabled: false,
|
||||
timeline: "Q4 2025",
|
||||
description: "Performance monitoring to be added later"
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the configuration for GitHub Actions.
|
||||
"""
|
||||
def github_actions_config do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
%{
|
||||
compilation_warnings: thresholds.compilation.max_warnings,
|
||||
credo_issues: thresholds.credo.max_issues,
|
||||
dialyzer_errors: thresholds.dialyzer.max_errors,
|
||||
coverage_minimum: thresholds.coverage.minimum,
|
||||
test_max_failures: thresholds.tests.max_failures,
|
||||
test_timeout_minutes: div(thresholds.tests.max_duration_seconds, 60)
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the configuration for mix check.
|
||||
"""
|
||||
def mix_check_config do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
[
|
||||
# Compiler with warnings allowed
|
||||
{:compiler, "mix compile --warnings-as-errors false"},
|
||||
|
||||
# Credo with issue budget
|
||||
{:credo, "mix credo --strict --max-issues #{thresholds.credo.max_issues}"},
|
||||
|
||||
# Dialyzer without halt on warnings
|
||||
{:dialyzer, "mix dialyzer", exit_status: 0},
|
||||
|
||||
# Tests with failure allowance
|
||||
{:ex_unit, "mix test --max-failures #{thresholds.tests.max_failures}"},
|
||||
|
||||
# Formatting is strict
|
||||
{:formatter, "mix format --check-formatted"},
|
||||
|
||||
# Coverage check
|
||||
{:coverage, "mix coveralls --minimum-coverage #{thresholds.coverage.minimum}"},
|
||||
|
||||
# Documentation coverage (optional for now)
|
||||
{:docs_coverage, false},
|
||||
|
||||
# Security scanning (disabled for now)
|
||||
{:sobelow, false},
|
||||
|
||||
# Dependency audit
|
||||
{:audit, "mix deps.audit", exit_status: 0},
|
||||
|
||||
# Doctor check (disabled)
|
||||
{:doctor, false}
|
||||
]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates a quality report showing current vs target thresholds.
|
||||
"""
|
||||
def quality_report do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
"""
|
||||
# WandererApp Quality Gates Report
|
||||
|
||||
Generated: #{DateTime.utc_now() |> DateTime.to_string()}
|
||||
|
||||
## Current Error Budgets vs Targets
|
||||
|
||||
| Category | Current Budget | Target Goal | Timeline | Status |
|
||||
|----------|----------------|-------------|----------|--------|
|
||||
| Compilation Warnings | ≤#{thresholds.compilation.max_warnings} | #{thresholds.compilation.target} | #{thresholds.compilation.timeline} | 🟡 Relaxed |
|
||||
| Credo Issues | ≤#{thresholds.credo.max_issues} | #{thresholds.credo.target_issues} | #{thresholds.credo.timeline} | 🟡 Relaxed |
|
||||
| Dialyzer Errors | ≤#{thresholds.dialyzer.max_errors} | #{thresholds.dialyzer.target_errors} | #{thresholds.dialyzer.timeline} | 🟡 Relaxed |
|
||||
| Test Coverage | ≥#{thresholds.coverage.minimum}% | #{thresholds.coverage.target}% | #{thresholds.coverage.timeline} | 🟡 Relaxed |
|
||||
| Test Failures | ≤#{thresholds.tests.max_failures} | #{thresholds.tests.target_failures} | #{thresholds.tests.timeline} | 🟡 Relaxed |
|
||||
| Code Formatting | Required | Required | - | ✅ Strict |
|
||||
|
||||
## Improvement Roadmap
|
||||
|
||||
### Q1 2025
|
||||
- Reduce Credo issues from #{thresholds.credo.max_issues} to #{thresholds.credo.target_issues}
|
||||
- Achieve zero test failures
|
||||
- Reduce test execution time to under 3 minutes
|
||||
|
||||
### Q2 2025
|
||||
- Eliminate all compilation warnings
|
||||
- Increase test coverage to #{thresholds.coverage.target}%
|
||||
- Enable security scanning with Sobelow
|
||||
|
||||
### Q3 2025
|
||||
- Clean up all Dialyzer warnings
|
||||
- Achieve 90% documentation coverage
|
||||
|
||||
### Q4 2025
|
||||
- Implement performance monitoring
|
||||
- Add memory usage tracking
|
||||
|
||||
## Quick Commands
|
||||
|
||||
```bash
|
||||
# Check current quality status
|
||||
mix check
|
||||
|
||||
# Run with auto-fix where possible
|
||||
mix check --fix
|
||||
|
||||
# Generate detailed quality report
|
||||
mix quality.report
|
||||
|
||||
# Check specific category
|
||||
mix credo --strict
|
||||
mix test --cover
|
||||
mix dialyzer
|
||||
```
|
||||
"""
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if a metric passes the current threshold.
|
||||
"""
|
||||
def passes_threshold?(category, metric, value) do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
case {category, metric} do
|
||||
{:compilation, :warnings} -> value <= thresholds.compilation.max_warnings
|
||||
{:credo, :issues} -> value <= thresholds.credo.max_issues
|
||||
{:credo, :high_priority} -> value <= thresholds.credo.max_high_priority
|
||||
{:dialyzer, :errors} -> value <= thresholds.dialyzer.max_errors
|
||||
{:coverage, :percentage} -> value >= thresholds.coverage.minimum
|
||||
{:tests, :failures} -> value <= thresholds.tests.max_failures
|
||||
{:tests, :duration} -> value <= thresholds.tests.max_duration_seconds
|
||||
_ -> true
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -390,3 +390,26 @@ end
|
||||
config :wanderer_app, :license_manager,
|
||||
api_url: System.get_env("LM_API_URL", "http://localhost:4000"),
|
||||
auth_key: System.get_env("LM_AUTH_KEY")
|
||||
|
||||
# SSE Configuration
|
||||
config :wanderer_app, :sse,
|
||||
enabled:
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_SSE_ENABLED", "true")
|
||||
|> String.to_existing_atom(),
|
||||
max_connections_total:
|
||||
config_dir |> get_int_from_path_or_env("WANDERER_SSE_MAX_CONNECTIONS", 1000),
|
||||
max_connections_per_map:
|
||||
config_dir |> get_int_from_path_or_env("SSE_MAX_CONNECTIONS_PER_MAP", 50),
|
||||
max_connections_per_api_key:
|
||||
config_dir |> get_int_from_path_or_env("SSE_MAX_CONNECTIONS_PER_API_KEY", 10),
|
||||
keepalive_interval: config_dir |> get_int_from_path_or_env("SSE_KEEPALIVE_INTERVAL", 30000),
|
||||
connection_timeout: config_dir |> get_int_from_path_or_env("SSE_CONNECTION_TIMEOUT", 300_000)
|
||||
|
||||
# External Events Configuration
|
||||
config :wanderer_app, :external_events,
|
||||
webhooks_enabled:
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_WEBHOOKS_ENABLED", "true")
|
||||
|> String.to_existing_atom(),
|
||||
webhook_timeout_ms: config_dir |> get_int_from_path_or_env("WANDERER_WEBHOOK_TIMEOUT_MS", 15000)
|
||||
|
||||
@@ -8,15 +8,23 @@ import Config
|
||||
config :wanderer_app, WandererApp.Repo,
|
||||
username: "postgres",
|
||||
password: "postgres",
|
||||
hostname: "localhost",
|
||||
hostname: System.get_env("DB_HOST", "localhost"),
|
||||
database: "wanderer_test#{System.get_env("MIX_TEST_PARTITION")}",
|
||||
pool: Ecto.Adapters.SQL.Sandbox,
|
||||
pool_size: 10
|
||||
pool_size: 20,
|
||||
ownership_timeout: 60_000,
|
||||
timeout: 60_000
|
||||
|
||||
# Set environment variable before config runs to ensure character API is enabled in tests
|
||||
System.put_env("WANDERER_CHARACTER_API_DISABLED", "false")
|
||||
|
||||
config :wanderer_app,
|
||||
ddrt: Test.DDRTMock,
|
||||
logger: Test.LoggerMock,
|
||||
pubsub_client: Test.PubSubMock
|
||||
pubsub_client: Test.PubSubMock,
|
||||
cached_info: WandererApp.CachedInfo.Mock,
|
||||
character_api_disabled: false,
|
||||
environment: :test
|
||||
|
||||
# We don't run a server during test. If one is required,
|
||||
# you can enable the server option below.
|
||||
@@ -36,3 +44,8 @@ config :logger, level: :warning
|
||||
|
||||
# Initialize plugs at runtime for faster test compilation
|
||||
config :phoenix, :plug_init_mode, :runtime
|
||||
|
||||
# Configure MIME types for testing, including XML for error response contract tests
|
||||
config :mime, :types, %{
|
||||
"application/xml" => ["xml"]
|
||||
}
|
||||
|
||||
25
coveralls.json
Normal file
25
coveralls.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"coverage_options": {
|
||||
"treat_no_relevant_lines_as_covered": true,
|
||||
"output_dir": "cover/",
|
||||
"template_path": "cover/coverage.html.eex",
|
||||
"minimum_coverage": 70
|
||||
},
|
||||
"terminal_options": {
|
||||
"file_column_width": 40
|
||||
},
|
||||
"html_options": {
|
||||
"output_dir": "cover/"
|
||||
},
|
||||
"skip_files": [
|
||||
"test/",
|
||||
"lib/wanderer_app_web.ex",
|
||||
"lib/wanderer_app.ex",
|
||||
"lib/wanderer_app/application.ex",
|
||||
"lib/wanderer_app/release.ex",
|
||||
"lib/wanderer_app_web/endpoint.ex",
|
||||
"lib/wanderer_app_web/telemetry.ex",
|
||||
"lib/wanderer_app_web/gettext.ex",
|
||||
"priv/"
|
||||
]
|
||||
}
|
||||
126
lib/mix/tasks/test.setup.ex
Normal file
126
lib/mix/tasks/test.setup.ex
Normal file
@@ -0,0 +1,126 @@
|
||||
defmodule Mix.Tasks.Test.Setup do
|
||||
@moduledoc """
|
||||
Sets up the test database environment.
|
||||
|
||||
This task will:
|
||||
- Create the test database if it doesn't exist
|
||||
- Run all migrations
|
||||
- Verify the setup is correct
|
||||
|
||||
## Usage
|
||||
|
||||
mix test.setup
|
||||
|
||||
## Options
|
||||
|
||||
--force Drop the existing test database and recreate it
|
||||
--quiet Reduce output verbosity
|
||||
--seed Seed the database with test fixtures after setup
|
||||
|
||||
## Examples
|
||||
|
||||
mix test.setup
|
||||
mix test.setup --force
|
||||
mix test.setup --seed
|
||||
mix test.setup --force --seed --quiet
|
||||
|
||||
"""
|
||||
|
||||
use Mix.Task
|
||||
|
||||
alias WandererApp.DatabaseSetup
|
||||
|
||||
@shortdoc "Sets up the test database environment"
|
||||
|
||||
@impl Mix.Task
|
||||
def run(args) do
|
||||
# Parse options
|
||||
{opts, _} =
|
||||
OptionParser.parse!(args,
|
||||
strict: [force: :boolean, quiet: :boolean, seed: :boolean],
|
||||
aliases: [f: :force, q: :quiet, s: :seed]
|
||||
)
|
||||
|
||||
# Configure logger level based on quiet option
|
||||
if opts[:quiet] do
|
||||
Logger.configure(level: :warning)
|
||||
else
|
||||
Logger.configure(level: :info)
|
||||
end
|
||||
|
||||
# Set the environment to test
|
||||
Mix.env(:test)
|
||||
|
||||
try do
|
||||
# Load the application configuration
|
||||
Mix.Task.run("loadconfig")
|
||||
|
||||
# Start the application
|
||||
{:ok, _} = Application.ensure_all_started(:wanderer_app)
|
||||
|
||||
if opts[:force] do
|
||||
Mix.shell().info("🔄 Forcing database recreation...")
|
||||
_ = DatabaseSetup.drop_database()
|
||||
end
|
||||
|
||||
case DatabaseSetup.setup_test_database() do
|
||||
:ok ->
|
||||
if opts[:seed] do
|
||||
Mix.shell().info("🌱 Seeding test data...")
|
||||
|
||||
case DatabaseSetup.seed_test_data() do
|
||||
:ok ->
|
||||
Mix.shell().info("✅ Test database setup and seeding completed successfully!")
|
||||
|
||||
{:error, reason} ->
|
||||
Mix.shell().error("❌ Test data seeding failed: #{inspect(reason)}")
|
||||
System.halt(1)
|
||||
end
|
||||
else
|
||||
Mix.shell().info("✅ Test database setup completed successfully!")
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Mix.shell().error("❌ Test database setup failed: #{inspect(reason)}")
|
||||
print_troubleshooting_help()
|
||||
System.halt(1)
|
||||
end
|
||||
rescue
|
||||
error ->
|
||||
Mix.shell().error("❌ Unexpected error during database setup: #{inspect(error)}")
|
||||
print_troubleshooting_help()
|
||||
System.halt(1)
|
||||
end
|
||||
end
|
||||
|
||||
defp print_troubleshooting_help do
|
||||
Mix.shell().info("""
|
||||
|
||||
🔧 Troubleshooting Tips:
|
||||
|
||||
1. Ensure PostgreSQL is running:
|
||||
• On macOS: brew services start postgresql
|
||||
• On Ubuntu: sudo service postgresql start
|
||||
• Using Docker: docker run --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 -d postgres
|
||||
|
||||
2. Check database configuration in config/test.exs:
|
||||
• Username: postgres
|
||||
• Password: postgres
|
||||
• Host: localhost
|
||||
• Port: 5432
|
||||
|
||||
3. Verify database permissions:
|
||||
• Ensure the postgres user can create databases
|
||||
• Try connecting manually: psql -U postgres -h localhost
|
||||
|
||||
4. For connection refused errors:
|
||||
• Check if PostgreSQL is listening on the correct port
|
||||
• Verify firewall settings
|
||||
|
||||
5. Force recreation if corrupted:
|
||||
• Run: mix test.setup --force
|
||||
|
||||
📚 For more help, see: https://hexdocs.pm/ecto/Ecto.Adapters.Postgres.html
|
||||
""")
|
||||
end
|
||||
end
|
||||
331
lib/mix/tasks/test_stability.ex
Normal file
331
lib/mix/tasks/test_stability.ex
Normal file
@@ -0,0 +1,331 @@
|
||||
defmodule Mix.Tasks.Test.Stability do
|
||||
@moduledoc """
|
||||
Runs tests multiple times to detect flaky tests.
|
||||
|
||||
## Usage
|
||||
|
||||
mix test.stability
|
||||
mix test.stability --runs 10
|
||||
mix test.stability --runs 5 --file test/specific_test.exs
|
||||
mix test.stability --tag flaky
|
||||
mix test.stability --detect --threshold 0.95
|
||||
|
||||
## Options
|
||||
|
||||
* `--runs` - Number of times to run tests (default: 5)
|
||||
* `--file` - Specific test file to check
|
||||
* `--tag` - Only run tests with specific tag
|
||||
* `--detect` - Detection mode, identifies flaky tests
|
||||
* `--threshold` - Success rate threshold for detection (default: 0.95)
|
||||
* `--parallel` - Run iterations in parallel
|
||||
* `--report` - Generate detailed report file
|
||||
"""
|
||||
|
||||
use Mix.Task
|
||||
|
||||
@shortdoc "Detect flaky tests by running them multiple times"
|
||||
|
||||
@default_runs 5
|
||||
@default_threshold 0.95
|
||||
|
||||
def run(args) do
|
||||
{opts, test_args, _} =
|
||||
OptionParser.parse(args,
|
||||
switches: [
|
||||
runs: :integer,
|
||||
file: :string,
|
||||
tag: :string,
|
||||
detect: :boolean,
|
||||
threshold: :float,
|
||||
parallel: :boolean,
|
||||
report: :string
|
||||
],
|
||||
aliases: [
|
||||
r: :runs,
|
||||
f: :file,
|
||||
t: :tag,
|
||||
d: :detect,
|
||||
p: :parallel
|
||||
]
|
||||
)
|
||||
|
||||
runs = Keyword.get(opts, :runs, @default_runs)
|
||||
threshold = Keyword.get(opts, :threshold, @default_threshold)
|
||||
detect_mode = Keyword.get(opts, :detect, false)
|
||||
parallel = Keyword.get(opts, :parallel, false)
|
||||
report_file = Keyword.get(opts, :report)
|
||||
|
||||
Mix.shell().info("🔍 Running test stability check...")
|
||||
Mix.shell().info(" Iterations: #{runs}")
|
||||
Mix.shell().info(" Threshold: #{Float.round(threshold * 100, 1)}%")
|
||||
Mix.shell().info("")
|
||||
|
||||
# Build test command
|
||||
test_cmd = build_test_command(opts, test_args)
|
||||
|
||||
# Run tests multiple times
|
||||
results =
|
||||
if parallel do
|
||||
run_tests_parallel(test_cmd, runs)
|
||||
else
|
||||
run_tests_sequential(test_cmd, runs)
|
||||
end
|
||||
|
||||
# Analyze results
|
||||
analysis = analyze_results(results, threshold)
|
||||
|
||||
# Display results
|
||||
display_results(analysis, detect_mode)
|
||||
|
||||
# Generate report if requested
|
||||
if report_file do
|
||||
generate_report(analysis, report_file)
|
||||
end
|
||||
|
||||
# Exit with appropriate code
|
||||
if analysis.flaky_count > 0 and detect_mode do
|
||||
Mix.shell().error("\n❌ Found #{analysis.flaky_count} flaky tests!")
|
||||
exit({:shutdown, 1})
|
||||
else
|
||||
Mix.shell().info("\n✅ Test stability check complete")
|
||||
end
|
||||
end
|
||||
|
||||
defp build_test_command(opts, test_args) do
|
||||
cmd_parts = ["test"]
|
||||
|
||||
cmd_parts =
|
||||
if file = Keyword.get(opts, :file) do
|
||||
cmd_parts ++ [file]
|
||||
else
|
||||
cmd_parts
|
||||
end
|
||||
|
||||
cmd_parts =
|
||||
if tag = Keyword.get(opts, :tag) do
|
||||
cmd_parts ++ ["--only", tag]
|
||||
else
|
||||
cmd_parts
|
||||
end
|
||||
|
||||
cmd_parts ++ test_args
|
||||
end
|
||||
|
||||
defp run_tests_sequential(test_cmd, runs) do
|
||||
for i <- 1..runs do
|
||||
Mix.shell().info("Running iteration #{i}/#{runs}...")
|
||||
|
||||
start_time = System.monotonic_time(:millisecond)
|
||||
|
||||
# Capture test output
|
||||
{output, exit_code} =
|
||||
System.cmd("mix", test_cmd,
|
||||
stderr_to_stdout: true,
|
||||
env: [{"MIX_ENV", "test"}]
|
||||
)
|
||||
|
||||
duration = System.monotonic_time(:millisecond) - start_time
|
||||
|
||||
# Parse test results
|
||||
test_results = parse_test_output(output)
|
||||
|
||||
%{
|
||||
iteration: i,
|
||||
exit_code: exit_code,
|
||||
duration: duration,
|
||||
output: output,
|
||||
tests: test_results.tests,
|
||||
failures: test_results.failures,
|
||||
failed_tests: test_results.failed_tests
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
defp run_tests_parallel(test_cmd, runs) do
|
||||
Mix.shell().info("Running #{runs} iterations in parallel...")
|
||||
|
||||
tasks =
|
||||
for i <- 1..runs do
|
||||
Task.async(fn ->
|
||||
start_time = System.monotonic_time(:millisecond)
|
||||
|
||||
{output, exit_code} =
|
||||
System.cmd("mix", test_cmd,
|
||||
stderr_to_stdout: true,
|
||||
env: [{"MIX_ENV", "test"}]
|
||||
)
|
||||
|
||||
duration = System.monotonic_time(:millisecond) - start_time
|
||||
test_results = parse_test_output(output)
|
||||
|
||||
%{
|
||||
iteration: i,
|
||||
exit_code: exit_code,
|
||||
duration: duration,
|
||||
output: output,
|
||||
tests: test_results.tests,
|
||||
failures: test_results.failures,
|
||||
failed_tests: test_results.failed_tests
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
Task.await_many(tasks, :infinity)
|
||||
end
|
||||
|
||||
defp parse_test_output(output) do
|
||||
lines = String.split(output, "\n")
|
||||
|
||||
# Extract test count and failures
|
||||
test_summary = Enum.find(lines, &String.contains?(&1, "test"))
|
||||
|
||||
{tests, failures} =
|
||||
case Regex.run(~r/(\d+) tests?, (\d+) failures?/, test_summary || "") do
|
||||
[_, tests, failures] ->
|
||||
{String.to_integer(tests), String.to_integer(failures)}
|
||||
|
||||
_ ->
|
||||
{0, 0}
|
||||
end
|
||||
|
||||
# Extract failed test names
|
||||
failed_tests = extract_failed_tests(output)
|
||||
|
||||
%{
|
||||
tests: tests,
|
||||
failures: failures,
|
||||
failed_tests: failed_tests
|
||||
}
|
||||
end
|
||||
|
||||
defp extract_failed_tests(output) do
|
||||
output
|
||||
|> String.split("\n")
|
||||
# More precise filtering for actual test failures
|
||||
|> Enum.filter(
|
||||
&(String.contains?(&1, "test ") and
|
||||
(String.contains?(&1, "FAILED") or String.contains?(&1, "ERROR") or
|
||||
Regex.match?(~r/^\s*\d+\)\s+test/, &1)))
|
||||
)
|
||||
|> Enum.map(&extract_test_name/1)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
end
|
||||
|
||||
defp extract_test_name(line) do
|
||||
case Regex.run(~r/test (.+) \((.+)\)/, line) do
|
||||
[_, name, module] -> "#{module}: #{name}"
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp analyze_results(results, threshold) do
|
||||
total_runs = length(results)
|
||||
|
||||
# Group failures by test name
|
||||
all_failures =
|
||||
results
|
||||
|> Enum.flat_map(& &1.failed_tests)
|
||||
|> Enum.frequencies()
|
||||
|
||||
# Identify flaky tests
|
||||
flaky_tests =
|
||||
all_failures
|
||||
|> Enum.filter(fn {_test, fail_count} ->
|
||||
success_rate = (total_runs - fail_count) / total_runs
|
||||
success_rate < threshold and success_rate > 0
|
||||
end)
|
||||
|> Enum.map(fn {test, fail_count} ->
|
||||
success_rate = (total_runs - fail_count) / total_runs
|
||||
|
||||
%{
|
||||
test: test,
|
||||
failures: fail_count,
|
||||
success_rate: success_rate,
|
||||
failure_rate: fail_count / total_runs
|
||||
}
|
||||
end)
|
||||
|> Enum.sort_by(& &1.failure_rate, :desc)
|
||||
|
||||
# Calculate statistics
|
||||
total_tests = results |> Enum.map(& &1.tests) |> Enum.max(fn -> 0 end)
|
||||
avg_duration = results |> Enum.map(& &1.duration) |> average()
|
||||
success_runs = Enum.count(results, &(&1.exit_code == 0))
|
||||
|
||||
%{
|
||||
total_runs: total_runs,
|
||||
total_tests: total_tests,
|
||||
success_runs: success_runs,
|
||||
failed_runs: total_runs - success_runs,
|
||||
success_rate: success_runs / total_runs,
|
||||
avg_duration: avg_duration,
|
||||
flaky_tests: flaky_tests,
|
||||
flaky_count: length(flaky_tests),
|
||||
all_failures: all_failures
|
||||
}
|
||||
end
|
||||
|
||||
defp average([]), do: 0
|
||||
defp average(list), do: Enum.sum(list) / length(list)
|
||||
|
||||
defp display_results(analysis, detect_mode) do
|
||||
Mix.shell().info("\n📊 Test Stability Results")
|
||||
Mix.shell().info("=" |> String.duplicate(50))
|
||||
|
||||
Mix.shell().info("\nSummary:")
|
||||
Mix.shell().info(" Total test runs: #{analysis.total_runs}")
|
||||
Mix.shell().info(" Successful runs: #{analysis.success_runs}")
|
||||
Mix.shell().info(" Failed runs: #{analysis.failed_runs}")
|
||||
Mix.shell().info(" Overall success rate: #{format_percentage(analysis.success_rate)}")
|
||||
Mix.shell().info(" Average duration: #{Float.round(analysis.avg_duration / 1000, 2)}s")
|
||||
|
||||
if analysis.flaky_count > 0 do
|
||||
Mix.shell().info("\n⚠️ Flaky Tests Detected:")
|
||||
Mix.shell().info("-" |> String.duplicate(50))
|
||||
|
||||
for test <- analysis.flaky_tests do
|
||||
Mix.shell().info("\n #{test.test}")
|
||||
Mix.shell().info(" Failure rate: #{format_percentage(test.failure_rate)}")
|
||||
Mix.shell().info(" Failed #{test.failures} out of #{analysis.total_runs} runs")
|
||||
end
|
||||
else
|
||||
Mix.shell().info("\n✅ No flaky tests detected!")
|
||||
end
|
||||
|
||||
if not detect_mode and map_size(analysis.all_failures) > 0 do
|
||||
Mix.shell().info("\n📝 All Test Failures:")
|
||||
Mix.shell().info("-" |> String.duplicate(50))
|
||||
|
||||
for {test, count} <- analysis.all_failures do
|
||||
percentage = count / analysis.total_runs
|
||||
Mix.shell().info(" #{test}: #{count} failures (#{format_percentage(percentage)})")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp format_percentage(rate) do
|
||||
"#{Float.round(rate * 100, 1)}%"
|
||||
end
|
||||
|
||||
defp generate_report(analysis, report_file) do
|
||||
timestamp = DateTime.utc_now() |> DateTime.to_string()
|
||||
|
||||
report = %{
|
||||
timestamp: timestamp,
|
||||
summary: %{
|
||||
total_runs: analysis.total_runs,
|
||||
total_tests: analysis.total_tests,
|
||||
success_runs: analysis.success_runs,
|
||||
failed_runs: analysis.failed_runs,
|
||||
success_rate: analysis.success_rate,
|
||||
avg_duration_ms: analysis.avg_duration
|
||||
},
|
||||
flaky_tests: analysis.flaky_tests,
|
||||
all_failures: analysis.all_failures
|
||||
}
|
||||
|
||||
json = Jason.encode!(report, pretty: true)
|
||||
File.write!(report_file, json)
|
||||
|
||||
Mix.shell().info("\n📄 Report written to: #{report_file}")
|
||||
end
|
||||
end
|
||||
@@ -1,7 +1,13 @@
|
||||
defmodule WandererApp.Api do
|
||||
@moduledoc false
|
||||
|
||||
use Ash.Domain
|
||||
use Ash.Domain,
|
||||
extensions: [AshJsonApi.Domain]
|
||||
|
||||
json_api do
|
||||
prefix "/api/v1"
|
||||
log_errors?(true)
|
||||
end
|
||||
|
||||
resources do
|
||||
resource WandererApp.Api.AccessList
|
||||
@@ -22,6 +28,7 @@ defmodule WandererApp.Api do
|
||||
resource WandererApp.Api.MapSubscription
|
||||
resource WandererApp.Api.MapTransaction
|
||||
resource WandererApp.Api.MapUserSettings
|
||||
resource WandererApp.Api.MapDefaultSettings
|
||||
resource WandererApp.Api.User
|
||||
resource WandererApp.Api.ShipTypeInfo
|
||||
resource WandererApp.Api.UserActivity
|
||||
@@ -30,5 +37,6 @@ defmodule WandererApp.Api do
|
||||
resource WandererApp.Api.License
|
||||
resource WandererApp.Api.MapPing
|
||||
resource WandererApp.Api.MapInvite
|
||||
resource WandererApp.Api.MapWebhookSubscription
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,32 @@ defmodule WandererApp.Api.AccessList do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("access_lists_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "access_lists"
|
||||
|
||||
includes([:owner, :members])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/access_lists")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:new)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:available, action: :available)
|
||||
@@ -79,8 +98,11 @@ defmodule WandererApp.Api.AccessList do
|
||||
relationships do
|
||||
belongs_to :owner, WandererApp.Api.Character do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
|
||||
has_many :members, WandererApp.Api.AccessListMember
|
||||
has_many :members, WandererApp.Api.AccessListMember do
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,32 @@ defmodule WandererApp.Api.AccessListMember do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("access_list_members_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "access_list_members"
|
||||
|
||||
includes([:access_list])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/access_list_members")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update_role)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:update_role, action: :update_role)
|
||||
@@ -101,6 +120,7 @@ defmodule WandererApp.Api.AccessListMember do
|
||||
relationships do
|
||||
belongs_to :access_list, WandererApp.Api.AccessList do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ defmodule WandererApp.Api.Changes.SlugifyName do
|
||||
defp maybe_slugify_name(changeset) do
|
||||
case Changeset.get_attribute(changeset, :slug) do
|
||||
slug when is_binary(slug) ->
|
||||
Changeset.change_attribute(changeset, :slug, Slug.slugify(slug))
|
||||
Changeset.force_change_attribute(changeset, :slug, Slug.slugify(slug))
|
||||
|
||||
_ ->
|
||||
changeset
|
||||
|
||||
@@ -3,13 +3,44 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
alias Ash.Resource.Change.Builtins
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("maps_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "maps"
|
||||
|
||||
# Include relationships for compound documents
|
||||
includes([
|
||||
:owner,
|
||||
:characters,
|
||||
:acls
|
||||
])
|
||||
|
||||
# Enable filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
# Routes configuration
|
||||
routes do
|
||||
base("/maps")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:new)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
|
||||
# Custom action for map duplication
|
||||
post(:duplicate, route: "/:id/duplicate")
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:available, action: :available)
|
||||
define(:get_map_by_slug, action: :by_slug, args: [:slug])
|
||||
@@ -22,11 +53,14 @@ defmodule WandererApp.Api.Map do
|
||||
define(:assign_owner, action: :assign_owner)
|
||||
define(:mark_as_deleted, action: :mark_as_deleted)
|
||||
define(:update_api_key, action: :update_api_key)
|
||||
define(:toggle_webhooks, action: :toggle_webhooks)
|
||||
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
action: :read
|
||||
)
|
||||
|
||||
define(:duplicate, action: :duplicate)
|
||||
end
|
||||
|
||||
calculations do
|
||||
@@ -127,6 +161,86 @@ defmodule WandererApp.Api.Map do
|
||||
update :update_api_key do
|
||||
accept [:public_api_key]
|
||||
end
|
||||
|
||||
update :toggle_webhooks do
|
||||
accept [:webhooks_enabled]
|
||||
end
|
||||
|
||||
create :duplicate do
|
||||
accept [:name, :description, :scope, :only_tracked_characters]
|
||||
|
||||
argument :source_map_id, :uuid, allow_nil?: false
|
||||
argument :copy_acls, :boolean, default: true
|
||||
argument :copy_user_settings, :boolean, default: true
|
||||
argument :copy_signatures, :boolean, default: true
|
||||
|
||||
# Set defaults from source map before creation
|
||||
change fn changeset, context ->
|
||||
source_map_id = Ash.Changeset.get_argument(changeset, :source_map_id)
|
||||
|
||||
case WandererApp.Api.Map.by_id(source_map_id) do
|
||||
{:ok, source_map} ->
|
||||
# Use provided description or fall back to source map description
|
||||
description =
|
||||
Ash.Changeset.get_attribute(changeset, :description) || source_map.description
|
||||
|
||||
changeset
|
||||
|> Ash.Changeset.change_attribute(:description, description)
|
||||
|> Ash.Changeset.change_attribute(:scope, source_map.scope)
|
||||
|> Ash.Changeset.change_attribute(
|
||||
:only_tracked_characters,
|
||||
source_map.only_tracked_characters
|
||||
)
|
||||
|> Ash.Changeset.change_attribute(:owner_id, context.actor.id)
|
||||
|> Ash.Changeset.change_attribute(
|
||||
:slug,
|
||||
generate_unique_slug(Ash.Changeset.get_attribute(changeset, :name))
|
||||
)
|
||||
|
||||
{:error, _} ->
|
||||
Ash.Changeset.add_error(changeset,
|
||||
field: :source_map_id,
|
||||
message: "Source map not found"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
# Copy related data after creation
|
||||
change Builtins.after_action(fn changeset, new_map, context ->
|
||||
source_map_id = Ash.Changeset.get_argument(changeset, :source_map_id)
|
||||
copy_acls = Ash.Changeset.get_argument(changeset, :copy_acls)
|
||||
copy_user_settings = Ash.Changeset.get_argument(changeset, :copy_user_settings)
|
||||
copy_signatures = Ash.Changeset.get_argument(changeset, :copy_signatures)
|
||||
|
||||
case WandererApp.Map.Operations.Duplication.duplicate_map(
|
||||
source_map_id,
|
||||
new_map,
|
||||
copy_acls: copy_acls,
|
||||
copy_user_settings: copy_user_settings,
|
||||
copy_signatures: copy_signatures
|
||||
) do
|
||||
{:ok, _result} ->
|
||||
{:ok, new_map}
|
||||
|
||||
{:error, error} ->
|
||||
{:error, error}
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
# Generate a unique slug from map name
|
||||
defp generate_unique_slug(name) do
|
||||
base_slug =
|
||||
name
|
||||
|> String.downcase()
|
||||
|> String.replace(~r/[^a-z0-9\s-]/, "")
|
||||
|> String.replace(~r/\s+/, "-")
|
||||
|> String.trim("-")
|
||||
|
||||
# Add timestamp to ensure uniqueness
|
||||
timestamp = System.system_time(:millisecond) |> Integer.to_string()
|
||||
"#{base_slug}-#{timestamp}"
|
||||
end
|
||||
|
||||
attributes do
|
||||
@@ -134,6 +248,7 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
attribute :name, :string do
|
||||
allow_nil? false
|
||||
public? true
|
||||
constraints trim?: false, max_length: 20, min_length: 3, allow_empty?: false
|
||||
end
|
||||
|
||||
@@ -143,8 +258,13 @@ defmodule WandererApp.Api.Map do
|
||||
constraints trim?: false, max_length: 40, min_length: 3, allow_empty?: false
|
||||
end
|
||||
|
||||
attribute :description, :string
|
||||
attribute :personal_note, :string
|
||||
attribute :description, :string do
|
||||
public? true
|
||||
end
|
||||
|
||||
attribute :personal_note, :string do
|
||||
public? true
|
||||
end
|
||||
|
||||
attribute :public_api_key, :string do
|
||||
allow_nil? true
|
||||
@@ -158,6 +278,7 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
attribute :scope, :atom do
|
||||
default "wormholes"
|
||||
public? true
|
||||
|
||||
constraints(
|
||||
one_of: [
|
||||
@@ -185,6 +306,12 @@ defmodule WandererApp.Api.Map do
|
||||
allow_nil? true
|
||||
end
|
||||
|
||||
attribute :webhooks_enabled, :boolean do
|
||||
default(false)
|
||||
allow_nil?(false)
|
||||
public?(true)
|
||||
end
|
||||
|
||||
create_timestamp(:inserted_at)
|
||||
update_timestamp(:updated_at)
|
||||
end
|
||||
@@ -196,20 +323,25 @@ defmodule WandererApp.Api.Map do
|
||||
relationships do
|
||||
belongs_to :owner, WandererApp.Api.Character do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
|
||||
many_to_many :characters, WandererApp.Api.Character do
|
||||
through WandererApp.Api.MapCharacterSettings
|
||||
source_attribute_on_join_resource :map_id
|
||||
destination_attribute_on_join_resource :character_id
|
||||
public? true
|
||||
end
|
||||
|
||||
many_to_many :acls, WandererApp.Api.AccessList do
|
||||
through WandererApp.Api.MapAccessList
|
||||
source_attribute_on_join_resource :map_id
|
||||
destination_attribute_on_join_resource :access_list_id
|
||||
public? true
|
||||
end
|
||||
|
||||
has_many :transactions, WandererApp.Api.MapTransaction
|
||||
has_many :transactions, WandererApp.Api.MapTransaction do
|
||||
public? false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,19 +3,56 @@ defmodule WandererApp.Api.MapAccessList do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_access_lists_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_access_lists"
|
||||
|
||||
# Handle composite primary key
|
||||
primary_key do
|
||||
keys([:id])
|
||||
end
|
||||
|
||||
includes([
|
||||
:map,
|
||||
:access_list
|
||||
])
|
||||
|
||||
# Enable automatic filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_access_lists")
|
||||
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
|
||||
# Custom routes for specific queries
|
||||
get(:read_by_map, route: "/by_map/:map_id")
|
||||
get(:read_by_acl, route: "/by_acl/:acl_id")
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
|
||||
define(:read_by_map,
|
||||
action: :read_by_map
|
||||
)
|
||||
|
||||
define(:read_by_acl,
|
||||
action: :read_by_acl
|
||||
)
|
||||
end
|
||||
|
||||
actions do
|
||||
@@ -30,6 +67,11 @@ defmodule WandererApp.Api.MapAccessList do
|
||||
argument(:map_id, :string, allow_nil?: false)
|
||||
filter(expr(map_id == ^arg(:map_id)))
|
||||
end
|
||||
|
||||
read :read_by_acl do
|
||||
argument(:acl_id, :string, allow_nil?: false)
|
||||
filter(expr(access_list_id == ^arg(:acl_id)))
|
||||
end
|
||||
end
|
||||
|
||||
attributes do
|
||||
@@ -40,8 +82,12 @@ defmodule WandererApp.Api.MapAccessList do
|
||||
end
|
||||
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false
|
||||
belongs_to :access_list, WandererApp.Api.AccessList, primary_key?: true, allow_nil?: false
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false, public?: true
|
||||
|
||||
belongs_to :access_list, WandererApp.Api.AccessList,
|
||||
primary_key?: true,
|
||||
allow_nil?: false,
|
||||
public?: true
|
||||
end
|
||||
|
||||
postgres do
|
||||
|
||||
@@ -4,7 +4,7 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshCloak]
|
||||
extensions: [AshCloak, AshJsonApi.Resource]
|
||||
|
||||
@derive {Jason.Encoder,
|
||||
only: [
|
||||
@@ -22,23 +22,39 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
table("map_character_settings_v1")
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:destroy, action: :destroy)
|
||||
define(:update, action: :update)
|
||||
json_api do
|
||||
type "map_character_settings"
|
||||
|
||||
includes([:map, :character])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
primary_key do
|
||||
keys([:id])
|
||||
end
|
||||
|
||||
routes do
|
||||
base("/map_character_settings")
|
||||
get(:read)
|
||||
index :read
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:read_by_map, action: :read_by_map)
|
||||
define(:read_by_map_and_character, action: :read_by_map_and_character)
|
||||
define(:by_map_filtered, action: :by_map_filtered)
|
||||
define(:tracked_by_map_filtered, action: :tracked_by_map_filtered)
|
||||
define(:tracked_by_character, action: :tracked_by_character)
|
||||
define(:tracked_by_map_all, action: :tracked_by_map_all)
|
||||
|
||||
define(:create, action: :create)
|
||||
define(:update, action: :update)
|
||||
define(:track, action: :track)
|
||||
define(:untrack, action: :untrack)
|
||||
|
||||
define(:follow, action: :follow)
|
||||
define(:unfollow, action: :unfollow)
|
||||
define(:destroy, action: :destroy)
|
||||
end
|
||||
|
||||
actions do
|
||||
@@ -232,8 +248,12 @@ defmodule WandererApp.Api.MapCharacterSettings do
|
||||
end
|
||||
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false
|
||||
belongs_to :character, WandererApp.Api.Character, primary_key?: true, allow_nil?: false
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false, public?: true
|
||||
|
||||
belongs_to :character, WandererApp.Api.Character,
|
||||
primary_key?: true,
|
||||
allow_nil?: false,
|
||||
public?: true
|
||||
end
|
||||
|
||||
identities do
|
||||
|
||||
@@ -3,15 +3,35 @@ defmodule WandererApp.Api.MapConnection do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_chain_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_connections"
|
||||
|
||||
includes([:map])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_connections")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:update, action: :update)
|
||||
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
@@ -39,7 +59,13 @@ defmodule WandererApp.Api.MapConnection do
|
||||
:solar_system_source,
|
||||
:solar_system_target,
|
||||
:type,
|
||||
:ship_size_type
|
||||
:ship_size_type,
|
||||
:mass_status,
|
||||
:time_status,
|
||||
:wormhole_type,
|
||||
:count_of_passage,
|
||||
:locked,
|
||||
:custom_info
|
||||
]
|
||||
|
||||
defaults [:create, :read, :update, :destroy]
|
||||
@@ -169,6 +195,7 @@ defmodule WandererApp.Api.MapConnection do
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
145
lib/wanderer_app/api/map_default_settings.ex
Normal file
145
lib/wanderer_app/api/map_default_settings.ex
Normal file
@@ -0,0 +1,145 @@
|
||||
defmodule WandererApp.Api.MapDefaultSettings do
|
||||
@moduledoc """
|
||||
Resource for storing default map settings that admins can configure.
|
||||
These settings will be applied to new users when they first access the map.
|
||||
"""
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_default_settings")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_default_settings"
|
||||
|
||||
includes([
|
||||
:map,
|
||||
:created_by,
|
||||
:updated_by
|
||||
])
|
||||
|
||||
routes do
|
||||
base("/map_default_settings")
|
||||
|
||||
get(:read)
|
||||
index(:read)
|
||||
post(:create)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:update, action: :update)
|
||||
define(:destroy, action: :destroy)
|
||||
define(:get_by_map_id, action: :get_by_map_id)
|
||||
end
|
||||
|
||||
actions do
|
||||
default_accept [
|
||||
:map_id,
|
||||
:settings
|
||||
]
|
||||
|
||||
defaults [:read, :destroy]
|
||||
|
||||
create :create do
|
||||
primary?(true)
|
||||
accept [:map_id, :settings]
|
||||
|
||||
change relate_actor(:created_by)
|
||||
change relate_actor(:updated_by)
|
||||
|
||||
change fn changeset, _context ->
|
||||
changeset
|
||||
|> validate_json_settings()
|
||||
end
|
||||
end
|
||||
|
||||
update :update do
|
||||
primary?(true)
|
||||
accept [:settings]
|
||||
|
||||
# Required for managing relationships
|
||||
require_atomic? false
|
||||
|
||||
change relate_actor(:updated_by)
|
||||
|
||||
change fn changeset, _context ->
|
||||
changeset
|
||||
|> validate_json_settings()
|
||||
end
|
||||
end
|
||||
|
||||
read :get_by_map_id do
|
||||
argument :map_id, :uuid, allow_nil?: false
|
||||
|
||||
filter expr(map_id == ^arg(:map_id))
|
||||
|
||||
prepare fn query, _context ->
|
||||
Ash.Query.limit(query, 1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
attributes do
|
||||
uuid_primary_key :id
|
||||
|
||||
attribute :settings, :string do
|
||||
allow_nil? false
|
||||
constraints min_length: 2
|
||||
description "JSON string containing the default map settings"
|
||||
end
|
||||
|
||||
create_timestamp(:inserted_at)
|
||||
update_timestamp(:updated_at)
|
||||
end
|
||||
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map do
|
||||
primary_key? false
|
||||
allow_nil? false
|
||||
public? true
|
||||
end
|
||||
|
||||
belongs_to :created_by, WandererApp.Api.Character do
|
||||
allow_nil? true
|
||||
public? true
|
||||
end
|
||||
|
||||
belongs_to :updated_by, WandererApp.Api.Character do
|
||||
allow_nil? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
|
||||
identities do
|
||||
identity :unique_map_settings, [:map_id]
|
||||
end
|
||||
|
||||
defp validate_json_settings(changeset) do
|
||||
case Ash.Changeset.get_attribute(changeset, :settings) do
|
||||
nil ->
|
||||
changeset
|
||||
|
||||
settings ->
|
||||
case Jason.decode(settings) do
|
||||
{:ok, _} ->
|
||||
changeset
|
||||
|
||||
{:error, _} ->
|
||||
Ash.Changeset.add_error(
|
||||
changeset,
|
||||
field: :settings,
|
||||
message: "must be valid JSON"
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -3,13 +3,26 @@ defmodule WandererApp.Api.MapSolarSystem do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_solar_system_v2")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_solar_systems"
|
||||
|
||||
# Enable automatic filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
# No routes - this resource should not be exposed via API
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:read,
|
||||
action: :read
|
||||
|
||||
@@ -3,13 +3,22 @@ defmodule WandererApp.Api.MapState do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_state_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_states"
|
||||
|
||||
routes do
|
||||
# No routes - this resource should not be exposed via API
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:update, action: :update)
|
||||
|
||||
@@ -3,13 +3,33 @@ defmodule WandererApp.Api.MapSubscription do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_subscriptions_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_subscriptions"
|
||||
|
||||
includes([
|
||||
:map
|
||||
])
|
||||
|
||||
# Enable automatic filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_subscriptions")
|
||||
|
||||
get(:read)
|
||||
index :read
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
|
||||
@@ -158,6 +178,7 @@ defmodule WandererApp.Api.MapSubscription do
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,32 @@ defmodule WandererApp.Api.MapSystem do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_system_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_systems"
|
||||
|
||||
includes([:map])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_systems")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:destroy, action: :destroy)
|
||||
@@ -60,10 +79,29 @@ defmodule WandererApp.Api.MapSystem do
|
||||
:solar_system_id,
|
||||
:position_x,
|
||||
:position_y,
|
||||
:status
|
||||
:status,
|
||||
:visible,
|
||||
:locked,
|
||||
:custom_name,
|
||||
:description,
|
||||
:tag,
|
||||
:temporary_name,
|
||||
:labels,
|
||||
:added_at,
|
||||
:linked_sig_eve_id
|
||||
]
|
||||
|
||||
defaults [:create, :read, :update, :destroy]
|
||||
defaults [:create, :update, :destroy]
|
||||
|
||||
read :read do
|
||||
primary?(true)
|
||||
|
||||
pagination offset?: true,
|
||||
default_limit: 100,
|
||||
max_page_size: 500,
|
||||
countable: true,
|
||||
required?: false
|
||||
end
|
||||
|
||||
read :read_all_by_map do
|
||||
argument(:map_id, :string, allow_nil?: false)
|
||||
@@ -209,6 +247,7 @@ defmodule WandererApp.Api.MapSystem do
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -3,13 +3,33 @@ defmodule WandererApp.Api.MapSystemComment do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_system_comments_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_system_comments"
|
||||
|
||||
includes([
|
||||
:system,
|
||||
:character
|
||||
])
|
||||
|
||||
routes do
|
||||
base("/map_system_comments")
|
||||
|
||||
get(:read)
|
||||
index :read
|
||||
|
||||
# Custom route for system-specific comments
|
||||
index :by_system_id, route: "/by_system/:system_id"
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:destroy, action: :destroy)
|
||||
@@ -68,10 +88,12 @@ defmodule WandererApp.Api.MapSystemComment do
|
||||
relationships do
|
||||
belongs_to :system, WandererApp.Api.MapSystem do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
|
||||
belongs_to :character, WandererApp.Api.Character do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,30 @@ defmodule WandererApp.Api.MapSystemSignature do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_system_signatures_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_system_signatures"
|
||||
|
||||
includes([:system])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_system_signatures")
|
||||
get(:read)
|
||||
index :read
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:all_active, action: :all_active)
|
||||
define(:create, action: :create)
|
||||
@@ -49,10 +66,21 @@ defmodule WandererApp.Api.MapSystemSignature do
|
||||
:kind,
|
||||
:group,
|
||||
:type,
|
||||
:deleted
|
||||
:deleted,
|
||||
:custom_info
|
||||
]
|
||||
|
||||
defaults [:read, :destroy]
|
||||
defaults [:destroy]
|
||||
|
||||
read :read do
|
||||
primary?(true)
|
||||
|
||||
pagination offset?: true,
|
||||
default_limit: 50,
|
||||
max_page_size: 200,
|
||||
countable: true,
|
||||
required?: false
|
||||
end
|
||||
|
||||
read :all_active do
|
||||
prepare build(sort: [updated_at: :desc])
|
||||
@@ -198,6 +226,7 @@ defmodule WandererApp.Api.MapSystemSignature do
|
||||
relationships do
|
||||
belongs_to :system, WandererApp.Api.MapSystem do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -26,13 +26,40 @@ defmodule WandererApp.Api.MapSystemStructure do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_system_structures_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_system_structures"
|
||||
|
||||
includes([
|
||||
:system
|
||||
])
|
||||
|
||||
# Enable automatic filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_system_structures")
|
||||
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
|
||||
# Custom routes for specific queries
|
||||
index :all_active, route: "/active"
|
||||
index :by_system_id, route: "/by_system/:system_id"
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:all_active, action: :all_active)
|
||||
define(:create, action: :create)
|
||||
@@ -184,6 +211,7 @@ defmodule WandererApp.Api.MapSystemStructure do
|
||||
relationships do
|
||||
belongs_to :system, WandererApp.Api.MapSystem do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -11,8 +11,6 @@ defmodule WandererApp.Api.MapTransaction do
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
action: :read
|
||||
@@ -20,6 +18,7 @@ defmodule WandererApp.Api.MapTransaction do
|
||||
|
||||
define(:by_map, action: :by_map)
|
||||
define(:by_user, action: :by_user)
|
||||
define(:create, action: :create)
|
||||
end
|
||||
|
||||
actions do
|
||||
@@ -75,6 +74,7 @@ defmodule WandererApp.Api.MapTransaction do
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,35 @@ defmodule WandererApp.Api.MapUserSettings do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_user_settings_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_user_settings"
|
||||
|
||||
# Handle composite primary key
|
||||
primary_key do
|
||||
keys([:map_id, :user_id])
|
||||
end
|
||||
|
||||
includes([
|
||||
:map,
|
||||
:user
|
||||
])
|
||||
|
||||
routes do
|
||||
base("/map_user_settings")
|
||||
|
||||
get(:read)
|
||||
index :read
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
|
||||
@@ -18,11 +40,10 @@ defmodule WandererApp.Api.MapUserSettings do
|
||||
action: :read
|
||||
)
|
||||
|
||||
define(:update_settings, action: :update_settings)
|
||||
define(:update_main_character, action: :update_main_character)
|
||||
define(:update_following_character, action: :update_following_character)
|
||||
|
||||
define(:update_hubs, action: :update_hubs)
|
||||
define(:update_settings, action: :update_settings)
|
||||
define(:update_following_character, action: :update_following_character)
|
||||
define(:update_main_character, action: :update_main_character)
|
||||
end
|
||||
|
||||
actions do
|
||||
@@ -74,8 +95,8 @@ defmodule WandererApp.Api.MapUserSettings do
|
||||
end
|
||||
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false
|
||||
belongs_to :user, WandererApp.Api.User, primary_key?: true, allow_nil?: false
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false, public?: true
|
||||
belongs_to :user, WandererApp.Api.User, primary_key?: true, allow_nil?: false, public?: true
|
||||
end
|
||||
|
||||
identities do
|
||||
|
||||
276
lib/wanderer_app/api/map_webhook_subscription.ex
Normal file
276
lib/wanderer_app/api/map_webhook_subscription.ex
Normal file
@@ -0,0 +1,276 @@
|
||||
defmodule WandererApp.Api.MapWebhookSubscription do
|
||||
@moduledoc """
|
||||
Ash resource for managing webhook subscriptions for map events.
|
||||
|
||||
Stores webhook endpoint configurations that receive HTTP POST notifications
|
||||
when events occur on a specific map.
|
||||
"""
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshCloak]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_webhook_subscriptions_v1")
|
||||
end
|
||||
|
||||
cloak do
|
||||
vault(WandererApp.Vault)
|
||||
attributes([:secret])
|
||||
decrypt_by_default([:secret])
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:update, action: :update)
|
||||
define(:destroy, action: :destroy)
|
||||
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
action: :read
|
||||
)
|
||||
|
||||
define(:by_map, action: :by_map, args: [:map_id])
|
||||
define(:active_by_map, action: :active_by_map, args: [:map_id])
|
||||
define(:rotate_secret, action: :rotate_secret)
|
||||
end
|
||||
|
||||
actions do
|
||||
default_accept [
|
||||
:map_id,
|
||||
:url,
|
||||
:events,
|
||||
:active?
|
||||
]
|
||||
|
||||
defaults [:read, :destroy]
|
||||
|
||||
update :update do
|
||||
accept [
|
||||
:url,
|
||||
:events,
|
||||
:active?,
|
||||
:last_delivery_at,
|
||||
:last_error,
|
||||
:last_error_at,
|
||||
:consecutive_failures,
|
||||
:secret
|
||||
]
|
||||
end
|
||||
|
||||
read :by_map do
|
||||
argument :map_id, :uuid, allow_nil?: false
|
||||
filter expr(map_id == ^arg(:map_id))
|
||||
prepare build(sort: [inserted_at: :desc])
|
||||
end
|
||||
|
||||
read :active_by_map do
|
||||
argument :map_id, :uuid, allow_nil?: false
|
||||
filter expr(map_id == ^arg(:map_id) and active? == true)
|
||||
prepare build(sort: [inserted_at: :desc])
|
||||
end
|
||||
|
||||
create :create do
|
||||
accept [
|
||||
:map_id,
|
||||
:url,
|
||||
:events,
|
||||
:active?
|
||||
]
|
||||
|
||||
# Validate webhook URL format
|
||||
change fn changeset, _context ->
|
||||
case Ash.Changeset.get_attribute(changeset, :url) do
|
||||
nil ->
|
||||
changeset
|
||||
|
||||
url ->
|
||||
case validate_webhook_url_format(url) do
|
||||
:ok ->
|
||||
changeset
|
||||
|
||||
{:error, message} ->
|
||||
Ash.Changeset.add_error(changeset, field: :url, message: message)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Validate events list
|
||||
change fn changeset, _context ->
|
||||
case Ash.Changeset.get_attribute(changeset, :events) do
|
||||
nil ->
|
||||
changeset
|
||||
|
||||
events when is_list(events) ->
|
||||
case validate_events_list(events) do
|
||||
:ok ->
|
||||
changeset
|
||||
|
||||
{:error, message} ->
|
||||
Ash.Changeset.add_error(changeset, field: :events, message: message)
|
||||
end
|
||||
|
||||
_ ->
|
||||
changeset
|
||||
end
|
||||
end
|
||||
|
||||
# Generate secret on creation
|
||||
change fn changeset, _context ->
|
||||
secret = generate_webhook_secret()
|
||||
Ash.Changeset.force_change_attribute(changeset, :secret, secret)
|
||||
end
|
||||
end
|
||||
|
||||
update :rotate_secret do
|
||||
accept []
|
||||
require_atomic? false
|
||||
|
||||
change fn changeset, _context ->
|
||||
new_secret = generate_webhook_secret()
|
||||
Ash.Changeset.change_attribute(changeset, :secret, new_secret)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
validations do
|
||||
validate present(:url), message: "URL is required"
|
||||
validate present(:events), message: "Events array is required"
|
||||
validate present(:map_id), message: "Map ID is required"
|
||||
end
|
||||
|
||||
attributes do
|
||||
uuid_primary_key :id
|
||||
|
||||
attribute :map_id, :uuid do
|
||||
allow_nil? false
|
||||
end
|
||||
|
||||
attribute :url, :string do
|
||||
allow_nil? false
|
||||
# 2KB limit as per security requirements
|
||||
constraints max_length: 2000
|
||||
end
|
||||
|
||||
attribute :events, {:array, :string} do
|
||||
allow_nil? false
|
||||
default []
|
||||
|
||||
constraints min_length: 1,
|
||||
# Reasonable limit on number of event types
|
||||
max_length: 50,
|
||||
# Max length per event type
|
||||
items: [max_length: 100]
|
||||
end
|
||||
|
||||
attribute :secret, :string do
|
||||
allow_nil? false
|
||||
# Hide in logs and API responses
|
||||
sensitive? true
|
||||
end
|
||||
|
||||
attribute :active?, :boolean do
|
||||
allow_nil? false
|
||||
default true
|
||||
end
|
||||
|
||||
# Delivery tracking fields
|
||||
attribute :last_delivery_at, :utc_datetime do
|
||||
allow_nil? true
|
||||
end
|
||||
|
||||
attribute :last_error, :string do
|
||||
allow_nil? true
|
||||
constraints max_length: 1000
|
||||
end
|
||||
|
||||
attribute :last_error_at, :utc_datetime do
|
||||
allow_nil? true
|
||||
end
|
||||
|
||||
attribute :consecutive_failures, :integer do
|
||||
allow_nil? false
|
||||
default 0
|
||||
end
|
||||
|
||||
create_timestamp(:inserted_at)
|
||||
update_timestamp(:updated_at)
|
||||
end
|
||||
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map do
|
||||
source_attribute :map_id
|
||||
destination_attribute :id
|
||||
attribute_writable? true
|
||||
end
|
||||
end
|
||||
|
||||
identities do
|
||||
# Allow multiple webhooks per map, but prevent duplicate URLs per map
|
||||
identity :unique_url_per_map, [:map_id, :url]
|
||||
end
|
||||
|
||||
# Private helper functions
|
||||
|
||||
defp generate_webhook_secret do
|
||||
:crypto.strong_rand_bytes(32) |> Base.encode64()
|
||||
end
|
||||
|
||||
defp validate_webhook_url_format(url) do
|
||||
uri = URI.parse(url)
|
||||
|
||||
cond do
|
||||
uri.scheme != "https" ->
|
||||
{:error, "Webhook URL must use HTTPS"}
|
||||
|
||||
uri.host == nil ->
|
||||
{:error, "Webhook URL must have a valid host"}
|
||||
|
||||
uri.host in ["localhost", "127.0.0.1", "0.0.0.0"] ->
|
||||
{:error, "Webhook URL cannot use localhost or loopback addresses"}
|
||||
|
||||
String.starts_with?(uri.host, "192.168.") or String.starts_with?(uri.host, "10.") or
|
||||
is_private_ip_172_range?(uri.host) ->
|
||||
{:error, "Webhook URL cannot use private network addresses"}
|
||||
|
||||
byte_size(url) > 2000 ->
|
||||
{:error, "Webhook URL cannot exceed 2000 characters"}
|
||||
|
||||
true ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_events_list(events) do
|
||||
alias WandererApp.ExternalEvents.Event
|
||||
|
||||
# Get valid event types as strings
|
||||
valid_event_strings =
|
||||
Event.supported_event_types()
|
||||
|> Enum.map(&Atom.to_string/1)
|
||||
|
||||
# Add wildcard as valid option
|
||||
valid_events = ["*" | valid_event_strings]
|
||||
|
||||
invalid_events = Enum.reject(events, fn event -> event in valid_events end)
|
||||
|
||||
if Enum.empty?(invalid_events) do
|
||||
:ok
|
||||
else
|
||||
{:error, "Invalid event types: #{Enum.join(invalid_events, ", ")}"}
|
||||
end
|
||||
end
|
||||
|
||||
# Check if IP is in the 172.16.0.0/12 range (172.16.0.0 to 172.31.255.255)
|
||||
defp is_private_ip_172_range?(host) do
|
||||
case :inet.parse_address(String.to_charlist(host)) do
|
||||
{:ok, {172, b, _, _}} when b >= 16 and b <= 31 ->
|
||||
true
|
||||
|
||||
_ ->
|
||||
false
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -3,13 +3,22 @@ defmodule WandererApp.Api.ShipTypeInfo do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("ship_type_infos_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "ship_type_info"
|
||||
|
||||
routes do
|
||||
# No routes - this resource should not be exposed via API
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:read,
|
||||
action: :read
|
||||
|
||||
@@ -4,13 +4,27 @@ defmodule WandererApp.Api.User do
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshCloak]
|
||||
extensions: [AshCloak, AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("user_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "users"
|
||||
|
||||
# Only expose safe, non-sensitive attributes
|
||||
includes([:characters])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
# No routes - this resource should not be exposed via API
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
@@ -71,7 +85,9 @@ defmodule WandererApp.Api.User do
|
||||
end
|
||||
|
||||
relationships do
|
||||
has_many :characters, WandererApp.Api.Character
|
||||
has_many :characters, WandererApp.Api.Character do
|
||||
public? true
|
||||
end
|
||||
end
|
||||
|
||||
identities do
|
||||
|
||||
@@ -3,7 +3,8 @@ defmodule WandererApp.Api.UserActivity do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
require Ash.Expr
|
||||
|
||||
@@ -24,9 +25,28 @@ defmodule WandererApp.Api.UserActivity do
|
||||
end
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "user_activities"
|
||||
|
||||
includes([:character, :user])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
primary_key do
|
||||
keys([:id])
|
||||
end
|
||||
|
||||
routes do
|
||||
base("/user_activities")
|
||||
get(:read)
|
||||
index :read
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:new, action: :new)
|
||||
define(:read, action: :read)
|
||||
define(:new, action: :new)
|
||||
end
|
||||
|
||||
actions do
|
||||
@@ -34,11 +54,10 @@ defmodule WandererApp.Api.UserActivity do
|
||||
:entity_id,
|
||||
:entity_type,
|
||||
:event_type,
|
||||
:event_data
|
||||
:event_data,
|
||||
:user_id
|
||||
]
|
||||
|
||||
defaults [:create, :update, :destroy]
|
||||
|
||||
read :read do
|
||||
primary?(true)
|
||||
|
||||
@@ -54,7 +73,7 @@ defmodule WandererApp.Api.UserActivity do
|
||||
accept [:entity_id, :entity_type, :event_type, :event_data]
|
||||
primary?(true)
|
||||
|
||||
argument :user_id, :uuid, allow_nil?: false
|
||||
argument :user_id, :uuid, allow_nil?: true
|
||||
argument :character_id, :uuid, allow_nil?: true
|
||||
|
||||
change manage_relationship(:user_id, :user, on_lookup: :relate, on_no_match: nil)
|
||||
@@ -79,7 +98,8 @@ defmodule WandererApp.Api.UserActivity do
|
||||
constraints(
|
||||
one_of: [
|
||||
:map,
|
||||
:access_list
|
||||
:access_list,
|
||||
:security_event
|
||||
]
|
||||
)
|
||||
|
||||
@@ -115,7 +135,17 @@ defmodule WandererApp.Api.UserActivity do
|
||||
:map_rally_added,
|
||||
:map_rally_cancelled,
|
||||
:signatures_added,
|
||||
:signatures_removed
|
||||
:signatures_removed,
|
||||
# Security audit events
|
||||
:auth_success,
|
||||
:auth_failure,
|
||||
:permission_denied,
|
||||
:privilege_escalation,
|
||||
:data_access,
|
||||
:admin_action,
|
||||
:config_change,
|
||||
:bulk_operation,
|
||||
:security_alert
|
||||
]
|
||||
)
|
||||
|
||||
@@ -132,12 +162,13 @@ defmodule WandererApp.Api.UserActivity do
|
||||
belongs_to :character, WandererApp.Api.Character do
|
||||
allow_nil? true
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
|
||||
belongs_to :user, WandererApp.Api.User do
|
||||
primary_key? true
|
||||
allow_nil? false
|
||||
allow_nil? true
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -23,7 +23,7 @@ defmodule WandererApp.Api.UserTransaction do
|
||||
:corporation_id
|
||||
]
|
||||
|
||||
defaults [:create, :read, :update, :destroy]
|
||||
defaults [:read]
|
||||
|
||||
create :new do
|
||||
accept [:journal_ref_id, :user_id, :date, :amount, :corporation_id]
|
||||
|
||||
@@ -7,66 +7,90 @@ defmodule WandererApp.Application do
|
||||
|
||||
@impl true
|
||||
def start(_type, _args) do
|
||||
# Skip test mocks setup - handled in test helper if needed
|
||||
|
||||
# Core children that must always start
|
||||
core_children = [
|
||||
WandererApp.PromEx,
|
||||
WandererAppWeb.Telemetry,
|
||||
WandererApp.Vault,
|
||||
WandererApp.Repo,
|
||||
{Phoenix.PubSub, name: WandererApp.PubSub, adapter_name: Phoenix.PubSub.PG2},
|
||||
{
|
||||
Finch,
|
||||
name: WandererApp.Finch,
|
||||
pools: %{
|
||||
default: [
|
||||
# number of connections per pool
|
||||
size: 50,
|
||||
# number of pools (so total 50 connections)
|
||||
count: 4
|
||||
]
|
||||
}
|
||||
},
|
||||
WandererApp.Cache,
|
||||
Supervisor.child_spec({Cachex, name: :api_cache, default_ttl: :timer.hours(1)},
|
||||
id: :api_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :esi_auth_cache}, id: :esi_auth_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :system_static_info_cache},
|
||||
id: :system_static_info_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :ship_types_cache}, id: :ship_types_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :character_cache}, id: :character_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :map_cache}, id: :map_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :character_state_cache},
|
||||
id: :character_state_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :tracked_characters},
|
||||
id: :tracked_characters_cache_worker
|
||||
),
|
||||
{Registry, keys: :unique, name: WandererApp.MapRegistry},
|
||||
{Registry, keys: :unique, name: WandererApp.Character.TrackerRegistry},
|
||||
{PartitionSupervisor,
|
||||
child_spec: DynamicSupervisor, name: WandererApp.Map.DynamicSupervisors},
|
||||
{PartitionSupervisor,
|
||||
child_spec: DynamicSupervisor, name: WandererApp.Character.DynamicSupervisors},
|
||||
WandererAppWeb.Presence,
|
||||
WandererAppWeb.Endpoint
|
||||
]
|
||||
|
||||
# Children that should only start in non-test environments
|
||||
runtime_children =
|
||||
if Application.get_env(:wanderer_app, :environment) == :test do
|
||||
[]
|
||||
else
|
||||
[
|
||||
WandererApp.Esi.InitClientsTask,
|
||||
WandererApp.Scheduler,
|
||||
WandererApp.Server.ServerStatusTracker,
|
||||
WandererApp.Server.TheraDataFetcher,
|
||||
{WandererApp.Character.TrackerPoolSupervisor, []},
|
||||
WandererApp.Character.TrackerManager,
|
||||
WandererApp.Map.Manager
|
||||
]
|
||||
end
|
||||
|
||||
children =
|
||||
[
|
||||
WandererApp.PromEx,
|
||||
WandererAppWeb.Telemetry,
|
||||
WandererApp.Vault,
|
||||
WandererApp.Repo,
|
||||
{Phoenix.PubSub, name: WandererApp.PubSub, adapter_name: Phoenix.PubSub.PG2},
|
||||
{
|
||||
Finch,
|
||||
name: WandererApp.Finch,
|
||||
pools: %{
|
||||
default: [
|
||||
# number of connections per pool
|
||||
size: 50,
|
||||
# number of pools (so total 50 connections)
|
||||
count: 4
|
||||
]
|
||||
}
|
||||
},
|
||||
WandererApp.Cache,
|
||||
Supervisor.child_spec({Cachex, name: :api_cache, default_ttl: :timer.hours(1)},
|
||||
id: :api_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :esi_auth_cache}, id: :esi_auth_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :system_static_info_cache},
|
||||
id: :system_static_info_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :ship_types_cache}, id: :ship_types_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :character_cache}, id: :character_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :map_cache}, id: :map_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :character_state_cache},
|
||||
id: :character_state_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :tracked_characters},
|
||||
id: :tracked_characters_cache_worker
|
||||
),
|
||||
WandererApp.Esi.InitClientsTask,
|
||||
WandererApp.Scheduler,
|
||||
{Registry, keys: :unique, name: WandererApp.MapRegistry},
|
||||
{Registry, keys: :unique, name: WandererApp.Character.TrackerRegistry},
|
||||
{PartitionSupervisor,
|
||||
child_spec: DynamicSupervisor, name: WandererApp.Map.DynamicSupervisors},
|
||||
{PartitionSupervisor,
|
||||
child_spec: DynamicSupervisor, name: WandererApp.Character.DynamicSupervisors},
|
||||
WandererApp.Server.ServerStatusTracker,
|
||||
WandererApp.Server.TheraDataFetcher,
|
||||
{WandererApp.Character.TrackerPoolSupervisor, []},
|
||||
WandererApp.Character.TrackerManager,
|
||||
WandererApp.Map.Manager,
|
||||
WandererAppWeb.Presence,
|
||||
WandererAppWeb.Endpoint
|
||||
] ++
|
||||
core_children ++
|
||||
runtime_children ++
|
||||
maybe_start_corp_wallet_tracker(WandererApp.Env.map_subscriptions_enabled?()) ++
|
||||
maybe_start_kills_services()
|
||||
maybe_start_kills_services() ++
|
||||
maybe_start_external_events_services()
|
||||
|
||||
opts = [strategy: :one_for_one, name: WandererApp.Supervisor]
|
||||
|
||||
Supervisor.start_link(children, opts)
|
||||
|> case do
|
||||
{:ok, _pid} = ok ->
|
||||
# Attach telemetry handler for database pool monitoring
|
||||
# :telemetry.attach(
|
||||
# "wanderer-db-pool-handler",
|
||||
# [:wanderer_app, :repo, :query],
|
||||
# &WandererApp.Tracker.handle_pool_query/4,
|
||||
# nil
|
||||
# )
|
||||
|
||||
ok
|
||||
|
||||
{:error, info} = e ->
|
||||
@@ -90,18 +114,65 @@ defmodule WandererApp.Application do
|
||||
do: []
|
||||
|
||||
defp maybe_start_kills_services do
|
||||
wanderer_kills_enabled =
|
||||
Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, false)
|
||||
|
||||
if wanderer_kills_enabled in [true, true, "true"] do
|
||||
Logger.info("Starting WandererKills service integration...")
|
||||
|
||||
[
|
||||
WandererApp.Kills.Supervisor,
|
||||
WandererApp.Map.ZkbDataFetcher
|
||||
]
|
||||
else
|
||||
# Don't start kills services in test environment
|
||||
if Application.get_env(:wanderer_app, :environment) == :test do
|
||||
[]
|
||||
else
|
||||
wanderer_kills_enabled =
|
||||
Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, false)
|
||||
|
||||
if wanderer_kills_enabled in [true, "true"] do
|
||||
Logger.info("Starting WandererKills service integration...")
|
||||
|
||||
[
|
||||
WandererApp.Kills.Supervisor,
|
||||
WandererApp.Map.ZkbDataFetcher
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_start_external_events_services do
|
||||
# Don't start external events in test environment
|
||||
if Application.get_env(:wanderer_app, :environment) == :test do
|
||||
[]
|
||||
else
|
||||
external_events_config = Application.get_env(:wanderer_app, :external_events, [])
|
||||
sse_enabled = WandererApp.Env.sse_enabled?()
|
||||
webhooks_enabled = external_events_config[:webhooks_enabled] || false
|
||||
|
||||
services = []
|
||||
|
||||
# Always include MapEventRelay if any external events are enabled
|
||||
services =
|
||||
if sse_enabled || webhooks_enabled do
|
||||
Logger.info("Starting external events system...")
|
||||
[WandererApp.ExternalEvents.MapEventRelay | services]
|
||||
else
|
||||
services
|
||||
end
|
||||
|
||||
# Add WebhookDispatcher if webhooks are enabled
|
||||
services =
|
||||
if webhooks_enabled do
|
||||
Logger.info("Starting webhook dispatcher...")
|
||||
[WandererApp.ExternalEvents.WebhookDispatcher | services]
|
||||
else
|
||||
services
|
||||
end
|
||||
|
||||
# Add SseStreamManager if SSE is enabled
|
||||
services =
|
||||
if sse_enabled do
|
||||
Logger.info("Starting SSE stream manager...")
|
||||
[WandererApp.ExternalEvents.SseStreamManager | services]
|
||||
else
|
||||
services
|
||||
end
|
||||
|
||||
Enum.reverse(services)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -69,7 +69,10 @@ defmodule WandererApp.CachedInfo do
|
||||
)
|
||||
end)
|
||||
|
||||
Cachex.get(:system_static_info_cache, solar_system_id)
|
||||
case Cachex.get(:system_static_info_cache, solar_system_id) do
|
||||
{:ok, nil} -> {:error, :not_found}
|
||||
result -> result
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("Failed to read solar systems from API: #{inspect(reason)}")
|
||||
|
||||
@@ -28,7 +28,7 @@ defmodule WandererApp.Character do
|
||||
Cachex.put(:character_cache, character_id, character)
|
||||
{:ok, character}
|
||||
|
||||
_ ->
|
||||
error ->
|
||||
{:error, :not_found}
|
||||
end
|
||||
|
||||
@@ -283,39 +283,44 @@ defmodule WandererApp.Character do
|
||||
|> case do
|
||||
{:ok, settings} when not is_nil(settings) ->
|
||||
character
|
||||
|> Map.put(:online, false)
|
||||
|> Map.merge(settings)
|
||||
|> Map.merge(%{
|
||||
solar_system_id: settings.solar_system_id,
|
||||
structure_id: settings.structure_id,
|
||||
station_id: settings.station_id,
|
||||
ship: settings.ship,
|
||||
ship_name: settings.ship_name,
|
||||
ship_item_id: settings.ship_item_id
|
||||
})
|
||||
|
||||
_ ->
|
||||
character
|
||||
|> Map.put(:online, false)
|
||||
|> Map.merge(@default_character_tracking_data)
|
||||
end
|
||||
|> Map.merge(%{tracking_paused: tracking_paused})
|
||||
|> Map.merge(%{online: false, tracking_paused: tracking_paused})
|
||||
end
|
||||
|
||||
defp prepare_search_results(result) do
|
||||
{:ok, characters} =
|
||||
_load_eve_info(Map.get(result, "character"), :get_character_info, &_map_character_info/1)
|
||||
load_eve_info(Map.get(result, "character"), :get_character_info, &map_character_info/1)
|
||||
|
||||
{:ok, corporations} =
|
||||
_load_eve_info(
|
||||
load_eve_info(
|
||||
Map.get(result, "corporation"),
|
||||
:get_corporation_info,
|
||||
&_map_corporation_info/1
|
||||
&map_corporation_info/1
|
||||
)
|
||||
|
||||
{:ok, alliances} =
|
||||
_load_eve_info(Map.get(result, "alliance"), :get_alliance_info, &_map_alliance_info/1)
|
||||
load_eve_info(Map.get(result, "alliance"), :get_alliance_info, &map_alliance_info/1)
|
||||
|
||||
[[characters | corporations] | alliances] |> List.flatten()
|
||||
end
|
||||
|
||||
defp _load_eve_info(nil, _, _), do: {:ok, []}
|
||||
defp load_eve_info(nil, _, _), do: {:ok, []}
|
||||
|
||||
defp _load_eve_info([], _, _), do: {:ok, []}
|
||||
defp load_eve_info([], _, _), do: {:ok, []}
|
||||
|
||||
defp _load_eve_info(eve_ids, method, map_function),
|
||||
defp load_eve_info(eve_ids, method, map_function),
|
||||
do:
|
||||
{:ok,
|
||||
Enum.map(eve_ids, fn eve_id ->
|
||||
@@ -331,7 +336,7 @@ defmodule WandererApp.Character do
|
||||
end)
|
||||
|> Enum.filter(fn result -> not is_nil(result) end)}
|
||||
|
||||
defp _map_alliance_info(info) do
|
||||
defp map_alliance_info(info) do
|
||||
%{
|
||||
label: info["name"],
|
||||
value: info["eve_id"] |> to_string(),
|
||||
@@ -339,7 +344,7 @@ defmodule WandererApp.Character do
|
||||
}
|
||||
end
|
||||
|
||||
defp _map_character_info(info) do
|
||||
defp map_character_info(info) do
|
||||
%{
|
||||
label: info["name"],
|
||||
value: info["eve_id"] |> to_string(),
|
||||
@@ -347,7 +352,7 @@ defmodule WandererApp.Character do
|
||||
}
|
||||
end
|
||||
|
||||
defp _map_corporation_info(info) do
|
||||
defp map_corporation_info(info) do
|
||||
%{
|
||||
label: info["name"],
|
||||
value: info["eve_id"] |> to_string(),
|
||||
|
||||
@@ -112,6 +112,17 @@ defmodule WandererApp.Character.Tracker do
|
||||
defp pause_tracking(character_id) do
|
||||
if WandererApp.Character.can_pause_tracking?(character_id) &&
|
||||
not WandererApp.Cache.has_key?("character:#{character_id}:tracking_paused") do
|
||||
# Log character tracking statistics before pausing
|
||||
{:ok, character_state} = WandererApp.Character.get_character_state(character_id)
|
||||
|
||||
Logger.warning(
|
||||
"CHARACTER_TRACKING_PAUSED: Character tracking paused due to sustained errors",
|
||||
character_id: character_id,
|
||||
active_maps: length(character_state.active_maps),
|
||||
is_online: character_state.is_online,
|
||||
tracking_duration_minutes: get_tracking_duration_minutes(character_id)
|
||||
)
|
||||
|
||||
WandererApp.Cache.delete("character:#{character_id}:online_forbidden")
|
||||
WandererApp.Cache.delete("character:#{character_id}:online_error_time")
|
||||
WandererApp.Cache.delete("character:#{character_id}:ship_error_time")
|
||||
@@ -122,6 +133,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
is_online: false
|
||||
})
|
||||
|
||||
# Original log kept for backward compatibility
|
||||
Logger.warning("[CharacterTracker] paused for #{character_id}")
|
||||
|
||||
WandererApp.Cache.put(
|
||||
@@ -175,6 +187,8 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
_ ->
|
||||
# Monitor cache for potential evictions before ESI call
|
||||
|
||||
case WandererApp.Esi.get_character_online(eve_id,
|
||||
access_token: access_token,
|
||||
character_id: character_id
|
||||
@@ -197,7 +211,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
WandererApp.Cache.delete("character:#{character_id}:ship_forbidden")
|
||||
WandererApp.Cache.delete("character:#{character_id}:location_forbidden")
|
||||
WandererApp.Cache.delete("character:#{character_id}:wallet_forbidden")
|
||||
WandererApp.Character.update_character(character_id, online)
|
||||
|
||||
try do
|
||||
WandererApp.Character.update_character(character_id, online)
|
||||
rescue
|
||||
error ->
|
||||
Logger.error("DB_ERROR: Failed to update character in database",
|
||||
character_id: character_id,
|
||||
error: inspect(error),
|
||||
operation: "update_character_online"
|
||||
)
|
||||
|
||||
# Re-raise to maintain existing error handling
|
||||
reraise error, __STACKTRACE__
|
||||
end
|
||||
|
||||
update = %{
|
||||
character_state
|
||||
@@ -206,12 +233,37 @@ defmodule WandererApp.Character.Tracker do
|
||||
track_location: online.online
|
||||
}
|
||||
|
||||
WandererApp.Character.update_character_state(character_id, update)
|
||||
try do
|
||||
WandererApp.Character.update_character_state(character_id, update)
|
||||
rescue
|
||||
error ->
|
||||
Logger.error("DB_ERROR: Failed to update character state in database",
|
||||
character_id: character_id,
|
||||
error: inspect(error),
|
||||
operation: "update_character_state"
|
||||
)
|
||||
|
||||
# Re-raise to maintain existing error handling
|
||||
reraise error, __STACKTRACE__
|
||||
end
|
||||
|
||||
:ok
|
||||
|
||||
{:error, error} when error in [:forbidden, :not_found, :timeout] ->
|
||||
Logger.warning("#{__MODULE__} failed to update_online: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_online",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.warning("ESI_ERROR: Character online tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_online"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:online_forbidden",
|
||||
@@ -233,7 +285,33 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
reset_seconds =
|
||||
Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
|
||||
remaining =
|
||||
Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
reset_duration: reset_timeout,
|
||||
count: 1
|
||||
},
|
||||
%{
|
||||
endpoint: "character_online",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: Character online tracking rate limited",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
endpoint: "character_online",
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:online_forbidden",
|
||||
@@ -244,7 +322,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("#{__MODULE__} failed to update_online: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_online",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character online tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_online"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:online_forbidden",
|
||||
@@ -307,7 +398,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
:ok
|
||||
|
||||
{:error, error} when error in [:forbidden, :not_found, :timeout] ->
|
||||
Logger.warning("#{__MODULE__} failed to get_character_info: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_info",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.warning("ESI_ERROR: Character info tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_info"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:info_forbidden",
|
||||
@@ -320,7 +424,32 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
reset_seconds =
|
||||
Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
|
||||
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
reset_duration: reset_timeout,
|
||||
count: 1
|
||||
},
|
||||
%{
|
||||
endpoint: "character_info",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: Character info tracking rate limited",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
endpoint: "character_info",
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:info_forbidden",
|
||||
@@ -331,13 +460,27 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited}
|
||||
|
||||
{:error, error} ->
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_info",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:info_forbidden",
|
||||
true,
|
||||
ttl: @forbidden_ttl
|
||||
)
|
||||
|
||||
Logger.error("#{__MODULE__} failed to get_character_info: #{inspect(error)}")
|
||||
Logger.error("ESI_ERROR: Character info tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_info"
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
|
||||
_ ->
|
||||
@@ -372,13 +515,26 @@ defmodule WandererApp.Character.Tracker do
|
||||
access_token: access_token,
|
||||
character_id: character_id
|
||||
) do
|
||||
{:ok, ship} when is_non_struct_map(ship) ->
|
||||
{:ok, ship} when is_map(ship) and not is_struct(ship) ->
|
||||
character_state |> maybe_update_ship(ship)
|
||||
|
||||
:ok
|
||||
|
||||
{:error, error} when error in [:forbidden, :not_found, :timeout] ->
|
||||
Logger.warning("#{__MODULE__} failed to update_ship: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_ship",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.warning("ESI_ERROR: Character ship tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_ship"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:ship_forbidden",
|
||||
@@ -398,7 +554,33 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
reset_seconds =
|
||||
Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
|
||||
remaining =
|
||||
Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
reset_duration: reset_timeout,
|
||||
count: 1
|
||||
},
|
||||
%{
|
||||
endpoint: "character_ship",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: Character ship tracking rate limited",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
endpoint: "character_ship",
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:ship_forbidden",
|
||||
@@ -409,7 +591,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited}
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("#{__MODULE__} failed to update_ship: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_ship",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character ship tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_ship"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:ship_forbidden",
|
||||
@@ -427,7 +622,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, error}
|
||||
|
||||
_ ->
|
||||
Logger.error("#{__MODULE__} failed to update_ship: wrong response")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_ship",
|
||||
error_type: "wrong_response",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character ship tracking failed - wrong response",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: "wrong_response",
|
||||
endpoint: "character_ship"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:ship_forbidden",
|
||||
@@ -471,18 +679,33 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
_ ->
|
||||
# Monitor cache for potential evictions before ESI call
|
||||
|
||||
case WandererApp.Esi.get_character_location(eve_id,
|
||||
access_token: access_token,
|
||||
character_id: character_id
|
||||
) do
|
||||
{:ok, location} when is_non_struct_map(location) ->
|
||||
{:ok, location} when is_map(location) and not is_struct(location) ->
|
||||
character_state
|
||||
|> maybe_update_location(location)
|
||||
|
||||
:ok
|
||||
|
||||
{:error, error} when error in [:forbidden, :not_found, :timeout] ->
|
||||
Logger.warning("#{__MODULE__} failed to update_location: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_location",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.warning("ESI_ERROR: Character location tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_location"
|
||||
)
|
||||
|
||||
if is_nil(
|
||||
WandererApp.Cache.lookup!("character:#{character_id}:location_error_time")
|
||||
@@ -496,10 +719,36 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
{:error, :error_limited, headers} ->
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
|
||||
reset_timeout = get_reset_timeout(headers, @location_limit_ttl)
|
||||
|
||||
reset_seconds =
|
||||
Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
|
||||
remaining =
|
||||
Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
reset_duration: reset_timeout,
|
||||
count: 1
|
||||
},
|
||||
%{
|
||||
endpoint: "character_location",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: Character location tracking rate limited",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
endpoint: "character_location",
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:location_forbidden",
|
||||
true,
|
||||
@@ -509,7 +758,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited}
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("#{__MODULE__} failed to update_location: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_location",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character location tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_location"
|
||||
)
|
||||
|
||||
if is_nil(
|
||||
WandererApp.Cache.lookup!("character:#{character_id}:location_error_time")
|
||||
@@ -523,7 +785,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
_ ->
|
||||
Logger.error("#{__MODULE__} failed to update_location: wrong response")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_location",
|
||||
error_type: "wrong_response",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character location tracking failed - wrong response",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: "wrong_response",
|
||||
endpoint: "character_location"
|
||||
)
|
||||
|
||||
if is_nil(
|
||||
WandererApp.Cache.lookup!("character:#{character_id}:location_error_time")
|
||||
@@ -579,7 +854,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
:ok
|
||||
|
||||
{:error, error} when error in [:forbidden, :not_found, :timeout] ->
|
||||
Logger.warning("#{__MODULE__} failed to update_wallet: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_wallet",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.warning("ESI_ERROR: Character wallet tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_wallet"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:wallet_forbidden",
|
||||
@@ -592,7 +880,33 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
reset_seconds =
|
||||
Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
|
||||
remaining =
|
||||
Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
reset_duration: reset_timeout,
|
||||
count: 1
|
||||
},
|
||||
%{
|
||||
endpoint: "character_wallet",
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: Character wallet tracking rate limited",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
endpoint: "character_wallet",
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:wallet_forbidden",
|
||||
@@ -603,7 +917,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("#{__MODULE__} failed to _update_wallet: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_wallet",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character wallet tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_wallet"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:wallet_forbidden",
|
||||
@@ -614,7 +941,20 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
error ->
|
||||
Logger.error("#{__MODULE__} failed to _update_wallet: #{inspect(error)}")
|
||||
# Emit telemetry for tracking
|
||||
:telemetry.execute([:wanderer_app, :esi, :error], %{count: 1}, %{
|
||||
endpoint: "character_wallet",
|
||||
error_type: error,
|
||||
tracking_pool: tracking_pool,
|
||||
character_id: character_id
|
||||
})
|
||||
|
||||
Logger.error("ESI_ERROR: Character wallet tracking failed",
|
||||
character_id: character_id,
|
||||
tracking_pool: tracking_pool,
|
||||
error_type: error,
|
||||
endpoint: "character_wallet"
|
||||
)
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:wallet_forbidden",
|
||||
@@ -739,7 +1079,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
state,
|
||||
ship
|
||||
)
|
||||
when is_non_struct_map(ship) do
|
||||
when is_map(ship) and not is_struct(ship) do
|
||||
ship_type_id = Map.get(ship, "ship_type_id")
|
||||
ship_name = Map.get(ship, "ship_name")
|
||||
|
||||
@@ -810,7 +1150,6 @@ defmodule WandererApp.Character.Tracker do
|
||||
),
|
||||
do:
|
||||
solar_system_id != new_solar_system_id ||
|
||||
solar_system_id != new_solar_system_id ||
|
||||
structure_id != new_structure_id ||
|
||||
station_id != new_station_id
|
||||
|
||||
@@ -1027,4 +1366,33 @@ defmodule WandererApp.Character.Tracker do
|
||||
defp get_online(%{"online" => online}), do: %{online: online}
|
||||
|
||||
defp get_online(_), do: %{online: false}
|
||||
|
||||
defp get_tracking_duration_minutes(character_id) do
|
||||
case WandererApp.Cache.lookup!("character:#{character_id}:map:*:tracking_start_time") do
|
||||
nil ->
|
||||
0
|
||||
|
||||
start_time when is_struct(start_time, DateTime) ->
|
||||
DateTime.diff(DateTime.utc_now(), start_time, :minute)
|
||||
|
||||
_ ->
|
||||
0
|
||||
end
|
||||
end
|
||||
|
||||
# Telemetry handler for database pool monitoring
|
||||
def handle_pool_query(_event_name, measurements, metadata, _config) do
|
||||
queue_time = measurements[:queue_time]
|
||||
|
||||
# Check if queue_time exists and exceeds threshold (in microseconds)
|
||||
# 100ms = 100_000 microseconds indicates pool exhaustion
|
||||
if queue_time && queue_time > 100_000 do
|
||||
Logger.warning("DB_POOL_EXHAUSTED: Database pool contention detected",
|
||||
queue_time_ms: div(queue_time, 1000),
|
||||
query: metadata[:query],
|
||||
source: metadata[:source],
|
||||
repo: metadata[:repo]
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -207,7 +207,7 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
on_timeout: :kill_task,
|
||||
timeout: :timer.seconds(60)
|
||||
)
|
||||
|> Enum.map(fn result ->
|
||||
|> Enum.each(fn result ->
|
||||
case result do
|
||||
{:ok, {:stop, character_id}} ->
|
||||
Process.send_after(self(), {:stop_track, character_id}, 100)
|
||||
@@ -278,7 +278,7 @@ defmodule WandererApp.Character.TrackerManager.Impl do
|
||||
on_timeout: :kill_task,
|
||||
timeout: :timer.seconds(30)
|
||||
)
|
||||
|> Enum.map(fn _result -> :ok end)
|
||||
|> Enum.each(fn _result -> :ok end)
|
||||
|
||||
state
|
||||
end
|
||||
|
||||
@@ -112,6 +112,9 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
def handle_continue(:start, state) do
|
||||
Logger.info("#{@name} started")
|
||||
|
||||
# Start message queue monitoring
|
||||
Process.send_after(self(), :monitor_message_queue, :timer.seconds(30))
|
||||
|
||||
Phoenix.PubSub.subscribe(
|
||||
WandererApp.PubSub,
|
||||
"server_status"
|
||||
@@ -133,6 +136,16 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:monitor_message_queue, state) do
|
||||
monitor_message_queue(state)
|
||||
|
||||
# Schedule next monitoring check
|
||||
Process.send_after(self(), :monitor_message_queue, :timer.seconds(30))
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({ref, result}, state) when is_reference(ref) do
|
||||
Process.demonitor(ref, [:flush])
|
||||
|
||||
@@ -163,7 +176,7 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
|
||||
try do
|
||||
characters
|
||||
|> Enum.map(fn character_id ->
|
||||
|> Enum.each(fn character_id ->
|
||||
WandererApp.TaskWrapper.start_link(WandererApp.Character.Tracker, :update_online, [
|
||||
character_id
|
||||
])
|
||||
@@ -384,7 +397,7 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
|
||||
try do
|
||||
characters
|
||||
|> Enum.map(fn character_id ->
|
||||
|> Enum.each(fn character_id ->
|
||||
WandererApp.TaskWrapper.start_link(WandererApp.Character.Tracker, :update_location, [
|
||||
character_id
|
||||
])
|
||||
@@ -421,7 +434,7 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
|
||||
try do
|
||||
characters
|
||||
|> Enum.map(fn character_id ->
|
||||
|> Enum.each(fn character_id ->
|
||||
WandererApp.TaskWrapper.start_link(WandererApp.Character.Tracker, :update_ship, [
|
||||
character_id
|
||||
])
|
||||
@@ -538,6 +551,39 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
defp monitor_message_queue(state) do
|
||||
try do
|
||||
{_, message_queue_len} = Process.info(self(), :message_queue_len)
|
||||
{_, memory} = Process.info(self(), :memory)
|
||||
|
||||
# Alert on high message queue
|
||||
if message_queue_len > 50 do
|
||||
Logger.warning("GENSERVER_QUEUE_HIGH: Character tracker pool message queue buildup",
|
||||
pool_id: state.uuid,
|
||||
message_queue_length: message_queue_len,
|
||||
memory_bytes: memory,
|
||||
tracked_characters: length(state.characters)
|
||||
)
|
||||
|
||||
# Emit telemetry
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :character, :tracker_pool, :queue_buildup],
|
||||
%{
|
||||
message_queue_length: message_queue_len,
|
||||
memory_bytes: memory
|
||||
},
|
||||
%{
|
||||
pool_id: state.uuid,
|
||||
tracked_characters: length(state.characters)
|
||||
}
|
||||
)
|
||||
end
|
||||
rescue
|
||||
error ->
|
||||
Logger.debug("Failed to monitor message queue: #{inspect(error)}")
|
||||
end
|
||||
end
|
||||
|
||||
defp via_tuple(uuid) do
|
||||
{:via, Registry, {@unique_registry, Module.concat(__MODULE__, uuid)}}
|
||||
end
|
||||
|
||||
297
lib/wanderer_app/database_setup.ex
Normal file
297
lib/wanderer_app/database_setup.ex
Normal file
@@ -0,0 +1,297 @@
|
||||
defmodule WandererApp.DatabaseSetup do
|
||||
@moduledoc """
|
||||
Database setup utilities for the test environment.
|
||||
|
||||
This module provides functions to:
|
||||
- Create and drop test databases
|
||||
- Run migrations
|
||||
- Seed test data
|
||||
- Reset database state between tests
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Repo
|
||||
alias Ecto.Adapters.SQL
|
||||
|
||||
@test_db_name "wanderer_test"
|
||||
|
||||
@doc """
|
||||
Sets up the test database from scratch.
|
||||
Creates the database, runs migrations, and sets up initial data.
|
||||
"""
|
||||
def setup_test_database do
|
||||
with :ok <- ensure_database_exists(),
|
||||
:ok <- run_migrations(),
|
||||
:ok <- verify_setup() do
|
||||
Logger.info("✅ Test database setup completed successfully")
|
||||
:ok
|
||||
else
|
||||
{:error, reason} ->
|
||||
Logger.error("❌ Test database setup failed: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Ensures the test database exists, creating it if necessary.
|
||||
"""
|
||||
def ensure_database_exists do
|
||||
case create_database() do
|
||||
:ok ->
|
||||
Logger.info("📋 Test database ready")
|
||||
:ok
|
||||
|
||||
{:error, :already_exists} ->
|
||||
Logger.info("📋 Test database already exists")
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("❌ Failed to create test database: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Creates the test database.
|
||||
"""
|
||||
def create_database do
|
||||
repo_config = Repo.config()
|
||||
database = Keyword.get(repo_config, :database)
|
||||
|
||||
case database do
|
||||
nil ->
|
||||
{:error, :no_database_configured}
|
||||
|
||||
db_name ->
|
||||
create_database_if_not_exists(db_name, repo_config)
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Drops the test database. Use with caution!
|
||||
"""
|
||||
def drop_database do
|
||||
repo_config = Repo.config()
|
||||
database = Keyword.get(repo_config, :database)
|
||||
|
||||
Logger.warning("🗑️ Dropping test database: #{database}")
|
||||
|
||||
# Stop the repo first
|
||||
if Process.whereis(Repo) do
|
||||
Supervisor.terminate_child(WandererApp.Supervisor, Repo)
|
||||
end
|
||||
|
||||
# Drop the database
|
||||
config_without_db = Keyword.put(repo_config, :database, nil)
|
||||
|
||||
case SQL.query(
|
||||
Ecto.Adapters.Postgres,
|
||||
"DROP DATABASE IF EXISTS \"#{database}\"",
|
||||
[],
|
||||
config_without_db
|
||||
) do
|
||||
{:ok, _} ->
|
||||
Logger.info("✅ Test database dropped successfully")
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("❌ Failed to drop test database: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Runs all pending migrations on the test database.
|
||||
"""
|
||||
def run_migrations do
|
||||
Logger.info("🏗️ Running migrations on test database...")
|
||||
|
||||
case Ecto.Migrator.run(Repo, migrations_path(), :up, all: true) do
|
||||
migrations when is_list(migrations) ->
|
||||
Logger.info("✅ Migrations completed: #{length(migrations)} migrations applied")
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("❌ Migration failed: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Rolls back the last migration.
|
||||
"""
|
||||
def rollback_migration(steps \\ 1) do
|
||||
Logger.info("⏪ Rolling back #{steps} migration(s)...")
|
||||
|
||||
case Ecto.Migrator.run(Repo, migrations_path(), :down, step: steps) do
|
||||
migrations when is_list(migrations) ->
|
||||
Logger.info("✅ Rollback completed: #{length(migrations)} migrations rolled back")
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("❌ Rollback failed: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Resets the test database to a clean state.
|
||||
"""
|
||||
def reset_database do
|
||||
Logger.info("🔄 Resetting test database...")
|
||||
|
||||
with :ok <- truncate_all_tables(),
|
||||
:ok <- reset_sequences() do
|
||||
Logger.info("✅ Database reset completed")
|
||||
:ok
|
||||
else
|
||||
{:error, reason} ->
|
||||
Logger.error("❌ Database reset failed: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Seeds the database with test fixtures.
|
||||
"""
|
||||
def seed_test_data do
|
||||
Logger.info("🌱 Seeding test data...")
|
||||
|
||||
try do
|
||||
# Add your test data seeding logic here
|
||||
# For example:
|
||||
# WandererAppWeb.Factory.create_test_scenario()
|
||||
|
||||
Logger.info("✅ Test data seeded successfully")
|
||||
:ok
|
||||
rescue
|
||||
error ->
|
||||
Logger.error("❌ Failed to seed test data: #{inspect(error)}")
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Verifies that the database setup is correct.
|
||||
"""
|
||||
def verify_setup do
|
||||
Logger.info("🔍 Verifying database setup...")
|
||||
|
||||
try do
|
||||
# Test basic connectivity
|
||||
SQL.query!(Repo, "SELECT 1", [])
|
||||
|
||||
# Verify key tables exist
|
||||
verify_table_exists("users")
|
||||
verify_table_exists("characters")
|
||||
verify_table_exists("maps")
|
||||
|
||||
Logger.info("✅ Database verification completed")
|
||||
:ok
|
||||
rescue
|
||||
error ->
|
||||
Logger.error("❌ Database verification failed: #{inspect(error)}")
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp create_database_if_not_exists(database, repo_config) do
|
||||
config_without_db = Keyword.put(repo_config, :database, nil)
|
||||
|
||||
case SQL.query(
|
||||
Ecto.Adapters.Postgres,
|
||||
"CREATE DATABASE \"#{database}\"",
|
||||
[],
|
||||
config_without_db
|
||||
) do
|
||||
{:ok, _} ->
|
||||
:ok
|
||||
|
||||
{:error, %{postgres: %{code: :duplicate_database}}} ->
|
||||
{:error, :already_exists}
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp truncate_all_tables do
|
||||
tables = get_all_tables()
|
||||
|
||||
if length(tables) > 0 do
|
||||
tables_sql = Enum.join(tables, ", ")
|
||||
SQL.query!(Repo, "TRUNCATE TABLE #{tables_sql} RESTART IDENTITY CASCADE", [])
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp reset_sequences do
|
||||
# Reset any sequences that might not be handled by RESTART IDENTITY
|
||||
sequences = get_all_sequences()
|
||||
|
||||
Enum.each(sequences, fn sequence ->
|
||||
SQL.query!(Repo, "ALTER SEQUENCE #{sequence} RESTART WITH 1", [])
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp get_all_tables do
|
||||
result =
|
||||
SQL.query!(
|
||||
Repo,
|
||||
"""
|
||||
SELECT tablename
|
||||
FROM pg_tables
|
||||
WHERE schemaname = 'public'
|
||||
AND tablename NOT LIKE '%_pkey'
|
||||
AND tablename != 'schema_migrations'
|
||||
""",
|
||||
[]
|
||||
)
|
||||
|
||||
result.rows |> List.flatten()
|
||||
end
|
||||
|
||||
defp get_all_sequences do
|
||||
result =
|
||||
SQL.query!(
|
||||
Repo,
|
||||
"""
|
||||
SELECT sequence_name
|
||||
FROM information_schema.sequences
|
||||
WHERE sequence_schema = 'public'
|
||||
""",
|
||||
[]
|
||||
)
|
||||
|
||||
result.rows |> List.flatten()
|
||||
end
|
||||
|
||||
defp verify_table_exists(table_name) do
|
||||
result =
|
||||
SQL.query!(
|
||||
Repo,
|
||||
"""
|
||||
SELECT COUNT(*)
|
||||
FROM information_schema.tables
|
||||
WHERE table_schema = 'public'
|
||||
AND table_name = $1
|
||||
""",
|
||||
[table_name]
|
||||
)
|
||||
|
||||
case result.rows do
|
||||
[[1]] -> :ok
|
||||
_ -> raise "Table #{table_name} does not exist"
|
||||
end
|
||||
end
|
||||
|
||||
defp migrations_path do
|
||||
Application.app_dir(:wanderer_app, "priv/repo/migrations")
|
||||
end
|
||||
end
|
||||
@@ -16,6 +16,7 @@ defmodule WandererApp.Env do
|
||||
def invites, do: get_key(:invites, false)
|
||||
|
||||
def map_subscriptions_enabled?, do: get_key(:map_subscriptions_enabled, false)
|
||||
def websocket_events_enabled?, do: get_key(:websocket_events_enabled, false)
|
||||
def public_api_disabled?, do: get_key(:public_api_disabled, false)
|
||||
|
||||
@decorate cacheable(
|
||||
@@ -47,6 +48,16 @@ defmodule WandererApp.Env do
|
||||
)
|
||||
def restrict_maps_creation?, do: get_key(:restrict_maps_creation, false)
|
||||
|
||||
def sse_enabled? do
|
||||
Application.get_env(@app, :sse, [])
|
||||
|> Keyword.get(:enabled, false)
|
||||
end
|
||||
|
||||
def webhooks_enabled? do
|
||||
Application.get_env(@app, :external_events, [])
|
||||
|> Keyword.get(:webhooks_enabled, false)
|
||||
end
|
||||
|
||||
@decorate cacheable(
|
||||
cache: WandererApp.Cache,
|
||||
key: "map-connection-auto-expire-hours"
|
||||
|
||||
@@ -536,6 +536,36 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
{:error, :not_found}
|
||||
|
||||
{:ok, %{status: 420, headers: headers} = _error} ->
|
||||
# Extract rate limit information from headers
|
||||
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for rate limiting
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
count: 1,
|
||||
reset_duration:
|
||||
case Integer.parse(reset_seconds || "0") do
|
||||
{seconds, _} -> seconds * 1000
|
||||
_ -> 0
|
||||
end
|
||||
},
|
||||
%{
|
||||
method: "GET",
|
||||
path: path,
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: GET request rate limited",
|
||||
method: "GET",
|
||||
path: path,
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
{:error, :error_limited, headers}
|
||||
|
||||
{:ok, %{status: status} = _error} when status in [401, 403] ->
|
||||
@@ -592,6 +622,36 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
{:error, :forbidden}
|
||||
|
||||
{:ok, %{status: 420, headers: headers} = _error} ->
|
||||
# Extract rate limit information from headers
|
||||
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for rate limiting
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
count: 1,
|
||||
reset_duration:
|
||||
case Integer.parse(reset_seconds || "0") do
|
||||
{seconds, _} -> seconds * 1000
|
||||
_ -> 0
|
||||
end
|
||||
},
|
||||
%{
|
||||
method: "POST",
|
||||
path: url,
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: POST request rate limited",
|
||||
method: "POST",
|
||||
path: url,
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
{:error, :error_limited, headers}
|
||||
|
||||
{:ok, %{status: status}} ->
|
||||
@@ -630,6 +690,36 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
{:error, :forbidden}
|
||||
|
||||
{:ok, %{status: 420, headers: headers} = _error} ->
|
||||
# Extract rate limit information from headers
|
||||
reset_seconds = Map.get(headers, "x-esi-error-limit-reset", ["unknown"]) |> List.first()
|
||||
remaining = Map.get(headers, "x-esi-error-limit-remain", ["unknown"]) |> List.first()
|
||||
|
||||
# Emit telemetry for rate limiting
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :esi, :rate_limited],
|
||||
%{
|
||||
count: 1,
|
||||
reset_duration:
|
||||
case Integer.parse(reset_seconds || "0") do
|
||||
{seconds, _} -> seconds * 1000
|
||||
_ -> 0
|
||||
end
|
||||
},
|
||||
%{
|
||||
method: "POST_ESI",
|
||||
path: url,
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
}
|
||||
)
|
||||
|
||||
Logger.warning("ESI_RATE_LIMITED: POST ESI request rate limited",
|
||||
method: "POST_ESI",
|
||||
path: url,
|
||||
reset_seconds: reset_seconds,
|
||||
remaining_requests: remaining
|
||||
)
|
||||
|
||||
{:error, :error_limited, headers}
|
||||
|
||||
{:ok, %{status: status}} ->
|
||||
@@ -695,9 +785,22 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
{:ok, %OAuth2.AccessToken{} = token},
|
||||
character,
|
||||
character_id,
|
||||
_expires_at,
|
||||
expires_at,
|
||||
scopes
|
||||
) do
|
||||
# Log token refresh success with timing info
|
||||
expires_at_datetime = DateTime.from_unix!(expires_at)
|
||||
time_since_expiry = DateTime.diff(DateTime.utc_now(), expires_at_datetime, :second)
|
||||
|
||||
Logger.debug(
|
||||
fn ->
|
||||
"TOKEN_REFRESH_SUCCESS: Character token refreshed successfully"
|
||||
end,
|
||||
character_id: character_id,
|
||||
time_since_expiry_seconds: time_since_expiry,
|
||||
new_expires_at: token.expires_at
|
||||
)
|
||||
|
||||
{:ok, _character} =
|
||||
character
|
||||
|> WandererApp.Api.Character.update(%{
|
||||
@@ -727,8 +830,23 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
expires_at,
|
||||
scopes
|
||||
) do
|
||||
time_since_expiry = DateTime.diff(DateTime.utc_now(), expires_at, :second)
|
||||
|
||||
Logger.warning("TOKEN_REFRESH_FAILED: Invalid grant error during token refresh",
|
||||
character_id: character_id,
|
||||
error_message: error_message,
|
||||
time_since_expiry_seconds: time_since_expiry,
|
||||
original_expires_at: expires_at
|
||||
)
|
||||
|
||||
# Emit telemetry for token refresh failures
|
||||
:telemetry.execute([:wanderer_app, :token, :refresh_failed], %{count: 1}, %{
|
||||
character_id: character_id,
|
||||
error_type: "invalid_grant",
|
||||
time_since_expiry: time_since_expiry
|
||||
})
|
||||
|
||||
invalidate_character_tokens(character, character_id, expires_at, scopes)
|
||||
Logger.warning("Failed to refresh token for #{character_id}: #{error_message}")
|
||||
{:error, :invalid_grant}
|
||||
end
|
||||
|
||||
@@ -739,7 +857,22 @@ defmodule WandererApp.Esi.ApiClient do
|
||||
expires_at,
|
||||
scopes
|
||||
) do
|
||||
Logger.warning("Failed to refresh token for #{character_id}: #{inspect(error)}")
|
||||
time_since_expiry = DateTime.diff(DateTime.utc_now(), expires_at, :second)
|
||||
|
||||
Logger.warning("TOKEN_REFRESH_FAILED: Connection refused during token refresh",
|
||||
character_id: character_id,
|
||||
error: inspect(error),
|
||||
time_since_expiry_seconds: time_since_expiry,
|
||||
original_expires_at: expires_at
|
||||
)
|
||||
|
||||
# Emit telemetry for connection failures
|
||||
:telemetry.execute([:wanderer_app, :token, :refresh_failed], %{count: 1}, %{
|
||||
character_id: character_id,
|
||||
error_type: "connection_refused",
|
||||
time_since_expiry: time_since_expiry
|
||||
})
|
||||
|
||||
{:error, :econnrefused}
|
||||
end
|
||||
|
||||
|
||||
@@ -345,6 +345,7 @@ defmodule WandererApp.EveDataService do
|
||||
|
||||
defp get_sun_type_id(sun_type_id) do
|
||||
case sun_type_id do
|
||||
nil -> 0
|
||||
"None" -> 0
|
||||
_ -> sun_type_id |> Integer.parse() |> elem(0)
|
||||
end
|
||||
|
||||
138
lib/wanderer_app/external_events/acl_event_broadcaster.ex
Normal file
138
lib/wanderer_app/external_events/acl_event_broadcaster.ex
Normal file
@@ -0,0 +1,138 @@
|
||||
defmodule WandererApp.ExternalEvents.AclEventBroadcaster do
|
||||
@moduledoc """
|
||||
Shared module for broadcasting ACL member events to all maps that use a specific ACL.
|
||||
|
||||
This module extracts the common broadcasting logic that was duplicated between
|
||||
access_list_member_api_controller.ex and access_lists_live.ex to maintain DRY principles.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Broadcasts an ACL member event to all maps that use the specified ACL.
|
||||
|
||||
## Parameters
|
||||
|
||||
- `acl_id` - The ID of the access list
|
||||
- `member` - The ACL member data structure
|
||||
- `event_type` - The type of event (:acl_member_added, :acl_member_updated, :acl_member_removed)
|
||||
|
||||
## Example
|
||||
|
||||
broadcast_member_event("acl-123", member, :acl_member_added)
|
||||
"""
|
||||
@spec broadcast_member_event(String.t(), map(), atom()) :: :ok | {:error, term()}
|
||||
def broadcast_member_event(acl_id, member, event_type) do
|
||||
# Validate member data
|
||||
with :ok <- validate_member(member),
|
||||
:ok <- validate_event_type(event_type) do
|
||||
Logger.debug(fn ->
|
||||
"Broadcasting ACL member event: #{event_type} for member #{member.name} (#{member.id}) in ACL #{acl_id}"
|
||||
end)
|
||||
|
||||
# Find all maps that use this ACL
|
||||
case Ash.read(
|
||||
WandererApp.Api.MapAccessList
|
||||
|> Ash.Query.for_read(:read_by_acl, %{acl_id: acl_id})
|
||||
) do
|
||||
{:ok, map_acls} ->
|
||||
Logger.debug(fn ->
|
||||
"Found #{length(map_acls)} maps using ACL #{acl_id}: #{inspect(Enum.map(map_acls, & &1.map_id))}"
|
||||
end)
|
||||
|
||||
# Get the member type and EVE ID
|
||||
{member_type, eve_id} = get_member_type_and_id(member)
|
||||
|
||||
# Skip broadcasting if no valid EVE ID
|
||||
if is_nil(member_type) || is_nil(eve_id) do
|
||||
Logger.warning("Cannot broadcast event for member without EVE ID: #{member.id}")
|
||||
{:error, :no_eve_id}
|
||||
else
|
||||
# Build the event payload
|
||||
payload = %{
|
||||
acl_id: acl_id,
|
||||
member_id: member.id,
|
||||
member_name: member.name,
|
||||
member_type: member_type,
|
||||
eve_id: eve_id,
|
||||
role: member.role
|
||||
}
|
||||
|
||||
Logger.debug(fn ->
|
||||
"Broadcasting #{event_type} event with payload: #{inspect(payload)}"
|
||||
end)
|
||||
|
||||
# Broadcast to each map
|
||||
Enum.each(map_acls, fn map_acl ->
|
||||
Logger.debug(fn -> "Broadcasting #{event_type} to map #{map_acl.map_id}" end)
|
||||
WandererApp.ExternalEvents.broadcast(map_acl.map_id, event_type, payload)
|
||||
end)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"Successfully broadcast #{event_type} event to #{length(map_acls)} maps"
|
||||
end)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("Failed to find maps for ACL #{acl_id}: #{inspect(error)}")
|
||||
{:error, {:map_lookup_failed, error}}
|
||||
end
|
||||
else
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
# Private helper functions
|
||||
|
||||
defp validate_member(member) do
|
||||
cond do
|
||||
is_nil(member) ->
|
||||
{:error, :member_is_nil}
|
||||
|
||||
not is_map(member) ->
|
||||
{:error, :member_not_map}
|
||||
|
||||
is_nil(Map.get(member, :id)) ->
|
||||
{:error, :member_id_missing}
|
||||
|
||||
is_nil(Map.get(member, :name)) ->
|
||||
{:error, :member_name_missing}
|
||||
|
||||
is_nil(Map.get(member, :role)) ->
|
||||
{:error, :member_role_missing}
|
||||
|
||||
Map.get(member, :role) not in [:admin, :manager, :member, :viewer, :blocked] ->
|
||||
{:error, {:invalid_role, Map.get(member, :role)}}
|
||||
|
||||
true ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_event_type(event_type) do
|
||||
if event_type in [:acl_member_added, :acl_member_updated, :acl_member_removed] do
|
||||
:ok
|
||||
else
|
||||
{:error, {:invalid_event_type, event_type}}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_member_type_and_id(member) do
|
||||
cond do
|
||||
member.eve_character_id ->
|
||||
{"character", member.eve_character_id}
|
||||
|
||||
member.eve_corporation_id ->
|
||||
{"corporation", member.eve_corporation_id}
|
||||
|
||||
member.eve_alliance_id ->
|
||||
{"alliance", member.eve_alliance_id}
|
||||
|
||||
true ->
|
||||
# Handle the case when no EVE IDs are set
|
||||
{nil, nil}
|
||||
end
|
||||
end
|
||||
end
|
||||
229
lib/wanderer_app/external_events/event.ex
Normal file
229
lib/wanderer_app/external_events/event.ex
Normal file
@@ -0,0 +1,229 @@
|
||||
defmodule WandererApp.ExternalEvents.Event do
|
||||
@moduledoc """
|
||||
Event struct for external webhook and WebSocket delivery.
|
||||
|
||||
This is completely separate from the internal PubSub event system
|
||||
and is only used for external client notifications.
|
||||
"""
|
||||
|
||||
@type event_type ::
|
||||
:add_system
|
||||
| :deleted_system
|
||||
| :system_renamed
|
||||
| :system_metadata_changed
|
||||
| :signatures_updated
|
||||
| :signature_added
|
||||
| :signature_removed
|
||||
| :connection_added
|
||||
| :connection_removed
|
||||
| :connection_updated
|
||||
| :character_added
|
||||
| :character_removed
|
||||
| :character_updated
|
||||
| :map_kill
|
||||
| :acl_member_added
|
||||
| :acl_member_removed
|
||||
| :acl_member_updated
|
||||
| :rally_point_added
|
||||
| :rally_point_removed
|
||||
|
||||
@type t :: %__MODULE__{
|
||||
# ULID for ordering
|
||||
id: String.t(),
|
||||
# Map identifier
|
||||
map_id: String.t(),
|
||||
# Event type
|
||||
type: event_type(),
|
||||
# Event-specific data
|
||||
payload: map(),
|
||||
# When the event occurred
|
||||
timestamp: DateTime.t()
|
||||
}
|
||||
|
||||
defstruct [:id, :map_id, :type, :payload, :timestamp]
|
||||
|
||||
@doc """
|
||||
Creates a new external event with ULID for ordering.
|
||||
|
||||
Validates that the event_type is supported before creating the event.
|
||||
"""
|
||||
@spec new(String.t(), event_type(), map()) :: t() | {:error, :invalid_event_type}
|
||||
def new(map_id, event_type, payload) when is_binary(map_id) and is_map(payload) do
|
||||
if valid_event_type?(event_type) do
|
||||
%__MODULE__{
|
||||
id: Ulid.generate(System.system_time(:millisecond)),
|
||||
map_id: map_id,
|
||||
type: event_type,
|
||||
payload: payload,
|
||||
timestamp: DateTime.utc_now()
|
||||
}
|
||||
else
|
||||
raise ArgumentError,
|
||||
"Invalid event type: #{inspect(event_type)}. Must be one of: #{supported_event_types() |> Enum.map(&to_string/1) |> Enum.join(", ")}"
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Converts an event to JSON format for delivery.
|
||||
"""
|
||||
@spec to_json(t()) :: map()
|
||||
def to_json(%__MODULE__{} = event) do
|
||||
%{
|
||||
"id" => event.id,
|
||||
"type" => to_string(event.type),
|
||||
"map_id" => event.map_id,
|
||||
"timestamp" => DateTime.to_iso8601(event.timestamp),
|
||||
"payload" => serialize_payload(event.payload)
|
||||
}
|
||||
end
|
||||
|
||||
# Convert Ash structs and other complex types to plain maps
|
||||
defp serialize_payload(payload) when is_struct(payload) do
|
||||
serialize_payload(payload, MapSet.new())
|
||||
end
|
||||
|
||||
defp serialize_payload(payload) when is_map(payload) do
|
||||
serialize_payload(payload, MapSet.new())
|
||||
end
|
||||
|
||||
# Define allowlisted fields for different struct types
|
||||
@system_fields [
|
||||
:id,
|
||||
:solar_system_id,
|
||||
:name,
|
||||
:position_x,
|
||||
:position_y,
|
||||
:visible,
|
||||
:locked,
|
||||
# ADD
|
||||
:temporary_name,
|
||||
# ADD
|
||||
:labels,
|
||||
# ADD
|
||||
:description,
|
||||
# ADD
|
||||
:status
|
||||
]
|
||||
@character_fields [
|
||||
:id,
|
||||
:character_id,
|
||||
:character_eve_id,
|
||||
:name,
|
||||
:corporation_id,
|
||||
:alliance_id,
|
||||
:ship_type_id,
|
||||
# ADD: Ship name for external clients
|
||||
:ship_name,
|
||||
:online,
|
||||
# ADD: Character location
|
||||
:solar_system_id,
|
||||
# ADD: Structure location
|
||||
:structure_id,
|
||||
# ADD: Station location
|
||||
:station_id
|
||||
]
|
||||
@connection_fields [
|
||||
:id,
|
||||
:source_id,
|
||||
:target_id,
|
||||
:connection_type,
|
||||
:time_status,
|
||||
:mass_status,
|
||||
:ship_size
|
||||
]
|
||||
@signature_fields [:id, :signature_id, :name, :type, :group]
|
||||
|
||||
# Overloaded versions with visited tracking
|
||||
defp serialize_payload(payload, visited) when is_struct(payload) do
|
||||
# Check for circular reference
|
||||
ref = {payload.__struct__, Map.get(payload, :id)}
|
||||
|
||||
if MapSet.member?(visited, ref) do
|
||||
# Return a reference indicator instead of recursing
|
||||
%{"__ref__" => to_string(ref)}
|
||||
else
|
||||
visited = MapSet.put(visited, ref)
|
||||
|
||||
# Get allowlisted fields based on struct type
|
||||
allowed_fields = get_allowed_fields(payload.__struct__)
|
||||
|
||||
payload
|
||||
|> Map.from_struct()
|
||||
|> Map.take(allowed_fields)
|
||||
|> serialize_fields(visited)
|
||||
end
|
||||
end
|
||||
|
||||
# Get allowed fields based on struct type
|
||||
defp get_allowed_fields(module) do
|
||||
module_name = module |> Module.split() |> List.last()
|
||||
|
||||
case module_name do
|
||||
"MapSystem" -> @system_fields
|
||||
"MapCharacter" -> @character_fields
|
||||
"MapConnection" -> @connection_fields
|
||||
"MapSystemSignature" -> @signature_fields
|
||||
# Default minimal fields for unknown types
|
||||
_ -> [:id, :name]
|
||||
end
|
||||
end
|
||||
|
||||
defp serialize_payload(payload, visited) when is_map(payload) do
|
||||
Map.new(payload, fn {k, v} -> {to_string(k), serialize_value(v, visited)} end)
|
||||
end
|
||||
|
||||
defp serialize_fields(fields, visited) do
|
||||
Enum.reduce(fields, %{}, fn {k, v}, acc ->
|
||||
if is_nil(v) do
|
||||
acc
|
||||
else
|
||||
Map.put(acc, to_string(k), serialize_value(v, visited))
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp serialize_value(%DateTime{} = dt, _visited), do: DateTime.to_iso8601(dt)
|
||||
defp serialize_value(%NaiveDateTime{} = dt, _visited), do: NaiveDateTime.to_iso8601(dt)
|
||||
defp serialize_value(v, visited) when is_struct(v), do: serialize_payload(v, visited)
|
||||
defp serialize_value(v, visited) when is_map(v), do: serialize_payload(v, visited)
|
||||
defp serialize_value(v, visited) when is_list(v), do: Enum.map(v, &serialize_value(&1, visited))
|
||||
defp serialize_value(v, _visited), do: v
|
||||
|
||||
@doc """
|
||||
Returns all supported event types.
|
||||
"""
|
||||
@spec supported_event_types() :: [event_type()]
|
||||
def supported_event_types do
|
||||
[
|
||||
:add_system,
|
||||
:deleted_system,
|
||||
:system_renamed,
|
||||
:system_metadata_changed,
|
||||
:signatures_updated,
|
||||
:signature_added,
|
||||
:signature_removed,
|
||||
:connection_added,
|
||||
:connection_removed,
|
||||
:connection_updated,
|
||||
:character_added,
|
||||
:character_removed,
|
||||
:character_updated,
|
||||
:map_kill,
|
||||
:acl_member_added,
|
||||
:acl_member_removed,
|
||||
:acl_member_updated,
|
||||
:rally_point_added,
|
||||
:rally_point_removed
|
||||
]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Validates an event type.
|
||||
"""
|
||||
@spec valid_event_type?(atom()) :: boolean()
|
||||
def valid_event_type?(event_type) when is_atom(event_type) do
|
||||
event_type in supported_event_types()
|
||||
end
|
||||
|
||||
def valid_event_type?(_), do: false
|
||||
end
|
||||
132
lib/wanderer_app/external_events/event_filter.ex
Normal file
132
lib/wanderer_app/external_events/event_filter.ex
Normal file
@@ -0,0 +1,132 @@
|
||||
defmodule WandererApp.ExternalEvents.EventFilter do
|
||||
@moduledoc """
|
||||
Event filtering logic for external event streams (WebSocket, SSE, webhooks).
|
||||
|
||||
Handles parsing of event filters from client requests and matching events
|
||||
against those filters. Supports wildcard ("*") and comma-separated event lists.
|
||||
"""
|
||||
|
||||
@supported_events [
|
||||
# System events
|
||||
:add_system,
|
||||
:deleted_system,
|
||||
:system_renamed,
|
||||
:system_metadata_changed,
|
||||
# Connection events
|
||||
:connection_added,
|
||||
:connection_removed,
|
||||
:connection_updated,
|
||||
# Character events (existing)
|
||||
:character_added,
|
||||
:character_removed,
|
||||
:character_updated,
|
||||
# Character events (new for SSE)
|
||||
:character_location_changed,
|
||||
:character_online_status_changed,
|
||||
:character_ship_changed,
|
||||
:character_ready_status_changed,
|
||||
# Signature events
|
||||
:signature_added,
|
||||
:signature_removed,
|
||||
:signatures_updated,
|
||||
# Kill events
|
||||
:map_kill,
|
||||
# ACL events
|
||||
:acl_member_added,
|
||||
:acl_member_removed,
|
||||
:acl_member_updated,
|
||||
# Rally point events
|
||||
:rally_point_added,
|
||||
:rally_point_removed
|
||||
]
|
||||
|
||||
@type event_type :: atom()
|
||||
@type event_filter :: [event_type()]
|
||||
|
||||
@doc """
|
||||
Parses event filter from client input.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> EventFilter.parse(nil)
|
||||
[:add_system, :deleted_system, ...] # all events
|
||||
|
||||
iex> EventFilter.parse("*")
|
||||
[:add_system, :deleted_system, ...] # all events
|
||||
|
||||
iex> EventFilter.parse("add_system,character_added")
|
||||
[:add_system, :character_added]
|
||||
|
||||
iex> EventFilter.parse("invalid,add_system")
|
||||
[:add_system] # invalid events are filtered out
|
||||
"""
|
||||
@spec parse(nil | String.t()) :: event_filter()
|
||||
def parse(nil), do: @supported_events
|
||||
def parse("*"), do: @supported_events
|
||||
def parse(""), do: @supported_events
|
||||
|
||||
def parse(events) when is_binary(events) do
|
||||
events
|
||||
|> String.split(",")
|
||||
|> Enum.map(&String.trim/1)
|
||||
|> Enum.map(&to_event_atom/1)
|
||||
|> Enum.filter(&(&1 in @supported_events))
|
||||
|> Enum.uniq()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if an event type matches the given filter.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> EventFilter.matches?(:add_system, [:add_system, :character_added])
|
||||
true
|
||||
|
||||
iex> EventFilter.matches?(:map_kill, [:add_system, :character_added])
|
||||
false
|
||||
"""
|
||||
@spec matches?(event_type(), event_filter()) :: boolean()
|
||||
def matches?(event_type, filter) when is_list(filter) do
|
||||
# Convert string event types to atoms for comparison
|
||||
atom_event_type =
|
||||
case event_type do
|
||||
atom when is_atom(atom) ->
|
||||
atom
|
||||
|
||||
string when is_binary(string) ->
|
||||
try do
|
||||
String.to_existing_atom(string)
|
||||
rescue
|
||||
ArgumentError -> nil
|
||||
end
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
|
||||
atom_event_type && atom_event_type in filter
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns all supported event types.
|
||||
"""
|
||||
@spec supported_events() :: event_filter()
|
||||
def supported_events, do: @supported_events
|
||||
|
||||
@doc """
|
||||
Validates if an event type is supported.
|
||||
"""
|
||||
@spec valid_event?(event_type()) :: boolean()
|
||||
def valid_event?(event_type) when is_atom(event_type) do
|
||||
event_type in @supported_events
|
||||
end
|
||||
|
||||
# Helper to safely convert string to atom, returns nil for invalid atoms
|
||||
defp to_event_atom(event_string) do
|
||||
try do
|
||||
String.to_existing_atom(event_string)
|
||||
rescue
|
||||
ArgumentError -> nil
|
||||
end
|
||||
end
|
||||
end
|
||||
112
lib/wanderer_app/external_events/external_events.ex
Normal file
112
lib/wanderer_app/external_events/external_events.ex
Normal file
@@ -0,0 +1,112 @@
|
||||
defmodule WandererApp.ExternalEvents do
|
||||
@moduledoc """
|
||||
External event system for SSE and webhook delivery.
|
||||
|
||||
This system is completely separate from the internal Phoenix PubSub
|
||||
event system and does NOT modify any existing event flows.
|
||||
|
||||
External events are delivered to:
|
||||
- SSE clients via Server-Sent Events
|
||||
- HTTP webhooks via WebhookDispatcher
|
||||
|
||||
## Usage
|
||||
|
||||
# From event producers, call this in ADDITION to existing broadcasts
|
||||
WandererApp.ExternalEvents.broadcast("map_123", :add_system, %{
|
||||
solar_system_id: 31000199,
|
||||
name: "J123456"
|
||||
})
|
||||
|
||||
This is additive-only and does not replace any existing functionality.
|
||||
"""
|
||||
|
||||
alias WandererApp.ExternalEvents.{Event, MapEventRelay}
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Broadcasts an event to external clients only.
|
||||
|
||||
This does NOT affect internal PubSub or LiveView handlers.
|
||||
It only delivers events to:
|
||||
- SSE clients via Server-Sent Events
|
||||
- Configured webhook endpoints
|
||||
|
||||
## Parameters
|
||||
|
||||
- `map_id`: The map identifier (string)
|
||||
- `event_type`: The event type atom (see Event.event_type/0)
|
||||
- `payload`: The event payload (map)
|
||||
|
||||
## Examples
|
||||
|
||||
# System events
|
||||
WandererApp.ExternalEvents.broadcast("map_123", :add_system, %{
|
||||
solar_system_id: 31000199,
|
||||
name: "J123456"
|
||||
})
|
||||
|
||||
# Kill events
|
||||
WandererApp.ExternalEvents.broadcast("map_123", :map_kill, %{
|
||||
killmail_id: 98765,
|
||||
victim_ship_type: "Rifter"
|
||||
})
|
||||
"""
|
||||
@spec broadcast(String.t(), Event.event_type(), map()) :: :ok
|
||||
def broadcast(map_id, event_type, payload) when is_binary(map_id) and is_map(payload) do
|
||||
log_message = "ExternalEvents.broadcast called - map: #{map_id}, type: #{event_type}"
|
||||
|
||||
Logger.debug(fn -> log_message end)
|
||||
|
||||
# Validate event type
|
||||
if Event.valid_event_type?(event_type) do
|
||||
# Create normalized event
|
||||
event = Event.new(map_id, event_type, payload)
|
||||
|
||||
# Emit telemetry for monitoring
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :external_events, :broadcast],
|
||||
%{count: 1},
|
||||
%{map_id: map_id, event_type: event_type}
|
||||
)
|
||||
|
||||
# Check if MapEventRelay is alive before sending
|
||||
if Process.whereis(MapEventRelay) do
|
||||
try do
|
||||
# Use call with timeout instead of cast for better error handling
|
||||
GenServer.call(MapEventRelay, {:deliver_event, event}, 5000)
|
||||
:ok
|
||||
catch
|
||||
:exit, {:timeout, _} ->
|
||||
Logger.error("Timeout delivering event to MapEventRelay for map #{map_id}")
|
||||
{:error, :timeout}
|
||||
|
||||
:exit, reason ->
|
||||
Logger.error("Failed to deliver event to MapEventRelay: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
else
|
||||
{:error, :relay_not_available}
|
||||
end
|
||||
else
|
||||
Logger.warning("Invalid external event type: #{inspect(event_type)}")
|
||||
{:error, :invalid_event_type}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Lists all supported event types.
|
||||
"""
|
||||
@spec supported_event_types() :: [Event.event_type()]
|
||||
def supported_event_types do
|
||||
Event.supported_event_types()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Validates an event type atom.
|
||||
"""
|
||||
@spec valid_event_type?(atom()) :: boolean()
|
||||
def valid_event_type?(event_type) do
|
||||
Event.valid_event_type?(event_type)
|
||||
end
|
||||
end
|
||||
576
lib/wanderer_app/external_events/json_api_formatter.ex
Normal file
576
lib/wanderer_app/external_events/json_api_formatter.ex
Normal file
@@ -0,0 +1,576 @@
|
||||
defmodule WandererApp.ExternalEvents.JsonApiFormatter do
|
||||
@moduledoc """
|
||||
JSON:API event formatter for real-time events.
|
||||
|
||||
Converts internal event structures to JSON:API compliant format
|
||||
for consistency with the API specification.
|
||||
"""
|
||||
|
||||
alias WandererApp.ExternalEvents.Event
|
||||
|
||||
@doc """
|
||||
Formats an event into JSON:API structure.
|
||||
|
||||
Converts internal events to JSON:API format:
|
||||
- `data`: Resource object with type, id, attributes, relationships
|
||||
- `meta`: Event metadata (type, timestamp, etc.)
|
||||
- `links`: Related resource links where applicable
|
||||
"""
|
||||
@spec format_event(Event.t()) :: map()
|
||||
def format_event(%Event{} = event) do
|
||||
%{
|
||||
"data" => format_resource_data(event),
|
||||
"meta" => format_event_meta(event),
|
||||
"links" => format_event_links(event)
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Formats a legacy event (map format) into JSON:API structure.
|
||||
|
||||
Handles events that are already in map format from existing system.
|
||||
"""
|
||||
@spec format_legacy_event(map()) :: map()
|
||||
def format_legacy_event(event) when is_map(event) do
|
||||
%{
|
||||
"data" => format_legacy_resource_data(event),
|
||||
"meta" => format_legacy_event_meta(event),
|
||||
"links" => format_legacy_event_links(event)
|
||||
}
|
||||
end
|
||||
|
||||
# Event-specific resource data formatting
|
||||
defp format_resource_data(%Event{type: :add_system, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id],
|
||||
"attributes" => %{
|
||||
"solar_system_id" => payload["solar_system_id"] || payload[:solar_system_id],
|
||||
"name" => payload["name"] || payload[:name],
|
||||
"locked" => payload["locked"] || payload[:locked],
|
||||
"x" => payload["x"] || payload[:x],
|
||||
"y" => payload["y"] || payload[:y],
|
||||
"created_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :deleted_system, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id],
|
||||
"meta" => %{
|
||||
"deleted" => true,
|
||||
"deleted_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :system_renamed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id],
|
||||
"attributes" => %{
|
||||
"name" => payload["name"] || payload[:name],
|
||||
"updated_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :system_metadata_changed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id],
|
||||
"attributes" => %{
|
||||
"locked" => payload["locked"] || payload[:locked],
|
||||
"x" => payload["x"] || payload[:x],
|
||||
"y" => payload["y"] || payload[:y],
|
||||
"updated_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :signature_added, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_system_signatures",
|
||||
"id" => payload["signature_id"] || payload[:signature_id],
|
||||
"attributes" => %{
|
||||
"signature_id" => payload["signature_identifier"] || payload[:signature_identifier],
|
||||
"signature_type" => payload["signature_type"] || payload[:signature_type],
|
||||
"name" => payload["name"] || payload[:name],
|
||||
"created_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :signature_removed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_system_signatures",
|
||||
"id" => payload["signature_id"] || payload[:signature_id],
|
||||
"meta" => %{
|
||||
"deleted" => true,
|
||||
"deleted_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :connection_added, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_connections",
|
||||
"id" => payload["connection_id"] || payload[:connection_id],
|
||||
"attributes" => %{
|
||||
"type" => payload["type"] || payload[:type],
|
||||
"time_status" => payload["time_status"] || payload[:time_status],
|
||||
"mass_status" => payload["mass_status"] || payload[:mass_status],
|
||||
"ship_size_type" => payload["ship_size_type"] || payload[:ship_size_type],
|
||||
"created_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"solar_system_source" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["solar_system_source"] || payload[:solar_system_source]
|
||||
}
|
||||
},
|
||||
"solar_system_target" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["solar_system_target"] || payload[:solar_system_target]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :connection_removed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_connections",
|
||||
"id" => payload["connection_id"] || payload[:connection_id],
|
||||
"meta" => %{
|
||||
"deleted" => true,
|
||||
"deleted_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :connection_updated, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "map_connections",
|
||||
"id" => payload["connection_id"] || payload[:connection_id],
|
||||
"attributes" => %{
|
||||
"type" => payload["type"] || payload[:type],
|
||||
"time_status" => payload["time_status"] || payload[:time_status],
|
||||
"mass_status" => payload["mass_status"] || payload[:mass_status],
|
||||
"ship_size_type" => payload["ship_size_type"] || payload[:ship_size_type],
|
||||
"updated_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :character_added, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "characters",
|
||||
"id" => payload["character_id"] || payload[:character_id],
|
||||
"attributes" => %{
|
||||
"eve_id" => payload["eve_id"] || payload[:eve_id],
|
||||
"name" => payload["name"] || payload[:name],
|
||||
"corporation_name" => payload["corporation_name"] || payload[:corporation_name],
|
||||
"corporation_ticker" => payload["corporation_ticker"] || payload[:corporation_ticker],
|
||||
"added_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :character_removed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "characters",
|
||||
"id" => payload["character_id"] || payload[:character_id],
|
||||
"meta" => %{
|
||||
"removed_from_system" => true,
|
||||
"removed_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :character_updated, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "characters",
|
||||
"id" => payload["character_id"] || payload[:character_id],
|
||||
"attributes" => %{
|
||||
"ship_type_id" => payload["ship_type_id"] || payload[:ship_type_id],
|
||||
"ship_name" => payload["ship_name"] || payload[:ship_name],
|
||||
"updated_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :acl_member_added, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "access_list_members",
|
||||
"id" => payload["member_id"] || payload[:member_id],
|
||||
"attributes" => %{
|
||||
"character_eve_id" => payload["character_eve_id"] || payload[:character_eve_id],
|
||||
"character_name" => payload["character_name"] || payload[:character_name],
|
||||
"role" => payload["role"] || payload[:role],
|
||||
"added_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"access_list" => %{
|
||||
"data" => %{
|
||||
"type" => "access_lists",
|
||||
"id" => payload["access_list_id"] || payload[:access_list_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :acl_member_removed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "access_list_members",
|
||||
"id" => payload["member_id"] || payload[:member_id],
|
||||
"meta" => %{
|
||||
"deleted" => true,
|
||||
"deleted_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"access_list" => %{
|
||||
"data" => %{
|
||||
"type" => "access_lists",
|
||||
"id" => payload["access_list_id"] || payload[:access_list_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :acl_member_updated, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "access_list_members",
|
||||
"id" => payload["member_id"] || payload[:member_id],
|
||||
"attributes" => %{
|
||||
"role" => payload["role"] || payload[:role],
|
||||
"updated_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"access_list" => %{
|
||||
"data" => %{
|
||||
"type" => "access_lists",
|
||||
"id" => payload["access_list_id"] || payload[:access_list_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :map_kill, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "kills",
|
||||
"id" => payload["killmail_id"] || payload[:killmail_id],
|
||||
"attributes" => %{
|
||||
"killmail_id" => payload["killmail_id"] || payload[:killmail_id],
|
||||
"victim_character_name" =>
|
||||
payload["victim_character_name"] || payload[:victim_character_name],
|
||||
"victim_ship_type" => payload["victim_ship_type"] || payload[:victim_ship_type],
|
||||
"occurred_at" => payload["killmail_time"] || payload[:killmail_time] || event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :rally_point_added, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "rally_points",
|
||||
"id" => payload["rally_point_id"] || payload[:rally_point_id],
|
||||
"attributes" => %{
|
||||
"name" => payload["name"] || payload[:name],
|
||||
"description" => payload["description"] || payload[:description],
|
||||
"created_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"system" => %{
|
||||
"data" => %{
|
||||
"type" => "map_systems",
|
||||
"id" => payload["system_id"] || payload[:system_id]
|
||||
}
|
||||
},
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
defp format_resource_data(%Event{type: :rally_point_removed, payload: payload} = event) do
|
||||
%{
|
||||
"type" => "rally_points",
|
||||
"id" => payload["rally_point_id"] || payload[:rally_point_id],
|
||||
"meta" => %{
|
||||
"deleted" => true,
|
||||
"deleted_at" => event.timestamp
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
# Generic fallback for unknown event types
|
||||
defp format_resource_data(%Event{payload: payload} = event) do
|
||||
%{
|
||||
"type" => "events",
|
||||
"id" => event.id,
|
||||
"attributes" => payload,
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => event.map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
# Legacy event formatting (for events already in map format)
|
||||
defp format_legacy_resource_data(event) do
|
||||
event_type = event["type"] || "unknown"
|
||||
payload = event["payload"] || event
|
||||
map_id = event["map_id"]
|
||||
|
||||
case event_type do
|
||||
"connected" ->
|
||||
%{
|
||||
"type" => "connection_status",
|
||||
"id" => event["id"] || Ulid.generate(),
|
||||
"attributes" => %{
|
||||
"status" => "connected",
|
||||
"server_time" => payload["server_time"],
|
||||
"connected_at" => payload["server_time"]
|
||||
},
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ ->
|
||||
# Use existing payload structure but wrap it in JSON:API format
|
||||
%{
|
||||
"type" => "events",
|
||||
"id" => event["id"] || Ulid.generate(),
|
||||
"attributes" => payload,
|
||||
"relationships" => %{
|
||||
"map" => %{
|
||||
"data" => %{"type" => "maps", "id" => map_id}
|
||||
}
|
||||
}
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
# Event metadata formatting
|
||||
defp format_event_meta(%Event{} = event) do
|
||||
%{
|
||||
"event_type" => event.type,
|
||||
"event_action" => determine_action(event.type),
|
||||
"timestamp" => DateTime.to_iso8601(event.timestamp),
|
||||
"map_id" => event.map_id,
|
||||
"event_id" => event.id
|
||||
}
|
||||
end
|
||||
|
||||
defp format_legacy_event_meta(event) do
|
||||
%{
|
||||
"event_type" => event["type"],
|
||||
"event_action" => determine_legacy_action(event["type"]),
|
||||
"timestamp" => event["timestamp"] || DateTime.to_iso8601(DateTime.utc_now()),
|
||||
"map_id" => event["map_id"],
|
||||
"event_id" => event["id"]
|
||||
}
|
||||
end
|
||||
|
||||
# Event links formatting
|
||||
defp format_event_links(%Event{map_id: map_id}) do
|
||||
%{
|
||||
"related" => "/api/v1/maps/#{map_id}",
|
||||
"self" => "/api/v1/maps/#{map_id}/events/stream"
|
||||
}
|
||||
end
|
||||
|
||||
defp format_legacy_event_links(event) do
|
||||
map_id = event["map_id"]
|
||||
|
||||
%{
|
||||
"related" => "/api/v1/maps/#{map_id}",
|
||||
"self" => "/api/v1/maps/#{map_id}/events/stream"
|
||||
}
|
||||
end
|
||||
|
||||
# Helper functions
|
||||
defp determine_action(event_type) do
|
||||
case event_type do
|
||||
type
|
||||
when type in [
|
||||
:add_system,
|
||||
:signature_added,
|
||||
:connection_added,
|
||||
:character_added,
|
||||
:acl_member_added,
|
||||
:rally_point_added
|
||||
] ->
|
||||
"created"
|
||||
|
||||
type
|
||||
when type in [
|
||||
:deleted_system,
|
||||
:signature_removed,
|
||||
:connection_removed,
|
||||
:character_removed,
|
||||
:acl_member_removed,
|
||||
:rally_point_removed
|
||||
] ->
|
||||
"deleted"
|
||||
|
||||
type
|
||||
when type in [
|
||||
:system_renamed,
|
||||
:system_metadata_changed,
|
||||
:connection_updated,
|
||||
:character_updated,
|
||||
:acl_member_updated
|
||||
] ->
|
||||
"updated"
|
||||
|
||||
:signatures_updated ->
|
||||
"bulk_updated"
|
||||
|
||||
:map_kill ->
|
||||
"created"
|
||||
|
||||
_ ->
|
||||
"unknown"
|
||||
end
|
||||
end
|
||||
|
||||
defp determine_legacy_action(event_type) do
|
||||
case event_type do
|
||||
"connected" ->
|
||||
"connected"
|
||||
|
||||
_ ->
|
||||
try do
|
||||
determine_action(String.to_existing_atom(event_type))
|
||||
rescue
|
||||
ArgumentError -> "unknown"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
253
lib/wanderer_app/external_events/map_event_relay.ex
Normal file
253
lib/wanderer_app/external_events/map_event_relay.ex
Normal file
@@ -0,0 +1,253 @@
|
||||
defmodule WandererApp.ExternalEvents.MapEventRelay do
|
||||
@moduledoc """
|
||||
GenServer that handles delivery of external events to SSE and webhook clients.
|
||||
|
||||
This system is completely separate from internal Phoenix PubSub and does NOT
|
||||
modify any existing event flows. It only handles external client delivery.
|
||||
|
||||
Responsibilities:
|
||||
- Store events in ETS ring buffer for backfill
|
||||
- Broadcast to SSE clients
|
||||
- Dispatch to webhook endpoints
|
||||
- Provide event history for reconnecting clients
|
||||
|
||||
Events are stored in an ETS table per map with ULID ordering for backfill support.
|
||||
Events older than 10 minutes are automatically cleaned up.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
|
||||
alias WandererApp.ExternalEvents.Event
|
||||
alias WandererApp.ExternalEvents.WebhookDispatcher
|
||||
|
||||
require Logger
|
||||
|
||||
@cleanup_interval :timer.minutes(2)
|
||||
@event_retention_minutes 10
|
||||
|
||||
def start_link(opts) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Retrieves events since a given timestamp for backfill.
|
||||
"""
|
||||
@spec get_events_since(String.t(), DateTime.t(), pos_integer()) :: [map()]
|
||||
def get_events_since(map_id, since_datetime, limit \\ 100) do
|
||||
GenServer.call(__MODULE__, {:get_events_since, map_id, since_datetime, limit})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Retrieves events since a given ULID for SSE backfill.
|
||||
"""
|
||||
@spec get_events_since_ulid(String.t(), String.t(), pos_integer()) ::
|
||||
{:ok, [map()]} | {:error, term()}
|
||||
def get_events_since_ulid(map_id, since_ulid, limit \\ 1_000) do
|
||||
GenServer.call(__MODULE__, {:get_events_since_ulid, map_id, since_ulid, limit})
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
# Create ETS table for event storage
|
||||
# Using ordered_set for ULID sorting, public for read access
|
||||
ets_table =
|
||||
:ets.new(:external_events, [
|
||||
:ordered_set,
|
||||
:public,
|
||||
:named_table,
|
||||
{:read_concurrency, true}
|
||||
])
|
||||
|
||||
# Schedule periodic cleanup
|
||||
schedule_cleanup()
|
||||
|
||||
Logger.debug(fn -> "MapEventRelay started for external events" end)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
ets_table: ets_table,
|
||||
event_count: 0
|
||||
}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_cast({:deliver_event, %Event{} = event}, state) do
|
||||
Logger.debug(fn ->
|
||||
"MapEventRelay received :deliver_event (cast) for map #{event.map_id}, type: #{event.type}"
|
||||
end)
|
||||
|
||||
new_state = deliver_single_event(event, state)
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call({:deliver_event, %Event{} = event}, _from, state) do
|
||||
# Log ACL events at info level for debugging
|
||||
if event.type in [:acl_member_added, :acl_member_removed, :acl_member_updated] do
|
||||
Logger.debug(fn ->
|
||||
"MapEventRelay received :deliver_event (call) for map #{event.map_id}, type: #{event.type}"
|
||||
end)
|
||||
else
|
||||
Logger.debug(fn ->
|
||||
"MapEventRelay received :deliver_event (call) for map #{event.map_id}, type: #{event.type}"
|
||||
end)
|
||||
end
|
||||
|
||||
new_state = deliver_single_event(event, state)
|
||||
{:reply, :ok, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call({:get_events_since, map_id, since_datetime, limit}, _from, state) do
|
||||
events = get_events_from_ets(map_id, since_datetime, limit, state.ets_table)
|
||||
{:reply, events, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call({:get_events_since_ulid, map_id, since_ulid}, from, state) do
|
||||
handle_call({:get_events_since_ulid, map_id, since_ulid, 1_000}, from, state)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call({:get_events_since_ulid, map_id, since_ulid, limit}, _from, state) do
|
||||
# Get all events for this map and filter by ULID
|
||||
case validate_ulid(since_ulid) do
|
||||
:ok ->
|
||||
try do
|
||||
# Events are stored as {event_id, map_id, json_data}
|
||||
# Filter by map_id and event_id (ULID) > since_ulid
|
||||
events =
|
||||
:ets.select(state.ets_table, [
|
||||
{{:"$1", :"$2", :"$3"}, [{:andalso, {:>, :"$1", since_ulid}, {:==, :"$2", map_id}}],
|
||||
[:"$3"]}
|
||||
])
|
||||
|> Enum.take(limit)
|
||||
|
||||
{:reply, {:ok, events}, state}
|
||||
rescue
|
||||
error in [ArgumentError] ->
|
||||
{:reply, {:error, {:ets_error, error}}, state}
|
||||
end
|
||||
|
||||
{:error, :invalid_ulid} ->
|
||||
{:reply, {:error, :invalid_ulid}, state}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:cleanup_events, state) do
|
||||
cleanup_old_events(state.ets_table)
|
||||
schedule_cleanup()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(msg, state) do
|
||||
Logger.warning("MapEventRelay received unexpected message: #{inspect(msg)}")
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
defp deliver_single_event(%Event{} = event, state) do
|
||||
Logger.debug(fn ->
|
||||
"MapEventRelay.deliver_single_event processing event for map #{event.map_id}, type: #{event.type}"
|
||||
end)
|
||||
|
||||
# Emit telemetry
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :external_events, :relay, :received],
|
||||
%{count: 1},
|
||||
%{map_id: event.map_id, event_type: event.type}
|
||||
)
|
||||
|
||||
# 1. Store in ETS for backfill
|
||||
store_event(event, state.ets_table)
|
||||
|
||||
# 2. Convert event to JSON for delivery methods
|
||||
event_json = Event.to_json(event)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"MapEventRelay converted event to JSON: #{inspect(String.slice(inspect(event_json), 0, 200))}..."
|
||||
end)
|
||||
|
||||
# 3. Send to webhook subscriptions via WebhookDispatcher
|
||||
WebhookDispatcher.dispatch_event(event.map_id, event)
|
||||
|
||||
# 4. Broadcast to SSE clients
|
||||
Logger.debug(fn -> "MapEventRelay broadcasting to SSE clients for map #{event.map_id}" end)
|
||||
WandererApp.ExternalEvents.SseStreamManager.broadcast_event(event.map_id, event_json)
|
||||
|
||||
# Emit delivered telemetry
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :external_events, :relay, :delivered],
|
||||
%{count: 1},
|
||||
%{map_id: event.map_id, event_type: event.type}
|
||||
)
|
||||
|
||||
%{state | event_count: state.event_count + 1}
|
||||
end
|
||||
|
||||
defp store_event(%Event{} = event, ets_table) do
|
||||
# Store with ULID as key for ordering
|
||||
# Value includes map_id for efficient filtering
|
||||
:ets.insert(ets_table, {event.id, event.map_id, Event.to_json(event)})
|
||||
end
|
||||
|
||||
defp get_events_from_ets(map_id, since_datetime, limit, ets_table) do
|
||||
# Convert datetime to ULID for comparison
|
||||
# If no since_datetime, retrieve all events for the map
|
||||
if since_datetime do
|
||||
since_ulid = datetime_to_ulid(since_datetime)
|
||||
|
||||
# Get all events since the ULID, filtered by map_id
|
||||
:ets.select(ets_table, [
|
||||
{{:"$1", :"$2", :"$3"}, [{:andalso, {:>=, :"$1", since_ulid}, {:==, :"$2", map_id}}],
|
||||
[:"$3"]}
|
||||
])
|
||||
|> Enum.take(limit)
|
||||
else
|
||||
# Get all events for the map_id
|
||||
:ets.select(ets_table, [
|
||||
{{:"$1", :"$2", :"$3"}, [{:==, :"$2", map_id}], [:"$3"]}
|
||||
])
|
||||
|> Enum.take(limit)
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_ulid(ulid) when is_binary(ulid) do
|
||||
# ULID format validation: 26 characters, [0-9A-Z] excluding I, L, O, U
|
||||
case byte_size(ulid) do
|
||||
26 ->
|
||||
if ulid =~ ~r/^[0123456789ABCDEFGHJKMNPQRSTVWXYZ]{26}$/ do
|
||||
:ok
|
||||
else
|
||||
{:error, :invalid_ulid}
|
||||
end
|
||||
|
||||
_ ->
|
||||
{:error, :invalid_ulid}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_ulid(_), do: {:error, :invalid_ulid}
|
||||
|
||||
defp cleanup_old_events(ets_table) do
|
||||
cutoff_time = DateTime.add(DateTime.utc_now(), -@event_retention_minutes, :minute)
|
||||
cutoff_ulid = datetime_to_ulid(cutoff_time)
|
||||
|
||||
# Delete events older than cutoff
|
||||
:ets.select_delete(ets_table, [
|
||||
{{:"$1", :_, :_}, [{:<, :"$1", cutoff_ulid}], [true]}
|
||||
])
|
||||
end
|
||||
|
||||
defp schedule_cleanup do
|
||||
Process.send_after(self(), :cleanup_events, @cleanup_interval)
|
||||
end
|
||||
|
||||
# Convert DateTime to ULID timestamp for comparison
|
||||
defp datetime_to_ulid(datetime) do
|
||||
timestamp = DateTime.to_unix(datetime, :millisecond)
|
||||
# Create a ULID with the timestamp (rest will be zeros for comparison)
|
||||
Ulid.generate(timestamp)
|
||||
end
|
||||
end
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user