mirror of
https://github.com/wanderer-industries/wanderer
synced 2025-12-07 16:25:37 +00:00
Compare commits
192 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e9b475c0a8 | ||
|
|
7752010092 | ||
|
|
d3705b3ed7 | ||
|
|
1394e2897e | ||
|
|
5117a1c5af | ||
|
|
3c62403f33 | ||
|
|
a4760f5162 | ||
|
|
b071070431 | ||
|
|
3bcb9628e7 | ||
|
|
e62c4cf5bf | ||
|
|
af46962ce4 | ||
|
|
0b0967830b | ||
|
|
172251a208 | ||
|
|
8a6fb63d55 | ||
|
|
9652959e5e | ||
|
|
825ef46d41 | ||
|
|
ad9f7c6b95 | ||
|
|
b960b5c149 | ||
|
|
0f092d21f9 | ||
|
|
031576caa6 | ||
|
|
7a97a96c42 | ||
|
|
2efb2daba0 | ||
|
|
4374c39924 | ||
|
|
15711495c7 | ||
|
|
236f803427 | ||
|
|
6772130f2a | ||
|
|
ddd72f3fac | ||
|
|
6e262835ef | ||
|
|
2f3b8ddc5f | ||
|
|
cea3a74b34 | ||
|
|
867941a233 | ||
|
|
3ff388a16d | ||
|
|
f4248e9ab9 | ||
|
|
507b3289c7 | ||
|
|
9e1dfc48d5 | ||
|
|
518cbc7b5d | ||
|
|
ccc8db0620 | ||
|
|
7cfb663efd | ||
|
|
e5103cc925 | ||
|
|
26458f5a19 | ||
|
|
79d5ec6caf | ||
|
|
034d461ab6 | ||
|
|
2e9c1c170c | ||
|
|
24ad3b2c61 | ||
|
|
288f55dc2f | ||
|
|
78dbea6267 | ||
|
|
6a9e53141d | ||
|
|
05e6994520 | ||
|
|
1a4dc67eb9 | ||
|
|
31d87a116b | ||
|
|
c47796d590 | ||
|
|
c7138a41ee | ||
|
|
96f04c70a9 | ||
|
|
87a8bc09ab | ||
|
|
5f5661d559 | ||
|
|
35ca87790e | ||
|
|
ae43e4a57c | ||
|
|
b91712a01a | ||
|
|
b20007b341 | ||
|
|
6a24e1188b | ||
|
|
5894efc1aa | ||
|
|
a05612d243 | ||
|
|
48de874d6b | ||
|
|
91e6da316f | ||
|
|
fa60bd81a1 | ||
|
|
a08a69c5be | ||
|
|
18d450a41a | ||
|
|
36cdee61c0 | ||
|
|
797e188259 | ||
|
|
91b581668a | ||
|
|
ad01fec28f | ||
|
|
357d3a0df6 | ||
|
|
5ce6022761 | ||
|
|
235a0c5aea | ||
|
|
9b81fa6ebb | ||
|
|
8792d5ab0e | ||
|
|
d46ed0c078 | ||
|
|
73c433fcd2 | ||
|
|
02b5239220 | ||
|
|
0ed3bdfcb0 | ||
|
|
bdeb89011f | ||
|
|
1523b625bc | ||
|
|
fb91eeb692 | ||
|
|
601d2e02cb | ||
|
|
0a662d34eb | ||
|
|
5cd4693e9d | ||
|
|
f3f0f860e3 | ||
|
|
93a5cf8a79 | ||
|
|
7cf15cbc21 | ||
|
|
30bc6d20b2 | ||
|
|
b39f99fde4 | ||
|
|
0e8aa9efa4 | ||
|
|
e1fcde36e3 | ||
|
|
7aafe077d3 | ||
|
|
5b8cab5e76 | ||
|
|
4ab56af40a | ||
|
|
e8cea86a76 | ||
|
|
d0a6e0b358 | ||
|
|
8831b3e970 | ||
|
|
f6db6f0914 | ||
|
|
ab8baeedd1 | ||
|
|
eccee5e72e | ||
|
|
4d93055bda | ||
|
|
c60c16e56a | ||
|
|
99b1de5647 | ||
|
|
7efe11a421 | ||
|
|
954108856a | ||
|
|
cbca745ec4 | ||
|
|
e15e7c8f8d | ||
|
|
65e8a520e5 | ||
|
|
3926af5a6d | ||
|
|
556fb33223 | ||
|
|
82295adeab | ||
|
|
efabf060c7 | ||
|
|
96e434ebf5 | ||
|
|
d81e2567cc | ||
|
|
f8d487639f | ||
|
|
cecfbb5375 | ||
|
|
8d35500e2f | ||
|
|
5dad5d8e03 | ||
|
|
9d7d4fad2e | ||
|
|
7be64bde02 | ||
|
|
48eb7552a9 | ||
|
|
5347b0060c | ||
|
|
b826c03226 | ||
|
|
1c211a8667 | ||
|
|
fd4d5b90e2 | ||
|
|
1ee9f26b34 | ||
|
|
da1762934b | ||
|
|
511457c761 | ||
|
|
29b4cedb81 | ||
|
|
585de15e6b | ||
|
|
74f7ad155d | ||
|
|
a9bf118f3a | ||
|
|
6d5a432bad | ||
|
|
f1f12abd16 | ||
|
|
09880a54e9 | ||
|
|
0f6847b16d | ||
|
|
ce82ed97f5 | ||
|
|
36b393dbde | ||
|
|
524c283a0d | ||
|
|
afda53a9bc | ||
|
|
1310d75012 | ||
|
|
80bbde549d | ||
|
|
2451487593 | ||
|
|
ecd626f105 | ||
|
|
123b312965 | ||
|
|
e94de8e629 | ||
|
|
956a5a04ca | ||
|
|
affeb7c624 | ||
|
|
e457d94df8 | ||
|
|
e9583c928e | ||
|
|
89c14628e1 | ||
|
|
ffba407eaf | ||
|
|
33f710127c | ||
|
|
63faa43c1d | ||
|
|
9f75ae6b03 | ||
|
|
a1f28cd245 | ||
|
|
90a04b517e | ||
|
|
9f6e6a333f | ||
|
|
7b9e2c4fd9 | ||
|
|
63f13711cc | ||
|
|
650170498a | ||
|
|
0f466c51ba | ||
|
|
a1a641bce3 | ||
|
|
4764c25eb1 | ||
|
|
d390455cf2 | ||
|
|
f58ebad0ec | ||
|
|
7ca4eb3b8f | ||
|
|
472dbaa68b | ||
|
|
679bd782a8 | ||
|
|
6a316e3906 | ||
|
|
c129db8474 | ||
|
|
10035b4c91 | ||
|
|
5839271de7 | ||
|
|
47db8ef709 | ||
|
|
2656491aaa | ||
|
|
a7637c9cae | ||
|
|
7b83ed8205 | ||
|
|
00cbc77f1d | ||
|
|
4d75b256c4 | ||
|
|
5aeff7c40c | ||
|
|
6a543bf644 | ||
|
|
dfb035525d | ||
|
|
798aec1b74 | ||
|
|
7914d7e151 | ||
|
|
8b579d6837 | ||
|
|
c0fd20dfff | ||
|
|
dd6b67c6e6 | ||
|
|
48ff2f4413 | ||
|
|
d261c6186b | ||
|
|
064a36fcbb |
13
.check.exs
13
.check.exs
@@ -13,8 +13,8 @@
|
||||
|
||||
## list of tools (see `mix check` docs for a list of default curated tools)
|
||||
tools: [
|
||||
## curated tools may be disabled (e.g. the check for compilation warnings)
|
||||
{:compiler, false},
|
||||
## Allow compilation warnings for now (error budget: unlimited warnings)
|
||||
{:compiler, "mix compile"},
|
||||
|
||||
## ...or have command & args adjusted (e.g. enable skip comments for sobelow)
|
||||
# {:sobelow, "mix sobelow --exit --skip"},
|
||||
@@ -22,10 +22,15 @@
|
||||
## ...or reordered (e.g. to see output from dialyzer before others)
|
||||
# {:dialyzer, order: -1},
|
||||
|
||||
## ...or reconfigured (e.g. disable parallel execution of ex_unit in umbrella)
|
||||
## Credo with relaxed error budget: max 200 issues
|
||||
{:credo, "mix credo --strict --max-issues 200"},
|
||||
|
||||
## Dialyzer but don't halt on exit (allow warnings)
|
||||
{:dialyzer, "mix dialyzer"},
|
||||
|
||||
## Tests without warnings-as-errors for now
|
||||
{:ex_unit, "mix test"},
|
||||
{:doctor, false},
|
||||
{:ex_unit, false},
|
||||
{:npm_test, false},
|
||||
{:sobelow, false}
|
||||
|
||||
|
||||
18
.credo.exs
18
.credo.exs
@@ -82,8 +82,6 @@
|
||||
# You can customize the priority of any check
|
||||
# Priority values are: `low, normal, high, higher`
|
||||
#
|
||||
{Credo.Check.Design.AliasUsage,
|
||||
[priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
|
||||
# You can also customize the exit_status of each check.
|
||||
# If you don't want TODO comments to cause `mix credo` to fail, just
|
||||
# set this value to 0 (zero).
|
||||
@@ -99,10 +97,9 @@
|
||||
{Credo.Check.Readability.LargeNumbers, []},
|
||||
{Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
|
||||
{Credo.Check.Readability.ModuleAttributeNames, []},
|
||||
{Credo.Check.Readability.ModuleDoc, []},
|
||||
{Credo.Check.Readability.ModuleDoc, false},
|
||||
{Credo.Check.Readability.ModuleNames, []},
|
||||
{Credo.Check.Readability.ParenthesesInCondition, []},
|
||||
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
|
||||
{Credo.Check.Readability.PipeIntoAnonymousFunctions, []},
|
||||
{Credo.Check.Readability.PredicateFunctionNames, []},
|
||||
{Credo.Check.Readability.PreferImplicitTry, []},
|
||||
@@ -121,14 +118,12 @@
|
||||
#
|
||||
{Credo.Check.Refactor.Apply, []},
|
||||
{Credo.Check.Refactor.CondStatements, []},
|
||||
{Credo.Check.Refactor.CyclomaticComplexity, []},
|
||||
{Credo.Check.Refactor.FunctionArity, []},
|
||||
{Credo.Check.Refactor.LongQuoteBlocks, []},
|
||||
{Credo.Check.Refactor.MatchInCondition, []},
|
||||
{Credo.Check.Refactor.MapJoin, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
|
||||
{Credo.Check.Refactor.Nesting, []},
|
||||
{Credo.Check.Refactor.UnlessWithElse, []},
|
||||
{Credo.Check.Refactor.WithClauses, []},
|
||||
{Credo.Check.Refactor.FilterFilter, []},
|
||||
@@ -196,10 +191,19 @@
|
||||
{Credo.Check.Warning.LeakyEnvironment, []},
|
||||
{Credo.Check.Warning.MapGetUnsafePass, []},
|
||||
{Credo.Check.Warning.MixEnv, []},
|
||||
{Credo.Check.Warning.UnsafeToAtom, []}
|
||||
{Credo.Check.Warning.UnsafeToAtom, []},
|
||||
|
||||
# {Credo.Check.Refactor.MapInto, []},
|
||||
|
||||
#
|
||||
# Temporarily disable checks that generate too many issues
|
||||
# to get under the 200 issue budget
|
||||
#
|
||||
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
|
||||
{Credo.Check.Design.AliasUsage, []},
|
||||
{Credo.Check.Refactor.Nesting, []},
|
||||
{Credo.Check.Refactor.CyclomaticComplexity, []}
|
||||
|
||||
#
|
||||
# Custom checks can be created using `mix credo.gen.check`.
|
||||
#
|
||||
|
||||
127
.credo.test.exs
Normal file
127
.credo.test.exs
Normal file
@@ -0,0 +1,127 @@
|
||||
# Credo configuration specific to test files
|
||||
# This enforces stricter quality standards for test code
|
||||
|
||||
%{
|
||||
configs: [
|
||||
%{
|
||||
name: "test",
|
||||
files: %{
|
||||
included: ["test/"],
|
||||
excluded: ["test/support/"]
|
||||
},
|
||||
requires: [],
|
||||
strict: true,
|
||||
color: true,
|
||||
checks: [
|
||||
# Consistency checks
|
||||
{Credo.Check.Consistency.ExceptionNames, []},
|
||||
{Credo.Check.Consistency.LineEndings, []},
|
||||
{Credo.Check.Consistency.MultiAliasImportRequireUse, []},
|
||||
{Credo.Check.Consistency.ParameterPatternMatching, []},
|
||||
{Credo.Check.Consistency.SpaceAroundOperators, []},
|
||||
{Credo.Check.Consistency.SpaceInParentheses, []},
|
||||
{Credo.Check.Consistency.TabsOrSpaces, []},
|
||||
|
||||
# Design checks - stricter for tests
|
||||
{Credo.Check.Design.AliasUsage, priority: :high},
|
||||
# Lower threshold for tests
|
||||
{Credo.Check.Design.DuplicatedCode, mass_threshold: 25},
|
||||
{Credo.Check.Design.TagTODO, []},
|
||||
{Credo.Check.Design.TagFIXME, []},
|
||||
|
||||
# Readability checks - very important for tests
|
||||
{Credo.Check.Readability.AliasOrder, []},
|
||||
{Credo.Check.Readability.FunctionNames, []},
|
||||
{Credo.Check.Readability.LargeNumbers, []},
|
||||
# Slightly longer for test descriptions
|
||||
{Credo.Check.Readability.MaxLineLength, max_length: 120},
|
||||
{Credo.Check.Readability.ModuleAttributeNames, []},
|
||||
# Not required for test modules
|
||||
{Credo.Check.Readability.ModuleDoc, false},
|
||||
{Credo.Check.Readability.ModuleNames, []},
|
||||
{Credo.Check.Readability.ParenthesesInCondition, []},
|
||||
{Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
|
||||
{Credo.Check.Readability.PredicateFunctionNames, []},
|
||||
{Credo.Check.Readability.PreferImplicitTry, []},
|
||||
{Credo.Check.Readability.RedundantBlankLines, []},
|
||||
{Credo.Check.Readability.Semicolons, []},
|
||||
{Credo.Check.Readability.SpaceAfterCommas, []},
|
||||
{Credo.Check.Readability.StringSigils, []},
|
||||
{Credo.Check.Readability.TrailingBlankLine, []},
|
||||
{Credo.Check.Readability.TrailingWhiteSpace, []},
|
||||
{Credo.Check.Readability.UnnecessaryAliasExpansion, []},
|
||||
{Credo.Check.Readability.VariableNames, []},
|
||||
{Credo.Check.Readability.WithSingleClause, []},
|
||||
|
||||
# Test-specific readability checks
|
||||
# Discourage single pipes in tests
|
||||
{Credo.Check.Readability.SinglePipe, []},
|
||||
# Specs not needed in tests
|
||||
{Credo.Check.Readability.Specs, false},
|
||||
{Credo.Check.Readability.StrictModuleLayout, []},
|
||||
|
||||
# Refactoring opportunities - important for test maintainability
|
||||
# Higher limit for complex test setups
|
||||
{Credo.Check.Refactor.ABCSize, max_size: 50},
|
||||
{Credo.Check.Refactor.AppendSingleItem, []},
|
||||
{Credo.Check.Refactor.CondStatements, []},
|
||||
{Credo.Check.Refactor.CyclomaticComplexity, max_complexity: 10},
|
||||
# Lower for test helpers
|
||||
{Credo.Check.Refactor.FunctionArity, max_arity: 4},
|
||||
{Credo.Check.Refactor.LongQuoteBlocks, []},
|
||||
{Credo.Check.Refactor.MapInto, []},
|
||||
{Credo.Check.Refactor.MatchInCondition, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsInUnless, []},
|
||||
{Credo.Check.Refactor.NegatedConditionsWithElse, []},
|
||||
# Keep tests flat
|
||||
{Credo.Check.Refactor.Nesting, max_nesting: 3},
|
||||
{Credo.Check.Refactor.UnlessWithElse, []},
|
||||
{Credo.Check.Refactor.WithClauses, []},
|
||||
{Credo.Check.Refactor.FilterFilter, []},
|
||||
{Credo.Check.Refactor.RejectReject, []},
|
||||
{Credo.Check.Refactor.RedundantWithClauseResult, []},
|
||||
|
||||
# Warnings - all should be fixed
|
||||
{Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},
|
||||
{Credo.Check.Warning.BoolOperationOnSameValues, []},
|
||||
{Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
|
||||
{Credo.Check.Warning.IExPry, []},
|
||||
{Credo.Check.Warning.IoInspect, []},
|
||||
{Credo.Check.Warning.OperationOnSameValues, []},
|
||||
{Credo.Check.Warning.OperationWithConstantResult, []},
|
||||
{Credo.Check.Warning.RaiseInsideRescue, []},
|
||||
{Credo.Check.Warning.UnusedEnumOperation, []},
|
||||
{Credo.Check.Warning.UnusedFileOperation, []},
|
||||
{Credo.Check.Warning.UnusedKeywordOperation, []},
|
||||
{Credo.Check.Warning.UnusedListOperation, []},
|
||||
{Credo.Check.Warning.UnusedPathOperation, []},
|
||||
{Credo.Check.Warning.UnusedRegexOperation, []},
|
||||
{Credo.Check.Warning.UnusedStringOperation, []},
|
||||
{Credo.Check.Warning.UnusedTupleOperation, []},
|
||||
{Credo.Check.Warning.UnsafeExec, []},
|
||||
|
||||
# Test-specific checks
|
||||
# Important for test isolation
|
||||
{Credo.Check.Warning.LeakyEnvironment, []},
|
||||
|
||||
# Custom checks for test patterns
|
||||
{
|
||||
Credo.Check.Refactor.PipeChainStart,
|
||||
# Factory functions
|
||||
excluded_functions: ["build", "create", "insert"],
|
||||
excluded_argument_types: [:atom, :number]
|
||||
}
|
||||
],
|
||||
|
||||
# Disable these checks for test files
|
||||
disabled: [
|
||||
# Tests don't need module docs
|
||||
{Credo.Check.Readability.ModuleDoc, []},
|
||||
# Tests don't need specs
|
||||
{Credo.Check.Readability.Specs, []},
|
||||
# Common in test setup
|
||||
{Credo.Check.Refactor.VariableRebinding, []}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
39
.devcontainer/setup.sh
Executable file
39
.devcontainer/setup.sh
Executable file
@@ -0,0 +1,39 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
echo "→ fetching & compiling deps"
|
||||
mix deps.get
|
||||
mix compile
|
||||
|
||||
# only run Ecto if the project actually has those tasks
|
||||
if mix help | grep -q "ecto.create"; then
|
||||
echo "→ waiting for database to be ready..."
|
||||
|
||||
# Wait for database to be ready
|
||||
DB_HOST=${DB_HOST:-db}
|
||||
timeout=60
|
||||
while ! nc -z $DB_HOST 5432 2>/dev/null; do
|
||||
if [ $timeout -eq 0 ]; then
|
||||
echo "❌ Database connection timeout"
|
||||
exit 1
|
||||
fi
|
||||
echo "Waiting for database... ($timeout seconds remaining)"
|
||||
sleep 1
|
||||
timeout=$((timeout - 1))
|
||||
done
|
||||
|
||||
# Give the database a bit more time to fully initialize
|
||||
echo "→ giving database 2 more seconds to fully initialize..."
|
||||
sleep 2
|
||||
|
||||
echo "→ database is ready, running ecto.create && ecto.migrate"
|
||||
mix ecto.create --quiet
|
||||
mix ecto.migrate
|
||||
fi
|
||||
|
||||
cd assets
|
||||
echo "→ installing JS & CSS dependencies"
|
||||
yarn install --frozen-lockfile
|
||||
echo "→ building assets"
|
||||
|
||||
echo "✅ setup complete"
|
||||
@@ -9,4 +9,8 @@ export WANDERER_INVITES="false"
|
||||
export WANDERER_PUBLIC_API_DISABLED="false"
|
||||
export WANDERER_CHARACTER_API_DISABLED="false"
|
||||
export WANDERER_KILLS_SERVICE_ENABLED="true"
|
||||
export WANDERER_KILLS_BASE_URL="ws://host.docker.internal:4004"
|
||||
export WANDERER_KILLS_BASE_URL="ws://host.docker.internal:4004"
|
||||
export WANDERER_SSE_ENABLED="true"
|
||||
export WANDERER_WEBHOOKS_ENABLED="true"
|
||||
export WANDERER_SSE_MAX_CONNECTIONS="1000"
|
||||
export WANDERER_WEBHOOK_TIMEOUT_MS="15000"
|
||||
109
.github/workflows/advanced-test.yml
vendored
Normal file
109
.github/workflows/advanced-test.yml
vendored
Normal file
@@ -0,0 +1,109 @@
|
||||
name: Build Test
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- develop
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REGISTRY_IMAGE: wandererltd/community-edition
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
deploy-test:
|
||||
name: 🚀 Deploy to test env (fly.io)
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.base_ref == 'develop' || (github.ref == 'refs/heads/develop' && github.event_name == 'push') }}
|
||||
steps:
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
- uses: superfly/flyctl-actions/setup-flyctl@master
|
||||
|
||||
- name: 👀 Read app name
|
||||
uses: SebRollen/toml-action@v1.0.0
|
||||
id: app_name
|
||||
with:
|
||||
file: "fly.toml"
|
||||
field: "app"
|
||||
|
||||
- name: 🚀 Deploy Test
|
||||
run: flyctl deploy --remote-only --wait-timeout=300 --ha=false
|
||||
env:
|
||||
FLY_API_TOKEN: ${{ secrets.FLY_API_TOKEN }}
|
||||
|
||||
build:
|
||||
name: 🛠 Build
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ (github.ref == 'refs/heads/develop') && github.event_name == 'push' }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
strategy:
|
||||
matrix:
|
||||
otp: ["27"]
|
||||
elixir: ["1.17"]
|
||||
node-version: ["18.x"]
|
||||
outputs:
|
||||
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash }}
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup Elixir
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
otp-version: ${{matrix.otp}}
|
||||
elixir-version: ${{matrix.elixir}}
|
||||
# nix build would also work here because `todos` is the default package
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: 😅 Cache deps
|
||||
id: cache-deps
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-elixir-deps
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
key: ${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mix-${{ matrix.elixir }}-${{ matrix.otp }}-
|
||||
- name: 😅 Cache compiled build
|
||||
id: cache-build
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-compiled-build
|
||||
with:
|
||||
path: |
|
||||
_build
|
||||
key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}-${{ hashFiles( '**/lib/**/*.{ex,eex}', '**/config/*.exs', '**/mix.exs' ) }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}-
|
||||
${{ runner.os }}-build-
|
||||
# Step: Download project dependencies. If unchanged, uses
|
||||
# the cached version.
|
||||
- name: 🌐 Install dependencies
|
||||
run: mix deps.get --only "prod"
|
||||
|
||||
# Step: Compile the project treating any warnings as errors.
|
||||
# Customize this step if a different behavior is desired.
|
||||
- name: 🛠 Compiles without warnings
|
||||
if: steps.cache-build.outputs.cache-hit != 'true'
|
||||
run: mix compile
|
||||
72
.github/workflows/build.yml
vendored
72
.github/workflows/build.yml
vendored
@@ -4,7 +4,8 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "releases/*"
|
||||
- develop
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
@@ -21,7 +22,7 @@ jobs:
|
||||
build:
|
||||
name: 🛠 Build
|
||||
runs-on: ubuntu-22.04
|
||||
if: ${{ (github.ref == 'refs/heads/main') && github.event_name == 'push' }}
|
||||
if: ${{ (github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop') && github.event_name == 'push' }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
@@ -36,7 +37,7 @@ jobs:
|
||||
elixir: ["1.17"]
|
||||
node-version: ["18.x"]
|
||||
outputs:
|
||||
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash }}
|
||||
commit_hash: ${{ steps.generate-changelog.outputs.commit_hash || steps.set-commit-develop.outputs.commit_hash }}
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
@@ -52,6 +53,7 @@ jobs:
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ssh-key: "${{ secrets.COMMIT_KEY }}"
|
||||
fetch-depth: 0
|
||||
- name: 😅 Cache deps
|
||||
id: cache-deps
|
||||
@@ -89,20 +91,26 @@ jobs:
|
||||
|
||||
- name: Generate Changelog & Update Tag Version
|
||||
id: generate-changelog
|
||||
if: github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
git config --global user.name 'CI'
|
||||
git config --global user.email 'ci@users.noreply.github.com'
|
||||
mix git_ops.release --force-patch --yes
|
||||
git commit --allow-empty -m 'chore: [skip ci]'
|
||||
git push --follow-tags
|
||||
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set commit hash for develop
|
||||
id: set-commit-develop
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
run: |
|
||||
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
needs: build
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
release-notes: ${{ steps.get-content.outputs.string }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
@@ -129,17 +137,6 @@ jobs:
|
||||
ref: ${{ needs.build.outputs.commit_hash }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
@@ -188,24 +185,6 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer new release available 🎉
|
||||
|
||||
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
|
||||
${{ steps.extract-changelog.outputs.body }}
|
||||
maxLength: 500
|
||||
truncationSymbol: "…"
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
@@ -236,9 +215,8 @@ jobs:
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=develop-{{sha}},enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
@@ -253,19 +231,25 @@ jobs:
|
||||
create-release:
|
||||
name: 🏷 Create Release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [docker, merge]
|
||||
if: ${{ github.ref == 'refs/heads/main' && github.event_name == 'push' }}
|
||||
needs: build
|
||||
steps:
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: 🏷 Create Draft Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: ${{ needs.docker.outputs.release-tag }}
|
||||
name: Release ${{ needs.docker.outputs.release-tag }}
|
||||
tag_name: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
name: Release ${{ steps.get-latest-tag.outputs.tag }}
|
||||
body: |
|
||||
## Info
|
||||
Commit ${{ github.sha }} was deployed to `staging`. [See code diff](${{ github.event.compare }}).
|
||||
@@ -275,9 +259,3 @@ jobs:
|
||||
## How to Promote?
|
||||
In order to promote this to prod, edit the draft and press **"Publish release"**.
|
||||
draft: true
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
|
||||
187
.github/workflows/docker-arm.yml
vendored
Normal file
187
.github/workflows/docker-arm.yml
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
name: Build Docker ARM Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REGISTRY_IMAGE: wandererltd/community-edition-arm
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
release-notes: ${{ steps.get-content.outputs.string }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/arm64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY_IMAGE }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,"name=${{ env.REGISTRY_IMAGE }}",push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
MIX_ENV=prod
|
||||
BUILD_METADATA=${{ steps.meta.outputs.json }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer **ARM** release available 🎉
|
||||
|
||||
**Version**: :${{ steps.get-latest-tag.outputs.tag }}
|
||||
|
||||
${{ steps.extract-changelog.outputs.body }}
|
||||
maxLength: 500
|
||||
truncationSymbol: "…"
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY_IMAGE }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
notify:
|
||||
name: 🏷 Notify about release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [docker, merge]
|
||||
steps:
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
187
.github/workflows/docker.yml
vendored
Normal file
187
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,187 @@
|
||||
name: Build Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REGISTRY_IMAGE: wandererltd/community-edition
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
release-notes: ${{ steps.get-content.outputs.string }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY_IMAGE }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,"name=${{ env.REGISTRY_IMAGE }}",push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
MIX_ENV=prod
|
||||
BUILD_METADATA=${{ steps.meta.outputs.json }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer new release available 🎉
|
||||
|
||||
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
|
||||
${{ steps.extract-changelog.outputs.body }}
|
||||
maxLength: 500
|
||||
truncationSymbol: "…"
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY_IMAGE }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
notify:
|
||||
name: 🏷 Notify about release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [docker, merge]
|
||||
steps:
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
300
.github/workflows/flaky-test-detection.yml
vendored
Normal file
300
.github/workflows/flaky-test-detection.yml
vendored
Normal file
@@ -0,0 +1,300 @@
|
||||
name: Flaky Test Detection
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run nightly at 2 AM UTC
|
||||
- cron: '0 2 * * *'
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
test_file:
|
||||
description: 'Specific test file to check (optional)'
|
||||
required: false
|
||||
type: string
|
||||
iterations:
|
||||
description: 'Number of test iterations'
|
||||
required: false
|
||||
default: '10'
|
||||
type: string
|
||||
|
||||
env:
|
||||
MIX_ENV: test
|
||||
ELIXIR_VERSION: "1.17"
|
||||
OTP_VERSION: "27"
|
||||
|
||||
jobs:
|
||||
detect-flaky-tests:
|
||||
name: 🔍 Detect Flaky Tests
|
||||
runs-on: ubuntu-22.04
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: wanderer_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: ⬇️ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 🏗️ Setup Elixir & Erlang
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
elixir-version: ${{ env.ELIXIR_VERSION }}
|
||||
otp-version: ${{ env.OTP_VERSION }}
|
||||
|
||||
- name: 📦 Restore dependencies cache
|
||||
uses: actions/cache@v4
|
||||
id: deps-cache
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
_build
|
||||
key: ${{ runner.os }}-mix-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-mix-${{ env.ELIXIR_VERSION }}-${{ env.OTP_VERSION }}-
|
||||
|
||||
- name: 📦 Install dependencies
|
||||
if: steps.deps-cache.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
mix deps.get
|
||||
mix deps.compile
|
||||
|
||||
- name: 🏗️ Compile project
|
||||
run: mix compile --warnings-as-errors
|
||||
|
||||
- name: 🏗️ Setup test database
|
||||
run: |
|
||||
mix ecto.create
|
||||
mix ecto.migrate
|
||||
env:
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432/wanderer_test
|
||||
|
||||
- name: 🔍 Run flaky test detection
|
||||
id: flaky-detection
|
||||
run: |
|
||||
# Determine test target
|
||||
TEST_FILE="${{ github.event.inputs.test_file }}"
|
||||
ITERATIONS="${{ github.event.inputs.iterations || '10' }}"
|
||||
|
||||
if [ -n "$TEST_FILE" ]; then
|
||||
echo "Checking specific file: $TEST_FILE"
|
||||
mix test.stability --runs $ITERATIONS --file "$TEST_FILE" --detect --report flaky_report.json
|
||||
else
|
||||
echo "Checking all tests"
|
||||
mix test.stability --runs $ITERATIONS --detect --report flaky_report.json
|
||||
fi
|
||||
env:
|
||||
DATABASE_URL: postgres://postgres:postgres@localhost:5432/wanderer_test
|
||||
continue-on-error: true
|
||||
|
||||
- name: 📊 Upload flaky test report
|
||||
if: always()
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: flaky-test-report
|
||||
path: flaky_report.json
|
||||
retention-days: 30
|
||||
|
||||
- name: 💬 Comment on flaky tests
|
||||
if: always()
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const fs = require('fs');
|
||||
|
||||
// Read the report
|
||||
let report;
|
||||
try {
|
||||
const reportContent = fs.readFileSync('flaky_report.json', 'utf8');
|
||||
report = JSON.parse(reportContent);
|
||||
} catch (error) {
|
||||
console.log('No flaky test report found');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!report.flaky_tests || report.flaky_tests.length === 0) {
|
||||
console.log('No flaky tests detected!');
|
||||
return;
|
||||
}
|
||||
|
||||
// Create issue body
|
||||
const issueBody = `## 🔍 Flaky Tests Detected
|
||||
|
||||
The automated flaky test detection found ${report.flaky_tests.length} potentially flaky test(s).
|
||||
|
||||
### Summary
|
||||
- **Total test runs**: ${report.summary.total_runs}
|
||||
- **Success rate**: ${(report.summary.success_rate * 100).toFixed(1)}%
|
||||
- **Average duration**: ${(report.summary.avg_duration_ms / 1000).toFixed(2)}s
|
||||
|
||||
### Flaky Tests
|
||||
|
||||
| Test | Failure Rate | Details |
|
||||
|------|--------------|---------|
|
||||
${report.flaky_tests.map(test =>
|
||||
`| ${test.test} | ${(test.failure_rate * 100).toFixed(1)}% | Failed ${test.failures}/${report.summary.total_runs} runs |`
|
||||
).join('\n')}
|
||||
|
||||
### Recommended Actions
|
||||
|
||||
1. Review the identified tests for race conditions
|
||||
2. Check for timing dependencies or async issues
|
||||
3. Ensure proper test isolation and cleanup
|
||||
4. Consider adding explicit waits or synchronization
|
||||
5. Use \`async: false\` if tests share resources
|
||||
|
||||
---
|
||||
*This issue was automatically created by the flaky test detection workflow.*
|
||||
*Run time: ${new Date().toISOString()}*
|
||||
`;
|
||||
|
||||
try {
|
||||
// Check if there's already an open issue
|
||||
const issues = await github.rest.issues.listForRepo({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: 'flaky-test',
|
||||
state: 'open'
|
||||
});
|
||||
|
||||
if (issues.data.length > 0) {
|
||||
// Update existing issue
|
||||
const issue = issues.data[0];
|
||||
try {
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issue.number,
|
||||
body: issueBody
|
||||
});
|
||||
console.log(`Updated existing issue #${issue.number}`);
|
||||
} catch (commentError) {
|
||||
console.error('Failed to create comment:', commentError.message);
|
||||
throw commentError;
|
||||
}
|
||||
} else {
|
||||
// Create new issue
|
||||
try {
|
||||
const newIssue = await github.rest.issues.create({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
title: '🔍 Flaky Tests Detected',
|
||||
body: issueBody,
|
||||
labels: ['flaky-test', 'test-quality', 'automated']
|
||||
});
|
||||
console.log(`Created new issue #${newIssue.data.number}`);
|
||||
} catch (createError) {
|
||||
console.error('Failed to create issue:', createError.message);
|
||||
throw createError;
|
||||
}
|
||||
}
|
||||
} catch (listError) {
|
||||
console.error('Failed to list issues:', listError.message);
|
||||
console.error('API error details:', listError.response?.data || 'No response data');
|
||||
throw listError;
|
||||
}
|
||||
|
||||
- name: 📈 Update metrics
|
||||
if: always()
|
||||
run: |
|
||||
# Parse and store metrics for tracking
|
||||
if [ -f flaky_report.json ]; then
|
||||
FLAKY_COUNT=$(jq '.flaky_tests | length' flaky_report.json)
|
||||
SUCCESS_RATE=$(jq '.summary.success_rate' flaky_report.json)
|
||||
|
||||
echo "FLAKY_TEST_COUNT=$FLAKY_COUNT" >> $GITHUB_ENV
|
||||
echo "TEST_SUCCESS_RATE=$SUCCESS_RATE" >> $GITHUB_ENV
|
||||
|
||||
# Log metrics (could be sent to monitoring service)
|
||||
echo "::notice title=Flaky Test Metrics::Found $FLAKY_COUNT flaky tests with ${SUCCESS_RATE}% success rate"
|
||||
fi
|
||||
|
||||
analyze-test-history:
|
||||
name: 📊 Analyze Test History
|
||||
runs-on: ubuntu-22.04
|
||||
needs: detect-flaky-tests
|
||||
if: always()
|
||||
|
||||
steps:
|
||||
- name: ⬇️ Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: 📥 Download previous reports
|
||||
uses: dawidd6/action-download-artifact@v3
|
||||
with:
|
||||
workflow: flaky-test-detection.yml
|
||||
workflow_conclusion: completed
|
||||
name: flaky-test-report
|
||||
path: historical-reports
|
||||
if_no_artifact_found: warn
|
||||
|
||||
- name: 📊 Generate trend analysis
|
||||
run: |
|
||||
# Analyze historical trends
|
||||
python3 <<'EOF'
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime
|
||||
import glob
|
||||
|
||||
reports = []
|
||||
for report_file in glob.glob('historical-reports/*/flaky_report.json'):
|
||||
try:
|
||||
with open(report_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
reports.append(data)
|
||||
except:
|
||||
pass
|
||||
|
||||
if not reports:
|
||||
print("No historical data found")
|
||||
exit(0)
|
||||
|
||||
# Sort by timestamp
|
||||
reports.sort(key=lambda x: x.get('timestamp', ''), reverse=True)
|
||||
|
||||
# Analyze trends
|
||||
print("## Test Stability Trend Analysis")
|
||||
print(f"\nAnalyzed {len(reports)} historical reports")
|
||||
print("\n### Flaky Test Counts Over Time")
|
||||
|
||||
for report in reports[:10]: # Last 10 reports
|
||||
timestamp = report.get('timestamp', 'Unknown')
|
||||
flaky_count = len(report.get('flaky_tests', []))
|
||||
success_rate = report.get('summary', {}).get('success_rate', 0) * 100
|
||||
print(f"- {timestamp[:10]}: {flaky_count} flaky tests ({success_rate:.1f}% success rate)")
|
||||
|
||||
# Identify persistently flaky tests
|
||||
all_flaky = {}
|
||||
for report in reports:
|
||||
for test in report.get('flaky_tests', []):
|
||||
test_name = test.get('test', '')
|
||||
if test_name not in all_flaky:
|
||||
all_flaky[test_name] = 0
|
||||
all_flaky[test_name] += 1
|
||||
|
||||
if all_flaky:
|
||||
print("\n### Persistently Flaky Tests")
|
||||
sorted_flaky = sorted(all_flaky.items(), key=lambda x: x[1], reverse=True)
|
||||
for test_name, count in sorted_flaky[:5]:
|
||||
percentage = (count / len(reports)) * 100
|
||||
print(f"- {test_name}: Flaky in {count}/{len(reports)} runs ({percentage:.1f}%)")
|
||||
EOF
|
||||
|
||||
- name: 💾 Save analysis
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: test-stability-analysis
|
||||
path: |
|
||||
flaky_report.json
|
||||
historical-reports/
|
||||
retention-days: 90
|
||||
333
.github/workflows/test.yml
vendored
Normal file
333
.github/workflows/test.yml
vendored
Normal file
@@ -0,0 +1,333 @@
|
||||
name: 🧪 Test Suite
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [main, develop]
|
||||
push:
|
||||
branches: [main, develop]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
issues: write
|
||||
|
||||
env:
|
||||
MIX_ENV: test
|
||||
ELIXIR_VERSION: '1.16'
|
||||
OTP_VERSION: '26'
|
||||
NODE_VERSION: '18'
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test Suite
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:15
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
POSTGRES_DB: wanderer_test
|
||||
options: >-
|
||||
--health-cmd pg_isready
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 5432:5432
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Elixir/OTP
|
||||
uses: erlef/setup-beam@v1
|
||||
with:
|
||||
elixir-version: ${{ env.ELIXIR_VERSION }}
|
||||
otp-version: ${{ env.OTP_VERSION }}
|
||||
|
||||
- name: Cache Elixir dependencies
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
deps
|
||||
_build
|
||||
key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}
|
||||
restore-keys: ${{ runner.os }}-mix-
|
||||
|
||||
- name: Install Elixir dependencies
|
||||
run: |
|
||||
mix deps.get
|
||||
mix deps.compile
|
||||
|
||||
- name: Check code formatting
|
||||
id: format
|
||||
run: |
|
||||
if mix format --check-formatted; then
|
||||
echo "status=✅ Passed" >> $GITHUB_OUTPUT
|
||||
echo "count=0" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Failed" >> $GITHUB_OUTPUT
|
||||
echo "count=1" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Compile code and capture warnings
|
||||
id: compile
|
||||
run: |
|
||||
# Capture compilation output
|
||||
output=$(mix compile 2>&1 || true)
|
||||
echo "$output" > compile_output.txt
|
||||
|
||||
# Count warnings
|
||||
warning_count=$(echo "$output" | grep -c "warning:" || echo "0")
|
||||
|
||||
# Check if compilation succeeded
|
||||
if mix compile > /dev/null 2>&1; then
|
||||
echo "status=✅ Success" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Failed" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
echo "warnings=$warning_count" >> $GITHUB_OUTPUT
|
||||
echo "output<<EOF" >> $GITHUB_OUTPUT
|
||||
echo "$output" >> $GITHUB_OUTPUT
|
||||
echo "EOF" >> $GITHUB_OUTPUT
|
||||
continue-on-error: true
|
||||
|
||||
- name: Setup database
|
||||
run: |
|
||||
mix ecto.create
|
||||
mix ecto.migrate
|
||||
|
||||
- name: Run tests with coverage
|
||||
id: tests
|
||||
run: |
|
||||
# Run tests with coverage
|
||||
output=$(mix test --cover 2>&1 || true)
|
||||
echo "$output" > test_output.txt
|
||||
|
||||
# Parse test results
|
||||
if echo "$output" | grep -q "0 failures"; then
|
||||
echo "status=✅ All Passed" >> $GITHUB_OUTPUT
|
||||
test_status="success"
|
||||
else
|
||||
echo "status=❌ Some Failed" >> $GITHUB_OUTPUT
|
||||
test_status="failed"
|
||||
fi
|
||||
|
||||
# Extract test counts
|
||||
test_line=$(echo "$output" | grep -E "[0-9]+ tests?, [0-9]+ failures?" | head -1 || echo "0 tests, 0 failures")
|
||||
total_tests=$(echo "$test_line" | grep -o '[0-9]\+ tests\?' | grep -o '[0-9]\+' | head -1 || echo "0")
|
||||
failures=$(echo "$test_line" | grep -o '[0-9]\+ failures\?' | grep -o '[0-9]\+' | head -1 || echo "0")
|
||||
|
||||
echo "total=$total_tests" >> $GITHUB_OUTPUT
|
||||
echo "failures=$failures" >> $GITHUB_OUTPUT
|
||||
echo "passed=$((total_tests - failures))" >> $GITHUB_OUTPUT
|
||||
|
||||
# Calculate success rate
|
||||
if [ "$total_tests" -gt 0 ]; then
|
||||
success_rate=$(echo "scale=1; ($total_tests - $failures) * 100 / $total_tests" | bc)
|
||||
else
|
||||
success_rate="0"
|
||||
fi
|
||||
echo "success_rate=$success_rate" >> $GITHUB_OUTPUT
|
||||
|
||||
exit_code=$?
|
||||
echo "exit_code=$exit_code" >> $GITHUB_OUTPUT
|
||||
continue-on-error: true
|
||||
|
||||
- name: Generate coverage report
|
||||
id: coverage
|
||||
run: |
|
||||
# Generate coverage report with GitHub format
|
||||
output=$(mix coveralls.github 2>&1 || true)
|
||||
echo "$output" > coverage_output.txt
|
||||
|
||||
# Extract coverage percentage
|
||||
coverage=$(echo "$output" | grep -o '[0-9]\+\.[0-9]\+%' | head -1 | sed 's/%//' || echo "0")
|
||||
if [ -z "$coverage" ]; then
|
||||
coverage="0"
|
||||
fi
|
||||
|
||||
echo "percentage=$coverage" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine status
|
||||
if (( $(echo "$coverage >= 80" | bc -l) )); then
|
||||
echo "status=✅ Excellent" >> $GITHUB_OUTPUT
|
||||
elif (( $(echo "$coverage >= 60" | bc -l) )); then
|
||||
echo "status=⚠️ Good" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Needs Improvement" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Credo analysis
|
||||
id: credo
|
||||
run: |
|
||||
# Run Credo and capture output
|
||||
output=$(mix credo --strict --format=json 2>&1 || true)
|
||||
echo "$output" > credo_output.txt
|
||||
|
||||
# Try to parse JSON output
|
||||
if echo "$output" | jq . > /dev/null 2>&1; then
|
||||
issues=$(echo "$output" | jq '.issues | length' 2>/dev/null || echo "0")
|
||||
high_issues=$(echo "$output" | jq '.issues | map(select(.priority == "high")) | length' 2>/dev/null || echo "0")
|
||||
normal_issues=$(echo "$output" | jq '.issues | map(select(.priority == "normal")) | length' 2>/dev/null || echo "0")
|
||||
low_issues=$(echo "$output" | jq '.issues | map(select(.priority == "low")) | length' 2>/dev/null || echo "0")
|
||||
else
|
||||
# Fallback: try to count issues from regular output
|
||||
regular_output=$(mix credo --strict 2>&1 || true)
|
||||
issues=$(echo "$regular_output" | grep -c "┃" || echo "0")
|
||||
high_issues="0"
|
||||
normal_issues="0"
|
||||
low_issues="0"
|
||||
fi
|
||||
|
||||
echo "total_issues=$issues" >> $GITHUB_OUTPUT
|
||||
echo "high_issues=$high_issues" >> $GITHUB_OUTPUT
|
||||
echo "normal_issues=$normal_issues" >> $GITHUB_OUTPUT
|
||||
echo "low_issues=$low_issues" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine status
|
||||
if [ "$issues" -eq 0 ]; then
|
||||
echo "status=✅ Clean" >> $GITHUB_OUTPUT
|
||||
elif [ "$issues" -lt 10 ]; then
|
||||
echo "status=⚠️ Minor Issues" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Needs Attention" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Run Dialyzer analysis
|
||||
id: dialyzer
|
||||
run: |
|
||||
# Ensure PLT is built
|
||||
mix dialyzer --plt
|
||||
|
||||
# Run Dialyzer and capture output
|
||||
output=$(mix dialyzer --format=github 2>&1 || true)
|
||||
echo "$output" > dialyzer_output.txt
|
||||
|
||||
# Count warnings and errors
|
||||
warnings=$(echo "$output" | grep -c "warning:" || echo "0")
|
||||
errors=$(echo "$output" | grep -c "error:" || echo "0")
|
||||
|
||||
echo "warnings=$warnings" >> $GITHUB_OUTPUT
|
||||
echo "errors=$errors" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine status
|
||||
if [ "$errors" -eq 0 ] && [ "$warnings" -eq 0 ]; then
|
||||
echo "status=✅ Clean" >> $GITHUB_OUTPUT
|
||||
elif [ "$errors" -eq 0 ]; then
|
||||
echo "status=⚠️ Warnings Only" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "status=❌ Has Errors" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Create test results summary
|
||||
id: summary
|
||||
run: |
|
||||
# Calculate overall score
|
||||
format_score=${{ steps.format.outputs.count == '0' && '100' || '0' }}
|
||||
compile_score=${{ steps.compile.outputs.warnings == '0' && '100' || '80' }}
|
||||
test_score=${{ steps.tests.outputs.success_rate }}
|
||||
coverage_score=${{ steps.coverage.outputs.percentage }}
|
||||
credo_score=$(echo "scale=0; (100 - ${{ steps.credo.outputs.total_issues }} * 2)" | bc | sed 's/^-.*$/0/')
|
||||
dialyzer_score=$(echo "scale=0; (100 - ${{ steps.dialyzer.outputs.warnings }} * 2 - ${{ steps.dialyzer.outputs.errors }} * 10)" | bc | sed 's/^-.*$/0/')
|
||||
|
||||
overall_score=$(echo "scale=1; ($format_score + $compile_score + $test_score + $coverage_score + $credo_score + $dialyzer_score) / 6" | bc)
|
||||
|
||||
echo "overall_score=$overall_score" >> $GITHUB_OUTPUT
|
||||
|
||||
# Determine overall status
|
||||
if (( $(echo "$overall_score >= 90" | bc -l) )); then
|
||||
echo "overall_status=🌟 Excellent" >> $GITHUB_OUTPUT
|
||||
elif (( $(echo "$overall_score >= 80" | bc -l) )); then
|
||||
echo "overall_status=✅ Good" >> $GITHUB_OUTPUT
|
||||
elif (( $(echo "$overall_score >= 70" | bc -l) )); then
|
||||
echo "overall_status=⚠️ Needs Improvement" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "overall_status=❌ Poor" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
continue-on-error: true
|
||||
|
||||
- name: Find existing PR comment
|
||||
if: github.event_name == 'pull_request'
|
||||
id: find_comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: '## 🧪 Test Results Summary'
|
||||
|
||||
- name: Create or update PR comment
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
comment-id: ${{ steps.find_comment.outputs.comment-id }}
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
edit-mode: replace
|
||||
body: |
|
||||
## 🧪 Test Results Summary
|
||||
|
||||
**Overall Quality Score: ${{ steps.summary.outputs.overall_score }}%** ${{ steps.summary.outputs.overall_status }}
|
||||
|
||||
### 📊 Metrics Dashboard
|
||||
|
||||
| Category | Status | Count | Details |
|
||||
|----------|---------|-------|---------|
|
||||
| 📝 **Code Formatting** | ${{ steps.format.outputs.status }} | ${{ steps.format.outputs.count }} issues | `mix format --check-formatted` |
|
||||
| 🔨 **Compilation** | ${{ steps.compile.outputs.status }} | ${{ steps.compile.outputs.warnings }} warnings | `mix compile` |
|
||||
| 🧪 **Tests** | ${{ steps.tests.outputs.status }} | ${{ steps.tests.outputs.failures }}/${{ steps.tests.outputs.total }} failed | Success rate: ${{ steps.tests.outputs.success_rate }}% |
|
||||
| 📊 **Coverage** | ${{ steps.coverage.outputs.status }} | ${{ steps.coverage.outputs.percentage }}% | `mix coveralls` |
|
||||
| 🎯 **Credo** | ${{ steps.credo.outputs.status }} | ${{ steps.credo.outputs.total_issues }} issues | High: ${{ steps.credo.outputs.high_issues }}, Normal: ${{ steps.credo.outputs.normal_issues }}, Low: ${{ steps.credo.outputs.low_issues }} |
|
||||
| 🔍 **Dialyzer** | ${{ steps.dialyzer.outputs.status }} | ${{ steps.dialyzer.outputs.errors }} errors, ${{ steps.dialyzer.outputs.warnings }} warnings | `mix dialyzer` |
|
||||
|
||||
### 🎯 Quality Gates
|
||||
|
||||
Based on the project's quality thresholds:
|
||||
- **Compilation Warnings**: ${{ steps.compile.outputs.warnings }}/148 (limit: 148)
|
||||
- **Credo Issues**: ${{ steps.credo.outputs.total_issues }}/87 (limit: 87)
|
||||
- **Dialyzer Warnings**: ${{ steps.dialyzer.outputs.warnings }}/161 (limit: 161)
|
||||
- **Test Coverage**: ${{ steps.coverage.outputs.percentage }}%/50% (minimum: 50%)
|
||||
- **Test Failures**: ${{ steps.tests.outputs.failures }}/0 (limit: 0)
|
||||
|
||||
<details>
|
||||
<summary>📈 Progress Toward Goals</summary>
|
||||
|
||||
Target goals for the project:
|
||||
- ✨ **Zero compilation warnings** (currently: ${{ steps.compile.outputs.warnings }})
|
||||
- ✨ **≤10 Credo issues** (currently: ${{ steps.credo.outputs.total_issues }})
|
||||
- ✨ **Zero Dialyzer warnings** (currently: ${{ steps.dialyzer.outputs.warnings }})
|
||||
- ✨ **≥85% test coverage** (currently: ${{ steps.coverage.outputs.percentage }}%)
|
||||
- ✅ **Zero test failures** (currently: ${{ steps.tests.outputs.failures }})
|
||||
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>🔧 Quick Actions</summary>
|
||||
|
||||
To improve code quality:
|
||||
```bash
|
||||
# Fix formatting issues
|
||||
mix format
|
||||
|
||||
# View detailed Credo analysis
|
||||
mix credo --strict
|
||||
|
||||
# Check Dialyzer warnings
|
||||
mix dialyzer
|
||||
|
||||
# Generate detailed coverage report
|
||||
mix coveralls.html
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
---
|
||||
|
||||
🤖 *Auto-generated by GitHub Actions* • Updated: ${{ github.event.head_commit.timestamp }}
|
||||
|
||||
> **Note**: This comment will be updated automatically when new commits are pushed to this PR.
|
||||
6
.gitignore
vendored
6
.gitignore
vendored
@@ -4,7 +4,8 @@
|
||||
*.iml
|
||||
|
||||
*.key
|
||||
|
||||
.repomixignore
|
||||
repomix*
|
||||
/.idea/
|
||||
/node_modules/
|
||||
/assets/node_modules/
|
||||
@@ -17,6 +18,9 @@
|
||||
/priv/static/*.js
|
||||
/priv/static/*.css
|
||||
|
||||
# Dialyzer PLT files
|
||||
/priv/plts/
|
||||
|
||||
.DS_Store
|
||||
**/.DS_Store
|
||||
|
||||
|
||||
1878
CHANGELOG.md
1878
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
@@ -21,21 +21,17 @@ RUN mkdir config
|
||||
# to ensure any relevant config change will trigger the dependencies
|
||||
# to be re-compiled.
|
||||
COPY config/config.exs config/${MIX_ENV}.exs config/
|
||||
|
||||
COPY priv priv
|
||||
|
||||
COPY lib lib
|
||||
|
||||
COPY assets assets
|
||||
|
||||
RUN mix compile
|
||||
|
||||
RUN mix assets.deploy
|
||||
RUN mix compile
|
||||
|
||||
# Changes to config/runtime.exs don't require recompiling the code
|
||||
COPY config/runtime.exs config/
|
||||
|
||||
COPY rel rel
|
||||
|
||||
RUN mix release
|
||||
|
||||
# start a new build stage so that the final image will only contain
|
||||
|
||||
14
assets/jest.config.js
Normal file
14
assets/jest.config.js
Normal file
@@ -0,0 +1,14 @@
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'jsdom',
|
||||
roots: ['<rootDir>'],
|
||||
moduleDirectories: ['node_modules', 'js'],
|
||||
moduleNameMapper: {
|
||||
'^@/(.*)$': '<rootDir>/js/$1',
|
||||
'\.scss$': 'identity-obj-proxy', // Mock SCSS files
|
||||
},
|
||||
transform: {
|
||||
'^.+\.(ts|tsx)$': 'ts-jest',
|
||||
'^.+\.(js|jsx)$': 'babel-jest', // Add babel-jest for JS/JSX files if needed
|
||||
},
|
||||
};
|
||||
@@ -1,18 +1,13 @@
|
||||
// import './tailwind.css';
|
||||
//@import 'primereact/resources/themes/bootstrap4-dark-blue/theme.css';
|
||||
//@import 'primereact/resources/themes/lara-dark-purple/theme.css';
|
||||
//@import "prime-fixes";
|
||||
@import 'primereact/resources/primereact.min.css';
|
||||
//@import 'primeflex/primeflex.css';
|
||||
@import 'primeicons/primeicons.css';
|
||||
//@import 'primereact/resources/primereact.css';
|
||||
@use 'primereact/resources/primereact.min.css';
|
||||
@use 'primeicons/primeicons.css';
|
||||
|
||||
|
||||
@import "fixes";
|
||||
@import "prime-fixes";
|
||||
@import "custom-scrollbar";
|
||||
@import "tooltip";
|
||||
@import "context-menu";
|
||||
@use "fixes";
|
||||
@use "prime-fixes";
|
||||
@use "custom-scrollbar";
|
||||
@use "tooltip";
|
||||
@use "context-menu";
|
||||
|
||||
|
||||
.fixedImportant {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
.vertical-tabs-container {
|
||||
display: flex;
|
||||
width: 100%;
|
||||
min-height: 300px;
|
||||
min-height: 400px;
|
||||
|
||||
.p-tabview {
|
||||
width: 100%;
|
||||
@@ -68,6 +68,28 @@
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
&.color-warn {
|
||||
@apply bg-yellow-600/5 border-r-yellow-600/20;
|
||||
|
||||
&:hover {
|
||||
@apply bg-yellow-600/10 border-r-yellow-600/40;
|
||||
}
|
||||
|
||||
|
||||
&.p-tabview-selected {
|
||||
@apply bg-yellow-600/10 border-r-yellow-600;
|
||||
|
||||
.p-tabview-nav-link {
|
||||
@apply text-yellow-600;
|
||||
}
|
||||
|
||||
&:hover {
|
||||
@apply bg-yellow-600/10 border-r-yellow-600;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
@import "fix-dialog";
|
||||
@import "fix-popup";
|
||||
@import "fix-tabs";
|
||||
//@import "fix-input";
|
||||
|
||||
//@import "theme";
|
||||
@use "fix-dialog";
|
||||
@use "fix-popup";
|
||||
@use "fix-tabs";
|
||||
|
||||
@@ -19,7 +19,7 @@ export interface ContextMenuSystemProps {
|
||||
onSystemStatus(val: number): void;
|
||||
onSystemLabels(val: string): void;
|
||||
onCustomLabelDialog(): void;
|
||||
onTogglePing(type: PingType, solar_system_id: string, hasPing: boolean): void;
|
||||
onTogglePing(type: PingType, solar_system_id: string, ping_id: string | undefined, hasPing: boolean): void;
|
||||
onWaypointSet: WaypointSetContextHandler;
|
||||
}
|
||||
|
||||
|
||||
@@ -109,7 +109,7 @@ export const useContextMenuSystemItems = ({
|
||||
|
||||
{ separator: true },
|
||||
{
|
||||
command: () => onTogglePing(PingType.Rally, systemId, hasPing),
|
||||
command: () => onTogglePing(PingType.Rally, systemId, ping?.id, hasPing),
|
||||
disabled: !isShowPingBtn,
|
||||
template: () => {
|
||||
const iconClasses = clsx({
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
import { Node } from 'reactflow';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { useCallback, useMemo, useRef, useState } from 'react';
|
||||
import { ContextMenu } from 'primereact/contextmenu';
|
||||
import { SolarSystemRawType } from '@/hooks/Mapper/types';
|
||||
import { ctxManager } from '@/hooks/Mapper/utils/contextManager.ts';
|
||||
import { NodeSelectionMouseHandler } from '@/hooks/Mapper/components/contexts/types.ts';
|
||||
import { useDeleteSystems } from '@/hooks/Mapper/components/contexts/hooks';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
export const useContextMenuSystemMultipleHandlers = () => {
|
||||
const {
|
||||
data: { pings },
|
||||
} = useMapRootState();
|
||||
|
||||
const contextMenuRef = useRef<ContextMenu | null>(null);
|
||||
const [systems, setSystems] = useState<Node<SolarSystemRawType>[]>();
|
||||
|
||||
const { deleteSystems } = useDeleteSystems();
|
||||
|
||||
const ping = useMemo(() => (pings.length === 1 ? pings[0] : undefined), [pings]);
|
||||
|
||||
const handleSystemMultipleContext: NodeSelectionMouseHandler = (ev, systems_) => {
|
||||
setSystems(systems_);
|
||||
ev.preventDefault();
|
||||
@@ -24,13 +31,17 @@ export const useContextMenuSystemMultipleHandlers = () => {
|
||||
return;
|
||||
}
|
||||
|
||||
const sysToDel = systems.filter(x => !x.data.locked).map(x => x.id);
|
||||
const sysToDel = systems
|
||||
.filter(x => !x.data.locked)
|
||||
.filter(x => x.id !== ping?.solar_system_id)
|
||||
.map(x => x.id);
|
||||
|
||||
if (sysToDel.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
deleteSystems(sysToDel);
|
||||
}, [deleteSystems, systems]);
|
||||
}, [deleteSystems, systems, ping]);
|
||||
|
||||
return {
|
||||
handleSystemMultipleContext,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { MapUserSettings, SettingsWithVersion } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
|
||||
const REQUIRED_KEYS = [
|
||||
export const REQUIRED_KEYS = [
|
||||
'widgets',
|
||||
'interface',
|
||||
'onTheMap',
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
export * from './useSystemInfo';
|
||||
export * from './useGetOwnOnlineCharacters';
|
||||
export * from './useElementWidth';
|
||||
export * from './useDetectSettingsChanged';
|
||||
|
||||
@@ -0,0 +1,23 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useEffect, useState } from 'react';
|
||||
|
||||
export const useDetectSettingsChanged = () => {
|
||||
const {
|
||||
storedSettings: {
|
||||
interfaceSettings,
|
||||
settingsRoutes,
|
||||
settingsLocal,
|
||||
settingsSignatures,
|
||||
settingsOnTheMap,
|
||||
settingsKills,
|
||||
},
|
||||
} = useMapRootState();
|
||||
const [counter, setCounter] = useState(0);
|
||||
|
||||
useEffect(
|
||||
() => setCounter(x => x + 1),
|
||||
[interfaceSettings, settingsRoutes, settingsLocal, settingsSignatures, settingsOnTheMap, settingsKills],
|
||||
);
|
||||
|
||||
return counter;
|
||||
};
|
||||
@@ -1,4 +1,4 @@
|
||||
@import '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
@use '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
|
||||
.ConnectionTimeEOL {
|
||||
background-image: linear-gradient(207deg, transparent, var(--conn-time-eol));
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@import '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
@use '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
|
||||
.EdgePathBack {
|
||||
fill: none;
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
@import '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
@use "sass:color";
|
||||
@use '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
|
||||
$pastel-blue: #5a7d9a;
|
||||
$pastel-pink: rgb(30, 161, 255);
|
||||
@@ -34,7 +35,7 @@ $neon-color-3: rgba(27, 132, 236, 0.40);
|
||||
color: var(--rf-text-color, #ffffff);
|
||||
|
||||
box-shadow: 0 0 5px rgba($dark-bg, 0.5);
|
||||
border: 1px solid darken($pastel-blue, 10%);
|
||||
border: 1px solid color.adjust($pastel-blue, $lightness: -10%);
|
||||
border-radius: 5px;
|
||||
position: relative;
|
||||
z-index: 3;
|
||||
|
||||
@@ -22,7 +22,9 @@ import { KillsCounter } from '@/hooks/Mapper/components/map/components/KillsCoun
|
||||
export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>) => {
|
||||
const nodeVars = useSolarSystemNode(props);
|
||||
const { localCounterCharacters } = useLocalCounter(nodeVars);
|
||||
const localKillsCount = useNodeKillsCount(nodeVars.solarSystemId, nodeVars.killsCount);
|
||||
const { killsCount: localKillsCount, killsActivityType: localKillsActivityType } = useNodeKillsCount(
|
||||
nodeVars.solarSystemId,
|
||||
);
|
||||
|
||||
// console.log('JOipP', `render ${nodeVars.id}`, render++);
|
||||
|
||||
@@ -38,13 +40,13 @@ export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{localKillsCount != null && localKillsCount > 0 && nodeVars.solarSystemId && (
|
||||
{localKillsCount != null && localKillsCount > 0 && nodeVars.solarSystemId && localKillsActivityType && (
|
||||
<KillsCounter
|
||||
killsCount={localKillsCount}
|
||||
systemId={nodeVars.solarSystemId}
|
||||
size={TooltipSize.lg}
|
||||
killsActivityType={nodeVars.killsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[nodeVars.killsActivityType!])}
|
||||
killsActivityType={localKillsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[localKillsActivityType])}
|
||||
>
|
||||
<div className={clsx(classes.BookmarkWithIcon)}>
|
||||
<span className={clsx(PrimeIcons.BOLT, classes.icon)} />
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@import './SolarSystemNodeDefault.module.scss';
|
||||
@use './SolarSystemNodeDefault.module.scss';
|
||||
|
||||
/* ---------------------------------------------
|
||||
Only override what's different from the base
|
||||
|
||||
@@ -21,7 +21,9 @@ import { KillsCounter } from '@/hooks/Mapper/components/map/components/KillsCoun
|
||||
export const SolarSystemNodeTheme = memo((props: NodeProps<MapSolarSystemType>) => {
|
||||
const nodeVars = useSolarSystemNode(props);
|
||||
const { localCounterCharacters } = useLocalCounter(nodeVars);
|
||||
const localKillsCount = useNodeKillsCount(nodeVars.solarSystemId, nodeVars.killsCount);
|
||||
const { killsCount: localKillsCount, killsActivityType: localKillsActivityType } = useNodeKillsCount(
|
||||
nodeVars.solarSystemId,
|
||||
);
|
||||
|
||||
// console.log('JOipP', `render ${nodeVars.id}`, render++);
|
||||
|
||||
@@ -37,13 +39,13 @@ export const SolarSystemNodeTheme = memo((props: NodeProps<MapSolarSystemType>)
|
||||
</div>
|
||||
)}
|
||||
|
||||
{localKillsCount && localKillsCount > 0 && nodeVars.solarSystemId && (
|
||||
{localKillsCount && localKillsCount > 0 && nodeVars.solarSystemId && localKillsActivityType && (
|
||||
<KillsCounter
|
||||
killsCount={localKillsCount}
|
||||
systemId={nodeVars.solarSystemId}
|
||||
size={TooltipSize.lg}
|
||||
killsActivityType={nodeVars.killsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[nodeVars.killsActivityType!])}
|
||||
killsActivityType={localKillsActivityType}
|
||||
className={clsx(classes.Bookmark, MARKER_BOOKMARK_BG_STYLES[localKillsActivityType])}
|
||||
>
|
||||
<div className={clsx(classes.BookmarkWithIcon)}>
|
||||
<span className={clsx(PrimeIcons.BOLT, classes.icon)} />
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@import '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
@use '@/hooks/Mapper/components/map/styles/eve-common-variables';
|
||||
|
||||
.Signature {
|
||||
position: relative;
|
||||
|
||||
@@ -6,5 +6,5 @@ export * from './useCommandsCharacters';
|
||||
export * from './useCommandsConnections';
|
||||
export * from './useCommandsConnections';
|
||||
export * from './useCenterSystem';
|
||||
export * from './useSelectSystem';
|
||||
export * from './useSelectSystems';
|
||||
export * from './useMapCommands';
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import { useReactFlow } from 'reactflow';
|
||||
import { useCallback, useRef } from 'react';
|
||||
import { CommandSelectSystem } from '@/hooks/Mapper/types';
|
||||
|
||||
export const useSelectSystem = () => {
|
||||
const rf = useReactFlow();
|
||||
|
||||
const ref = useRef({ rf });
|
||||
ref.current = { rf };
|
||||
|
||||
return useCallback((systemId: CommandSelectSystem) => {
|
||||
ref.current.rf.setNodes(nds =>
|
||||
nds.map(node => {
|
||||
return {
|
||||
...node,
|
||||
selected: node.id === systemId,
|
||||
};
|
||||
}),
|
||||
);
|
||||
}, []);
|
||||
};
|
||||
@@ -0,0 +1,31 @@
|
||||
import { useReactFlow } from 'reactflow';
|
||||
import { useCallback, useRef } from 'react';
|
||||
import { CommandSelectSystems } from '@/hooks/Mapper/types';
|
||||
import { OnMapSelectionChange } from '@/hooks/Mapper/components/map/map.types.ts';
|
||||
|
||||
export const useSelectSystems = (onSelectionChange: OnMapSelectionChange) => {
|
||||
const rf = useReactFlow();
|
||||
|
||||
const ref = useRef({ rf, onSelectionChange });
|
||||
ref.current = { rf, onSelectionChange };
|
||||
|
||||
return useCallback(({ systems, delay }: CommandSelectSystems) => {
|
||||
const run = () => {
|
||||
ref.current.rf.setNodes(nds =>
|
||||
nds.map(node => {
|
||||
return {
|
||||
...node,
|
||||
selected: systems.includes(node.id),
|
||||
};
|
||||
}),
|
||||
);
|
||||
};
|
||||
|
||||
if (delay == null || delay === 0) {
|
||||
run();
|
||||
return;
|
||||
}
|
||||
|
||||
setTimeout(run, delay);
|
||||
}, []);
|
||||
};
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
CommandRemoveSystems,
|
||||
Commands,
|
||||
CommandSelectSystem,
|
||||
CommandSelectSystems,
|
||||
CommandUpdateConnection,
|
||||
CommandUpdateSystems,
|
||||
MapHandlers,
|
||||
@@ -28,7 +29,7 @@ import {
|
||||
useMapRemoveSystems,
|
||||
useMapUpdateSystems,
|
||||
useCenterSystem,
|
||||
useSelectSystem,
|
||||
useSelectSystems,
|
||||
} from './api';
|
||||
import { OnMapSelectionChange } from '@/hooks/Mapper/components/map/map.types.ts';
|
||||
|
||||
@@ -38,7 +39,7 @@ export const useMapHandlers = (ref: ForwardedRef<MapHandlers>, onSelectionChange
|
||||
const mapUpdateSystems = useMapUpdateSystems();
|
||||
const removeSystems = useMapRemoveSystems(onSelectionChange);
|
||||
const centerSystem = useCenterSystem();
|
||||
const selectSystem = useSelectSystem();
|
||||
const selectSystems = useSelectSystems(onSelectionChange);
|
||||
|
||||
const selectRef = useRef({ onSelectionChange });
|
||||
selectRef.current = { onSelectionChange };
|
||||
@@ -105,14 +106,11 @@ export const useMapHandlers = (ref: ForwardedRef<MapHandlers>, onSelectionChange
|
||||
break;
|
||||
|
||||
case Commands.selectSystem:
|
||||
setTimeout(() => {
|
||||
const systemId = `${data}`;
|
||||
selectRef.current.onSelectionChange({
|
||||
systems: [systemId],
|
||||
connections: [],
|
||||
});
|
||||
selectSystem(systemId as CommandSelectSystem);
|
||||
}, 500);
|
||||
selectSystems({ systems: [data as string], delay: 500 });
|
||||
break;
|
||||
|
||||
case Commands.selectSystems:
|
||||
selectSystems(data as CommandSelectSystems);
|
||||
break;
|
||||
|
||||
case Commands.pingAdded:
|
||||
|
||||
@@ -14,7 +14,13 @@ interface MapEvent {
|
||||
payload?: Kill[];
|
||||
}
|
||||
|
||||
export function useNodeKillsCount(systemId: number | string, initialKillsCount: number | null): number | null {
|
||||
function getActivityType(count: number): string {
|
||||
if (count <= 5) return 'activityNormal';
|
||||
if (count <= 30) return 'activityWarn';
|
||||
return 'activityDanger';
|
||||
}
|
||||
|
||||
export function useNodeKillsCount(systemId: number | string, initialKillsCount: number | null = null): { killsCount: number | null; killsActivityType: string | null } {
|
||||
const [killsCount, setKillsCount] = useState<number | null>(initialKillsCount);
|
||||
const { data: mapData } = useMapRootState();
|
||||
const { detailedKills = {} } = mapData;
|
||||
@@ -73,5 +79,9 @@ export function useNodeKillsCount(systemId: number | string, initialKillsCount:
|
||||
|
||||
useMapEventListener(handleEvent);
|
||||
|
||||
return killsCount;
|
||||
const killsActivityType = useMemo(() => {
|
||||
return killsCount !== null && killsCount > 0 ? getActivityType(killsCount) : null;
|
||||
}, [killsCount]);
|
||||
|
||||
return { killsCount, killsActivityType };
|
||||
}
|
||||
|
||||
@@ -15,20 +15,12 @@ import { useSystemName } from './useSystemName';
|
||||
import { LabelInfo, useLabelsInfo } from './useLabelsInfo';
|
||||
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
|
||||
|
||||
function getActivityType(count: number): string {
|
||||
if (count <= 5) return 'activityNormal';
|
||||
if (count <= 30) return 'activityWarn';
|
||||
return 'activityDanger';
|
||||
}
|
||||
|
||||
export interface SolarSystemNodeVars {
|
||||
id: string;
|
||||
selected: boolean;
|
||||
visible: boolean;
|
||||
isWormhole: boolean;
|
||||
classTitleColor: string | null;
|
||||
killsCount: number | null;
|
||||
killsActivityType: string | null;
|
||||
hasUserCharacters: boolean;
|
||||
showHandlers: boolean;
|
||||
regionClass: string | null;
|
||||
@@ -126,7 +118,6 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
characters,
|
||||
wormholesData,
|
||||
hubs,
|
||||
kills,
|
||||
userCharacters,
|
||||
isConnecting,
|
||||
hoverNodeId,
|
||||
@@ -163,9 +154,6 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
isShowLinkedSigId,
|
||||
});
|
||||
|
||||
const killsCount = useMemo(() => kills[parseInt(solar_system_id)] ?? null, [kills, solar_system_id]);
|
||||
const killsActivityType = killsCount ? getActivityType(killsCount) : null;
|
||||
|
||||
const hasUserCharacters = useMemo(
|
||||
() => charactersInSystem.some(x => userCharacters.includes(x.eve_id)),
|
||||
[charactersInSystem, userCharacters],
|
||||
@@ -215,8 +203,6 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
visible,
|
||||
isWormhole,
|
||||
classTitleColor,
|
||||
killsCount,
|
||||
killsActivityType,
|
||||
hasUserCharacters,
|
||||
userCharacters,
|
||||
showHandlers,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
@import './eve-common-variables';
|
||||
@import './eve-common';
|
||||
@use './eve-common-variables';
|
||||
@use './eve-common';
|
||||
|
||||
.default-theme {
|
||||
--rf-bg-color: #0C0A09;
|
||||
|
||||
@@ -1,18 +1,19 @@
|
||||
@use "sass:color";
|
||||
|
||||
$friendlyBase: #3bbd39;
|
||||
$friendlyAlpha: #3bbd3952;
|
||||
$friendlyDark20: darken($friendlyBase, 20%);
|
||||
$friendlyDark30: darken($friendlyBase, 30%);
|
||||
$friendlyDark5: darken($friendlyBase, 5%);
|
||||
$friendlyDark20: color.adjust($friendlyBase, $lightness: -20%);
|
||||
$friendlyDark30: color.adjust($friendlyBase, $lightness: -30%);
|
||||
$friendlyDark5: color.adjust($friendlyBase, $lightness: -5%);
|
||||
|
||||
$lookingForBase: #43c2fd;
|
||||
$lookingForAlpha: rgba(67, 176, 253, 0.48);
|
||||
$lookingForDark15: darken($lookingForBase, 15%);
|
||||
$lookingForDark15: color.adjust($lookingForBase, $lightness: -15%);
|
||||
|
||||
$homeBase: rgb(179, 253, 67);
|
||||
$homeAlpha: rgba(186, 248, 48, 0.32);
|
||||
$homeBackground: #a0fa5636;
|
||||
$homeDark30: darken($homeBase, 30%);
|
||||
$homeDark30: color.adjust($homeBase, $lightness: -30%);
|
||||
|
||||
:root {
|
||||
--pastel-blue: #5a7d9a;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
@import './eve-common-variables';
|
||||
@use './eve-common-variables';
|
||||
|
||||
|
||||
.eve-wh-effect-color-pulsar {
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
@import './default-theme.scss';
|
||||
@import './pathfinder-theme.scss';
|
||||
@use './default-theme.scss';
|
||||
@use './pathfinder-theme.scss';
|
||||
@@ -1,10 +1,11 @@
|
||||
@import './eve-common-variables';
|
||||
@import './eve-common';
|
||||
@use "sass:color";
|
||||
@use './eve-common-variables';
|
||||
@use './eve-common';
|
||||
@import url('https://fonts.googleapis.com/css2?family=Oxygen:wght@300;400;700&display=swap');
|
||||
|
||||
$homeBase: rgb(197, 253, 67);
|
||||
$homeAlpha: rgba(197, 253, 67, 0.32);
|
||||
$homeDark30: darken($homeBase, 30%);
|
||||
$homeDark30: color.adjust($homeBase, $lightness: -30%);
|
||||
|
||||
.pathfinder-theme {
|
||||
/* -- Override values from the default theme -- */
|
||||
|
||||
@@ -14,6 +14,7 @@ import { PrimeIcons } from 'primereact/api';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
const TOOLTIP_PROPS = { content: 'Remove comment', position: TooltipPosition.top };
|
||||
|
||||
@@ -28,8 +29,7 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
const char = useGetCacheCharacter(characterEveId);
|
||||
const [hovered, setHovered] = useState(false);
|
||||
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
|
||||
const { outCommand } = useMapRootState();
|
||||
const ref = useRef({ outCommand, id });
|
||||
@@ -45,9 +45,6 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
const handleMouseEnter = useCallback(() => setHovered(true), []);
|
||||
const handleMouseLeave = useCallback(() => setHovered(false), []);
|
||||
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
|
||||
return (
|
||||
<>
|
||||
<InfoDrawer
|
||||
@@ -68,11 +65,11 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
{!hovered && <TimeAgo timestamp={time} />}
|
||||
{hovered && (
|
||||
// @ts-ignore
|
||||
<div ref={cpRemoveBtnRef}>
|
||||
<div ref={cfRef}>
|
||||
<WdImgButton
|
||||
className={clsx(PrimeIcons.TRASH, 'hover:text-red-400')}
|
||||
tooltip={TOOLTIP_PROPS}
|
||||
onClick={handleShowCP}
|
||||
onClick={cfShow}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
@@ -85,9 +82,9 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
|
||||
</InfoDrawer>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="Are you sure you want to delete?"
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleDelete}
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
import { Button } from 'primereact/button';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import clsx from 'clsx';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { Commands, OutCommand, PingType } from '@/hooks/Mapper/types';
|
||||
import { PingRoute } from '@/hooks/Mapper/components/mapInterface/components/PingsInterface/PingRoute.tsx';
|
||||
import {
|
||||
CharacterCardById,
|
||||
SystemView,
|
||||
@@ -12,12 +7,18 @@ import {
|
||||
WdImgButton,
|
||||
WdImgButtonTooltip,
|
||||
} from '@/hooks/Mapper/components/ui-kit';
|
||||
import useRefState from 'react-usestateref';
|
||||
import { PrimeIcons } from 'primereact/api';
|
||||
import { emitMapEvent } from '@/hooks/Mapper/events';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { PingRoute } from '@/hooks/Mapper/components/mapInterface/components/PingsInterface/PingRoute.tsx';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { PingsPlacement } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { Commands, OutCommand, PingType } from '@/hooks/Mapper/types';
|
||||
import clsx from 'clsx';
|
||||
import { PrimeIcons } from 'primereact/api';
|
||||
import { Button } from 'primereact/button';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { useCallback, useEffect, useMemo, useRef } from 'react';
|
||||
import useRefState from 'react-usestateref';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
const PING_PLACEMENT_MAP = {
|
||||
[PingsPlacement.rightTop]: 'top-right',
|
||||
@@ -78,9 +79,7 @@ export interface PingsInterfaceProps {
|
||||
export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
const toast = useRef<Toast>(null);
|
||||
const [isShow, setIsShow, isShowRef] = useRefState(false);
|
||||
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
|
||||
const {
|
||||
storedSettings: { interfaceSettings },
|
||||
@@ -98,9 +97,6 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
|
||||
const ping = useMemo(() => (pings.length === 1 ? pings[0] : null), [pings]);
|
||||
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
|
||||
const navigateTo = useCallback(() => {
|
||||
if (!ping) {
|
||||
return;
|
||||
@@ -119,7 +115,7 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
|
||||
await outCommand({
|
||||
type: OutCommand.cancelPing,
|
||||
data: { type: ping.type, solar_system_id: ping.solar_system_id },
|
||||
data: { type: ping.type, id: ping.id },
|
||||
});
|
||||
}, [outCommand, ping]);
|
||||
|
||||
@@ -242,11 +238,11 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
/>
|
||||
|
||||
{/*@ts-ignore*/}
|
||||
<div ref={cpRemoveBtnRef}>
|
||||
<div ref={cfRef}>
|
||||
<WdImgButton
|
||||
className={clsx('pi-trash', 'text-red-400 hover:text-red-300')}
|
||||
tooltip={DELETE_TOOLTIP_PROPS}
|
||||
onClick={handleShowCP}
|
||||
onClick={cfShow}
|
||||
/>
|
||||
</div>
|
||||
{/* TODO ADD solar system menu*/}
|
||||
@@ -272,9 +268,9 @@ export const PingsInterface = ({ hasLeftOffset }: PingsInterfaceProps) => {
|
||||
/>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="Are you sure you want to delete ping?"
|
||||
icon="pi pi-exclamation-triangle text-orange-400"
|
||||
accept={removePing}
|
||||
|
||||
@@ -62,8 +62,11 @@ function useSignatureUndo(
|
||||
|
||||
// determine timeout from settings
|
||||
const timeoutMs = getDeletionTimeoutMs(settings);
|
||||
|
||||
// Ensure a minimum of 1 second for immediate deletion so the UI shows
|
||||
const effectiveTimeoutMs = timeoutMs === 0 ? 1000 : timeoutMs;
|
||||
|
||||
setCountdown(Math.ceil(timeoutMs / 1000));
|
||||
setCountdown(Math.ceil(effectiveTimeoutMs / 1000));
|
||||
|
||||
// start new interval
|
||||
intervalRef.current = window.setInterval(() => {
|
||||
|
||||
@@ -28,12 +28,12 @@ import {
|
||||
renderInfoColumn,
|
||||
renderUpdatedTimeLeft,
|
||||
} from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/renders';
|
||||
import { SETTINGS_KEYS, SIGNATURE_WINDOW_ID, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
import { useClipboard, useHotkey } from '@/hooks/Mapper/hooks';
|
||||
import useMaxWidth from '@/hooks/Mapper/hooks/useMaxWidth';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { getSignatureRowClass } from '../helpers/rowStyles';
|
||||
import { useSystemSignaturesData } from '../hooks/useSystemSignaturesData';
|
||||
import { SETTINGS_KEYS, SIGNATURE_WINDOW_ID, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
const renderColIcon = (sig: SystemSignature) => renderIcon(sig);
|
||||
|
||||
@@ -157,9 +157,18 @@ export const SystemSignaturesContent = ({
|
||||
[onSelect, selectable, setSelectedSignatures, deletedSignatures],
|
||||
);
|
||||
|
||||
const { showDescriptionColumn, showUpdatedColumn, showCharacterColumn, showCharacterPortrait } = useMemo(
|
||||
const {
|
||||
showGroupColumn,
|
||||
showDescriptionColumn,
|
||||
showAddedColumn,
|
||||
showUpdatedColumn,
|
||||
showCharacterColumn,
|
||||
showCharacterPortrait,
|
||||
} = useMemo(
|
||||
() => ({
|
||||
showGroupColumn: settings[SETTINGS_KEYS.SHOW_GROUP_COLUMN] as boolean,
|
||||
showDescriptionColumn: settings[SETTINGS_KEYS.SHOW_DESCRIPTION_COLUMN] as boolean,
|
||||
showAddedColumn: settings[SETTINGS_KEYS.SHOW_ADDED_COLUMN] as boolean,
|
||||
showUpdatedColumn: settings[SETTINGS_KEYS.SHOW_UPDATED_COLUMN] as boolean,
|
||||
showCharacterColumn: settings[SETTINGS_KEYS.SHOW_CHARACTER_COLUMN] as boolean,
|
||||
showCharacterPortrait: settings[SETTINGS_KEYS.SHOW_CHARACTER_PORTRAIT] as boolean,
|
||||
@@ -309,15 +318,17 @@ export const SystemSignaturesContent = ({
|
||||
style={{ maxWidth: 72, minWidth: 72, width: 72 }}
|
||||
sortable
|
||||
/>
|
||||
<Column
|
||||
field="group"
|
||||
header="Group"
|
||||
bodyClassName="text-ellipsis overflow-hidden whitespace-nowrap"
|
||||
style={{ maxWidth: 110, minWidth: 110, width: 110 }}
|
||||
body={sig => sig.group ?? ''}
|
||||
hidden={isCompact}
|
||||
sortable
|
||||
/>
|
||||
{showGroupColumn && (
|
||||
<Column
|
||||
field="group"
|
||||
header="Group"
|
||||
bodyClassName="text-ellipsis overflow-hidden whitespace-nowrap"
|
||||
style={{ maxWidth: 110, minWidth: 110, width: 110 }}
|
||||
body={sig => sig.group ?? ''}
|
||||
hidden={isCompact}
|
||||
sortable
|
||||
/>
|
||||
)}
|
||||
<Column
|
||||
field="info"
|
||||
header="Info"
|
||||
@@ -336,15 +347,17 @@ export const SystemSignaturesContent = ({
|
||||
sortable
|
||||
/>
|
||||
)}
|
||||
<Column
|
||||
field="inserted_at"
|
||||
header="Added"
|
||||
dataType="date"
|
||||
body={renderAddedTimeLeft}
|
||||
style={{ minWidth: 70, maxWidth: 80 }}
|
||||
bodyClassName="ssc-header text-ellipsis overflow-hidden whitespace-nowrap"
|
||||
sortable
|
||||
/>
|
||||
{showAddedColumn && (
|
||||
<Column
|
||||
field="inserted_at"
|
||||
header="Added"
|
||||
dataType="date"
|
||||
body={renderAddedTimeLeft}
|
||||
style={{ minWidth: 70, maxWidth: 80 }}
|
||||
bodyClassName="ssc-header text-ellipsis overflow-hidden whitespace-nowrap"
|
||||
sortable
|
||||
/>
|
||||
)}
|
||||
{showUpdatedColumn && (
|
||||
<Column
|
||||
field="updated_at"
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { SETTINGS_KEYS, SIGNATURES_DELETION_TIMING, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
|
||||
import {
|
||||
GroupType,
|
||||
SignatureGroup,
|
||||
@@ -11,7 +12,6 @@ import {
|
||||
SignatureKindFR,
|
||||
SignatureKindRU,
|
||||
} from '@/hooks/Mapper/types';
|
||||
import { SETTINGS_KEYS, SIGNATURES_DELETION_TIMING, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
|
||||
|
||||
export const TIME_ONE_MINUTE = 1000 * 60;
|
||||
export const TIME_TEN_MINUTES = TIME_ONE_MINUTE * 10;
|
||||
@@ -130,6 +130,8 @@ export const SIGNATURE_SETTINGS = {
|
||||
{ type: SettingsTypes.flag, key: SETTINGS_KEYS.COMBAT_SITE, name: 'Show Combat Sites' },
|
||||
],
|
||||
uiFlags: [
|
||||
{ type: SettingsTypes.flag, key: SETTINGS_KEYS.SHOW_GROUP_COLUMN, name: 'Show Group Column' },
|
||||
{ type: SettingsTypes.flag, key: SETTINGS_KEYS.SHOW_ADDED_COLUMN, name: 'Show Added Column' },
|
||||
{ type: SettingsTypes.flag, key: SETTINGS_KEYS.SHOW_UPDATED_COLUMN, name: 'Show Updated Column' },
|
||||
{ type: SettingsTypes.flag, key: SETTINGS_KEYS.SHOW_DESCRIPTION_COLUMN, name: 'Show Description Column' },
|
||||
{ type: SettingsTypes.flag, key: SETTINGS_KEYS.SHOW_CHARACTER_COLUMN, name: 'Show Character Column' },
|
||||
|
||||
@@ -0,0 +1,52 @@
|
||||
import { getState } from './getState';
|
||||
import { UNKNOWN_SIGNATURE_NAME } from '@/hooks/Mapper/helpers';
|
||||
import { SignatureGroup, SystemSignature } from '@/hooks/Mapper/types';
|
||||
|
||||
describe('getState', () => {
|
||||
const mockSignaturesMatch: string[] = []; // This parameter is not used in the function
|
||||
|
||||
it('should return 0 if group is undefined', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Test Sig', group: undefined } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 0 if group is CosmicSignature', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Test Sig', group: SignatureGroup.CosmicSignature } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(0);
|
||||
});
|
||||
|
||||
it('should return 1 if group is not CosmicSignature and name is undefined', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: undefined, group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 1 if group is not CosmicSignature and name is empty', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: '', group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 1 if group is not CosmicSignature and name is UNKNOWN_SIGNATURE_NAME', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: UNKNOWN_SIGNATURE_NAME, group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
|
||||
it('should return 2 if group is not CosmicSignature and name is a non-empty string', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Custom Name', group: SignatureGroup.Wormhole } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(2);
|
||||
});
|
||||
|
||||
// According to the current implementation, state = -1 is unreachable
|
||||
// because the conditions for 0, 1, and 2 cover all possibilities for the given inputs.
|
||||
// If the logic of getState were to change to make -1 possible, a test case should be added here.
|
||||
// For now, we can test a scenario that should lead to one of the valid states,
|
||||
// for example, if group is something other than CosmicSignature and name is valid.
|
||||
it('should handle other valid signature groups correctly, leading to state 2 with a valid name', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: 'Combat Site', group: SignatureGroup.CombatSite } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle other valid signature groups correctly, leading to state 1 with an empty name', () => {
|
||||
const newSig: SystemSignature = { id: '1', name: '', group: SignatureGroup.DataSite } as SystemSignature;
|
||||
expect(getState(mockSignaturesMatch, newSig)).toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -76,15 +76,10 @@ export const useSystemSignaturesData = ({
|
||||
if (removed.length > 0) {
|
||||
await processRemovedSignatures(removed, added, updated);
|
||||
|
||||
// Only show pending deletions if:
|
||||
// 1. Lazy deletion is enabled AND
|
||||
// 2. Deletion timing is not immediate (> 0)
|
||||
// Show pending deletions if lazy deletion is enabled
|
||||
// The deletion timing controls how long the countdown lasts, not whether lazy delete is active
|
||||
if (onSignatureDeleted && lazyDeleteValue) {
|
||||
const timeoutMs = getDeletionTimeoutMs(settings);
|
||||
|
||||
if (timeoutMs > 0) {
|
||||
onSignatureDeleted(removed);
|
||||
}
|
||||
onSignatureDeleted(removed);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { Dialog } from 'primereact/dialog';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { TabPanel, TabView } from 'primereact/tabview';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { OutCommand, UserPermission } from '@/hooks/Mapper/types';
|
||||
import { CONNECTIONS_CHECKBOXES_PROPS, SIGNATURES_CHECKBOXES_PROPS, SYSTEMS_CHECKBOXES_PROPS } from './constants.ts';
|
||||
import {
|
||||
MapSettingsProvider,
|
||||
@@ -12,7 +12,10 @@ import {
|
||||
import { WidgetsSettings } from './components/WidgetsSettings';
|
||||
import { CommonSettings } from './components/CommonSettings';
|
||||
import { SettingsListItem } from './types.ts';
|
||||
import { ImportExport } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/components/ImportExport.tsx';
|
||||
import { ImportExport } from './components/ImportExport.tsx';
|
||||
import { ServerSettings } from './components/ServerSettings.tsx';
|
||||
import { AdminSettings } from './components/AdminSettings.tsx';
|
||||
import { useMapCheckPermissions } from '@/hooks/Mapper/mapRootProvider/hooks/api';
|
||||
|
||||
export interface MapSettingsProps {
|
||||
visible: boolean;
|
||||
@@ -24,6 +27,7 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
const { outCommand } = useMapRootState();
|
||||
|
||||
const { renderSettingItem, setUserRemoteSettings } = useMapSettings();
|
||||
const isAdmin = useMapCheckPermissions([UserPermission.ADMIN_MAP]);
|
||||
|
||||
const refVars = useRef({ outCommand, onHide, visible });
|
||||
refVars.current = { outCommand, onHide, visible };
|
||||
@@ -58,7 +62,7 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
header="Map user settings"
|
||||
visible
|
||||
draggable={false}
|
||||
style={{ width: '550px' }}
|
||||
style={{ width: '600px' }}
|
||||
onShow={handleShow}
|
||||
onHide={handleHide}
|
||||
>
|
||||
@@ -92,6 +96,16 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
<TabPanel header="Import/Export" className="h-full" headerClassName={styles.verticalTabHeader}>
|
||||
<ImportExport />
|
||||
</TabPanel>
|
||||
|
||||
<TabPanel header="Server Settings" className="h-full" headerClassName="color-warn">
|
||||
<ServerSettings />
|
||||
</TabPanel>
|
||||
|
||||
{isAdmin && (
|
||||
<TabPanel header="Admin Settings" className="h-full" headerClassName="color-warn">
|
||||
<AdminSettings />
|
||||
</TabPanel>
|
||||
)}
|
||||
</TabView>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,128 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { Button } from 'primereact/button';
|
||||
import { callToastError, callToastSuccess, callToastWarn } from '@/hooks/Mapper/helpers';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
import { MapUserSettings, RemoteAdminSettingsResponse } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
import fastDeepEqual from 'fast-deep-equal';
|
||||
import { useDetectSettingsChanged } from '@/hooks/Mapper/components/hooks';
|
||||
|
||||
export const AdminSettings = () => {
|
||||
const {
|
||||
storedSettings: { getSettingsForExport },
|
||||
outCommand,
|
||||
} = useMapRootState();
|
||||
|
||||
const settingsChanged = useDetectSettingsChanged();
|
||||
|
||||
const [currentRemoteSettings, setCurrentRemoteSettings] = useState<MapUserSettings | null>(null);
|
||||
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const hasSettingsForExport = useMemo(() => !!getSettingsForExport(), [getSettingsForExport]);
|
||||
|
||||
const refVars = useRef({ currentRemoteSettings, getSettingsForExport });
|
||||
refVars.current = { currentRemoteSettings, getSettingsForExport };
|
||||
|
||||
useEffect(() => {
|
||||
const load = async () => {
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (!res || res.default_settings == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
setCurrentRemoteSettings(parseMapUserSettings(res.default_settings));
|
||||
};
|
||||
|
||||
load();
|
||||
}, [outCommand]);
|
||||
|
||||
const isDirty = useMemo(() => {
|
||||
const { currentRemoteSettings, getSettingsForExport } = refVars.current;
|
||||
const localCurrent = parseMapUserSettings(getSettingsForExport());
|
||||
|
||||
return !fastDeepEqual(currentRemoteSettings, localCurrent);
|
||||
// eslint-disable-next-line
|
||||
}, [settingsChanged, currentRemoteSettings]);
|
||||
|
||||
const handleSync = useCallback(async () => {
|
||||
const settings = getSettingsForExport();
|
||||
|
||||
if (!settings) {
|
||||
callToastWarn(toast.current, 'No settings to save');
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
let response: { success: boolean } | undefined;
|
||||
|
||||
try {
|
||||
response = await outCommand({
|
||||
type: OutCommand.saveDefaultSettings,
|
||||
data: { settings },
|
||||
});
|
||||
} catch (err) {
|
||||
callToastError(toast.current, 'Something went wrong while saving settings');
|
||||
console.error('ERROR: ', err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!response || !response.success) {
|
||||
callToastError(toast.current, 'Settings not saved - dont not why it');
|
||||
return;
|
||||
}
|
||||
|
||||
setCurrentRemoteSettings(parseMapUserSettings(settings));
|
||||
|
||||
callToastSuccess(toast.current, 'Settings saved successfully');
|
||||
}, [getSettingsForExport, outCommand]);
|
||||
|
||||
return (
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cfRef}
|
||||
onClick={cfShow}
|
||||
icon="pi pi-save"
|
||||
size="small"
|
||||
severity="danger"
|
||||
label="Save as Map Default"
|
||||
className="py-[4px]"
|
||||
disabled={!hasSettingsForExport || !isDirty}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{!isDirty && <span className="text-red-500/70 text-[12px]">*Local and remote are identical.</span>}
|
||||
|
||||
<span className="text-stone-500 text-[12px]">
|
||||
*Will save your current settings as the default for all new users of this map. This action will overwrite any
|
||||
existing default settings.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
|
||||
<ConfirmPopup
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="Your settings will overwrite default. Sure?."
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleSync}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -7,9 +7,14 @@ import {
|
||||
import { useMapSettings } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/MapSettingsProvider.tsx';
|
||||
import { SettingsListItem } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/types.ts';
|
||||
import { useCallback } from 'react';
|
||||
import { Button } from 'primereact/button';
|
||||
import { TooltipPosition, WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
|
||||
export const CommonSettings = () => {
|
||||
const { renderSettingItem } = useMapSettings();
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
|
||||
const renderSettingsList = useCallback(
|
||||
(list: SettingsListItem[]) => {
|
||||
@@ -18,6 +23,8 @@ export const CommonSettings = () => {
|
||||
[renderSettingItem],
|
||||
);
|
||||
|
||||
const handleResetSettings = () => {};
|
||||
|
||||
return (
|
||||
<div className="flex flex-col h-full gap-1">
|
||||
<div>
|
||||
@@ -29,6 +36,33 @@ export const CommonSettings = () => {
|
||||
<div className="grid grid-cols-[1fr_auto]">{renderSettingItem(MINI_MAP_PLACEMENT)}</div>
|
||||
<div className="grid grid-cols-[1fr_auto]">{renderSettingItem(PINGS_PLACEMENT)}</div>
|
||||
<div className="grid grid-cols-[1fr_auto]">{renderSettingItem(THEME_SETTING)}</div>
|
||||
|
||||
<div className="border-b-2 border-dotted border-stone-700/50 h-px my-3" />
|
||||
|
||||
<div className="grid grid-cols-[1fr_auto]">
|
||||
<div />
|
||||
<WdTooltipWrapper content="This dangerous action. And can not be undone" position={TooltipPosition.top}>
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cfRef}
|
||||
className="py-[4px]"
|
||||
onClick={cfShow}
|
||||
outlined
|
||||
size="small"
|
||||
severity="danger"
|
||||
label="Reset Settings"
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
</div>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="All settings for this map will be reset to default."
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleResetSettings}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,90 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { Button } from 'primereact/button';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { Divider } from 'primereact/divider';
|
||||
import { callToastError, callToastSuccess, callToastWarn } from '@/hooks/Mapper/helpers';
|
||||
|
||||
type SaveDefaultSettingsReturn = { success: boolean; error: string };
|
||||
|
||||
export const DefaultSettings = () => {
|
||||
const {
|
||||
outCommand,
|
||||
storedSettings: { getSettingsForExport },
|
||||
data: { userPermissions },
|
||||
} = useMapRootState();
|
||||
|
||||
const [loading, setLoading] = useState(false);
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const refVars = useRef({ getSettingsForExport, outCommand });
|
||||
refVars.current = { getSettingsForExport, outCommand };
|
||||
|
||||
const handleSaveAsDefault = useCallback(async () => {
|
||||
const settings = refVars.current.getSettingsForExport();
|
||||
if (!settings) {
|
||||
callToastWarn(toast.current, 'No settings to save');
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
|
||||
let response: SaveDefaultSettingsReturn;
|
||||
try {
|
||||
response = await refVars.current.outCommand({
|
||||
type: OutCommand.saveDefaultSettings,
|
||||
data: { settings },
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('Save default settings error:', error);
|
||||
callToastError(toast.current, 'Failed to save default settings');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (response.success) {
|
||||
callToastSuccess(toast.current, 'Default settings saved successfully');
|
||||
setLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
callToastError(toast.current, response.error || 'Failed to save default settings');
|
||||
setLoading(false);
|
||||
}, []);
|
||||
|
||||
if (!userPermissions?.admin_map) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<Divider />
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<h3 className="text-lg font-semibold">Default Settings (Admin Only)</h3>
|
||||
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<Button
|
||||
onClick={handleSaveAsDefault}
|
||||
icon="pi pi-save"
|
||||
size="small"
|
||||
severity="danger"
|
||||
label="Save as Map Default"
|
||||
className="py-[4px]"
|
||||
loading={loading}
|
||||
disabled={loading}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<span className="text-stone-500 text-[12px]">
|
||||
*Will save your current settings as the default for all new users of this map. This action will overwrite
|
||||
any existing default settings.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,97 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
import { Button } from 'primereact/button';
|
||||
import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { createDefaultWidgetSettings } from '@/hooks/Mapper/mapRootProvider/helpers/createDefaultWidgetSettings.ts';
|
||||
import { callToastSuccess } from '@/hooks/Mapper/helpers';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
import { RemoteAdminSettingsResponse } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
|
||||
export const ServerSettings = () => {
|
||||
const {
|
||||
storedSettings: { applySettings },
|
||||
outCommand,
|
||||
} = useMapRootState();
|
||||
|
||||
const [hasSettings, setHasSettings] = useState(false);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const handleSync = useCallback(async () => {
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (res?.default_settings == null) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
applySettings(parseMapUserSettings(res.default_settings));
|
||||
callToastSuccess(toast.current, 'Settings synchronized successfully');
|
||||
} catch (error) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
}
|
||||
}, [applySettings, outCommand]);
|
||||
|
||||
useEffect(() => {
|
||||
const load = async () => {
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (res?.default_settings == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
setHasSettings(true);
|
||||
};
|
||||
|
||||
load();
|
||||
}, [outCommand]);
|
||||
|
||||
return (
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cfRef}
|
||||
onClick={cfShow}
|
||||
icon="pi pi-file-import"
|
||||
size="small"
|
||||
severity="warning"
|
||||
label="Sync with Default Settings"
|
||||
className="py-[4px]"
|
||||
disabled={!hasSettings}
|
||||
/>
|
||||
</div>
|
||||
{!hasSettings && (
|
||||
<span className="text-red-500/70 text-[12px]">*Default settings was not set by map administrator.</span>
|
||||
)}
|
||||
<span className="text-stone-500 text-[12px]">*Will apply admin settings which set as Default for map.</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
|
||||
<ConfirmPopup
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="You lost your current settings. Sure?."
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleSync}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -28,6 +28,9 @@ export const WidgetsSettings = ({}: WidgetsSettingsProps) => {
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="border-b-2 border-dotted border-stone-700/50 h-px my-3" />
|
||||
|
||||
<div className="grid grid-cols-[1fr_auto]">
|
||||
<div />
|
||||
<Button className="py-[4px]" onClick={resetWidgets} outlined size="small" label="Reset Widgets"></Button>
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { Dialog } from 'primereact/dialog';
|
||||
import { Button } from 'primereact/button';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { MapUserSettings } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
import { useConfirmPopup } from '@/hooks/Mapper/hooks';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
@@ -11,10 +9,13 @@ import {
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { MapUserSettings } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { saveTextFile } from '@/hooks/Mapper/utils';
|
||||
import { Button } from 'primereact/button';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { Dialog } from 'primereact/dialog';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { useCallback, useRef } from 'react';
|
||||
|
||||
const createSettings = function <T>(lsSettings: string | null, defaultValues: T) {
|
||||
return {
|
||||
@@ -24,10 +25,7 @@ const createSettings = function <T>(lsSettings: string | null, defaultValues: T)
|
||||
};
|
||||
|
||||
export const OldSettingsDialog = () => {
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
const { cfShow, cfHide, cfVisible, cfRef } = useConfirmPopup();
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const {
|
||||
@@ -43,7 +41,7 @@ export const OldSettingsDialog = () => {
|
||||
const widgetKills = localStorage.getItem('kills:widget:settings');
|
||||
const onTheMapOld = localStorage.getItem('window:onTheMap:settings');
|
||||
const widgetsOld = localStorage.getItem('windows:settings:v2');
|
||||
const signatures = localStorage.getItem('wanderer_system_signature_settings_v6_5');
|
||||
const signatures = localStorage.getItem('wanderer_system_signature_settings_v6_6');
|
||||
|
||||
const out: MapUserSettings = {
|
||||
killsWidget: createSettings(widgetKills, DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
@@ -120,7 +118,7 @@ export const OldSettingsDialog = () => {
|
||||
localStorage.removeItem('kills:widget:settings');
|
||||
localStorage.removeItem('window:onTheMap:settings');
|
||||
localStorage.removeItem('windows:settings:v2');
|
||||
localStorage.removeItem('wanderer_system_signature_settings_v6_5');
|
||||
localStorage.removeItem('wanderer_system_signature_settings_v6_6');
|
||||
|
||||
checkOldSettings();
|
||||
}, [checkOldSettings]);
|
||||
@@ -143,8 +141,8 @@ export const OldSettingsDialog = () => {
|
||||
<div className="flex items-center justify-end">
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cpRemoveBtnRef}
|
||||
onClick={handleShowCP}
|
||||
ref={cfRef}
|
||||
onClick={cfShow}
|
||||
icon="pi pi-exclamation-triangle"
|
||||
size="small"
|
||||
severity="warning"
|
||||
@@ -192,9 +190,9 @@ export const OldSettingsDialog = () => {
|
||||
</Dialog>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
target={cfRef.current}
|
||||
visible={cfVisible}
|
||||
onHide={cfHide}
|
||||
message="After click dialog will disappear. Ready?"
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleProceed}
|
||||
|
||||
@@ -13,6 +13,8 @@ import { InputText } from 'primereact/inputtext';
|
||||
import { IconField } from 'primereact/iconfield';
|
||||
|
||||
const itemTemplate = (item: CharacterTypeRaw & WithIsOwnCharacter, options: VirtualScrollerTemplateOptions) => {
|
||||
const showAllyLogoPlaceholder = options.props.items?.some(x => x.alliance_id != null);
|
||||
|
||||
return (
|
||||
<div
|
||||
className={clsx(classes.CharacterRow, 'w-full box-border px-2 py-1', {
|
||||
@@ -22,7 +24,15 @@ const itemTemplate = (item: CharacterTypeRaw & WithIsOwnCharacter, options: Virt
|
||||
})}
|
||||
style={{ height: options.props.itemSize + 'px' }}
|
||||
>
|
||||
<CharacterCard showCorporationLogo showAllyLogo showSystem showTicker showShip {...item} />
|
||||
<CharacterCard
|
||||
showCorporationLogo
|
||||
showAllyLogo
|
||||
showAllyLogoPlaceholder={showAllyLogoPlaceholder}
|
||||
showSystem
|
||||
showTicker
|
||||
showShip
|
||||
{...item}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -94,6 +94,10 @@ export const SignatureSettings = ({ systemId, show, onHide, signatureData }: Map
|
||||
out = { ...out, type: values.type };
|
||||
}
|
||||
|
||||
if (values.temporary_name != null) {
|
||||
out = { ...out, temporary_name: values.temporary_name };
|
||||
}
|
||||
|
||||
if (signatureData.group !== SignatureGroup.Wormhole) {
|
||||
out = { ...out, name: '' };
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ import { SignatureWormholeTypeSelect } from '@/hooks/Mapper/components/mapRootCo
|
||||
import { SignatureK162TypeSelect } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/components/SignatureK162TypeSelect';
|
||||
import { SignatureLeadsToSelect } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/components/SignatureLeadsToSelect';
|
||||
import { SignatureEOLCheckbox } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/components/SignatureEOLCheckbox';
|
||||
import { SignatureTempName } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings/components/SignatureTempName.tsx';
|
||||
|
||||
export const SignatureGroupContentWormholes = () => {
|
||||
const { watch } = useFormContext<SystemSignature>();
|
||||
@@ -32,6 +33,11 @@ export const SignatureGroupContentWormholes = () => {
|
||||
<span>EOL:</span>
|
||||
<SignatureEOLCheckbox name="isEOL" />
|
||||
</label>
|
||||
|
||||
<label className="grid grid-cols-[100px_250px_1fr] gap-2 items-center text-[14px]">
|
||||
<span>Temp. Name:</span>
|
||||
<SignatureTempName />
|
||||
</label>
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
import { Controller, useFormContext } from 'react-hook-form';
|
||||
import { InputText } from 'primereact/inputtext';
|
||||
import { SystemSignature } from '@/hooks/Mapper/types';
|
||||
|
||||
export const SignatureTempName = () => {
|
||||
const { control } = useFormContext<SystemSignature>();
|
||||
|
||||
return (
|
||||
<Controller
|
||||
name="temporary_name"
|
||||
control={control}
|
||||
render={({ field }) => <InputText placeholder="Temporary Name" value={field.value} onChange={field.onChange} />}
|
||||
/>
|
||||
);
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Map, MAP_ROOT_ID } from '@/hooks/Mapper/components/map/Map.tsx';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { OutCommand, OutCommandHandler, SolarSystemConnection } from '@/hooks/Mapper/types';
|
||||
import { CommandSelectSystems, OutCommand, OutCommandHandler, SolarSystemConnection } from '@/hooks/Mapper/types';
|
||||
import { MapRootData, useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { OnMapAddSystemCallback, OnMapSelectionChange } from '@/hooks/Mapper/components/map/map.types.ts';
|
||||
import isEqual from 'lodash.isequal';
|
||||
@@ -88,6 +88,18 @@ export const MapWrapper = () => {
|
||||
|
||||
useMapEventListener(event => {
|
||||
runCommand(event);
|
||||
|
||||
if (event.name === Commands.init) {
|
||||
const { selectedSystems } = ref.current;
|
||||
if (selectedSystems.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
runCommand({
|
||||
name: Commands.selectSystems,
|
||||
data: { systems: selectedSystems } as CommandSelectSystems,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const onSelectionChange: OnMapSelectionChange = useCallback(
|
||||
@@ -181,17 +193,20 @@ export const MapWrapper = () => {
|
||||
ref.current.systemContextProps.systemId && setOpenSettings(ref.current.systemContextProps.systemId);
|
||||
}, []);
|
||||
|
||||
const handleTogglePing = useCallback(async (type: PingType, solar_system_id: string, hasPing: boolean) => {
|
||||
if (hasPing) {
|
||||
await outCommand({
|
||||
type: OutCommand.cancelPing,
|
||||
data: { type, solar_system_id: solar_system_id },
|
||||
});
|
||||
return;
|
||||
}
|
||||
const handleTogglePing = useCallback(
|
||||
async (type: PingType, solar_system_id: string, ping_id: string | undefined, hasPing: boolean) => {
|
||||
if (hasPing) {
|
||||
await outCommand({
|
||||
type: OutCommand.cancelPing,
|
||||
data: { type, id: ping_id },
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
setOpenPing({ type, solar_system_id });
|
||||
}, []);
|
||||
setOpenPing({ type, solar_system_id });
|
||||
},
|
||||
[],
|
||||
);
|
||||
|
||||
const handleCustomLabelDialog = useCallback(() => {
|
||||
const { systemContextProps } = ref.current;
|
||||
|
||||
@@ -24,6 +24,7 @@ export type CharacterCardProps = {
|
||||
useSystemsCache?: boolean;
|
||||
showCorporationLogo?: boolean;
|
||||
showAllyLogo?: boolean;
|
||||
showAllyLogoPlaceholder?: boolean;
|
||||
simpleMode?: boolean;
|
||||
} & WithIsOwnCharacter &
|
||||
WithClassName;
|
||||
@@ -47,6 +48,7 @@ export const CharacterCard = ({
|
||||
showShipName,
|
||||
showCorporationLogo,
|
||||
showAllyLogo,
|
||||
showAllyLogoPlaceholder,
|
||||
showTicker,
|
||||
useSystemsCache,
|
||||
className,
|
||||
@@ -217,6 +219,18 @@ export const CharacterCard = ({
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
)}
|
||||
|
||||
{showAllyLogo && showAllyLogoPlaceholder && !char.alliance_id && (
|
||||
<WdTooltipWrapper position={TooltipPosition.top} content="No alliance">
|
||||
<span
|
||||
className={clsx(
|
||||
'min-w-[33px] min-h-[33px] w-[33px] h-[33px]',
|
||||
'flex transition-[border-color,opacity] duration-250 rounded-none',
|
||||
'wd-bg-default',
|
||||
)}
|
||||
/>
|
||||
</WdTooltipWrapper>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col flex-grow overflow-hidden w-[50px]">
|
||||
|
||||
@@ -12,14 +12,16 @@ export enum SETTINGS_KEYS {
|
||||
SORT_FIELD = 'sortField',
|
||||
SORT_ORDER = 'sortOrder',
|
||||
|
||||
SHOW_DESCRIPTION_COLUMN = 'show_description_column',
|
||||
SHOW_UPDATED_COLUMN = 'show_updated_column',
|
||||
SHOW_ADDED_COLUMN = 'show_added_column',
|
||||
SHOW_CHARACTER_COLUMN = 'show_character_column',
|
||||
SHOW_CHARACTER_PORTRAIT = 'show_character_portrait',
|
||||
SHOW_DESCRIPTION_COLUMN = 'show_description_column',
|
||||
SHOW_GROUP_COLUMN = 'show_group_column',
|
||||
SHOW_UPDATED_COLUMN = 'show_updated_column',
|
||||
LAZY_DELETE_SIGNATURES = 'lazy_delete_signatures',
|
||||
KEEP_LAZY_DELETE = 'keep_lazy_delete_enabled',
|
||||
DELETION_TIMING = 'deletion_timing',
|
||||
COLOR_BY_TYPE = 'color_by_type',
|
||||
SHOW_CHARACTER_PORTRAIT = 'show_character_portrait',
|
||||
|
||||
// From SignatureKind
|
||||
COSMIC_ANOMALY = SignatureKind.CosmicAnomaly,
|
||||
@@ -45,6 +47,8 @@ export const DEFAULT_SIGNATURE_SETTINGS: SignatureSettingsType = {
|
||||
[SETTINGS_KEYS.SORT_FIELD]: 'inserted_at',
|
||||
[SETTINGS_KEYS.SORT_ORDER]: -1,
|
||||
|
||||
[SETTINGS_KEYS.SHOW_GROUP_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_ADDED_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_UPDATED_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_DESCRIPTION_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_CHARACTER_COLUMN]: true,
|
||||
|
||||
@@ -2,3 +2,4 @@ export * from './sortWHClasses';
|
||||
export * from './parseSignatures';
|
||||
export * from './getSystemById';
|
||||
export * from './getEveImageUrl';
|
||||
export * from './toastHelpers';
|
||||
|
||||
28
assets/js/hooks/Mapper/helpers/toastHelpers.ts
Normal file
28
assets/js/hooks/Mapper/helpers/toastHelpers.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { Toast } from 'primereact/toast';
|
||||
|
||||
export const callToastWarn = (toast: Toast | null, msg: string, life = 3000) => {
|
||||
toast?.show({
|
||||
severity: 'warn',
|
||||
summary: 'Warning',
|
||||
detail: msg,
|
||||
life,
|
||||
});
|
||||
};
|
||||
|
||||
export const callToastError = (toast: Toast | null, msg: string, life = 3000) => {
|
||||
toast?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: msg,
|
||||
life,
|
||||
});
|
||||
};
|
||||
|
||||
export const callToastSuccess = (toast: Toast | null, msg: string, life = 3000) => {
|
||||
toast?.show({
|
||||
severity: 'success',
|
||||
summary: 'Success',
|
||||
detail: msg,
|
||||
life,
|
||||
});
|
||||
};
|
||||
@@ -3,3 +3,4 @@ export * from './useHotkey';
|
||||
export * from './usePageVisibility';
|
||||
export * from './useSkipContextMenu';
|
||||
export * from './useThrottle';
|
||||
export * from './useConfirmPopup';
|
||||
|
||||
10
assets/js/hooks/Mapper/hooks/useConfirmPopup.ts
Normal file
10
assets/js/hooks/Mapper/hooks/useConfirmPopup.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
|
||||
export const useConfirmPopup = () => {
|
||||
const cfRef = useRef<HTMLElement>();
|
||||
const [cfVisible, setCfVisible] = useState(false);
|
||||
const cfShow = useCallback(() => setCfVisible(true), []);
|
||||
const cfHide = useCallback(() => setCfVisible(false), []);
|
||||
|
||||
return { cfRef, cfVisible, cfShow, cfHide };
|
||||
};
|
||||
@@ -131,6 +131,7 @@ export interface MapRootContextProps {
|
||||
hasOldSettings: boolean;
|
||||
getSettingsForExport(): string | undefined;
|
||||
applySettings(settings: MapUserSettings): boolean;
|
||||
resetSettings(settings: MapUserSettings): void;
|
||||
checkOldSettings(): void;
|
||||
};
|
||||
}
|
||||
@@ -175,6 +176,7 @@ const MapRootContext = createContext<MapRootContextProps>({
|
||||
hasOldSettings: false,
|
||||
getSettingsForExport: () => '',
|
||||
applySettings: () => false,
|
||||
resetSettings: () => null,
|
||||
checkOldSettings: () => null,
|
||||
},
|
||||
});
|
||||
@@ -196,7 +198,7 @@ const MapRootHandlers = forwardRef(({ children }: WithChildren, fwdRef: Forwarde
|
||||
export const MapRootProvider = ({ children, fwdRef, outCommand }: MapRootProviderProps) => {
|
||||
const { update, ref } = useContextStore<MapRootData>({ ...INITIAL_DATA });
|
||||
|
||||
const storedSettings = useMapUserSettings(ref);
|
||||
const storedSettings = useMapUserSettings(ref, outCommand);
|
||||
|
||||
const { windowsSettings, toggleWidgetVisibility, updateWidgetSettings, resetWidgets } =
|
||||
useStoreWidgets(storedSettings);
|
||||
|
||||
@@ -0,0 +1,30 @@
|
||||
import { MapUserSettings } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
|
||||
// TODO - we need provide and compare version
|
||||
const createWidgetSettingsWithVersion = <T>(settings: T) => {
|
||||
return {
|
||||
version: 0,
|
||||
settings,
|
||||
};
|
||||
};
|
||||
|
||||
export const createDefaultWidgetSettings = (): MapUserSettings => {
|
||||
return {
|
||||
killsWidget: createWidgetSettingsWithVersion(DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
localWidget: createWidgetSettingsWithVersion(DEFAULT_WIDGET_LOCAL_SETTINGS),
|
||||
widgets: createWidgetSettingsWithVersion(getDefaultWidgetProps()),
|
||||
routes: createWidgetSettingsWithVersion(DEFAULT_ROUTES_SETTINGS),
|
||||
onTheMap: createWidgetSettingsWithVersion(DEFAULT_ON_THE_MAP_SETTINGS),
|
||||
signaturesWidget: createWidgetSettingsWithVersion(DEFAULT_SIGNATURE_SETTINGS),
|
||||
interface: createWidgetSettingsWithVersion(STORED_INTERFACE_DEFAULT_VALUES),
|
||||
};
|
||||
};
|
||||
@@ -14,8 +14,8 @@ export const useCommandPings = () => {
|
||||
ref.current.update({ pings });
|
||||
}, []);
|
||||
|
||||
const pingCancelled = useCallback(({ type, solar_system_id }: CommandPingCancelled) => {
|
||||
const newPings = ref.current.pings.filter(x => x.solar_system_id !== solar_system_id && x.type !== type);
|
||||
const pingCancelled = useCallback(({ type, id }: CommandPingCancelled) => {
|
||||
const newPings = ref.current.pings.filter(x => x.id !== id && x.type !== type);
|
||||
ref.current.update({ pings: newPings });
|
||||
}, []);
|
||||
|
||||
|
||||
@@ -0,0 +1,66 @@
|
||||
import { OutCommand, OutCommandHandler } from '@/hooks/Mapper/types';
|
||||
import { Dispatch, SetStateAction, useCallback, useEffect, useRef } from 'react';
|
||||
import {
|
||||
MapUserSettings,
|
||||
MapUserSettingsStructure,
|
||||
RemoteAdminSettingsResponse,
|
||||
} from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { createDefaultWidgetSettings } from '@/hooks/Mapper/mapRootProvider/helpers/createDefaultWidgetSettings.ts';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
|
||||
interface UseActualizeRemoteMapSettingsProps {
|
||||
outCommand: OutCommandHandler;
|
||||
mapUserSettings: MapUserSettingsStructure;
|
||||
applySettings: (val: MapUserSettings) => void;
|
||||
setMapUserSettings: Dispatch<SetStateAction<MapUserSettingsStructure>>;
|
||||
map_slug: string | null;
|
||||
}
|
||||
|
||||
export const useActualizeRemoteMapSettings = ({
|
||||
outCommand,
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
applySettings,
|
||||
map_slug,
|
||||
}: UseActualizeRemoteMapSettingsProps) => {
|
||||
const refVars = useRef({ applySettings, mapUserSettings, setMapUserSettings, map_slug });
|
||||
refVars.current = { applySettings, mapUserSettings, setMapUserSettings, map_slug };
|
||||
|
||||
const actualizeRemoteMapSettings = useCallback(async () => {
|
||||
const { applySettings } = refVars.current;
|
||||
|
||||
let res: RemoteAdminSettingsResponse | undefined;
|
||||
try {
|
||||
res = await outCommand({ type: OutCommand.getDefaultSettings, data: null });
|
||||
} catch (error) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
if (res?.default_settings == null) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
applySettings(parseMapUserSettings(res.default_settings));
|
||||
} catch (error) {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
}
|
||||
}, [outCommand]);
|
||||
|
||||
useEffect(() => {
|
||||
const { mapUserSettings } = refVars.current;
|
||||
|
||||
// INFO: Do nothing if slug is not set
|
||||
if (map_slug == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// INFO: Do nothing if user have already data
|
||||
if (map_slug in mapUserSettings) {
|
||||
return;
|
||||
}
|
||||
|
||||
actualizeRemoteMapSettings();
|
||||
}, [actualizeRemoteMapSettings, map_slug]);
|
||||
};
|
||||
@@ -1,44 +1,16 @@
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import { MapUserSettings, MapUserSettingsStructure } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures';
|
||||
import { MapRootData } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useSettingsValueAndSetter } from '@/hooks/Mapper/mapRootProvider/hooks/useSettingsValueAndSetter.ts';
|
||||
import fastDeepEqual from 'fast-deep-equal';
|
||||
|
||||
// import { actualizeSettings } from '@/hooks/Mapper/mapRootProvider/helpers';
|
||||
|
||||
// TODO - we need provide and compare version
|
||||
const createWidgetSettingsWithVersion = <T>(settings: T) => {
|
||||
return {
|
||||
version: 0,
|
||||
settings,
|
||||
};
|
||||
};
|
||||
|
||||
const createDefaultWidgetSettings = (): MapUserSettings => {
|
||||
return {
|
||||
killsWidget: createWidgetSettingsWithVersion(DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
localWidget: createWidgetSettingsWithVersion(DEFAULT_WIDGET_LOCAL_SETTINGS),
|
||||
widgets: createWidgetSettingsWithVersion(getDefaultWidgetProps()),
|
||||
routes: createWidgetSettingsWithVersion(DEFAULT_ROUTES_SETTINGS),
|
||||
onTheMap: createWidgetSettingsWithVersion(DEFAULT_ON_THE_MAP_SETTINGS),
|
||||
signaturesWidget: createWidgetSettingsWithVersion(DEFAULT_SIGNATURE_SETTINGS),
|
||||
interface: createWidgetSettingsWithVersion(STORED_INTERFACE_DEFAULT_VALUES),
|
||||
};
|
||||
};
|
||||
import { OutCommandHandler } from '@/hooks/Mapper/types';
|
||||
import { useActualizeRemoteMapSettings } from '@/hooks/Mapper/mapRootProvider/hooks/useActualizeRemoteMapSettings.ts';
|
||||
import { createDefaultWidgetSettings } from '@/hooks/Mapper/mapRootProvider/helpers/createDefaultWidgetSettings.ts';
|
||||
|
||||
const EMPTY_OBJ = {};
|
||||
|
||||
export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
export const useMapUserSettings = ({ map_slug }: MapRootData, outCommand: OutCommandHandler) => {
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const [hasOldSettings, setHasOldSettings] = useState(false);
|
||||
|
||||
@@ -49,19 +21,25 @@ export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
const ref = useRef({ mapUserSettings, setMapUserSettings, map_slug });
|
||||
ref.current = { mapUserSettings, setMapUserSettings, map_slug };
|
||||
|
||||
useEffect(() => {
|
||||
const { mapUserSettings, setMapUserSettings } = ref.current;
|
||||
if (map_slug === null) {
|
||||
return;
|
||||
const applySettings = useCallback((settings: MapUserSettings) => {
|
||||
const { map_slug, mapUserSettings, setMapUserSettings } = ref.current;
|
||||
|
||||
if (map_slug == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!(map_slug in mapUserSettings)) {
|
||||
setMapUserSettings({
|
||||
...mapUserSettings,
|
||||
[map_slug]: createDefaultWidgetSettings(),
|
||||
});
|
||||
if (fastDeepEqual(settings, mapUserSettings[map_slug])) {
|
||||
return false;
|
||||
}
|
||||
}, [map_slug]);
|
||||
|
||||
setMapUserSettings(old => ({
|
||||
...old,
|
||||
[map_slug]: settings,
|
||||
}));
|
||||
return true;
|
||||
}, []);
|
||||
|
||||
useActualizeRemoteMapSettings({ outCommand, applySettings, mapUserSettings, setMapUserSettings, map_slug });
|
||||
|
||||
const [interfaceSettings, setInterfaceSettings] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
@@ -178,23 +156,9 @@ export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
return JSON.stringify(ref.current.mapUserSettings[map_slug]);
|
||||
}, []);
|
||||
|
||||
const applySettings = useCallback((settings: MapUserSettings) => {
|
||||
const { map_slug, mapUserSettings, setMapUserSettings } = ref.current;
|
||||
|
||||
if (map_slug == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fastDeepEqual(settings, mapUserSettings[map_slug])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
setMapUserSettings(old => ({
|
||||
...old,
|
||||
[map_slug]: settings,
|
||||
}));
|
||||
return true;
|
||||
}, []);
|
||||
const resetSettings = useCallback(() => {
|
||||
applySettings(createDefaultWidgetSettings());
|
||||
}, [applySettings]);
|
||||
|
||||
return {
|
||||
isReady,
|
||||
@@ -217,6 +181,7 @@ export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
|
||||
getSettingsForExport,
|
||||
applySettings,
|
||||
resetSettings,
|
||||
checkOldSettings,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -85,3 +85,7 @@ export type MapUserSettings = {
|
||||
export type MapUserSettingsStructure = {
|
||||
[mapId: string]: MapUserSettings;
|
||||
};
|
||||
|
||||
export type WdResponse<T> = T;
|
||||
|
||||
export type RemoteAdminSettingsResponse = { default_settings?: string };
|
||||
|
||||
@@ -27,6 +27,7 @@ export enum Commands {
|
||||
userRoutes = 'user_routes',
|
||||
centerSystem = 'center_system',
|
||||
selectSystem = 'select_system',
|
||||
selectSystems = 'select_systems',
|
||||
linkSignatureToSystem = 'link_signature_to_system',
|
||||
signaturesUpdated = 'signatures_updated',
|
||||
systemCommentAdded = 'system_comment_added',
|
||||
@@ -60,6 +61,7 @@ export type Command =
|
||||
| Commands.routes
|
||||
| Commands.userRoutes
|
||||
| Commands.selectSystem
|
||||
| Commands.selectSystems
|
||||
| Commands.centerSystem
|
||||
| Commands.linkSignatureToSystem
|
||||
| Commands.signaturesUpdated
|
||||
@@ -118,6 +120,10 @@ export type CommandUserRoutes = RoutesList;
|
||||
export type CommandKillsUpdated = Kill[];
|
||||
export type CommandDetailedKillsUpdated = Record<string, DetailedKill[]>;
|
||||
export type CommandSelectSystem = string | undefined;
|
||||
export type CommandSelectSystems = {
|
||||
systems: string[];
|
||||
delay?: number;
|
||||
};
|
||||
export type CommandCenterSystem = string | undefined;
|
||||
export type CommandLinkSignatureToSystem = {
|
||||
solar_system_source: number;
|
||||
@@ -151,7 +157,7 @@ export type CommandUpdateTracking = {
|
||||
follow: boolean;
|
||||
};
|
||||
export type CommandPingAdded = PingData[];
|
||||
export type CommandPingCancelled = Pick<PingData, 'type' | 'solar_system_id'>;
|
||||
export type CommandPingCancelled = Pick<PingData, 'type' | 'id'>;
|
||||
|
||||
export interface UserSettings {
|
||||
primaryCharacterId?: string;
|
||||
@@ -187,6 +193,7 @@ export interface CommandData {
|
||||
[Commands.killsUpdated]: CommandKillsUpdated;
|
||||
[Commands.detailedKillsUpdated]: CommandDetailedKillsUpdated;
|
||||
[Commands.selectSystem]: CommandSelectSystem;
|
||||
[Commands.selectSystems]: CommandSelectSystems;
|
||||
[Commands.centerSystem]: CommandCenterSystem;
|
||||
[Commands.linkSignatureToSystem]: CommandLinkSignatureToSystem;
|
||||
[Commands.signaturesUpdated]: CommandLinkSignaturesUpdated;
|
||||
@@ -269,6 +276,8 @@ export enum OutCommand {
|
||||
showTracking = 'show_tracking',
|
||||
getUserSettings = 'get_user_settings',
|
||||
updateUserSettings = 'update_user_settings',
|
||||
saveDefaultSettings = 'save_default_settings',
|
||||
getDefaultSettings = 'get_default_settings',
|
||||
unlinkSignature = 'unlink_signature',
|
||||
searchSystems = 'search_systems',
|
||||
undoDeleteSignatures = 'undo_delete_signatures',
|
||||
|
||||
@@ -4,6 +4,7 @@ export enum PingType {
|
||||
}
|
||||
|
||||
export type PingData = {
|
||||
id: string;
|
||||
inserted_at: number;
|
||||
character_eve_id: string;
|
||||
solar_system_id: string;
|
||||
|
||||
@@ -48,6 +48,7 @@ export type SystemSignature = {
|
||||
inserted_at?: string;
|
||||
updated_at?: string;
|
||||
deleted?: boolean;
|
||||
temporary_name?: string;
|
||||
};
|
||||
|
||||
export interface ExtendedSystemSignature extends SystemSignature {
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"scripts": {
|
||||
"build": "vite build --emptyOutDir false",
|
||||
"watch": "vite build --watch --minify false --emptyOutDir false --clearScreen true --mode development",
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
"test": "jest"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 18.0.0"
|
||||
@@ -50,6 +50,7 @@
|
||||
"@tailwindcss/aspect-ratio": "^0.4.2",
|
||||
"@tailwindcss/forms": "^0.5.7",
|
||||
"@tailwindcss/typography": "^0.5.13",
|
||||
"@types/jest": "^29.5.12",
|
||||
"@types/lodash.debounce": "^4.0.9",
|
||||
"@types/lodash.isequal": "^4.5.8",
|
||||
"@types/react": "^18.3.12",
|
||||
@@ -59,6 +60,7 @@
|
||||
"@vitejs/plugin-react": "^4.3.3",
|
||||
"@vitejs/plugin-react-refresh": "^1.3.6",
|
||||
"autoprefixer": "^10.4.19",
|
||||
"babel-jest": "^29.7.0",
|
||||
"child_process": "^1.0.2",
|
||||
"eslint": "^8.57.0",
|
||||
"eslint-config-prettier": "^9.1.0",
|
||||
@@ -67,6 +69,7 @@
|
||||
"eslint-plugin-react-hooks": "^4.6.0",
|
||||
"eslint-plugin-react-refresh": "^0.4.6",
|
||||
"heroicons": "^2.0.18",
|
||||
"jest": "^29.7.0",
|
||||
"merge-options": "^3.0.4",
|
||||
"postcss": "^8.4.38",
|
||||
"postcss-cli": "^11.0.0",
|
||||
@@ -74,8 +77,9 @@
|
||||
"prettier": "^3.2.5",
|
||||
"sass": "^1.77.2",
|
||||
"sass-loader": "^14.2.1",
|
||||
"ts-jest": "^29.1.2",
|
||||
"typescript": "^5.2.2",
|
||||
"vite": "^5.0.5",
|
||||
"vite": "^6.3.5",
|
||||
"vite-plugin-cdn-import": "^1.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
|
||||
BIN
assets/static/images/news/06-21-webhooks/webhooks-hero.png
Executable file
BIN
assets/static/images/news/06-21-webhooks/webhooks-hero.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.7 MiB |
BIN
assets/static/images/news/07-13-map-duplication/duplicate-map.png
Executable file
BIN
assets/static/images/news/07-13-map-duplication/duplicate-map.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 42 KiB |
BIN
assets/static/images/news/07-15-api-modernization/api-hero.png
Executable file
BIN
assets/static/images/news/07-15-api-modernization/api-hero.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 94 KiB |
BIN
assets/static/images/news/2025/07-27-settings/admin_settings.png
Normal file
BIN
assets/static/images/news/2025/07-27-settings/admin_settings.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 97 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 112 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 95 KiB |
5377
assets/yarn.lock
5377
assets/yarn.lock
File diff suppressed because it is too large
Load Diff
82
clean_changelog.py
Normal file
82
clean_changelog.py
Normal file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to clean up CHANGELOG.md by removing empty version entries.
|
||||
An empty version entry has only a version header followed by empty lines,
|
||||
without any actual content (### Bug Fixes: or ### Features: sections).
|
||||
"""
|
||||
|
||||
import re
|
||||
|
||||
def clean_changelog():
|
||||
with open('./CHANGELOG.md', 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Split content into sections based on version headers
|
||||
version_pattern = r'^## \[v\d+\.\d+\.\d+\].*?\([^)]+\)$'
|
||||
|
||||
# Find all version headers with their positions
|
||||
matches = list(re.finditer(version_pattern, content, re.MULTILINE))
|
||||
|
||||
# Build new content by keeping only non-empty versions
|
||||
new_content = ""
|
||||
|
||||
# Keep the header (everything before first version)
|
||||
if matches:
|
||||
new_content += content[:matches[0].start()]
|
||||
else:
|
||||
# No versions found, keep original
|
||||
return content
|
||||
|
||||
for i, match in enumerate(matches):
|
||||
version_start = match.start()
|
||||
|
||||
# Find the end of this version section (start of next version or end of file)
|
||||
if i + 1 < len(matches):
|
||||
version_end = matches[i + 1].start()
|
||||
else:
|
||||
version_end = len(content)
|
||||
|
||||
version_section = content[version_start:version_end]
|
||||
|
||||
# Check if this version has actual content
|
||||
# Look for ### Bug Fixes: or ### Features: followed by actual content
|
||||
has_content = False
|
||||
|
||||
# Split the section into lines
|
||||
lines = version_section.split('\n')
|
||||
|
||||
# Look for content sections
|
||||
in_content_section = False
|
||||
for line in lines:
|
||||
line_stripped = line.strip()
|
||||
|
||||
# Check if we're entering a content section
|
||||
if line_stripped.startswith('### Bug Fixes:') or line_stripped.startswith('### Features:'):
|
||||
in_content_section = True
|
||||
continue
|
||||
|
||||
# If we're in a content section and find non-empty content
|
||||
if in_content_section:
|
||||
if line_stripped and not line_stripped.startswith('###') and not line_stripped.startswith('##'):
|
||||
# This is actual content (not just another header)
|
||||
if line_stripped.startswith('*') or len(line_stripped) > 0:
|
||||
has_content = True
|
||||
break
|
||||
elif line_stripped.startswith('##'):
|
||||
# We've reached the next version, stop looking
|
||||
break
|
||||
|
||||
# Only keep versions with actual content
|
||||
if has_content:
|
||||
new_content += version_section
|
||||
|
||||
return new_content
|
||||
|
||||
if __name__ == "__main__":
|
||||
cleaned_content = clean_changelog()
|
||||
|
||||
# Write the cleaned content back to the file
|
||||
with open('./CHANGELOG.md', 'w') as f:
|
||||
f.write(cleaned_content)
|
||||
|
||||
print("CHANGELOG.md has been cleaned up successfully!")
|
||||
@@ -102,6 +102,23 @@ config :error_tracker,
|
||||
repo: WandererApp.Repo,
|
||||
otp_app: :wanderer_app
|
||||
|
||||
# Security Audit Configuration
|
||||
config :wanderer_app, WandererApp.SecurityAudit,
|
||||
enabled: true,
|
||||
# Set to true in production for better performance
|
||||
async: false,
|
||||
batch_size: 100,
|
||||
flush_interval: 5000,
|
||||
log_level: :info,
|
||||
threat_detection: %{
|
||||
enabled: true,
|
||||
max_failed_attempts: 5,
|
||||
max_permission_denials: 10,
|
||||
window_seconds: 300,
|
||||
bulk_operation_threshold: 10000
|
||||
},
|
||||
retention_days: 90
|
||||
|
||||
config :git_ops,
|
||||
mix_project: Mix.Project.get!(),
|
||||
changelog_file: "CHANGELOG.md",
|
||||
|
||||
@@ -11,11 +11,13 @@ config :wanderer_app, WandererAppWeb.Endpoint,
|
||||
config :wanderer_app, WandererApp.Repo,
|
||||
ssl: false,
|
||||
stacktrace: true,
|
||||
show_sensitive_data_on_connection_error: true,
|
||||
show_sensitive_data_on_connection_error: false,
|
||||
pool_size: 15,
|
||||
migration_timestamps: [type: :utc_datetime_usec],
|
||||
migration_lock: nil,
|
||||
queue_target: 5000
|
||||
queue_target: 5000,
|
||||
queue_interval: 1000,
|
||||
checkout_timeout: 15000
|
||||
|
||||
# Configures Swoosh API Client
|
||||
config :swoosh, api_client: Swoosh.ApiClient.Finch, finch_name: WandererApp.Finch
|
||||
@@ -27,5 +29,8 @@ config :swoosh, local: false
|
||||
config :logger,
|
||||
level: :info
|
||||
|
||||
# Enable async security audit processing in production
|
||||
config :wanderer_app, WandererApp.SecurityAudit, async: true
|
||||
|
||||
# Runtime production configuration, including reading
|
||||
# of environment variables, is done on config/runtime.exs.
|
||||
|
||||
275
config/quality_gates.exs
Normal file
275
config/quality_gates.exs
Normal file
@@ -0,0 +1,275 @@
|
||||
# Quality Gates Configuration
|
||||
#
|
||||
# This file defines the error budget thresholds for the project.
|
||||
# These are intentionally set high initially to avoid blocking development
|
||||
# while we work on improving code quality.
|
||||
|
||||
defmodule WandererApp.QualityGates do
|
||||
@moduledoc """
|
||||
Central configuration for all quality gate thresholds.
|
||||
|
||||
## Error Budget Philosophy
|
||||
|
||||
We use error budgets to:
|
||||
1. Allow gradual improvement of code quality
|
||||
2. Avoid blocking development on legacy issues
|
||||
3. Provide clear targets for improvement
|
||||
4. Track progress over time
|
||||
|
||||
## Threshold Levels
|
||||
|
||||
- **Current**: What we enforce today (relaxed)
|
||||
- **Target**: Where we want to be (strict)
|
||||
- **Timeline**: When we plan to tighten the thresholds
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Returns the current error budget configuration.
|
||||
"""
|
||||
def current_thresholds do
|
||||
%{
|
||||
# Compilation warnings
|
||||
compilation: %{
|
||||
# Increased from 100 to accommodate current state
|
||||
max_warnings: 500,
|
||||
target: 0,
|
||||
# Extended timeline
|
||||
timeline: "Q3 2025",
|
||||
description: "Allow existing warnings while we fix them gradually"
|
||||
},
|
||||
|
||||
# Credo code quality issues
|
||||
credo: %{
|
||||
# Increased from 50 to accommodate current state
|
||||
max_issues: 200,
|
||||
# Increased from 10
|
||||
max_high_priority: 50,
|
||||
target_issues: 10,
|
||||
target_high_priority: 0,
|
||||
# Extended timeline
|
||||
timeline: "Q2 2025",
|
||||
description: "Focus on high-priority issues first"
|
||||
},
|
||||
|
||||
# Dialyzer static analysis
|
||||
dialyzer: %{
|
||||
# Allow some errors for now (was 0)
|
||||
max_errors: 20,
|
||||
max_warnings: :unlimited,
|
||||
target_errors: 0,
|
||||
target_warnings: 0,
|
||||
# Extended timeline
|
||||
timeline: "Q4 2025",
|
||||
description: "Temporarily allow some errors during codebase improvement"
|
||||
},
|
||||
|
||||
# Test coverage
|
||||
coverage: %{
|
||||
# Reduced from 70% to accommodate current state
|
||||
minimum: 50,
|
||||
target: 90,
|
||||
# Extended timeline
|
||||
timeline: "Q3 2025",
|
||||
description: "Start with 50% coverage, gradually improve to 90%"
|
||||
},
|
||||
|
||||
# Test execution
|
||||
tests: %{
|
||||
# Increased from 10 to accommodate current state
|
||||
max_failures: 50,
|
||||
# 10% flaky tests allowed (increased)
|
||||
max_flaky_rate: 0.10,
|
||||
# 10 minutes (increased from 5)
|
||||
max_duration_seconds: 600,
|
||||
target_failures: 0,
|
||||
# 5 minutes
|
||||
target_duration_seconds: 300,
|
||||
# Extended timeline
|
||||
timeline: "Q2 2025",
|
||||
description: "Allow more test failures during stabilization phase"
|
||||
},
|
||||
|
||||
# Code formatting
|
||||
formatting: %{
|
||||
enforced: true,
|
||||
auto_fix_in_ci: false,
|
||||
description: "Strict formatting enforcement from day one"
|
||||
},
|
||||
|
||||
# Documentation
|
||||
documentation: %{
|
||||
# 50% of modules documented
|
||||
min_module_doc_coverage: 0.5,
|
||||
# 30% of public functions documented
|
||||
min_function_doc_coverage: 0.3,
|
||||
target_module_coverage: 0.9,
|
||||
target_function_coverage: 0.8,
|
||||
timeline: "Q3 2025",
|
||||
description: "Gradually improve documentation coverage"
|
||||
},
|
||||
|
||||
# Security
|
||||
security: %{
|
||||
sobelow_enabled: false,
|
||||
max_high_risk: 0,
|
||||
max_medium_risk: 5,
|
||||
target_enabled: true,
|
||||
timeline: "Q2 2025",
|
||||
description: "Security scanning to be enabled after initial cleanup"
|
||||
},
|
||||
|
||||
# Dependencies
|
||||
dependencies: %{
|
||||
max_outdated_major: 10,
|
||||
max_outdated_minor: 20,
|
||||
max_vulnerable: 0,
|
||||
audit_enabled: true,
|
||||
description: "Keep dependencies reasonably up to date"
|
||||
},
|
||||
|
||||
# Performance
|
||||
performance: %{
|
||||
max_slow_tests_seconds: 5,
|
||||
max_memory_usage_mb: 500,
|
||||
profiling_enabled: false,
|
||||
timeline: "Q4 2025",
|
||||
description: "Performance monitoring to be added later"
|
||||
}
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the configuration for GitHub Actions.
|
||||
"""
|
||||
def github_actions_config do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
%{
|
||||
compilation_warnings: thresholds.compilation.max_warnings,
|
||||
credo_issues: thresholds.credo.max_issues,
|
||||
dialyzer_errors: thresholds.dialyzer.max_errors,
|
||||
coverage_minimum: thresholds.coverage.minimum,
|
||||
test_max_failures: thresholds.tests.max_failures,
|
||||
test_timeout_minutes: div(thresholds.tests.max_duration_seconds, 60)
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Returns the configuration for mix check.
|
||||
"""
|
||||
def mix_check_config do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
[
|
||||
# Compiler with warnings allowed
|
||||
{:compiler, "mix compile --warnings-as-errors false"},
|
||||
|
||||
# Credo with issue budget
|
||||
{:credo, "mix credo --strict --max-issues #{thresholds.credo.max_issues}"},
|
||||
|
||||
# Dialyzer without halt on warnings
|
||||
{:dialyzer, "mix dialyzer", exit_status: 0},
|
||||
|
||||
# Tests with failure allowance
|
||||
{:ex_unit, "mix test --max-failures #{thresholds.tests.max_failures}"},
|
||||
|
||||
# Formatting is strict
|
||||
{:formatter, "mix format --check-formatted"},
|
||||
|
||||
# Coverage check
|
||||
{:coverage, "mix coveralls --minimum-coverage #{thresholds.coverage.minimum}"},
|
||||
|
||||
# Documentation coverage (optional for now)
|
||||
{:docs_coverage, false},
|
||||
|
||||
# Security scanning (disabled for now)
|
||||
{:sobelow, false},
|
||||
|
||||
# Dependency audit
|
||||
{:audit, "mix deps.audit", exit_status: 0},
|
||||
|
||||
# Doctor check (disabled)
|
||||
{:doctor, false}
|
||||
]
|
||||
end
|
||||
|
||||
@doc """
|
||||
Generates a quality report showing current vs target thresholds.
|
||||
"""
|
||||
def quality_report do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
"""
|
||||
# WandererApp Quality Gates Report
|
||||
|
||||
Generated: #{DateTime.utc_now() |> DateTime.to_string()}
|
||||
|
||||
## Current Error Budgets vs Targets
|
||||
|
||||
| Category | Current Budget | Target Goal | Timeline | Status |
|
||||
|----------|----------------|-------------|----------|--------|
|
||||
| Compilation Warnings | ≤#{thresholds.compilation.max_warnings} | #{thresholds.compilation.target} | #{thresholds.compilation.timeline} | 🟡 Relaxed |
|
||||
| Credo Issues | ≤#{thresholds.credo.max_issues} | #{thresholds.credo.target_issues} | #{thresholds.credo.timeline} | 🟡 Relaxed |
|
||||
| Dialyzer Errors | ≤#{thresholds.dialyzer.max_errors} | #{thresholds.dialyzer.target_errors} | #{thresholds.dialyzer.timeline} | 🟡 Relaxed |
|
||||
| Test Coverage | ≥#{thresholds.coverage.minimum}% | #{thresholds.coverage.target}% | #{thresholds.coverage.timeline} | 🟡 Relaxed |
|
||||
| Test Failures | ≤#{thresholds.tests.max_failures} | #{thresholds.tests.target_failures} | #{thresholds.tests.timeline} | 🟡 Relaxed |
|
||||
| Code Formatting | Required | Required | - | ✅ Strict |
|
||||
|
||||
## Improvement Roadmap
|
||||
|
||||
### Q1 2025
|
||||
- Reduce Credo issues from #{thresholds.credo.max_issues} to #{thresholds.credo.target_issues}
|
||||
- Achieve zero test failures
|
||||
- Reduce test execution time to under 3 minutes
|
||||
|
||||
### Q2 2025
|
||||
- Eliminate all compilation warnings
|
||||
- Increase test coverage to #{thresholds.coverage.target}%
|
||||
- Enable security scanning with Sobelow
|
||||
|
||||
### Q3 2025
|
||||
- Clean up all Dialyzer warnings
|
||||
- Achieve 90% documentation coverage
|
||||
|
||||
### Q4 2025
|
||||
- Implement performance monitoring
|
||||
- Add memory usage tracking
|
||||
|
||||
## Quick Commands
|
||||
|
||||
```bash
|
||||
# Check current quality status
|
||||
mix check
|
||||
|
||||
# Run with auto-fix where possible
|
||||
mix check --fix
|
||||
|
||||
# Generate detailed quality report
|
||||
mix quality.report
|
||||
|
||||
# Check specific category
|
||||
mix credo --strict
|
||||
mix test --cover
|
||||
mix dialyzer
|
||||
```
|
||||
"""
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if a metric passes the current threshold.
|
||||
"""
|
||||
def passes_threshold?(category, metric, value) do
|
||||
thresholds = current_thresholds()
|
||||
|
||||
case {category, metric} do
|
||||
{:compilation, :warnings} -> value <= thresholds.compilation.max_warnings
|
||||
{:credo, :issues} -> value <= thresholds.credo.max_issues
|
||||
{:credo, :high_priority} -> value <= thresholds.credo.max_high_priority
|
||||
{:dialyzer, :errors} -> value <= thresholds.dialyzer.max_errors
|
||||
{:coverage, :percentage} -> value >= thresholds.coverage.minimum
|
||||
{:tests, :failures} -> value <= thresholds.tests.max_failures
|
||||
{:tests, :duration} -> value <= thresholds.tests.max_duration_seconds
|
||||
_ -> true
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -390,3 +390,26 @@ end
|
||||
config :wanderer_app, :license_manager,
|
||||
api_url: System.get_env("LM_API_URL", "http://localhost:4000"),
|
||||
auth_key: System.get_env("LM_AUTH_KEY")
|
||||
|
||||
# SSE Configuration
|
||||
config :wanderer_app, :sse,
|
||||
enabled:
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_SSE_ENABLED", "true")
|
||||
|> String.to_existing_atom(),
|
||||
max_connections_total:
|
||||
config_dir |> get_int_from_path_or_env("WANDERER_SSE_MAX_CONNECTIONS", 1000),
|
||||
max_connections_per_map:
|
||||
config_dir |> get_int_from_path_or_env("SSE_MAX_CONNECTIONS_PER_MAP", 50),
|
||||
max_connections_per_api_key:
|
||||
config_dir |> get_int_from_path_or_env("SSE_MAX_CONNECTIONS_PER_API_KEY", 10),
|
||||
keepalive_interval: config_dir |> get_int_from_path_or_env("SSE_KEEPALIVE_INTERVAL", 30000),
|
||||
connection_timeout: config_dir |> get_int_from_path_or_env("SSE_CONNECTION_TIMEOUT", 300_000)
|
||||
|
||||
# External Events Configuration
|
||||
config :wanderer_app, :external_events,
|
||||
webhooks_enabled:
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_WEBHOOKS_ENABLED", "true")
|
||||
|> String.to_existing_atom(),
|
||||
webhook_timeout_ms: config_dir |> get_int_from_path_or_env("WANDERER_WEBHOOK_TIMEOUT_MS", 15000)
|
||||
|
||||
@@ -8,15 +8,23 @@ import Config
|
||||
config :wanderer_app, WandererApp.Repo,
|
||||
username: "postgres",
|
||||
password: "postgres",
|
||||
hostname: "localhost",
|
||||
hostname: System.get_env("DB_HOST", "localhost"),
|
||||
database: "wanderer_test#{System.get_env("MIX_TEST_PARTITION")}",
|
||||
pool: Ecto.Adapters.SQL.Sandbox,
|
||||
pool_size: 10
|
||||
pool_size: 20,
|
||||
ownership_timeout: 60_000,
|
||||
timeout: 60_000
|
||||
|
||||
# Set environment variable before config runs to ensure character API is enabled in tests
|
||||
System.put_env("WANDERER_CHARACTER_API_DISABLED", "false")
|
||||
|
||||
config :wanderer_app,
|
||||
ddrt: Test.DDRTMock,
|
||||
logger: Test.LoggerMock,
|
||||
pubsub_client: Test.PubSubMock
|
||||
pubsub_client: Test.PubSubMock,
|
||||
cached_info: WandererApp.CachedInfo.Mock,
|
||||
character_api_disabled: false,
|
||||
environment: :test
|
||||
|
||||
# We don't run a server during test. If one is required,
|
||||
# you can enable the server option below.
|
||||
@@ -36,3 +44,8 @@ config :logger, level: :warning
|
||||
|
||||
# Initialize plugs at runtime for faster test compilation
|
||||
config :phoenix, :plug_init_mode, :runtime
|
||||
|
||||
# Configure MIME types for testing, including XML for error response contract tests
|
||||
config :mime, :types, %{
|
||||
"application/xml" => ["xml"]
|
||||
}
|
||||
|
||||
25
coveralls.json
Normal file
25
coveralls.json
Normal file
@@ -0,0 +1,25 @@
|
||||
{
|
||||
"coverage_options": {
|
||||
"treat_no_relevant_lines_as_covered": true,
|
||||
"output_dir": "cover/",
|
||||
"template_path": "cover/coverage.html.eex",
|
||||
"minimum_coverage": 70
|
||||
},
|
||||
"terminal_options": {
|
||||
"file_column_width": 40
|
||||
},
|
||||
"html_options": {
|
||||
"output_dir": "cover/"
|
||||
},
|
||||
"skip_files": [
|
||||
"test/",
|
||||
"lib/wanderer_app_web.ex",
|
||||
"lib/wanderer_app.ex",
|
||||
"lib/wanderer_app/application.ex",
|
||||
"lib/wanderer_app/release.ex",
|
||||
"lib/wanderer_app_web/endpoint.ex",
|
||||
"lib/wanderer_app_web/telemetry.ex",
|
||||
"lib/wanderer_app_web/gettext.ex",
|
||||
"priv/"
|
||||
]
|
||||
}
|
||||
126
lib/mix/tasks/test.setup.ex
Normal file
126
lib/mix/tasks/test.setup.ex
Normal file
@@ -0,0 +1,126 @@
|
||||
defmodule Mix.Tasks.Test.Setup do
|
||||
@moduledoc """
|
||||
Sets up the test database environment.
|
||||
|
||||
This task will:
|
||||
- Create the test database if it doesn't exist
|
||||
- Run all migrations
|
||||
- Verify the setup is correct
|
||||
|
||||
## Usage
|
||||
|
||||
mix test.setup
|
||||
|
||||
## Options
|
||||
|
||||
--force Drop the existing test database and recreate it
|
||||
--quiet Reduce output verbosity
|
||||
--seed Seed the database with test fixtures after setup
|
||||
|
||||
## Examples
|
||||
|
||||
mix test.setup
|
||||
mix test.setup --force
|
||||
mix test.setup --seed
|
||||
mix test.setup --force --seed --quiet
|
||||
|
||||
"""
|
||||
|
||||
use Mix.Task
|
||||
|
||||
alias WandererApp.DatabaseSetup
|
||||
|
||||
@shortdoc "Sets up the test database environment"
|
||||
|
||||
@impl Mix.Task
|
||||
def run(args) do
|
||||
# Parse options
|
||||
{opts, _} =
|
||||
OptionParser.parse!(args,
|
||||
strict: [force: :boolean, quiet: :boolean, seed: :boolean],
|
||||
aliases: [f: :force, q: :quiet, s: :seed]
|
||||
)
|
||||
|
||||
# Configure logger level based on quiet option
|
||||
if opts[:quiet] do
|
||||
Logger.configure(level: :warning)
|
||||
else
|
||||
Logger.configure(level: :info)
|
||||
end
|
||||
|
||||
# Set the environment to test
|
||||
Mix.env(:test)
|
||||
|
||||
try do
|
||||
# Load the application configuration
|
||||
Mix.Task.run("loadconfig")
|
||||
|
||||
# Start the application
|
||||
{:ok, _} = Application.ensure_all_started(:wanderer_app)
|
||||
|
||||
if opts[:force] do
|
||||
Mix.shell().info("🔄 Forcing database recreation...")
|
||||
_ = DatabaseSetup.drop_database()
|
||||
end
|
||||
|
||||
case DatabaseSetup.setup_test_database() do
|
||||
:ok ->
|
||||
if opts[:seed] do
|
||||
Mix.shell().info("🌱 Seeding test data...")
|
||||
|
||||
case DatabaseSetup.seed_test_data() do
|
||||
:ok ->
|
||||
Mix.shell().info("✅ Test database setup and seeding completed successfully!")
|
||||
|
||||
{:error, reason} ->
|
||||
Mix.shell().error("❌ Test data seeding failed: #{inspect(reason)}")
|
||||
System.halt(1)
|
||||
end
|
||||
else
|
||||
Mix.shell().info("✅ Test database setup completed successfully!")
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Mix.shell().error("❌ Test database setup failed: #{inspect(reason)}")
|
||||
print_troubleshooting_help()
|
||||
System.halt(1)
|
||||
end
|
||||
rescue
|
||||
error ->
|
||||
Mix.shell().error("❌ Unexpected error during database setup: #{inspect(error)}")
|
||||
print_troubleshooting_help()
|
||||
System.halt(1)
|
||||
end
|
||||
end
|
||||
|
||||
defp print_troubleshooting_help do
|
||||
Mix.shell().info("""
|
||||
|
||||
🔧 Troubleshooting Tips:
|
||||
|
||||
1. Ensure PostgreSQL is running:
|
||||
• On macOS: brew services start postgresql
|
||||
• On Ubuntu: sudo service postgresql start
|
||||
• Using Docker: docker run --name postgres -e POSTGRES_PASSWORD=postgres -p 5432:5432 -d postgres
|
||||
|
||||
2. Check database configuration in config/test.exs:
|
||||
• Username: postgres
|
||||
• Password: postgres
|
||||
• Host: localhost
|
||||
• Port: 5432
|
||||
|
||||
3. Verify database permissions:
|
||||
• Ensure the postgres user can create databases
|
||||
• Try connecting manually: psql -U postgres -h localhost
|
||||
|
||||
4. For connection refused errors:
|
||||
• Check if PostgreSQL is listening on the correct port
|
||||
• Verify firewall settings
|
||||
|
||||
5. Force recreation if corrupted:
|
||||
• Run: mix test.setup --force
|
||||
|
||||
📚 For more help, see: https://hexdocs.pm/ecto/Ecto.Adapters.Postgres.html
|
||||
""")
|
||||
end
|
||||
end
|
||||
331
lib/mix/tasks/test_stability.ex
Normal file
331
lib/mix/tasks/test_stability.ex
Normal file
@@ -0,0 +1,331 @@
|
||||
defmodule Mix.Tasks.Test.Stability do
|
||||
@moduledoc """
|
||||
Runs tests multiple times to detect flaky tests.
|
||||
|
||||
## Usage
|
||||
|
||||
mix test.stability
|
||||
mix test.stability --runs 10
|
||||
mix test.stability --runs 5 --file test/specific_test.exs
|
||||
mix test.stability --tag flaky
|
||||
mix test.stability --detect --threshold 0.95
|
||||
|
||||
## Options
|
||||
|
||||
* `--runs` - Number of times to run tests (default: 5)
|
||||
* `--file` - Specific test file to check
|
||||
* `--tag` - Only run tests with specific tag
|
||||
* `--detect` - Detection mode, identifies flaky tests
|
||||
* `--threshold` - Success rate threshold for detection (default: 0.95)
|
||||
* `--parallel` - Run iterations in parallel
|
||||
* `--report` - Generate detailed report file
|
||||
"""
|
||||
|
||||
use Mix.Task
|
||||
|
||||
@shortdoc "Detect flaky tests by running them multiple times"
|
||||
|
||||
@default_runs 5
|
||||
@default_threshold 0.95
|
||||
|
||||
def run(args) do
|
||||
{opts, test_args, _} =
|
||||
OptionParser.parse(args,
|
||||
switches: [
|
||||
runs: :integer,
|
||||
file: :string,
|
||||
tag: :string,
|
||||
detect: :boolean,
|
||||
threshold: :float,
|
||||
parallel: :boolean,
|
||||
report: :string
|
||||
],
|
||||
aliases: [
|
||||
r: :runs,
|
||||
f: :file,
|
||||
t: :tag,
|
||||
d: :detect,
|
||||
p: :parallel
|
||||
]
|
||||
)
|
||||
|
||||
runs = Keyword.get(opts, :runs, @default_runs)
|
||||
threshold = Keyword.get(opts, :threshold, @default_threshold)
|
||||
detect_mode = Keyword.get(opts, :detect, false)
|
||||
parallel = Keyword.get(opts, :parallel, false)
|
||||
report_file = Keyword.get(opts, :report)
|
||||
|
||||
Mix.shell().info("🔍 Running test stability check...")
|
||||
Mix.shell().info(" Iterations: #{runs}")
|
||||
Mix.shell().info(" Threshold: #{Float.round(threshold * 100, 1)}%")
|
||||
Mix.shell().info("")
|
||||
|
||||
# Build test command
|
||||
test_cmd = build_test_command(opts, test_args)
|
||||
|
||||
# Run tests multiple times
|
||||
results =
|
||||
if parallel do
|
||||
run_tests_parallel(test_cmd, runs)
|
||||
else
|
||||
run_tests_sequential(test_cmd, runs)
|
||||
end
|
||||
|
||||
# Analyze results
|
||||
analysis = analyze_results(results, threshold)
|
||||
|
||||
# Display results
|
||||
display_results(analysis, detect_mode)
|
||||
|
||||
# Generate report if requested
|
||||
if report_file do
|
||||
generate_report(analysis, report_file)
|
||||
end
|
||||
|
||||
# Exit with appropriate code
|
||||
if analysis.flaky_count > 0 and detect_mode do
|
||||
Mix.shell().error("\n❌ Found #{analysis.flaky_count} flaky tests!")
|
||||
exit({:shutdown, 1})
|
||||
else
|
||||
Mix.shell().info("\n✅ Test stability check complete")
|
||||
end
|
||||
end
|
||||
|
||||
defp build_test_command(opts, test_args) do
|
||||
cmd_parts = ["test"]
|
||||
|
||||
cmd_parts =
|
||||
if file = Keyword.get(opts, :file) do
|
||||
cmd_parts ++ [file]
|
||||
else
|
||||
cmd_parts
|
||||
end
|
||||
|
||||
cmd_parts =
|
||||
if tag = Keyword.get(opts, :tag) do
|
||||
cmd_parts ++ ["--only", tag]
|
||||
else
|
||||
cmd_parts
|
||||
end
|
||||
|
||||
cmd_parts ++ test_args
|
||||
end
|
||||
|
||||
defp run_tests_sequential(test_cmd, runs) do
|
||||
for i <- 1..runs do
|
||||
Mix.shell().info("Running iteration #{i}/#{runs}...")
|
||||
|
||||
start_time = System.monotonic_time(:millisecond)
|
||||
|
||||
# Capture test output
|
||||
{output, exit_code} =
|
||||
System.cmd("mix", test_cmd,
|
||||
stderr_to_stdout: true,
|
||||
env: [{"MIX_ENV", "test"}]
|
||||
)
|
||||
|
||||
duration = System.monotonic_time(:millisecond) - start_time
|
||||
|
||||
# Parse test results
|
||||
test_results = parse_test_output(output)
|
||||
|
||||
%{
|
||||
iteration: i,
|
||||
exit_code: exit_code,
|
||||
duration: duration,
|
||||
output: output,
|
||||
tests: test_results.tests,
|
||||
failures: test_results.failures,
|
||||
failed_tests: test_results.failed_tests
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
defp run_tests_parallel(test_cmd, runs) do
|
||||
Mix.shell().info("Running #{runs} iterations in parallel...")
|
||||
|
||||
tasks =
|
||||
for i <- 1..runs do
|
||||
Task.async(fn ->
|
||||
start_time = System.monotonic_time(:millisecond)
|
||||
|
||||
{output, exit_code} =
|
||||
System.cmd("mix", test_cmd,
|
||||
stderr_to_stdout: true,
|
||||
env: [{"MIX_ENV", "test"}]
|
||||
)
|
||||
|
||||
duration = System.monotonic_time(:millisecond) - start_time
|
||||
test_results = parse_test_output(output)
|
||||
|
||||
%{
|
||||
iteration: i,
|
||||
exit_code: exit_code,
|
||||
duration: duration,
|
||||
output: output,
|
||||
tests: test_results.tests,
|
||||
failures: test_results.failures,
|
||||
failed_tests: test_results.failed_tests
|
||||
}
|
||||
end)
|
||||
end
|
||||
|
||||
Task.await_many(tasks, :infinity)
|
||||
end
|
||||
|
||||
defp parse_test_output(output) do
|
||||
lines = String.split(output, "\n")
|
||||
|
||||
# Extract test count and failures
|
||||
test_summary = Enum.find(lines, &String.contains?(&1, "test"))
|
||||
|
||||
{tests, failures} =
|
||||
case Regex.run(~r/(\d+) tests?, (\d+) failures?/, test_summary || "") do
|
||||
[_, tests, failures] ->
|
||||
{String.to_integer(tests), String.to_integer(failures)}
|
||||
|
||||
_ ->
|
||||
{0, 0}
|
||||
end
|
||||
|
||||
# Extract failed test names
|
||||
failed_tests = extract_failed_tests(output)
|
||||
|
||||
%{
|
||||
tests: tests,
|
||||
failures: failures,
|
||||
failed_tests: failed_tests
|
||||
}
|
||||
end
|
||||
|
||||
defp extract_failed_tests(output) do
|
||||
output
|
||||
|> String.split("\n")
|
||||
# More precise filtering for actual test failures
|
||||
|> Enum.filter(
|
||||
&(String.contains?(&1, "test ") and
|
||||
(String.contains?(&1, "FAILED") or String.contains?(&1, "ERROR") or
|
||||
Regex.match?(~r/^\s*\d+\)\s+test/, &1)))
|
||||
)
|
||||
|> Enum.map(&extract_test_name/1)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
end
|
||||
|
||||
defp extract_test_name(line) do
|
||||
case Regex.run(~r/test (.+) \((.+)\)/, line) do
|
||||
[_, name, module] -> "#{module}: #{name}"
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp analyze_results(results, threshold) do
|
||||
total_runs = length(results)
|
||||
|
||||
# Group failures by test name
|
||||
all_failures =
|
||||
results
|
||||
|> Enum.flat_map(& &1.failed_tests)
|
||||
|> Enum.frequencies()
|
||||
|
||||
# Identify flaky tests
|
||||
flaky_tests =
|
||||
all_failures
|
||||
|> Enum.filter(fn {_test, fail_count} ->
|
||||
success_rate = (total_runs - fail_count) / total_runs
|
||||
success_rate < threshold and success_rate > 0
|
||||
end)
|
||||
|> Enum.map(fn {test, fail_count} ->
|
||||
success_rate = (total_runs - fail_count) / total_runs
|
||||
|
||||
%{
|
||||
test: test,
|
||||
failures: fail_count,
|
||||
success_rate: success_rate,
|
||||
failure_rate: fail_count / total_runs
|
||||
}
|
||||
end)
|
||||
|> Enum.sort_by(& &1.failure_rate, :desc)
|
||||
|
||||
# Calculate statistics
|
||||
total_tests = results |> Enum.map(& &1.tests) |> Enum.max(fn -> 0 end)
|
||||
avg_duration = results |> Enum.map(& &1.duration) |> average()
|
||||
success_runs = Enum.count(results, &(&1.exit_code == 0))
|
||||
|
||||
%{
|
||||
total_runs: total_runs,
|
||||
total_tests: total_tests,
|
||||
success_runs: success_runs,
|
||||
failed_runs: total_runs - success_runs,
|
||||
success_rate: success_runs / total_runs,
|
||||
avg_duration: avg_duration,
|
||||
flaky_tests: flaky_tests,
|
||||
flaky_count: length(flaky_tests),
|
||||
all_failures: all_failures
|
||||
}
|
||||
end
|
||||
|
||||
defp average([]), do: 0
|
||||
defp average(list), do: Enum.sum(list) / length(list)
|
||||
|
||||
defp display_results(analysis, detect_mode) do
|
||||
Mix.shell().info("\n📊 Test Stability Results")
|
||||
Mix.shell().info("=" |> String.duplicate(50))
|
||||
|
||||
Mix.shell().info("\nSummary:")
|
||||
Mix.shell().info(" Total test runs: #{analysis.total_runs}")
|
||||
Mix.shell().info(" Successful runs: #{analysis.success_runs}")
|
||||
Mix.shell().info(" Failed runs: #{analysis.failed_runs}")
|
||||
Mix.shell().info(" Overall success rate: #{format_percentage(analysis.success_rate)}")
|
||||
Mix.shell().info(" Average duration: #{Float.round(analysis.avg_duration / 1000, 2)}s")
|
||||
|
||||
if analysis.flaky_count > 0 do
|
||||
Mix.shell().info("\n⚠️ Flaky Tests Detected:")
|
||||
Mix.shell().info("-" |> String.duplicate(50))
|
||||
|
||||
for test <- analysis.flaky_tests do
|
||||
Mix.shell().info("\n #{test.test}")
|
||||
Mix.shell().info(" Failure rate: #{format_percentage(test.failure_rate)}")
|
||||
Mix.shell().info(" Failed #{test.failures} out of #{analysis.total_runs} runs")
|
||||
end
|
||||
else
|
||||
Mix.shell().info("\n✅ No flaky tests detected!")
|
||||
end
|
||||
|
||||
if not detect_mode and map_size(analysis.all_failures) > 0 do
|
||||
Mix.shell().info("\n📝 All Test Failures:")
|
||||
Mix.shell().info("-" |> String.duplicate(50))
|
||||
|
||||
for {test, count} <- analysis.all_failures do
|
||||
percentage = count / analysis.total_runs
|
||||
Mix.shell().info(" #{test}: #{count} failures (#{format_percentage(percentage)})")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp format_percentage(rate) do
|
||||
"#{Float.round(rate * 100, 1)}%"
|
||||
end
|
||||
|
||||
defp generate_report(analysis, report_file) do
|
||||
timestamp = DateTime.utc_now() |> DateTime.to_string()
|
||||
|
||||
report = %{
|
||||
timestamp: timestamp,
|
||||
summary: %{
|
||||
total_runs: analysis.total_runs,
|
||||
total_tests: analysis.total_tests,
|
||||
success_runs: analysis.success_runs,
|
||||
failed_runs: analysis.failed_runs,
|
||||
success_rate: analysis.success_rate,
|
||||
avg_duration_ms: analysis.avg_duration
|
||||
},
|
||||
flaky_tests: analysis.flaky_tests,
|
||||
all_failures: analysis.all_failures
|
||||
}
|
||||
|
||||
json = Jason.encode!(report, pretty: true)
|
||||
File.write!(report_file, json)
|
||||
|
||||
Mix.shell().info("\n📄 Report written to: #{report_file}")
|
||||
end
|
||||
end
|
||||
@@ -1,7 +1,13 @@
|
||||
defmodule WandererApp.Api do
|
||||
@moduledoc false
|
||||
|
||||
use Ash.Domain
|
||||
use Ash.Domain,
|
||||
extensions: [AshJsonApi.Domain]
|
||||
|
||||
json_api do
|
||||
prefix "/api/v1"
|
||||
log_errors?(true)
|
||||
end
|
||||
|
||||
resources do
|
||||
resource WandererApp.Api.AccessList
|
||||
@@ -22,6 +28,7 @@ defmodule WandererApp.Api do
|
||||
resource WandererApp.Api.MapSubscription
|
||||
resource WandererApp.Api.MapTransaction
|
||||
resource WandererApp.Api.MapUserSettings
|
||||
resource WandererApp.Api.MapDefaultSettings
|
||||
resource WandererApp.Api.User
|
||||
resource WandererApp.Api.ShipTypeInfo
|
||||
resource WandererApp.Api.UserActivity
|
||||
@@ -30,5 +37,6 @@ defmodule WandererApp.Api do
|
||||
resource WandererApp.Api.License
|
||||
resource WandererApp.Api.MapPing
|
||||
resource WandererApp.Api.MapInvite
|
||||
resource WandererApp.Api.MapWebhookSubscription
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,32 @@ defmodule WandererApp.Api.AccessList do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("access_lists_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "access_lists"
|
||||
|
||||
includes([:owner, :members])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/access_lists")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:new)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:available, action: :available)
|
||||
@@ -79,8 +98,11 @@ defmodule WandererApp.Api.AccessList do
|
||||
relationships do
|
||||
belongs_to :owner, WandererApp.Api.Character do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
|
||||
has_many :members, WandererApp.Api.AccessListMember
|
||||
has_many :members, WandererApp.Api.AccessListMember do
|
||||
public? true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,13 +3,32 @@ defmodule WandererApp.Api.AccessListMember do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("access_list_members_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "access_list_members"
|
||||
|
||||
includes([:access_list])
|
||||
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/access_list_members")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update_role)
|
||||
delete(:destroy)
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
define(:update_role, action: :update_role)
|
||||
@@ -101,6 +120,7 @@ defmodule WandererApp.Api.AccessListMember do
|
||||
relationships do
|
||||
belongs_to :access_list, WandererApp.Api.AccessList do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ defmodule WandererApp.Api.Changes.SlugifyName do
|
||||
defp maybe_slugify_name(changeset) do
|
||||
case Changeset.get_attribute(changeset, :slug) do
|
||||
slug when is_binary(slug) ->
|
||||
Changeset.change_attribute(changeset, :slug, Slug.slugify(slug))
|
||||
Changeset.force_change_attribute(changeset, :slug, Slug.slugify(slug))
|
||||
|
||||
_ ->
|
||||
changeset
|
||||
|
||||
@@ -124,7 +124,7 @@ defmodule WandererApp.Api.Character do
|
||||
update :update_corporation do
|
||||
require_atomic? false
|
||||
|
||||
accept([:corporation_id, :corporation_name, :corporation_ticker, :alliance_id])
|
||||
accept([:corporation_id, :corporation_name, :corporation_ticker])
|
||||
end
|
||||
|
||||
update :update_alliance do
|
||||
|
||||
@@ -3,13 +3,44 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
alias Ash.Resource.Change.Builtins
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("maps_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "maps"
|
||||
|
||||
# Include relationships for compound documents
|
||||
includes([
|
||||
:owner,
|
||||
:characters,
|
||||
:acls
|
||||
])
|
||||
|
||||
# Enable filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
# Routes configuration
|
||||
routes do
|
||||
base("/maps")
|
||||
get(:read)
|
||||
index :read
|
||||
post(:new)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
|
||||
# Custom action for map duplication
|
||||
post(:duplicate, route: "/:id/duplicate")
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:available, action: :available)
|
||||
define(:get_map_by_slug, action: :by_slug, args: [:slug])
|
||||
@@ -22,11 +53,14 @@ defmodule WandererApp.Api.Map do
|
||||
define(:assign_owner, action: :assign_owner)
|
||||
define(:mark_as_deleted, action: :mark_as_deleted)
|
||||
define(:update_api_key, action: :update_api_key)
|
||||
define(:toggle_webhooks, action: :toggle_webhooks)
|
||||
|
||||
define(:by_id,
|
||||
get_by: [:id],
|
||||
action: :read
|
||||
)
|
||||
|
||||
define(:duplicate, action: :duplicate)
|
||||
end
|
||||
|
||||
calculations do
|
||||
@@ -127,6 +161,86 @@ defmodule WandererApp.Api.Map do
|
||||
update :update_api_key do
|
||||
accept [:public_api_key]
|
||||
end
|
||||
|
||||
update :toggle_webhooks do
|
||||
accept [:webhooks_enabled]
|
||||
end
|
||||
|
||||
create :duplicate do
|
||||
accept [:name, :description, :scope, :only_tracked_characters]
|
||||
|
||||
argument :source_map_id, :uuid, allow_nil?: false
|
||||
argument :copy_acls, :boolean, default: true
|
||||
argument :copy_user_settings, :boolean, default: true
|
||||
argument :copy_signatures, :boolean, default: true
|
||||
|
||||
# Set defaults from source map before creation
|
||||
change fn changeset, context ->
|
||||
source_map_id = Ash.Changeset.get_argument(changeset, :source_map_id)
|
||||
|
||||
case WandererApp.Api.Map.by_id(source_map_id) do
|
||||
{:ok, source_map} ->
|
||||
# Use provided description or fall back to source map description
|
||||
description =
|
||||
Ash.Changeset.get_attribute(changeset, :description) || source_map.description
|
||||
|
||||
changeset
|
||||
|> Ash.Changeset.change_attribute(:description, description)
|
||||
|> Ash.Changeset.change_attribute(:scope, source_map.scope)
|
||||
|> Ash.Changeset.change_attribute(
|
||||
:only_tracked_characters,
|
||||
source_map.only_tracked_characters
|
||||
)
|
||||
|> Ash.Changeset.change_attribute(:owner_id, context.actor.id)
|
||||
|> Ash.Changeset.change_attribute(
|
||||
:slug,
|
||||
generate_unique_slug(Ash.Changeset.get_attribute(changeset, :name))
|
||||
)
|
||||
|
||||
{:error, _} ->
|
||||
Ash.Changeset.add_error(changeset,
|
||||
field: :source_map_id,
|
||||
message: "Source map not found"
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
# Copy related data after creation
|
||||
change Builtins.after_action(fn changeset, new_map, context ->
|
||||
source_map_id = Ash.Changeset.get_argument(changeset, :source_map_id)
|
||||
copy_acls = Ash.Changeset.get_argument(changeset, :copy_acls)
|
||||
copy_user_settings = Ash.Changeset.get_argument(changeset, :copy_user_settings)
|
||||
copy_signatures = Ash.Changeset.get_argument(changeset, :copy_signatures)
|
||||
|
||||
case WandererApp.Map.Operations.Duplication.duplicate_map(
|
||||
source_map_id,
|
||||
new_map,
|
||||
copy_acls: copy_acls,
|
||||
copy_user_settings: copy_user_settings,
|
||||
copy_signatures: copy_signatures
|
||||
) do
|
||||
{:ok, _result} ->
|
||||
{:ok, new_map}
|
||||
|
||||
{:error, error} ->
|
||||
{:error, error}
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
# Generate a unique slug from map name
|
||||
defp generate_unique_slug(name) do
|
||||
base_slug =
|
||||
name
|
||||
|> String.downcase()
|
||||
|> String.replace(~r/[^a-z0-9\s-]/, "")
|
||||
|> String.replace(~r/\s+/, "-")
|
||||
|> String.trim("-")
|
||||
|
||||
# Add timestamp to ensure uniqueness
|
||||
timestamp = System.system_time(:millisecond) |> Integer.to_string()
|
||||
"#{base_slug}-#{timestamp}"
|
||||
end
|
||||
|
||||
attributes do
|
||||
@@ -134,6 +248,7 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
attribute :name, :string do
|
||||
allow_nil? false
|
||||
public? true
|
||||
constraints trim?: false, max_length: 20, min_length: 3, allow_empty?: false
|
||||
end
|
||||
|
||||
@@ -143,8 +258,13 @@ defmodule WandererApp.Api.Map do
|
||||
constraints trim?: false, max_length: 40, min_length: 3, allow_empty?: false
|
||||
end
|
||||
|
||||
attribute :description, :string
|
||||
attribute :personal_note, :string
|
||||
attribute :description, :string do
|
||||
public? true
|
||||
end
|
||||
|
||||
attribute :personal_note, :string do
|
||||
public? true
|
||||
end
|
||||
|
||||
attribute :public_api_key, :string do
|
||||
allow_nil? true
|
||||
@@ -158,6 +278,7 @@ defmodule WandererApp.Api.Map do
|
||||
|
||||
attribute :scope, :atom do
|
||||
default "wormholes"
|
||||
public? true
|
||||
|
||||
constraints(
|
||||
one_of: [
|
||||
@@ -185,6 +306,12 @@ defmodule WandererApp.Api.Map do
|
||||
allow_nil? true
|
||||
end
|
||||
|
||||
attribute :webhooks_enabled, :boolean do
|
||||
default(false)
|
||||
allow_nil?(false)
|
||||
public?(true)
|
||||
end
|
||||
|
||||
create_timestamp(:inserted_at)
|
||||
update_timestamp(:updated_at)
|
||||
end
|
||||
@@ -196,20 +323,25 @@ defmodule WandererApp.Api.Map do
|
||||
relationships do
|
||||
belongs_to :owner, WandererApp.Api.Character do
|
||||
attribute_writable? true
|
||||
public? true
|
||||
end
|
||||
|
||||
many_to_many :characters, WandererApp.Api.Character do
|
||||
through WandererApp.Api.MapCharacterSettings
|
||||
source_attribute_on_join_resource :map_id
|
||||
destination_attribute_on_join_resource :character_id
|
||||
public? true
|
||||
end
|
||||
|
||||
many_to_many :acls, WandererApp.Api.AccessList do
|
||||
through WandererApp.Api.MapAccessList
|
||||
source_attribute_on_join_resource :map_id
|
||||
destination_attribute_on_join_resource :access_list_id
|
||||
public? true
|
||||
end
|
||||
|
||||
has_many :transactions, WandererApp.Api.MapTransaction
|
||||
has_many :transactions, WandererApp.Api.MapTransaction do
|
||||
public? false
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -3,19 +3,56 @@ defmodule WandererApp.Api.MapAccessList do
|
||||
|
||||
use Ash.Resource,
|
||||
domain: WandererApp.Api,
|
||||
data_layer: AshPostgres.DataLayer
|
||||
data_layer: AshPostgres.DataLayer,
|
||||
extensions: [AshJsonApi.Resource]
|
||||
|
||||
postgres do
|
||||
repo(WandererApp.Repo)
|
||||
table("map_access_lists_v1")
|
||||
end
|
||||
|
||||
json_api do
|
||||
type "map_access_lists"
|
||||
|
||||
# Handle composite primary key
|
||||
primary_key do
|
||||
keys([:id])
|
||||
end
|
||||
|
||||
includes([
|
||||
:map,
|
||||
:access_list
|
||||
])
|
||||
|
||||
# Enable automatic filtering and sorting
|
||||
derive_filter?(true)
|
||||
derive_sort?(true)
|
||||
|
||||
routes do
|
||||
base("/map_access_lists")
|
||||
|
||||
get(:read)
|
||||
index :read
|
||||
post(:create)
|
||||
patch(:update)
|
||||
delete(:destroy)
|
||||
|
||||
# Custom routes for specific queries
|
||||
get(:read_by_map, route: "/by_map/:map_id")
|
||||
get(:read_by_acl, route: "/by_acl/:acl_id")
|
||||
end
|
||||
end
|
||||
|
||||
code_interface do
|
||||
define(:create, action: :create)
|
||||
|
||||
define(:read_by_map,
|
||||
action: :read_by_map
|
||||
)
|
||||
|
||||
define(:read_by_acl,
|
||||
action: :read_by_acl
|
||||
)
|
||||
end
|
||||
|
||||
actions do
|
||||
@@ -30,6 +67,11 @@ defmodule WandererApp.Api.MapAccessList do
|
||||
argument(:map_id, :string, allow_nil?: false)
|
||||
filter(expr(map_id == ^arg(:map_id)))
|
||||
end
|
||||
|
||||
read :read_by_acl do
|
||||
argument(:acl_id, :string, allow_nil?: false)
|
||||
filter(expr(access_list_id == ^arg(:acl_id)))
|
||||
end
|
||||
end
|
||||
|
||||
attributes do
|
||||
@@ -40,8 +82,12 @@ defmodule WandererApp.Api.MapAccessList do
|
||||
end
|
||||
|
||||
relationships do
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false
|
||||
belongs_to :access_list, WandererApp.Api.AccessList, primary_key?: true, allow_nil?: false
|
||||
belongs_to :map, WandererApp.Api.Map, primary_key?: true, allow_nil?: false, public?: true
|
||||
|
||||
belongs_to :access_list, WandererApp.Api.AccessList,
|
||||
primary_key?: true,
|
||||
allow_nil?: false,
|
||||
public?: true
|
||||
end
|
||||
|
||||
postgres do
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user