mirror of
https://github.com/wanderer-industries/wanderer
synced 2025-12-03 22:35:41 +00:00
Compare commits
23 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
02b5239220 | ||
|
|
0ed3bdfcb0 | ||
|
|
bdeb89011f | ||
|
|
1523b625bc | ||
|
|
fb91eeb692 | ||
|
|
601d2e02cb | ||
|
|
0a662d34eb | ||
|
|
5cd4693e9d | ||
|
|
f3f0f860e3 | ||
|
|
93a5cf8a79 | ||
|
|
7cf15cbc21 | ||
|
|
30bc6d20b2 | ||
|
|
b39f99fde4 | ||
|
|
0e8aa9efa4 | ||
|
|
e1fcde36e3 | ||
|
|
7aafe077d3 | ||
|
|
5b8cab5e76 | ||
|
|
4ab56af40a | ||
|
|
e8cea86a76 | ||
|
|
d0a6e0b358 | ||
|
|
8831b3e970 | ||
|
|
f6db6f0914 | ||
|
|
ab8baeedd1 |
64
.github/workflows/build.yml
vendored
64
.github/workflows/build.yml
vendored
@@ -5,7 +5,7 @@ on:
|
||||
branches:
|
||||
- main
|
||||
- develop
|
||||
- "releases/*"
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
@@ -53,6 +53,7 @@ jobs:
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ssh-key: "${{ secrets.COMMIT_KEY }}"
|
||||
fetch-depth: 0
|
||||
- name: 😅 Cache deps
|
||||
id: cache-deps
|
||||
@@ -97,7 +98,7 @@ jobs:
|
||||
mix git_ops.release --force-patch --yes
|
||||
git push --follow-tags
|
||||
echo "commit_hash=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT
|
||||
|
||||
|
||||
- name: Set commit hash for develop
|
||||
id: set-commit-develop
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
@@ -106,11 +107,9 @@ jobs:
|
||||
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
if: github.ref == 'refs/heads/develop'
|
||||
needs: build
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
release-notes: ${{ steps.get-content.outputs.string }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
@@ -137,19 +136,6 @@ jobs:
|
||||
ref: ${{ needs.build.outputs.commit_hash }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
if: github.ref == 'refs/heads/main'
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
if: github.ref == 'refs/heads/main'
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
@@ -198,26 +184,6 @@ jobs:
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer new release available 🎉
|
||||
|
||||
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
|
||||
${{ steps.extract-changelog.outputs.body }}
|
||||
maxLength: 500
|
||||
truncationSymbol: "…"
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
@@ -248,9 +214,6 @@ jobs:
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=semver,pattern={{major}}.{{minor}},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }},enable=${{ github.ref == 'refs/heads/main' }}
|
||||
type=raw,value=develop,enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
type=raw,value=develop-{{sha}},enable=${{ github.ref == 'refs/heads/develop' }}
|
||||
|
||||
@@ -267,19 +230,25 @@ jobs:
|
||||
create-release:
|
||||
name: 🏷 Create Release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [docker, merge]
|
||||
if: ${{ github.ref == 'refs/heads/main' && github.event_name == 'push' }}
|
||||
needs: build
|
||||
steps:
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: 🏷 Create Draft Release
|
||||
uses: softprops/action-gh-release@v1
|
||||
with:
|
||||
tag_name: ${{ needs.docker.outputs.release-tag }}
|
||||
name: Release ${{ needs.docker.outputs.release-tag }}
|
||||
tag_name: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
name: Release ${{ steps.get-latest-tag.outputs.tag }}
|
||||
body: |
|
||||
## Info
|
||||
Commit ${{ github.sha }} was deployed to `staging`. [See code diff](${{ github.event.compare }}).
|
||||
@@ -289,10 +258,3 @@ jobs:
|
||||
## How to Promote?
|
||||
In order to promote this to prod, edit the draft and press **"Publish release"**.
|
||||
draft: true
|
||||
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
if: github.ref == 'refs/heads/main'
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
|
||||
184
.github/workflows/docker-arm.yml
vendored
Normal file
184
.github/workflows/docker-arm.yml
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
name: Build Docker ARM Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REGISTRY_IMAGE: wandererltd/community-edition-arm
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
release-notes: ${{ steps.get-content.outputs.string }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/arm64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY_IMAGE }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,"name=${{ env.REGISTRY_IMAGE }}",push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
MIX_ENV=prod
|
||||
BUILD_METADATA=${{ steps.meta.outputs.json }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer **ARM** release available 🎉
|
||||
|
||||
[wandererltd/community-edition-arm:${{ steps.get-latest-tag.outputs.tag }}](https://hub.docker.com/r/wandererltd/community-edition-arm/tags)
|
||||
|
||||
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
|
||||
${{ steps.extract-changelog.outputs.body }}
|
||||
maxLength: 500
|
||||
truncationSymbol: "…"
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY_IMAGE }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
notify:
|
||||
name: 🏷 Notify about release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [docker, merge]
|
||||
steps:
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
184
.github/workflows/docker.yml
vendored
Normal file
184
.github/workflows/docker.yml
vendored
Normal file
@@ -0,0 +1,184 @@
|
||||
name: Build Docker Image
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
MIX_ENV: prod
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REGISTRY_IMAGE: wandererltd/community-edition
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
docker:
|
||||
name: 🛠 Build Docker Images
|
||||
runs-on: ubuntu-22.04
|
||||
outputs:
|
||||
release-tag: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
release-notes: ${{ steps.get-content.outputs.string }}
|
||||
permissions:
|
||||
checks: write
|
||||
contents: write
|
||||
packages: write
|
||||
attestations: write
|
||||
id-token: write
|
||||
pull-requests: write
|
||||
repository-projects: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
steps:
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Get Release Tag
|
||||
id: get-latest-tag
|
||||
uses: "WyriHaximus/github-action-get-previous-tag@v1"
|
||||
with:
|
||||
fallback: 1.0.0
|
||||
|
||||
- name: ⬇️ Checkout repo
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
ref: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Prepare Changelog
|
||||
run: |
|
||||
yes | cp -rf CHANGELOG.md priv/changelog/CHANGELOG.md
|
||||
sed -i '1i%{title: "Change Log"}\n\n---\n' priv/changelog/CHANGELOG.md
|
||||
|
||||
- name: Extract metadata (tags, labels) for Docker
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ env.REGISTRY_IMAGE }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Login to DockerHub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
push: true
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
cache-from: type=gha
|
||||
cache-to: type=gha,mode=max
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
platforms: ${{ matrix.platform }}
|
||||
outputs: type=image,"name=${{ env.REGISTRY_IMAGE }}",push-by-digest=true,name-canonical=true,push=true
|
||||
build-args: |
|
||||
MIX_ENV=prod
|
||||
BUILD_METADATA=${{ steps.meta.outputs.json }}
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p /tmp/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "/tmp/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: /tmp/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
- uses: markpatterson27/markdown-to-output@v1
|
||||
id: extract-changelog
|
||||
with:
|
||||
filepath: CHANGELOG.md
|
||||
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@v1.3.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Wanderer new release available 🎉
|
||||
|
||||
[wandererltd/community-edition:${{ steps.get-latest-tag.outputs.tag }}](https://hub.docker.com/r/wandererltd/community-edition/tags)
|
||||
|
||||
**Version**: ${{ steps.get-latest-tag.outputs.tag }}
|
||||
|
||||
${{ steps.extract-changelog.outputs.body }}
|
||||
maxLength: 500
|
||||
truncationSymbol: "…"
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- docker
|
||||
steps:
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: /tmp/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.WANDERER_DOCKER_USER }}
|
||||
password: ${{ secrets.WANDERER_DOCKER_PASSWORD }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY_IMAGE }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
type=semver,pattern={{version}}
|
||||
type=semver,pattern={{major}}.{{minor}}
|
||||
type=semver,pattern={{version}},value=${{ needs.docker.outputs.release-tag }}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: /tmp/digests
|
||||
run: |
|
||||
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
|
||||
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
|
||||
|
||||
- name: Inspect image
|
||||
run: |
|
||||
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
|
||||
|
||||
notify:
|
||||
name: 🏷 Notify about release
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [docker, merge]
|
||||
steps:
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ needs.docker.outputs.release-notes }}
|
||||
68
CHANGELOG.md
68
CHANGELOG.md
@@ -2,6 +2,74 @@
|
||||
|
||||
<!-- changelog -->
|
||||
|
||||
## [v1.75.14](https://github.com/wanderer-industries/wanderer/compare/v1.75.13...v1.75.14) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.13](https://github.com/wanderer-industries/wanderer/compare/v1.75.12...v1.75.13) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.12](https://github.com/wanderer-industries/wanderer/compare/v1.75.11...v1.75.12) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.11](https://github.com/wanderer-industries/wanderer/compare/v1.75.10...v1.75.11) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.10](https://github.com/wanderer-industries/wanderer/compare/v1.75.9...v1.75.10) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.9](https://github.com/wanderer-industries/wanderer/compare/v1.75.8...v1.75.9) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.8](https://github.com/wanderer-industries/wanderer/compare/v1.75.7...v1.75.8) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.7](https://github.com/wanderer-industries/wanderer/compare/v1.75.6...v1.75.7) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.6](https://github.com/wanderer-industries/wanderer/compare/v1.75.5...v1.75.6) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.5](https://github.com/wanderer-industries/wanderer/compare/v1.75.4...v1.75.5) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.75.4](https://github.com/wanderer-industries/wanderer/compare/v1.75.3...v1.75.4) (2025-08-11)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* restore security audit
|
||||
|
||||
## [v1.75.3](https://github.com/wanderer-industries/wanderer/compare/v1.75.2...v1.75.3) (2025-08-10)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* core: Fixed character tracking issues
|
||||
|
||||
## [v1.75.2](https://github.com/wanderer-industries/wanderer/compare/v1.75.1...v1.75.2) (2025-08-10)
|
||||
|
||||
|
||||
|
||||
@@ -21,21 +21,17 @@ RUN mkdir config
|
||||
# to ensure any relevant config change will trigger the dependencies
|
||||
# to be re-compiled.
|
||||
COPY config/config.exs config/${MIX_ENV}.exs config/
|
||||
|
||||
COPY priv priv
|
||||
|
||||
COPY lib lib
|
||||
|
||||
COPY assets assets
|
||||
|
||||
RUN mix compile
|
||||
|
||||
RUN mix assets.deploy
|
||||
RUN mix compile
|
||||
|
||||
# Changes to config/runtime.exs don't require recompiling the code
|
||||
COPY config/runtime.exs config/
|
||||
|
||||
COPY rel rel
|
||||
|
||||
RUN mix release
|
||||
|
||||
# start a new build stage so that the final image will only contain
|
||||
|
||||
@@ -102,6 +102,23 @@ config :error_tracker,
|
||||
repo: WandererApp.Repo,
|
||||
otp_app: :wanderer_app
|
||||
|
||||
# Security Audit Configuration
|
||||
config :wanderer_app, WandererApp.SecurityAudit,
|
||||
enabled: true,
|
||||
# Set to true in production for better performance
|
||||
async: false,
|
||||
batch_size: 100,
|
||||
flush_interval: 5000,
|
||||
log_level: :info,
|
||||
threat_detection: %{
|
||||
enabled: true,
|
||||
max_failed_attempts: 5,
|
||||
max_permission_denials: 10,
|
||||
window_seconds: 300,
|
||||
bulk_operation_threshold: 10000
|
||||
},
|
||||
retention_days: 90
|
||||
|
||||
config :git_ops,
|
||||
mix_project: Mix.Project.get!(),
|
||||
changelog_file: "CHANGELOG.md",
|
||||
|
||||
@@ -27,5 +27,8 @@ config :swoosh, local: false
|
||||
config :logger,
|
||||
level: :info
|
||||
|
||||
# Enable async security audit processing in production
|
||||
config :wanderer_app, WandererApp.SecurityAudit, async: true
|
||||
|
||||
# Runtime production configuration, including reading
|
||||
# of environment variables, is done on config/runtime.exs.
|
||||
|
||||
@@ -145,7 +145,12 @@ defmodule WandererApp.Api.UserActivity do
|
||||
:admin_action,
|
||||
:config_change,
|
||||
:bulk_operation,
|
||||
:security_alert
|
||||
:security_alert,
|
||||
# Subscription events
|
||||
:subscription_created,
|
||||
:subscription_updated,
|
||||
:subscription_deleted,
|
||||
:subscription_unknown
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@@ -45,6 +45,9 @@ defmodule WandererApp.Application do
|
||||
Supervisor.child_spec({Cachex, name: :tracked_characters},
|
||||
id: :tracked_characters_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :wanderer_app_cache},
|
||||
id: :wanderer_app_cache_worker
|
||||
),
|
||||
{Registry, keys: :unique, name: WandererApp.MapRegistry},
|
||||
{Registry, keys: :unique, name: WandererApp.Character.TrackerRegistry},
|
||||
{PartitionSupervisor,
|
||||
@@ -60,6 +63,14 @@ defmodule WandererApp.Application do
|
||||
if Application.get_env(:wanderer_app, :environment) == :test do
|
||||
[]
|
||||
else
|
||||
security_audit_children =
|
||||
if Application.get_env(:wanderer_app, WandererApp.SecurityAudit, [])
|
||||
|> Keyword.get(:async, false) do
|
||||
[WandererApp.SecurityAudit.AsyncProcessor]
|
||||
else
|
||||
[]
|
||||
end
|
||||
|
||||
[
|
||||
WandererApp.Esi.InitClientsTask,
|
||||
WandererApp.Scheduler,
|
||||
@@ -68,7 +79,7 @@ defmodule WandererApp.Application do
|
||||
{WandererApp.Character.TrackerPoolSupervisor, []},
|
||||
WandererApp.Character.TrackerManager,
|
||||
WandererApp.Map.Manager
|
||||
]
|
||||
] ++ security_audit_children
|
||||
end
|
||||
|
||||
children =
|
||||
|
||||
150
lib/wanderer_app/audit/request_context.ex
Normal file
150
lib/wanderer_app/audit/request_context.ex
Normal file
@@ -0,0 +1,150 @@
|
||||
defmodule WandererApp.Audit.RequestContext do
|
||||
@moduledoc """
|
||||
Provides utilities for extracting request context information
|
||||
for audit logging purposes.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Extract the client's IP address from the connection.
|
||||
|
||||
Simply returns the remote_ip from the connection.
|
||||
"""
|
||||
def get_ip_address(conn) do
|
||||
conn.remote_ip
|
||||
|> :inet.ntoa()
|
||||
|> to_string()
|
||||
rescue
|
||||
error ->
|
||||
Logger.warning("Failed to get IP address: #{inspect(error)}",
|
||||
error: error,
|
||||
stacktrace: __STACKTRACE__
|
||||
)
|
||||
|
||||
"unknown"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Extract the user agent from the request headers.
|
||||
"""
|
||||
def get_user_agent(conn) do
|
||||
get_header(conn, "user-agent") || "unknown"
|
||||
end
|
||||
|
||||
@doc """
|
||||
Extract or generate a session ID for the request.
|
||||
"""
|
||||
def get_session_id(conn) do
|
||||
# Try to get from session
|
||||
session_id = get_session(conn, :session_id)
|
||||
|
||||
# Fall back to request ID
|
||||
session_id || get_request_id(conn)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Extract or generate a request ID for correlation.
|
||||
"""
|
||||
def get_request_id(conn) do
|
||||
# Try standard request ID headers
|
||||
get_header(conn, "x-request-id") ||
|
||||
get_header(conn, "x-correlation-id") ||
|
||||
Logger.metadata()[:request_id] ||
|
||||
generate_request_id()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Build a complete request metadata map for audit logging.
|
||||
"""
|
||||
def build_request_metadata(conn) do
|
||||
%{
|
||||
ip_address: get_ip_address(conn),
|
||||
user_agent: get_user_agent(conn),
|
||||
session_id: get_session_id(conn),
|
||||
request_id: get_request_id(conn),
|
||||
request_path: conn.request_path,
|
||||
method: conn.method |> to_string() |> String.upcase(),
|
||||
host: conn.host,
|
||||
port: conn.port,
|
||||
scheme: conn.scheme |> to_string()
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Extract user information from the connection.
|
||||
|
||||
Returns a map with user_id and any additional user context.
|
||||
"""
|
||||
def get_user_info(conn) do
|
||||
case conn.assigns[:current_user] do
|
||||
%{id: user_id} = user ->
|
||||
%{
|
||||
user_id: user_id,
|
||||
username: Map.get(user, :username),
|
||||
email: Map.get(user, :email)
|
||||
}
|
||||
|
||||
nil ->
|
||||
%{user_id: nil}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Build a minimal request details map for audit events.
|
||||
|
||||
This is used by existing audit calls that expect specific fields.
|
||||
"""
|
||||
def build_request_details(conn) do
|
||||
metadata = build_request_metadata(conn)
|
||||
|
||||
%{
|
||||
ip_address: metadata.ip_address,
|
||||
user_agent: metadata.user_agent,
|
||||
session_id: metadata.session_id,
|
||||
request_path: metadata.request_path,
|
||||
method: metadata.method
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Set request context in the process dictionary for async logging.
|
||||
"""
|
||||
def set_request_context(conn) do
|
||||
context = %{
|
||||
metadata: build_request_metadata(conn),
|
||||
user_info: get_user_info(conn),
|
||||
timestamp: DateTime.utc_now()
|
||||
}
|
||||
|
||||
Process.put(:audit_request_context, context)
|
||||
conn
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get request context from the process dictionary.
|
||||
"""
|
||||
def get_request_context do
|
||||
Process.get(:audit_request_context)
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp get_header(conn, header) do
|
||||
case Plug.Conn.get_req_header(conn, header) do
|
||||
[value | _] -> value
|
||||
[] -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp get_session(conn, key) do
|
||||
conn
|
||||
|> Plug.Conn.get_session(key)
|
||||
rescue
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
defp generate_request_id do
|
||||
"req_#{:crypto.strong_rand_bytes(16) |> Base.url_encode64(padding: false)}"
|
||||
end
|
||||
end
|
||||
@@ -28,7 +28,7 @@ defmodule WandererApp.Character do
|
||||
Cachex.put(:character_cache, character_id, character)
|
||||
{:ok, character}
|
||||
|
||||
_ ->
|
||||
error ->
|
||||
{:error, :not_found}
|
||||
end
|
||||
|
||||
@@ -283,39 +283,44 @@ defmodule WandererApp.Character do
|
||||
|> case do
|
||||
{:ok, settings} when not is_nil(settings) ->
|
||||
character
|
||||
|> Map.put(:online, false)
|
||||
|> Map.merge(settings)
|
||||
|> Map.merge(%{
|
||||
solar_system_id: settings.solar_system_id,
|
||||
structure_id: settings.structure_id,
|
||||
station_id: settings.station_id,
|
||||
ship: settings.ship,
|
||||
ship_name: settings.ship_name,
|
||||
ship_item_id: settings.ship_item_id
|
||||
})
|
||||
|
||||
_ ->
|
||||
character
|
||||
|> Map.put(:online, false)
|
||||
|> Map.merge(@default_character_tracking_data)
|
||||
end
|
||||
|> Map.merge(%{tracking_paused: tracking_paused})
|
||||
|> Map.merge(%{online: false, tracking_paused: tracking_paused})
|
||||
end
|
||||
|
||||
defp prepare_search_results(result) do
|
||||
{:ok, characters} =
|
||||
_load_eve_info(Map.get(result, "character"), :get_character_info, &_map_character_info/1)
|
||||
load_eve_info(Map.get(result, "character"), :get_character_info, &map_character_info/1)
|
||||
|
||||
{:ok, corporations} =
|
||||
_load_eve_info(
|
||||
load_eve_info(
|
||||
Map.get(result, "corporation"),
|
||||
:get_corporation_info,
|
||||
&_map_corporation_info/1
|
||||
&map_corporation_info/1
|
||||
)
|
||||
|
||||
{:ok, alliances} =
|
||||
_load_eve_info(Map.get(result, "alliance"), :get_alliance_info, &_map_alliance_info/1)
|
||||
load_eve_info(Map.get(result, "alliance"), :get_alliance_info, &map_alliance_info/1)
|
||||
|
||||
[[characters | corporations] | alliances] |> List.flatten()
|
||||
end
|
||||
|
||||
defp _load_eve_info(nil, _, _), do: {:ok, []}
|
||||
defp load_eve_info(nil, _, _), do: {:ok, []}
|
||||
|
||||
defp _load_eve_info([], _, _), do: {:ok, []}
|
||||
defp load_eve_info([], _, _), do: {:ok, []}
|
||||
|
||||
defp _load_eve_info(eve_ids, method, map_function),
|
||||
defp load_eve_info(eve_ids, method, map_function),
|
||||
do:
|
||||
{:ok,
|
||||
Enum.map(eve_ids, fn eve_id ->
|
||||
@@ -331,7 +336,7 @@ defmodule WandererApp.Character do
|
||||
end)
|
||||
|> Enum.filter(fn result -> not is_nil(result) end)}
|
||||
|
||||
defp _map_alliance_info(info) do
|
||||
defp map_alliance_info(info) do
|
||||
%{
|
||||
label: info["name"],
|
||||
value: info["eve_id"] |> to_string(),
|
||||
@@ -339,7 +344,7 @@ defmodule WandererApp.Character do
|
||||
}
|
||||
end
|
||||
|
||||
defp _map_character_info(info) do
|
||||
defp map_character_info(info) do
|
||||
%{
|
||||
label: info["name"],
|
||||
value: info["eve_id"] |> to_string(),
|
||||
@@ -347,7 +352,7 @@ defmodule WandererApp.Character do
|
||||
}
|
||||
end
|
||||
|
||||
defp _map_corporation_info(info) do
|
||||
defp map_corporation_info(info) do
|
||||
%{
|
||||
label: info["name"],
|
||||
value: info["eve_id"] |> to_string(),
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
defmodule WandererApp.Map.Audit do
|
||||
@moduledoc """
|
||||
Manager map subscription plans
|
||||
|
||||
This module now delegates to SecurityAudit for consistency.
|
||||
It maintains backward compatibility while using the centralized audit system.
|
||||
"""
|
||||
|
||||
require Ash.Query
|
||||
@@ -13,19 +16,15 @@ defmodule WandererApp.Map.Audit do
|
||||
@audit_expired_seconds @month_seconds * 3
|
||||
|
||||
def track_map_subscription_event(event_type, metadata) do
|
||||
case event_type do
|
||||
"subscription.created" ->
|
||||
track_map_event(event_type, metadata)
|
||||
mapped_type =
|
||||
case event_type do
|
||||
"subscription.created" -> :subscription_created
|
||||
"subscription.updated" -> :subscription_updated
|
||||
"subscription.deleted" -> :subscription_deleted
|
||||
_ -> :subscription_unknown
|
||||
end
|
||||
|
||||
"subscription.updated" ->
|
||||
track_map_event(event_type, metadata)
|
||||
|
||||
"subscription.deleted" ->
|
||||
track_map_event(event_type, metadata)
|
||||
|
||||
_ ->
|
||||
{:ok, nil}
|
||||
end
|
||||
track_map_event(mapped_type, metadata)
|
||||
end
|
||||
|
||||
def archive() do
|
||||
@@ -40,191 +39,16 @@ defmodule WandererApp.Map.Audit do
|
||||
end
|
||||
|
||||
def get_activity_query(map_id, period, activity) do
|
||||
{from, to} = period |> get_period()
|
||||
|
||||
query =
|
||||
WandererApp.Api.UserActivity
|
||||
|> Ash.Query.filter(
|
||||
and: [
|
||||
[entity_id: map_id],
|
||||
[inserted_at: [greater_than_or_equal: from]],
|
||||
[inserted_at: [less_than_or_equal: to]]
|
||||
]
|
||||
)
|
||||
|
||||
query =
|
||||
activity
|
||||
|> case do
|
||||
"all" ->
|
||||
query
|
||||
|
||||
activity ->
|
||||
query
|
||||
|> Ash.Query.filter(event_type: activity)
|
||||
end
|
||||
|
||||
query
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
SecurityAudit.get_map_activity_query(map_id, period, activity)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get combined activity including security events for a map.
|
||||
"""
|
||||
def get_combined_activity_query(map_id, period, activity) do
|
||||
{from, to} = period |> get_period()
|
||||
|
||||
# Get regular map activity
|
||||
map_query = get_activity_query(map_id, period, activity)
|
||||
|
||||
# Get security events related to this map
|
||||
security_query =
|
||||
WandererApp.Api.UserActivity
|
||||
|> Ash.Query.filter(entity_type: :security_event)
|
||||
|> Ash.Query.filter(inserted_at: [greater_than_or_equal: from])
|
||||
|> Ash.Query.filter(inserted_at: [less_than_or_equal: to])
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
|
||||
# Execute both queries and combine results
|
||||
case {Ash.read(map_query), Ash.read(security_query)} do
|
||||
{{:ok, map_activities}, {:ok, security_activities}} ->
|
||||
# Combine and sort by timestamp
|
||||
combined =
|
||||
(map_activities ++ security_activities)
|
||||
|> Enum.sort_by(& &1.inserted_at, {:desc, DateTime})
|
||||
|
||||
{:ok, combined}
|
||||
|
||||
{{:error, _} = error, _} ->
|
||||
error
|
||||
|
||||
{_, {:error, _} = error} ->
|
||||
error
|
||||
end
|
||||
def track_acl_event(event_type, metadata) do
|
||||
SecurityAudit.track_acl_event(event_type, metadata)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get security events for a specific map.
|
||||
"""
|
||||
def get_security_events_for_map(map_id, period \\ "1D") do
|
||||
{from, to} = period |> get_period()
|
||||
|
||||
# Get security events that might be related to this map
|
||||
# This could include data access events, permission denied events, etc.
|
||||
SecurityAudit.get_events_in_range(from, to)
|
||||
|> Enum.filter(fn event ->
|
||||
case Jason.decode(event.event_data || "{}") do
|
||||
{:ok, data} ->
|
||||
# Check if the event data contains references to this map
|
||||
data["resource_id"] == map_id ||
|
||||
data["entity_id"] == map_id ||
|
||||
data["map_id"] == map_id
|
||||
|
||||
_ ->
|
||||
false
|
||||
end
|
||||
end)
|
||||
def track_map_event(event_type, metadata) do
|
||||
SecurityAudit.track_map_event(event_type, metadata)
|
||||
end
|
||||
|
||||
def track_acl_event(
|
||||
event_type,
|
||||
%{user_id: user_id, acl_id: acl_id} = metadata
|
||||
)
|
||||
when not is_nil(user_id) and not is_nil(acl_id),
|
||||
do:
|
||||
WandererApp.Api.UserActivity.new(%{
|
||||
user_id: user_id,
|
||||
entity_type: :access_list,
|
||||
entity_id: acl_id,
|
||||
event_type: event_type,
|
||||
event_data: metadata |> Map.drop([:user_id, :acl_id]) |> Jason.encode!()
|
||||
})
|
||||
|
||||
def track_acl_event(_event_type, _metadata), do: {:ok, nil}
|
||||
|
||||
def track_map_event(
|
||||
event_type,
|
||||
%{character_id: character_id, user_id: user_id, map_id: map_id} = metadata
|
||||
)
|
||||
when not is_nil(character_id) and not is_nil(user_id) and not is_nil(map_id) do
|
||||
# Log regular map activity
|
||||
result =
|
||||
WandererApp.Api.UserActivity.new(%{
|
||||
character_id: character_id,
|
||||
user_id: user_id,
|
||||
entity_type: :map,
|
||||
entity_id: map_id,
|
||||
event_type: event_type,
|
||||
event_data: metadata |> Map.drop([:character_id, :user_id, :map_id]) |> Jason.encode!()
|
||||
})
|
||||
|
||||
# Also log security-relevant map events
|
||||
if security_relevant_event?(event_type) do
|
||||
SecurityAudit.log_data_access(
|
||||
"map",
|
||||
map_id,
|
||||
user_id,
|
||||
event_type,
|
||||
metadata
|
||||
)
|
||||
end
|
||||
|
||||
result
|
||||
end
|
||||
|
||||
def track_map_event(_event_type, _metadata), do: {:ok, nil}
|
||||
|
||||
defp get_period("1H") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = now |> DateTime.add(-1 * 3600, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("1D") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = now |> DateTime.add(-24 * 3600, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("1W") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = now |> DateTime.add(-24 * 3600 * 7, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("1M") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = now |> DateTime.add(-24 * 3600 * 31, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("2M") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = now |> DateTime.add(-24 * 3600 * 31 * 2, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("3M") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = now |> DateTime.add(-24 * 3600 * 31 * 3, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period(_), do: get_period("1H")
|
||||
|
||||
defp get_expired_at(), do: DateTime.utc_now() |> DateTime.add(-@audit_expired_seconds, :second)
|
||||
|
||||
defp security_relevant_event?(event_type) do
|
||||
# Define which map events should also be logged as security events
|
||||
event_type in [
|
||||
:map_acl_added,
|
||||
:map_acl_removed,
|
||||
:map_acl_updated,
|
||||
:map_acl_member_added,
|
||||
:map_acl_member_removed,
|
||||
:map_acl_member_updated,
|
||||
:map_removed,
|
||||
:character_added,
|
||||
:character_removed
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
898
lib/wanderer_app/security_audit.ex
Normal file
898
lib/wanderer_app/security_audit.ex
Normal file
@@ -0,0 +1,898 @@
|
||||
defmodule WandererApp.SecurityAudit do
|
||||
@moduledoc """
|
||||
Comprehensive security audit logging system.
|
||||
|
||||
This module provides centralized logging for security-related events including:
|
||||
- Authentication events (login, logout, failures)
|
||||
- Authorization events (permission denied, privilege escalation)
|
||||
- Data access events (sensitive queries, bulk exports)
|
||||
- Configuration changes and admin actions
|
||||
"""
|
||||
|
||||
require Logger
|
||||
require Ash.Query
|
||||
|
||||
alias WandererApp.Api.UserActivity
|
||||
|
||||
@doc """
|
||||
Log a security event with structured data.
|
||||
|
||||
## Examples
|
||||
|
||||
iex> WandererApp.SecurityAudit.log_event(:auth_success, user_id, %{
|
||||
...> ip_address: "192.168.1.100",
|
||||
...> user_agent: "Mozilla/5.0...",
|
||||
...> auth_method: "session"
|
||||
...> })
|
||||
:ok
|
||||
"""
|
||||
def log_event(event_type, user_id, details \\ %{}) do
|
||||
audit_entry = %{
|
||||
event_type: event_type,
|
||||
user_id: user_id,
|
||||
timestamp: DateTime.utc_now(),
|
||||
details: details,
|
||||
severity: determine_severity(event_type),
|
||||
session_id: details[:session_id],
|
||||
ip_address: details[:ip_address],
|
||||
user_agent: details[:user_agent]
|
||||
}
|
||||
|
||||
# Store in database
|
||||
store_audit_entry(audit_entry)
|
||||
|
||||
# Send to telemetry for monitoring
|
||||
emit_telemetry_event(audit_entry)
|
||||
|
||||
# Log to application logs
|
||||
log_to_application_log(audit_entry)
|
||||
|
||||
# Check for security alerts
|
||||
check_security_alerts(audit_entry)
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log authentication events.
|
||||
"""
|
||||
def log_auth_event(event_type, user_id, request_details) do
|
||||
# Start with the basic required fields
|
||||
details = %{
|
||||
ip_address: request_details[:ip_address],
|
||||
user_agent: request_details[:user_agent],
|
||||
auth_method: request_details[:auth_method],
|
||||
session_id: request_details[:session_id]
|
||||
}
|
||||
|
||||
# Merge any additional fields from request_details
|
||||
details = Map.merge(details, request_details)
|
||||
|
||||
log_event(event_type, user_id, details)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log data access events.
|
||||
"""
|
||||
def log_data_access(resource_type, resource_id, user_id, action, request_details \\ %{}) do
|
||||
details = %{
|
||||
resource_type: resource_type,
|
||||
resource_id: resource_id,
|
||||
action: action,
|
||||
ip_address: request_details[:ip_address],
|
||||
user_agent: request_details[:user_agent],
|
||||
session_id: request_details[:session_id]
|
||||
}
|
||||
|
||||
log_event(:data_access, user_id, details)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log permission denied events.
|
||||
"""
|
||||
def log_permission_denied(
|
||||
resource_type,
|
||||
resource_id,
|
||||
user_id,
|
||||
attempted_action,
|
||||
request_details \\ %{}
|
||||
) do
|
||||
details = %{
|
||||
resource_type: resource_type,
|
||||
resource_id: resource_id,
|
||||
attempted_action: attempted_action,
|
||||
ip_address: request_details[:ip_address],
|
||||
user_agent: request_details[:user_agent],
|
||||
session_id: request_details[:session_id]
|
||||
}
|
||||
|
||||
log_event(:permission_denied, user_id, details)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log admin actions.
|
||||
"""
|
||||
def log_admin_action(action, user_id, target_resource, request_details \\ %{}) do
|
||||
details = %{
|
||||
action: action,
|
||||
target_resource: target_resource,
|
||||
ip_address: request_details[:ip_address],
|
||||
user_agent: request_details[:user_agent],
|
||||
session_id: request_details[:session_id]
|
||||
}
|
||||
|
||||
log_event(:admin_action, user_id, details)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log configuration changes.
|
||||
"""
|
||||
def log_config_change(config_key, old_value, new_value, user_id, request_details \\ %{}) do
|
||||
details = %{
|
||||
config_key: config_key,
|
||||
old_value: sanitize_sensitive_data(old_value),
|
||||
new_value: sanitize_sensitive_data(new_value),
|
||||
ip_address: request_details[:ip_address],
|
||||
user_agent: request_details[:user_agent],
|
||||
session_id: request_details[:session_id]
|
||||
}
|
||||
|
||||
log_event(:config_change, user_id, details)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log bulk data operations.
|
||||
"""
|
||||
def log_bulk_operation(operation_type, record_count, user_id, request_details \\ %{}) do
|
||||
details = %{
|
||||
operation_type: operation_type,
|
||||
record_count: record_count,
|
||||
ip_address: request_details[:ip_address],
|
||||
user_agent: request_details[:user_agent],
|
||||
session_id: request_details[:session_id]
|
||||
}
|
||||
|
||||
log_event(:bulk_operation, user_id, details)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get audit events for a specific user.
|
||||
"""
|
||||
def get_user_audit_events(user_id, limit \\ 100) do
|
||||
UserActivity
|
||||
|> Ash.Query.filter(user_id: user_id)
|
||||
|> Ash.Query.filter(entity_type: :security_event)
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
|> Ash.Query.limit(limit)
|
||||
|> Ash.read!()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get recent security events.
|
||||
"""
|
||||
def get_recent_events(limit \\ 50) do
|
||||
UserActivity
|
||||
|> Ash.Query.filter(entity_type: :security_event)
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
|> Ash.Query.limit(limit)
|
||||
|> Ash.read!()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get security events by type.
|
||||
"""
|
||||
def get_events_by_type(event_type, limit \\ 50) do
|
||||
UserActivity
|
||||
|> Ash.Query.filter(entity_type: :security_event)
|
||||
|> Ash.Query.filter(event_type: event_type)
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
|> Ash.Query.limit(limit)
|
||||
|> Ash.read!()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get security events within a time range.
|
||||
"""
|
||||
def get_events_in_range(from_datetime, to_datetime, limit \\ 100) do
|
||||
UserActivity
|
||||
|> Ash.Query.filter(entity_type: :security_event)
|
||||
|> Ash.Query.filter(inserted_at: [greater_than_or_equal: from_datetime])
|
||||
|> Ash.Query.filter(inserted_at: [less_than_or_equal: to_datetime])
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
|> Ash.Query.limit(limit)
|
||||
|> Ash.read!()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Track map-related events (compatibility with Map.Audit).
|
||||
"""
|
||||
def track_map_event(
|
||||
event_type,
|
||||
%{character_id: character_id, user_id: user_id, map_id: map_id} = metadata
|
||||
)
|
||||
when not is_nil(character_id) and not is_nil(user_id) and not is_nil(map_id) do
|
||||
# Sanitize and prepare metadata
|
||||
sanitized_metadata =
|
||||
metadata
|
||||
|> Map.drop([:character_id, :user_id, :map_id])
|
||||
|> sanitize_metadata()
|
||||
|
||||
attrs = %{
|
||||
character_id: character_id,
|
||||
user_id: user_id,
|
||||
entity_type: :map,
|
||||
entity_id: map_id,
|
||||
event_type: normalize_event_type(event_type),
|
||||
event_data: Jason.encode!(sanitized_metadata)
|
||||
}
|
||||
|
||||
case UserActivity.new(attrs) do
|
||||
{:ok, activity} ->
|
||||
{:ok, activity}
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("Failed to track map event",
|
||||
error: inspect(error),
|
||||
event_type: event_type,
|
||||
map_id: map_id
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def track_map_event(_event_type, _metadata), do: {:ok, nil}
|
||||
|
||||
@doc """
|
||||
Track ACL-related events (compatibility with Map.Audit).
|
||||
"""
|
||||
def track_acl_event(
|
||||
event_type,
|
||||
%{user_id: user_id, acl_id: acl_id} = metadata
|
||||
)
|
||||
when not is_nil(user_id) and not is_nil(acl_id) do
|
||||
# Sanitize and prepare metadata
|
||||
sanitized_metadata =
|
||||
metadata
|
||||
|> Map.drop([:user_id, :acl_id])
|
||||
|> sanitize_metadata()
|
||||
|
||||
attrs = %{
|
||||
user_id: user_id,
|
||||
entity_type: :access_list,
|
||||
entity_id: acl_id,
|
||||
event_type: normalize_event_type(event_type),
|
||||
event_data: Jason.encode!(sanitized_metadata)
|
||||
}
|
||||
|
||||
case UserActivity.new(attrs) do
|
||||
{:ok, activity} ->
|
||||
{:ok, activity}
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("Failed to track ACL event",
|
||||
error: inspect(error),
|
||||
event_type: event_type,
|
||||
acl_id: acl_id
|
||||
)
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def track_acl_event(_event_type, _metadata), do: {:ok, nil}
|
||||
|
||||
@doc """
|
||||
Get activity query for maps (compatibility with Map.Audit).
|
||||
"""
|
||||
def get_map_activity_query(map_id, period, activity \\ "all") do
|
||||
{from, to} = get_period(period)
|
||||
|
||||
query =
|
||||
UserActivity
|
||||
|> Ash.Query.filter(
|
||||
and: [
|
||||
[entity_id: map_id],
|
||||
[inserted_at: [greater_than_or_equal: from]],
|
||||
[inserted_at: [less_than_or_equal: to]]
|
||||
]
|
||||
)
|
||||
|
||||
query =
|
||||
case activity do
|
||||
"all" ->
|
||||
query
|
||||
|
||||
activity ->
|
||||
query
|
||||
|> Ash.Query.filter(event_type: normalize_event_type(activity))
|
||||
end
|
||||
|
||||
query
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
end
|
||||
|
||||
defp get_period("1H") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = DateTime.add(now, -1 * 3600, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("1D") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = DateTime.add(now, -24 * 3600, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("1W") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = DateTime.add(now, -24 * 3600 * 7, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("1M") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = DateTime.add(now, -24 * 3600 * 31, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("2M") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = DateTime.add(now, -24 * 3600 * 31 * 2, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period("3M") do
|
||||
now = DateTime.utc_now()
|
||||
start_date = DateTime.add(now, -24 * 3600 * 31 * 3, :second)
|
||||
{start_date, now}
|
||||
end
|
||||
|
||||
defp get_period(_), do: get_period("1H")
|
||||
|
||||
@doc """
|
||||
Check for suspicious patterns in user activity.
|
||||
"""
|
||||
def analyze_user_behavior(user_id, time_window \\ 3600) do
|
||||
now = DateTime.utc_now()
|
||||
from_time = DateTime.add(now, -time_window, :second)
|
||||
|
||||
# Get recent activities
|
||||
activities =
|
||||
UserActivity
|
||||
|> Ash.Query.filter(user_id: user_id)
|
||||
|> Ash.Query.filter(entity_type: :security_event)
|
||||
|> Ash.Query.filter(inserted_at: [greater_than_or_equal: from_time])
|
||||
|> Ash.Query.sort(inserted_at: :desc)
|
||||
|> Ash.read!()
|
||||
|
||||
# Analyze patterns
|
||||
patterns = analyze_patterns(activities)
|
||||
risk_score = calculate_risk_score(patterns)
|
||||
recommendations = generate_recommendations(patterns, risk_score)
|
||||
|
||||
%{
|
||||
risk_score: risk_score,
|
||||
suspicious_patterns: patterns,
|
||||
recommendations: recommendations,
|
||||
activities_analyzed: length(activities),
|
||||
time_window_seconds: time_window
|
||||
}
|
||||
end
|
||||
|
||||
defp analyze_patterns(activities) do
|
||||
patterns = []
|
||||
|
||||
# Count by event type
|
||||
event_counts = Enum.frequencies_by(activities, & &1.event_type)
|
||||
|
||||
# Check for multiple auth failures
|
||||
auth_failures = Map.get(event_counts, :auth_failure, 0)
|
||||
|
||||
patterns =
|
||||
if auth_failures >= 3 do
|
||||
[{:multiple_auth_failures, auth_failures} | patterns]
|
||||
else
|
||||
patterns
|
||||
end
|
||||
|
||||
# Check for permission denied spikes
|
||||
permission_denied = Map.get(event_counts, :permission_denied, 0)
|
||||
|
||||
patterns =
|
||||
if permission_denied >= 5 do
|
||||
[{:excessive_permission_denials, permission_denied} | patterns]
|
||||
else
|
||||
patterns
|
||||
end
|
||||
|
||||
# Check for rapid activity (more than 100 events in time window)
|
||||
patterns =
|
||||
if length(activities) > 100 do
|
||||
[{:high_activity_volume, length(activities)} | patterns]
|
||||
else
|
||||
patterns
|
||||
end
|
||||
|
||||
# Check for geographic anomalies by analyzing unique IPs
|
||||
unique_ips =
|
||||
activities
|
||||
|> Enum.map(fn activity ->
|
||||
case Jason.decode(activity.event_data || "{}") do
|
||||
{:ok, data} -> data["ip_address"]
|
||||
_ -> nil
|
||||
end
|
||||
end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|> Enum.uniq()
|
||||
|> length()
|
||||
|
||||
patterns =
|
||||
if unique_ips > 5 do
|
||||
[{:multiple_ip_addresses, unique_ips} | patterns]
|
||||
else
|
||||
patterns
|
||||
end
|
||||
|
||||
patterns
|
||||
end
|
||||
|
||||
defp calculate_risk_score(patterns) do
|
||||
score =
|
||||
Enum.reduce(patterns, 0, fn
|
||||
{:multiple_auth_failures, count}, acc -> acc + count * 2
|
||||
{:excessive_permission_denials, count}, acc -> acc + count * 1.5
|
||||
{:high_activity_volume, _}, acc -> acc + 5
|
||||
{:multiple_ip_addresses, count}, acc -> acc + count * 3
|
||||
_, acc -> acc
|
||||
end)
|
||||
|
||||
cond do
|
||||
score >= 20 -> :critical
|
||||
score >= 10 -> :high
|
||||
score >= 5 -> :medium
|
||||
true -> :low
|
||||
end
|
||||
end
|
||||
|
||||
defp generate_recommendations(patterns, risk_score) do
|
||||
base_recommendations =
|
||||
case risk_score do
|
||||
:critical -> ["Immediate review required", "Consider blocking user temporarily"]
|
||||
:high -> ["Monitor user activity closely", "Review recent actions"]
|
||||
:medium -> ["Keep user under observation"]
|
||||
:low -> []
|
||||
end
|
||||
|
||||
pattern_recommendations =
|
||||
Enum.flat_map(patterns, fn
|
||||
{:multiple_auth_failures, _} ->
|
||||
["Reset user password", "Enable MFA"]
|
||||
|
||||
{:excessive_permission_denials, _} ->
|
||||
["Review user permissions", "Check for compromised account"]
|
||||
|
||||
{:high_activity_volume, _} ->
|
||||
["Check for automated activity", "Review API usage"]
|
||||
|
||||
{:multiple_ip_addresses, _} ->
|
||||
["Verify user location changes", "Check for account sharing"]
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end)
|
||||
|
||||
Enum.uniq(base_recommendations ++ pattern_recommendations)
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp store_audit_entry(audit_entry) do
|
||||
# Handle async processing if enabled
|
||||
if async_enabled?() do
|
||||
WandererApp.SecurityAudit.AsyncProcessor.log_event(audit_entry)
|
||||
else
|
||||
do_store_audit_entry(audit_entry)
|
||||
end
|
||||
end
|
||||
|
||||
@doc false
|
||||
def do_store_audit_entry(audit_entry) do
|
||||
# Ensure event_type is properly formatted
|
||||
event_type = normalize_event_type(audit_entry.event_type)
|
||||
|
||||
attrs = %{
|
||||
user_id: audit_entry.user_id,
|
||||
character_id: nil,
|
||||
entity_id: hash_identifier(audit_entry.session_id),
|
||||
entity_type: :security_event,
|
||||
event_type: event_type,
|
||||
event_data: encode_event_data(audit_entry)
|
||||
}
|
||||
|
||||
case UserActivity.new(attrs) do
|
||||
{:ok, _activity} ->
|
||||
:ok
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("Failed to store security audit entry",
|
||||
error: inspect(error),
|
||||
event_type: event_type,
|
||||
user_id: audit_entry.user_id
|
||||
)
|
||||
|
||||
# Emit telemetry for monitoring
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :security_audit, :storage_error],
|
||||
%{count: 1},
|
||||
%{event_type: event_type, error: error}
|
||||
)
|
||||
|
||||
# Don't block the request, but track the failure
|
||||
{:error, :storage_failed}
|
||||
end
|
||||
end
|
||||
|
||||
defp hash_identifier(identifier) when is_binary(identifier) do
|
||||
secret_salt =
|
||||
Application.get_env(:wanderer_app, :secret_key_base) ||
|
||||
raise "SECRET_KEY_BASE not configured"
|
||||
|
||||
:crypto.hash(:sha256, secret_salt <> identifier)
|
||||
|> Base.encode16(case: :lower)
|
||||
end
|
||||
|
||||
defp hash_identifier(nil), do: generate_entity_id()
|
||||
|
||||
defp normalize_event_type(event_type) when is_atom(event_type), do: event_type
|
||||
|
||||
defp normalize_event_type(event_type) when is_binary(event_type) do
|
||||
try do
|
||||
String.to_existing_atom(event_type)
|
||||
rescue
|
||||
ArgumentError -> :security_alert
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_event_type(_), do: :security_alert
|
||||
|
||||
defp encode_event_data(audit_entry) do
|
||||
sanitized_details = sanitize_for_json(audit_entry.details)
|
||||
|
||||
data =
|
||||
Map.merge(sanitized_details, %{
|
||||
timestamp: convert_datetime(audit_entry.timestamp),
|
||||
severity: to_string(audit_entry.severity),
|
||||
ip_address: audit_entry.ip_address,
|
||||
user_agent: audit_entry.user_agent
|
||||
})
|
||||
|
||||
case Jason.encode(data) do
|
||||
{:ok, json} -> json
|
||||
{:error, _} -> Jason.encode!(%{error: "Failed to encode audit data"})
|
||||
end
|
||||
end
|
||||
|
||||
defp sanitize_for_json(data) when is_map(data) do
|
||||
data
|
||||
|> Enum.reduce(%{}, fn {key, value}, acc ->
|
||||
sanitized_key = to_string(key)
|
||||
|
||||
# Skip sensitive fields
|
||||
if sanitized_key in ~w(password secret token private_key api_key) do
|
||||
acc
|
||||
else
|
||||
Map.put(acc, sanitized_key, sanitize_value(value))
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp sanitize_for_json(data), do: sanitize_value(data)
|
||||
|
||||
defp sanitize_metadata(metadata) do
|
||||
# List of sensitive keys to remove from metadata
|
||||
sensitive_keys = [:password, :token, :secret, :api_key, :private_key, :auth_token]
|
||||
|
||||
metadata
|
||||
|> Map.drop(sensitive_keys)
|
||||
|> Enum.map(fn {k, v} ->
|
||||
# Ensure keys are strings or atoms
|
||||
key = if is_binary(k), do: k, else: to_string(k)
|
||||
{key, sanitize_value(v)}
|
||||
end)
|
||||
|> Enum.into(%{})
|
||||
end
|
||||
|
||||
defp sanitize_value(%DateTime{} = dt), do: DateTime.to_iso8601(dt)
|
||||
defp sanitize_value(%NaiveDateTime{} = dt), do: NaiveDateTime.to_iso8601(dt)
|
||||
defp sanitize_value(%Date{} = date), do: Date.to_iso8601(date)
|
||||
defp sanitize_value(%Time{} = time), do: Time.to_iso8601(time)
|
||||
|
||||
defp sanitize_value(atom) when is_atom(atom) and not is_nil(atom) and not is_boolean(atom),
|
||||
do: to_string(atom)
|
||||
|
||||
defp sanitize_value(list) when is_list(list), do: Enum.map(list, &sanitize_value/1)
|
||||
defp sanitize_value(map) when is_map(map), do: sanitize_for_json(map)
|
||||
defp sanitize_value(value), do: value
|
||||
|
||||
defp convert_datetime(%DateTime{} = dt), do: DateTime.to_iso8601(dt)
|
||||
defp convert_datetime(%NaiveDateTime{} = dt), do: NaiveDateTime.to_iso8601(dt)
|
||||
defp convert_datetime(value), do: value
|
||||
|
||||
defp generate_entity_id do
|
||||
"audit_#{DateTime.utc_now() |> DateTime.to_unix(:microsecond)}_#{System.unique_integer([:positive])}"
|
||||
end
|
||||
|
||||
defp async_enabled? do
|
||||
Application.get_env(:wanderer_app, __MODULE__, [])
|
||||
|> Keyword.get(:async, false)
|
||||
end
|
||||
|
||||
defp emit_telemetry_event(audit_entry) do
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :security_audit],
|
||||
%{count: 1},
|
||||
%{
|
||||
event_type: audit_entry.event_type,
|
||||
severity: audit_entry.severity,
|
||||
user_id: audit_entry.user_id
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
defp log_to_application_log(audit_entry) do
|
||||
log_level =
|
||||
case audit_entry.severity do
|
||||
:critical -> :error
|
||||
:high -> :warning
|
||||
:medium -> :info
|
||||
:low -> :debug
|
||||
end
|
||||
|
||||
Logger.log(log_level, "Security audit: #{audit_entry.event_type}",
|
||||
user_id: audit_entry.user_id,
|
||||
timestamp: audit_entry.timestamp,
|
||||
details: audit_entry.details
|
||||
)
|
||||
end
|
||||
|
||||
defp check_security_alerts(audit_entry) do
|
||||
case audit_entry.event_type do
|
||||
:auth_failure ->
|
||||
check_failed_login_attempts(audit_entry)
|
||||
|
||||
:permission_denied ->
|
||||
check_privilege_escalation_attempts(audit_entry)
|
||||
|
||||
:bulk_operation ->
|
||||
check_bulk_data_access(audit_entry)
|
||||
|
||||
:security_alert ->
|
||||
# Already a security alert, don't double-check
|
||||
:ok
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp check_failed_login_attempts(audit_entry) do
|
||||
config = threat_detection_config()
|
||||
|
||||
if config[:enabled] do
|
||||
ip_address = audit_entry.ip_address || "unknown"
|
||||
cache_key = "auth_failures:#{ip_address}"
|
||||
window = config[:window_seconds] || 300
|
||||
max_attempts = config[:max_failed_attempts] || 5
|
||||
|
||||
# Increment counter in Cachex with TTL
|
||||
count =
|
||||
case Cachex.incr(:wanderer_app_cache, cache_key) do
|
||||
{:ok, count} ->
|
||||
# Set TTL on first increment
|
||||
if count == 1 do
|
||||
Cachex.expire(:wanderer_app_cache, cache_key, :timer.seconds(window))
|
||||
end
|
||||
|
||||
count
|
||||
|
||||
{:error, :no_key} ->
|
||||
# Key doesn't exist, initialize it with TTL
|
||||
case Cachex.put(:wanderer_app_cache, cache_key, 1, ttl: :timer.seconds(window)) do
|
||||
{:ok, _} ->
|
||||
1
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("Failed to initialize auth failure counter",
|
||||
error: inspect(error),
|
||||
cache_key: cache_key
|
||||
)
|
||||
|
||||
1
|
||||
end
|
||||
|
||||
{:error, error} ->
|
||||
# Other errors - log and return safe default
|
||||
Logger.error("Failed to increment auth failure counter",
|
||||
error: inspect(error),
|
||||
cache_key: cache_key
|
||||
)
|
||||
|
||||
1
|
||||
end
|
||||
|
||||
if count >= max_attempts do
|
||||
Logger.warning("Potential brute force attack detected",
|
||||
ip_address: ip_address,
|
||||
attempts: count,
|
||||
user_id: audit_entry.user_id
|
||||
)
|
||||
|
||||
# Emit security alert
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :security_audit, :threat_detected],
|
||||
%{count: 1},
|
||||
%{threat_type: :brute_force, ip_address: ip_address}
|
||||
)
|
||||
|
||||
# Log a security alert event
|
||||
log_event(:security_alert, audit_entry.user_id, %{
|
||||
threat_type: "brute_force",
|
||||
ip_address: ip_address,
|
||||
failed_attempts: count,
|
||||
window_seconds: window
|
||||
})
|
||||
end
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp check_privilege_escalation_attempts(audit_entry) do
|
||||
config = threat_detection_config()
|
||||
|
||||
if config[:enabled] && audit_entry.user_id do
|
||||
cache_key = "privilege_escalation:#{audit_entry.user_id}"
|
||||
window = config[:window_seconds] || 300
|
||||
max_denials = config[:max_permission_denials] || 10
|
||||
|
||||
count =
|
||||
case Cachex.incr(:wanderer_app_cache, cache_key) do
|
||||
{:ok, count} ->
|
||||
if count == 1 do
|
||||
Cachex.expire(:wanderer_app_cache, cache_key, :timer.seconds(window))
|
||||
end
|
||||
|
||||
count
|
||||
|
||||
{:error, :no_key} ->
|
||||
# Key doesn't exist, initialize it with TTL
|
||||
case Cachex.put(:wanderer_app_cache, cache_key, 1, ttl: :timer.seconds(window)) do
|
||||
{:ok, _} ->
|
||||
1
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error("Failed to initialize privilege escalation counter",
|
||||
error: inspect(error),
|
||||
cache_key: cache_key
|
||||
)
|
||||
|
||||
1
|
||||
end
|
||||
|
||||
{:error, error} ->
|
||||
# Other errors - log and return safe default
|
||||
Logger.error("Failed to increment privilege escalation counter",
|
||||
error: inspect(error),
|
||||
cache_key: cache_key
|
||||
)
|
||||
|
||||
1
|
||||
end
|
||||
|
||||
if count >= max_denials do
|
||||
Logger.warning("Potential privilege escalation attempt detected",
|
||||
user_id: audit_entry.user_id,
|
||||
denials: count,
|
||||
resource_type: audit_entry.details[:resource_type]
|
||||
)
|
||||
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :security_audit, :threat_detected],
|
||||
%{count: 1},
|
||||
%{threat_type: :privilege_escalation, user_id: audit_entry.user_id}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp check_bulk_data_access(audit_entry) do
|
||||
config = threat_detection_config()
|
||||
|
||||
if config[:enabled] && audit_entry.user_id do
|
||||
record_count = audit_entry.details[:record_count] || 0
|
||||
threshold = config[:bulk_operation_threshold] || 10000
|
||||
|
||||
if record_count > threshold do
|
||||
Logger.warning("Large bulk operation detected",
|
||||
user_id: audit_entry.user_id,
|
||||
operation_type: audit_entry.details[:operation_type],
|
||||
record_count: record_count
|
||||
)
|
||||
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :security_audit, :bulk_operation],
|
||||
%{record_count: record_count},
|
||||
%{user_id: audit_entry.user_id, operation_type: audit_entry.details[:operation_type]}
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
defp threat_detection_config do
|
||||
Application.get_env(:wanderer_app, __MODULE__, [])
|
||||
|> Keyword.get(:threat_detection, %{})
|
||||
end
|
||||
|
||||
defp determine_severity(event_type) do
|
||||
case event_type do
|
||||
:auth_failure -> :medium
|
||||
:permission_denied -> :high
|
||||
:privilege_escalation -> :critical
|
||||
:config_change -> :high
|
||||
:admin_action -> :medium
|
||||
:bulk_operation -> :medium
|
||||
:data_access -> :low
|
||||
:auth_success -> :low
|
||||
_ -> :medium
|
||||
end
|
||||
end
|
||||
|
||||
defp sanitize_sensitive_data(value) when is_binary(value) do
|
||||
# Patterns to detect sensitive data
|
||||
sensitive_patterns = [
|
||||
~r/password/i,
|
||||
~r/token/i,
|
||||
~r/secret/i,
|
||||
~r/api[_-]?key/i,
|
||||
~r/private[_-]?key/i,
|
||||
~r/access[_-]?key/i,
|
||||
~r/auth/i,
|
||||
~r/bearer\s+[a-zA-Z0-9\-_]+/i,
|
||||
# Long hex strings (potential tokens)
|
||||
~r/[a-f0-9]{32,}/i
|
||||
]
|
||||
|
||||
# Check if value contains sensitive patterns
|
||||
is_sensitive = Enum.any?(sensitive_patterns, &Regex.match?(&1, value))
|
||||
|
||||
cond do
|
||||
is_sensitive -> "[REDACTED]"
|
||||
String.length(value) > 200 -> String.slice(value, 0, 200) <> "..."
|
||||
true -> value
|
||||
end
|
||||
end
|
||||
|
||||
defp sanitize_sensitive_data(value) when is_map(value) do
|
||||
# Recursively sanitize map values
|
||||
Map.new(value, fn {k, v} ->
|
||||
key_str = to_string(k)
|
||||
|
||||
if Regex.match?(~r/password|token|secret|key|auth/i, key_str) do
|
||||
{k, "[REDACTED]"}
|
||||
else
|
||||
{k, sanitize_sensitive_data(v)}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp sanitize_sensitive_data(value) when is_list(value) do
|
||||
Enum.map(value, &sanitize_sensitive_data/1)
|
||||
end
|
||||
|
||||
defp sanitize_sensitive_data(value), do: value
|
||||
end
|
||||
246
lib/wanderer_app/security_audit/async_processor.ex
Normal file
246
lib/wanderer_app/security_audit/async_processor.ex
Normal file
@@ -0,0 +1,246 @@
|
||||
defmodule WandererApp.SecurityAudit.AsyncProcessor do
|
||||
@moduledoc """
|
||||
GenServer for asynchronous batch processing of security audit events.
|
||||
|
||||
This server buffers audit events in memory and periodically flushes them
|
||||
to the database in batches for improved performance.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
alias WandererApp.SecurityAudit
|
||||
|
||||
@default_batch_size 100
|
||||
# 5 seconds
|
||||
@default_flush_interval 5_000
|
||||
@max_buffer_size 1_000
|
||||
|
||||
defstruct [
|
||||
:batch_size,
|
||||
:flush_interval,
|
||||
:buffer,
|
||||
:timer_ref,
|
||||
:stats
|
||||
]
|
||||
|
||||
# Client API
|
||||
|
||||
@doc """
|
||||
Start the async processor.
|
||||
"""
|
||||
def start_link(opts \\ []) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Log an event asynchronously.
|
||||
"""
|
||||
def log_event(audit_entry) do
|
||||
GenServer.cast(__MODULE__, {:log_event, audit_entry})
|
||||
end
|
||||
|
||||
@doc """
|
||||
Force a flush of the buffer.
|
||||
"""
|
||||
def flush do
|
||||
GenServer.call(__MODULE__, :flush)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Get current processor statistics.
|
||||
"""
|
||||
def get_stats do
|
||||
GenServer.call(__MODULE__, :get_stats)
|
||||
end
|
||||
|
||||
# Server callbacks
|
||||
|
||||
@impl true
|
||||
def init(opts) do
|
||||
config = Application.get_env(:wanderer_app, WandererApp.SecurityAudit, [])
|
||||
|
||||
batch_size = Keyword.get(opts, :batch_size, config[:batch_size] || @default_batch_size)
|
||||
|
||||
flush_interval =
|
||||
Keyword.get(opts, :flush_interval, config[:flush_interval] || @default_flush_interval)
|
||||
|
||||
state = %__MODULE__{
|
||||
batch_size: batch_size,
|
||||
flush_interval: flush_interval,
|
||||
buffer: [],
|
||||
timer_ref: nil,
|
||||
stats: %{
|
||||
events_processed: 0,
|
||||
batches_flushed: 0,
|
||||
errors: 0,
|
||||
last_flush: nil
|
||||
}
|
||||
}
|
||||
|
||||
# Schedule first flush
|
||||
state = schedule_flush(state)
|
||||
|
||||
{:ok, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_cast({:log_event, audit_entry}, state) do
|
||||
# Add to buffer
|
||||
buffer = [audit_entry | state.buffer]
|
||||
|
||||
# Update stats
|
||||
stats = Map.update!(state.stats, :events_processed, &(&1 + 1))
|
||||
|
||||
# Check if we need to flush
|
||||
cond do
|
||||
length(buffer) >= state.batch_size ->
|
||||
# Flush immediately if batch size reached
|
||||
{:noreply, do_flush(%{state | buffer: buffer, stats: stats})}
|
||||
|
||||
length(buffer) >= @max_buffer_size ->
|
||||
# Force flush if max buffer size reached
|
||||
Logger.warning("Security audit buffer overflow, forcing flush",
|
||||
buffer_size: length(buffer),
|
||||
max_size: @max_buffer_size
|
||||
)
|
||||
|
||||
{:noreply, do_flush(%{state | buffer: buffer, stats: stats})}
|
||||
|
||||
true ->
|
||||
# Just add to buffer
|
||||
{:noreply, %{state | buffer: buffer, stats: stats}}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:flush, _from, state) do
|
||||
new_state = do_flush(state)
|
||||
{:reply, :ok, new_state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:get_stats, _from, state) do
|
||||
stats = Map.put(state.stats, :current_buffer_size, length(state.buffer))
|
||||
{:reply, stats, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:flush_timer, state) do
|
||||
state =
|
||||
if length(state.buffer) > 0 do
|
||||
do_flush(state)
|
||||
else
|
||||
state
|
||||
end
|
||||
|
||||
# Schedule next flush
|
||||
state = schedule_flush(state)
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def terminate(_reason, state) do
|
||||
# Flush any remaining events on shutdown
|
||||
if length(state.buffer) > 0 do
|
||||
do_flush(state)
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp schedule_flush(state) do
|
||||
# Cancel existing timer if any
|
||||
if state.timer_ref do
|
||||
Process.cancel_timer(state.timer_ref)
|
||||
end
|
||||
|
||||
# Schedule new timer
|
||||
timer_ref = Process.send_after(self(), :flush_timer, state.flush_interval)
|
||||
|
||||
%{state | timer_ref: timer_ref}
|
||||
end
|
||||
|
||||
defp do_flush(state) when length(state.buffer) == 0 do
|
||||
state
|
||||
end
|
||||
|
||||
defp do_flush(state) do
|
||||
# Take events to flush (reverse to maintain order)
|
||||
events = Enum.reverse(state.buffer)
|
||||
|
||||
# Attempt to store events
|
||||
case bulk_store_events(events) do
|
||||
{:ok, count} ->
|
||||
Logger.debug("Flushed #{count} security audit events")
|
||||
|
||||
# Update stats
|
||||
stats =
|
||||
state.stats
|
||||
|> Map.update!(:batches_flushed, &(&1 + 1))
|
||||
|> Map.put(:last_flush, DateTime.utc_now())
|
||||
|
||||
# Clear buffer
|
||||
%{state | buffer: [], stats: stats}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("Failed to flush security audit events",
|
||||
reason: inspect(reason),
|
||||
event_count: length(events)
|
||||
)
|
||||
|
||||
# Update error stats
|
||||
stats = Map.update!(state.stats, :errors, &(&1 + 1))
|
||||
|
||||
# Implement backoff - keep events in buffer but don't grow indefinitely
|
||||
buffer =
|
||||
if length(state.buffer) > @max_buffer_size do
|
||||
Logger.warning("Dropping oldest audit events due to repeated flush failures")
|
||||
Enum.take(state.buffer, @max_buffer_size)
|
||||
else
|
||||
state.buffer
|
||||
end
|
||||
|
||||
%{state | buffer: buffer, stats: stats}
|
||||
end
|
||||
end
|
||||
|
||||
defp bulk_store_events(events) do
|
||||
# Process events in smaller chunks if necessary
|
||||
events
|
||||
# Ash bulk operations work better with smaller chunks
|
||||
|> Enum.chunk_every(50)
|
||||
|> Enum.reduce_while({:ok, 0}, fn chunk, {:ok, count} ->
|
||||
case store_event_chunk(chunk) do
|
||||
{:ok, chunk_count} ->
|
||||
{:cont, {:ok, count + chunk_count}}
|
||||
|
||||
{:error, _} = error ->
|
||||
{:halt, error}
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp store_event_chunk(events) do
|
||||
# Transform events to Ash attributes
|
||||
records =
|
||||
Enum.map(events, fn event ->
|
||||
SecurityAudit.do_store_audit_entry(event)
|
||||
end)
|
||||
|
||||
# Count successful stores
|
||||
successful =
|
||||
Enum.count(records, fn
|
||||
:ok -> true
|
||||
_ -> false
|
||||
end)
|
||||
|
||||
{:ok, successful}
|
||||
rescue
|
||||
error ->
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
@@ -2,14 +2,20 @@ defmodule WandererAppWeb.Plugs.CheckJsonApiAuth do
|
||||
@moduledoc """
|
||||
Plug for authenticating JSON:API v1 endpoints.
|
||||
|
||||
Supports both session-based authentication (for web clients) and
|
||||
Supports both session-based authentication (for web clients) and
|
||||
Bearer token authentication (for API clients).
|
||||
|
||||
Currently, Bearer token authentication only supports map API keys.
|
||||
When a valid map API key is provided, the map owner is set as the
|
||||
authenticated user and the map is made available in conn.assigns.
|
||||
|
||||
"""
|
||||
|
||||
import Plug.Conn
|
||||
|
||||
alias WandererApp.Api.User
|
||||
alias WandererApp.SecurityAudit
|
||||
alias WandererApp.Audit.RequestContext
|
||||
|
||||
def init(opts), do: opts
|
||||
|
||||
@@ -57,7 +63,8 @@ defmodule WandererAppWeb.Plugs.CheckJsonApiAuth do
|
||||
|> assign(:current_user, user)
|
||||
|> assign(:current_user_role, get_user_role(user))
|
||||
|
||||
{:error, reason} ->
|
||||
{:error, reason} when is_binary(reason) ->
|
||||
# Legacy error handling for simple string errors
|
||||
end_time = System.monotonic_time(:millisecond)
|
||||
duration = end_time - start_time
|
||||
|
||||
@@ -82,6 +89,36 @@ defmodule WandererAppWeb.Plugs.CheckJsonApiAuth do
|
||||
|> put_resp_content_type("application/json")
|
||||
|> send_resp(401, Jason.encode!(%{error: reason}))
|
||||
|> halt()
|
||||
|
||||
{:error, external_message, internal_reason} ->
|
||||
# New error handling with separate internal and external messages
|
||||
end_time = System.monotonic_time(:millisecond)
|
||||
duration = end_time - start_time
|
||||
|
||||
# Log failed authentication with detailed internal reason
|
||||
request_details = extract_request_details(conn)
|
||||
|
||||
SecurityAudit.log_auth_event(
|
||||
:auth_failure,
|
||||
nil,
|
||||
Map.merge(request_details, %{
|
||||
failure_reason: internal_reason,
|
||||
external_message: external_message
|
||||
})
|
||||
)
|
||||
|
||||
# Emit failed authentication event
|
||||
:telemetry.execute(
|
||||
[:wanderer_app, :json_api, :auth],
|
||||
%{count: 1, duration: duration},
|
||||
%{auth_type: get_auth_type(conn), result: "failure"}
|
||||
)
|
||||
|
||||
conn
|
||||
|> put_status(:unauthorized)
|
||||
|> put_resp_content_type("application/json")
|
||||
|> send_resp(401, Jason.encode!(%{error: external_message}))
|
||||
|> halt()
|
||||
end
|
||||
end
|
||||
|
||||
@@ -103,8 +140,6 @@ defmodule WandererAppWeb.Plugs.CheckJsonApiAuth do
|
||||
defp authenticate_bearer_token(conn) do
|
||||
case get_req_header(conn, "authorization") do
|
||||
["Bearer " <> token] ->
|
||||
# For now, use a simple approach - validate token format
|
||||
# In the future, this could be extended to support JWT or other token types
|
||||
validate_api_token(token)
|
||||
|
||||
_ ->
|
||||
@@ -113,48 +148,23 @@ defmodule WandererAppWeb.Plugs.CheckJsonApiAuth do
|
||||
end
|
||||
|
||||
defp validate_api_token(token) do
|
||||
# For test environment, accept test API keys
|
||||
if Application.get_env(:wanderer_app, :env) == :test and
|
||||
(String.starts_with?(token, "test_") or String.starts_with?(token, "test_api_key_")) do
|
||||
# For test tokens, look up the actual map by API key
|
||||
case find_map_by_api_key(token) do
|
||||
{:ok, map} when not is_nil(map) ->
|
||||
# Use the actual map owner as the user
|
||||
user = %User{
|
||||
id: map.owner_id || Ecto.UUID.generate(),
|
||||
name: "Test User",
|
||||
hash: "test_hash_#{System.unique_integer([:positive])}"
|
||||
}
|
||||
# Look up the map by its public API key
|
||||
case find_map_by_api_key(token) do
|
||||
{:ok, map} when not is_nil(map) ->
|
||||
# Get the actual owner of the map
|
||||
case User.by_id(map.owner_id, load: :characters) do
|
||||
{:ok, user} ->
|
||||
# Return the map owner as the authenticated user
|
||||
{:ok, user, map}
|
||||
|
||||
{:ok, user, map}
|
||||
{:error, _} ->
|
||||
# Return generic error with specific reason for internal logging
|
||||
{:error, "Authentication failed", :map_owner_not_found}
|
||||
end
|
||||
|
||||
_ ->
|
||||
# If no map found with this test token, create a test user without a map
|
||||
user = %User{
|
||||
id: Ecto.UUID.generate(),
|
||||
name: "Test User",
|
||||
hash: "test_hash_#{System.unique_integer([:positive])}"
|
||||
}
|
||||
|
||||
{:ok, user}
|
||||
end
|
||||
else
|
||||
# Look up the map by its public API key
|
||||
case find_map_by_api_key(token) do
|
||||
{:ok, map} when not is_nil(map) ->
|
||||
# Create a user representing API access for this map
|
||||
# In a real implementation, you might want to track the actual user who created the API key
|
||||
user = %User{
|
||||
id: map.owner_id || Ecto.UUID.generate(),
|
||||
name: "API User for #{map.name}",
|
||||
hash: "api_hash_#{map.id}"
|
||||
}
|
||||
|
||||
{:ok, user, map}
|
||||
|
||||
_ ->
|
||||
{:error, "Invalid API key"}
|
||||
end
|
||||
_ ->
|
||||
# Return generic error with specific reason for internal logging
|
||||
{:error, "Authentication failed", :invalid_api_key}
|
||||
end
|
||||
end
|
||||
|
||||
@@ -192,50 +202,8 @@ defmodule WandererAppWeb.Plugs.CheckJsonApiAuth do
|
||||
end
|
||||
|
||||
defp extract_request_details(conn) do
|
||||
%{
|
||||
ip_address: get_peer_ip(conn),
|
||||
user_agent: get_user_agent(conn),
|
||||
auth_method: get_auth_type(conn),
|
||||
session_id: get_session_id(conn),
|
||||
request_path: conn.request_path,
|
||||
method: conn.method
|
||||
}
|
||||
end
|
||||
|
||||
defp get_peer_ip(conn) do
|
||||
case get_req_header(conn, "x-forwarded-for") do
|
||||
[forwarded_for] ->
|
||||
forwarded_for
|
||||
|> String.split(",")
|
||||
|> List.first()
|
||||
|> String.trim()
|
||||
|
||||
[] ->
|
||||
case get_req_header(conn, "x-real-ip") do
|
||||
[real_ip] ->
|
||||
real_ip
|
||||
|
||||
[] ->
|
||||
case conn.remote_ip do
|
||||
{a, b, c, d} -> "#{a}.#{b}.#{c}.#{d}"
|
||||
_ -> "unknown"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp get_user_agent(conn) do
|
||||
case get_req_header(conn, "user-agent") do
|
||||
[user_agent] -> user_agent
|
||||
[] -> "unknown"
|
||||
end
|
||||
end
|
||||
|
||||
defp get_session_id(conn) do
|
||||
case get_session(conn, :session_id) do
|
||||
nil -> conn.assigns[:request_id] || "unknown"
|
||||
session_id -> session_id
|
||||
end
|
||||
RequestContext.build_request_details(conn)
|
||||
|> Map.put(:auth_method, get_auth_type(conn))
|
||||
end
|
||||
|
||||
defp maybe_assign_map(conn, nil), do: conn
|
||||
|
||||
@@ -13,6 +13,7 @@ defmodule WandererAppWeb.Plugs.ApiVersioning do
|
||||
import Plug.Conn
|
||||
|
||||
alias WandererApp.SecurityAudit
|
||||
alias WandererApp.Audit.RequestContext
|
||||
|
||||
@supported_versions ["1"]
|
||||
@default_version "1"
|
||||
@@ -260,14 +261,13 @@ defmodule WandererAppWeb.Plugs.ApiVersioning do
|
||||
|
||||
defp log_deprecation_usage(conn, version) do
|
||||
user_id = get_user_id(conn)
|
||||
request_details = RequestContext.build_request_details(conn)
|
||||
|
||||
SecurityAudit.log_event(:deprecated_api_usage, user_id, %{
|
||||
version: version,
|
||||
path: conn.request_path,
|
||||
method: conn.method,
|
||||
user_agent: get_user_agent(conn),
|
||||
ip_address: get_peer_ip(conn)
|
||||
})
|
||||
SecurityAudit.log_event(
|
||||
:deprecated_api_usage,
|
||||
user_id,
|
||||
Map.put(request_details, :version, version)
|
||||
)
|
||||
|
||||
conn
|
||||
end
|
||||
@@ -316,12 +316,15 @@ defmodule WandererAppWeb.Plugs.ApiVersioning do
|
||||
|
||||
# Error handling
|
||||
defp handle_version_error(conn, reason, _opts) do
|
||||
SecurityAudit.log_event(:api_version_error, get_user_id(conn), %{
|
||||
reason: reason,
|
||||
path: conn.request_path,
|
||||
method: conn.method,
|
||||
headers: get_version_headers(conn)
|
||||
})
|
||||
request_details = RequestContext.build_request_details(conn)
|
||||
|
||||
SecurityAudit.log_event(
|
||||
:api_version_error,
|
||||
get_user_id(conn),
|
||||
request_details
|
||||
|> Map.put(:reason, reason)
|
||||
|> Map.put(:headers, get_version_headers(conn))
|
||||
)
|
||||
|
||||
conn
|
||||
|> send_version_error(400, "Invalid API version", %{
|
||||
@@ -376,35 +379,6 @@ defmodule WandererAppWeb.Plugs.ApiVersioning do
|
||||
end
|
||||
end
|
||||
|
||||
defp get_user_agent(conn) do
|
||||
case get_req_header(conn, "user-agent") do
|
||||
[user_agent] -> user_agent
|
||||
[] -> "unknown"
|
||||
end
|
||||
end
|
||||
|
||||
defp get_peer_ip(conn) do
|
||||
case get_req_header(conn, "x-forwarded-for") do
|
||||
[forwarded_for] ->
|
||||
forwarded_for
|
||||
|> String.split(",")
|
||||
|> List.first()
|
||||
|> String.trim()
|
||||
|
||||
[] ->
|
||||
case get_req_header(conn, "x-real-ip") do
|
||||
[real_ip] ->
|
||||
real_ip
|
||||
|
||||
[] ->
|
||||
case conn.remote_ip do
|
||||
{a, b, c, d} -> "#{a}.#{b}.#{c}.#{d}"
|
||||
_ -> "unknown"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp get_version_headers(conn) do
|
||||
%{
|
||||
"api-version" => get_req_header(conn, "api-version"),
|
||||
@@ -429,8 +403,6 @@ defmodule WandererAppWeb.Plugs.ApiVersioning do
|
||||
end
|
||||
|
||||
defp get_breaking_changes(from_version, to_version) do
|
||||
# Define breaking changes between versions
|
||||
# Since we've consolidated to v1, most legacy versions are no longer supported
|
||||
%{
|
||||
{"1.0", "1"} => [
|
||||
"All API endpoints now use /api/v1/ prefix",
|
||||
|
||||
@@ -14,6 +14,7 @@ defmodule WandererAppWeb.Plugs.RequestValidator do
|
||||
import Plug.Conn
|
||||
|
||||
alias WandererApp.SecurityAudit
|
||||
alias WandererApp.Audit.RequestContext
|
||||
|
||||
# 10MB
|
||||
@max_request_size 10 * 1024 * 1024
|
||||
@@ -344,13 +345,13 @@ defmodule WandererAppWeb.Plugs.RequestValidator do
|
||||
# Log security threat
|
||||
user_id = get_user_id(conn)
|
||||
|
||||
SecurityAudit.log_event(:security_alert, user_id, %{
|
||||
threats: threats,
|
||||
ip_address: get_peer_ip(conn),
|
||||
user_agent: get_user_agent(conn),
|
||||
request_path: conn.request_path,
|
||||
method: conn.method
|
||||
})
|
||||
request_details = RequestContext.build_request_details(conn)
|
||||
|
||||
SecurityAudit.log_event(
|
||||
:security_alert,
|
||||
user_id,
|
||||
Map.put(request_details, :threats, threats)
|
||||
)
|
||||
|
||||
conn
|
||||
|> send_validation_error(400, "Malicious content detected", %{
|
||||
@@ -457,35 +458,6 @@ defmodule WandererAppWeb.Plugs.RequestValidator do
|
||||
end
|
||||
end
|
||||
|
||||
defp get_peer_ip(conn) do
|
||||
case get_req_header(conn, "x-forwarded-for") do
|
||||
[forwarded_for] ->
|
||||
forwarded_for
|
||||
|> String.split(",")
|
||||
|> List.first()
|
||||
|> String.trim()
|
||||
|
||||
[] ->
|
||||
case get_req_header(conn, "x-real-ip") do
|
||||
[real_ip] ->
|
||||
real_ip
|
||||
|
||||
[] ->
|
||||
case conn.remote_ip do
|
||||
{a, b, c, d} -> "#{a}.#{b}.#{c}.#{d}"
|
||||
_ -> "unknown"
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp get_user_agent(conn) do
|
||||
case get_req_header(conn, "user-agent") do
|
||||
[user_agent] -> user_agent
|
||||
[] -> "unknown"
|
||||
end
|
||||
end
|
||||
|
||||
defp send_validation_error(conn, status, message, details) do
|
||||
error_response = %{
|
||||
error: message,
|
||||
@@ -504,14 +476,15 @@ defmodule WandererAppWeb.Plugs.RequestValidator do
|
||||
# Log the validation error
|
||||
user_id = get_user_id(conn)
|
||||
|
||||
SecurityAudit.log_event(:security_alert, user_id, %{
|
||||
error: "validation_error",
|
||||
message: Exception.message(error),
|
||||
ip_address: get_peer_ip(conn),
|
||||
user_agent: get_user_agent(conn),
|
||||
request_path: conn.request_path,
|
||||
method: conn.method
|
||||
})
|
||||
request_details = RequestContext.build_request_details(conn)
|
||||
|
||||
SecurityAudit.log_event(
|
||||
:security_alert,
|
||||
user_id,
|
||||
request_details
|
||||
|> Map.put(:error, "validation_error")
|
||||
|> Map.put(:message, Exception.message(error))
|
||||
)
|
||||
|
||||
conn
|
||||
|> send_validation_error(500, "Request validation failed", %{
|
||||
|
||||
@@ -31,8 +31,6 @@ defmodule WandererAppWeb.Presence do
|
||||
character_id
|
||||
end)
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_updated", true)
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map_#{map_id}:presence_character_ids",
|
||||
presence_tracked_character_ids
|
||||
@@ -43,6 +41,8 @@ defmodule WandererAppWeb.Presence do
|
||||
presence_data
|
||||
)
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:presence_updated", true)
|
||||
|
||||
{:ok, state}
|
||||
end
|
||||
end
|
||||
|
||||
2
mix.exs
2
mix.exs
@@ -3,7 +3,7 @@ defmodule WandererApp.MixProject do
|
||||
|
||||
@source_url "https://github.com/wanderer-industries/wanderer"
|
||||
|
||||
@version "1.75.2"
|
||||
@version "1.75.14"
|
||||
|
||||
def project do
|
||||
[
|
||||
|
||||
@@ -0,0 +1,41 @@
|
||||
defmodule WandererApp.Repo.Migrations.AddSecurityAuditIndexes do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
@disable_migration_lock true
|
||||
|
||||
def up do
|
||||
# Add indexes for security audit queries
|
||||
create_if_not_exists index(:user_activity_v1, [:entity_type, :event_type, :inserted_at],
|
||||
concurrently: true
|
||||
)
|
||||
|
||||
create_if_not_exists index(:user_activity_v1, [:user_id, :inserted_at], concurrently: true)
|
||||
create_if_not_exists index(:user_activity_v1, [:event_type], concurrently: true)
|
||||
|
||||
# Partial index for security events only - for better performance
|
||||
create_if_not_exists index(:user_activity_v1, [:user_id, :inserted_at],
|
||||
where: "entity_type = 'security_event'",
|
||||
name: :user_activity_v1_security_events_idx,
|
||||
concurrently: true
|
||||
)
|
||||
|
||||
# Index for entity_id queries (used by Map.Audit)
|
||||
create_if_not_exists index(:user_activity_v1, [:entity_id, :inserted_at], concurrently: true)
|
||||
end
|
||||
|
||||
def down do
|
||||
drop_if_exists index(:user_activity_v1, [:entity_id, :inserted_at], concurrently: true)
|
||||
|
||||
drop_if_exists index(:user_activity_v1, [:user_id, :inserted_at],
|
||||
name: :user_activity_v1_security_events_idx,
|
||||
concurrently: true
|
||||
)
|
||||
|
||||
drop_if_exists index(:user_activity_v1, [:event_type], concurrently: true)
|
||||
drop_if_exists index(:user_activity_v1, [:user_id, :inserted_at], concurrently: true)
|
||||
|
||||
drop_if_exists index(:user_activity_v1, [:entity_type, :event_type, :inserted_at],
|
||||
concurrently: true
|
||||
)
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user