Compare commits

...

17 Commits

Author SHA1 Message Date
Dmitry Popov
42e0f8f660 Merge branch 'main' into develop
Some checks are pending
Build Test / 🚀 Deploy to test env (fly.io) (push) Waiting to run
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Waiting to run
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Waiting to run
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Blocked by required conditions
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Blocked by required conditions
Build Develop / merge (push) Blocked by required conditions
Build Develop / 🏷 Notify about develop release (push) Blocked by required conditions
🧪 Test Suite / Test Suite (push) Waiting to run
2025-11-25 21:02:53 +01:00
Dmitry Popov
147dd5880e Merge pull request #559 from wanderer-industries/markdown-description
feat: Add support markdown for system description
2025-11-26 00:01:09 +04:00
DanSylvest
69991fff72 feat: Add support markdown for system description 2025-11-25 22:50:11 +03:00
Dmitry Popov
b881c84a52 Merge branch 'main' into develop 2025-11-25 20:11:53 +01:00
CI
de4e1f859f chore: [skip ci] 2025-11-25 19:07:31 +00:00
CI
8e2a19540c chore: release version v1.86.1 2025-11-25 19:07:31 +00:00
Dmitry Popov
855c596672 Merge pull request #558 from wanderer-industries/show-passage-direction
fix(Map): Add ability to see character passage direction in list of p…
2025-11-25 23:06:45 +04:00
DanSylvest
36d3c0937b chore: Add ability to see character passage direction in list of passages - remove unnecessary log 2025-11-25 22:04:12 +03:00
CI
d8fb1f78cf chore: [skip ci] 2025-11-25 19:03:24 +00:00
CI
98fa7e0235 chore: release version v1.86.0 2025-11-25 19:03:24 +00:00
Dmitry Popov
e4396fe2f9 Merge pull request #557 from guarzo/guarzo/filteractivity
feat: add date filter for character activity
2025-11-25 23:02:58 +04:00
DanSylvest
1c117903f6 fix(Map): Add ability to see character passage direction in list of passages 2025-11-25 21:51:01 +03:00
Dmitry Popov
9e9dc39200 Merge pull request #556 from guarzo/guarzo/ticker2andsse
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
fix: sse enable checkbox, and kills ticker
2025-11-25 15:33:05 +04:00
Dmitry Popov
abd7e4e15c chore: fix tests issues 2025-11-25 12:28:31 +01:00
Guarzo
88ed9cd39e feat: add date filter for character activity 2025-11-25 01:52:06 +00:00
Dmitry Popov
9666a8e78a chore: fix tests issues
Some checks failed
Build Test / 🚀 Deploy to test env (fly.io) (push) Has been cancelled
Build Test / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build (1.17, 18.x, 27) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/amd64) (push) Has been cancelled
Build Develop / 🛠 Build Docker Images (linux/arm64) (push) Has been cancelled
Build Develop / merge (push) Has been cancelled
Build Develop / 🏷 Notify about develop release (push) Has been cancelled
🧪 Test Suite / Test Suite (push) Has been cancelled
2025-11-25 00:41:40 +01:00
Guarzo
7a74ae566b fix: sse enable checkbox, and kills ticker 2025-11-23 18:04:30 +00:00
52 changed files with 607 additions and 980 deletions

View File

@@ -2,6 +2,24 @@
<!-- changelog -->
## [v1.86.1](https://github.com/wanderer-industries/wanderer/compare/v1.86.0...v1.86.1) (2025-11-25)
### Bug Fixes:
* Map: Add ability to see character passage direction in list of passages
## [v1.86.0](https://github.com/wanderer-industries/wanderer/compare/v1.85.5...v1.86.0) (2025-11-25)
### Features:
* add date filter for character activity
## [v1.85.5](https://github.com/wanderer-industries/wanderer/compare/v1.85.4...v1.85.5) (2025-11-24)

View File

@@ -1,4 +1,3 @@
import classes from './MarkdownComment.module.scss';
import clsx from 'clsx';
import {
InfoDrawer,
@@ -49,7 +48,11 @@ export const MarkdownComment = ({ text, time, characterEveId, id }: MarkdownComm
<>
<InfoDrawer
labelClassName="mb-[3px]"
className={clsx(classes.MarkdownCommentRoot, 'p-1 bg-stone-700/20 ')}
className={clsx(
'p-1 bg-stone-700/20',
'text-[12px] leading-[1.2] text-stone-300 break-words',
'bg-gradient-to-r from-stone-600/40 via-stone-600/10 to-stone-600/0',
)}
onMouseEnter={handleMouseEnter}
onMouseLeave={handleMouseLeave}
title={

View File

@@ -0,0 +1,9 @@
.CERoot {
@apply border border-stone-400/30 rounded-[2px];
:global {
.cm-content {
@apply bg-stone-600/40;
}
}
}

View File

@@ -1,11 +1,12 @@
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
import { TooltipPosition, WdImageSize, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
import { useHotkey } from '@/hooks/Mapper/hooks';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { OutCommand } from '@/hooks/Mapper/types';
import clsx from 'clsx';
import { PrimeIcons } from 'primereact/api';
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
import { useHotkey } from '@/hooks/Mapper/hooks';
import { useCallback, useMemo, useRef, useState } from 'react';
import { OutCommand } from '@/hooks/Mapper/types';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import classes from './CommentsEditor.module.scss';
export interface CommentsEditorProps {}
@@ -50,6 +51,7 @@ export const CommentsEditor = ({}: CommentsEditorProps) => {
return (
<MarkdownEditor
className={classes.CERoot}
value={textVal}
onChange={setTextVal}
overlayContent={

View File

@@ -1,9 +1,9 @@
.CERoot {
@apply border border-stone-400/30 rounded-[2px];
@apply border border-stone-500/30 rounded-[2px];
:global {
.cm-content {
@apply bg-stone-600/40;
@apply bg-stone-950/70;
}
.cm-scroller {

View File

@@ -44,9 +44,17 @@ export interface MarkdownEditorProps {
overlayContent?: ReactNode;
value: string;
onChange: (value: string) => void;
height?: string;
className?: string;
}
export const MarkdownEditor = ({ value, onChange, overlayContent }: MarkdownEditorProps) => {
export const MarkdownEditor = ({
value,
onChange,
overlayContent,
height = '70px',
className,
}: MarkdownEditorProps) => {
const [hasShift, setHasShift] = useState(false);
const refData = useRef({ onChange });
@@ -66,9 +74,9 @@ export const MarkdownEditor = ({ value, onChange, overlayContent }: MarkdownEdit
<div className={clsx(classes.MarkdownEditor, 'relative')}>
<CodeMirror
value={value}
height="70px"
height={height}
extensions={CODE_MIRROR_EXTENSIONS}
className={classes.CERoot}
className={clsx(classes.CERoot, className)}
theme={oneDark}
onChange={handleOnChange}
placeholder="Start typing..."

View File

@@ -8,8 +8,8 @@ import { LabelsManager } from '@/hooks/Mapper/utils/labelsManager.ts';
import { Dialog } from 'primereact/dialog';
import { IconField } from 'primereact/iconfield';
import { InputText } from 'primereact/inputtext';
import { InputTextarea } from 'primereact/inputtextarea';
import { useCallback, useEffect, useRef, useState } from 'react';
import { MarkdownEditor } from '@/hooks/Mapper/components/mapInterface/components/MarkdownEditor';
interface SystemSettingsDialog {
systemId: string;
@@ -214,13 +214,9 @@ export const SystemSettingsDialog = ({ systemId, visible, setVisible }: SystemSe
<div className="flex flex-col gap-1">
<label htmlFor="username">Description</label>
<InputTextarea
autoResize
rows={5}
cols={30}
value={description}
onChange={e => setDescription(e.target.value)}
/>
<div className="h-[200px]">
<MarkdownEditor value={description} onChange={e => setDescription(e)} height="180px" />
</div>
</div>
</div>

View File

@@ -2,7 +2,7 @@ import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { isWormholeSpace } from '@/hooks/Mapper/components/map/helpers/isWormholeSpace.ts';
import { useMemo } from 'react';
import { getSystemById, sortWHClasses } from '@/hooks/Mapper/helpers';
import { InfoDrawer, WHClassView, WHEffectView } from '@/hooks/Mapper/components/ui-kit';
import { InfoDrawer, MarkdownTextViewer, WHClassView, WHEffectView } from '@/hooks/Mapper/components/ui-kit';
import { getSystemStaticInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useLoadSystemStatic';
interface SystemInfoContentProps {
@@ -51,7 +51,7 @@ export const SystemInfoContent = ({ systemId }: SystemInfoContentProps) => {
</div>
}
>
<div className="break-words">{description}</div>
<MarkdownTextViewer>{description}</MarkdownTextViewer>
</InfoDrawer>
)}
</div>

View File

@@ -1,4 +1,7 @@
import { Dialog } from 'primereact/dialog';
import { Menu } from 'primereact/menu';
import { MenuItem } from 'primereact/menuitem';
import { useState, useCallback, useRef, useMemo } from 'react';
import { CharacterActivityContent } from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity/CharacterActivityContent.tsx';
interface CharacterActivityProps {
@@ -6,17 +9,69 @@ interface CharacterActivityProps {
onHide: () => void;
}
const periodOptions = [
{ value: 30, label: '30 Days' },
{ value: 365, label: '1 Year' },
{ value: null, label: 'All Time' },
];
export const CharacterActivity = ({ visible, onHide }: CharacterActivityProps) => {
const [selectedPeriod, setSelectedPeriod] = useState<number | null>(30);
const menuRef = useRef<Menu>(null);
const handlePeriodChange = useCallback((days: number | null) => {
setSelectedPeriod(days);
}, []);
const menuItems: MenuItem[] = useMemo(
() => [
{
label: 'Period',
items: periodOptions.map(option => ({
label: option.label,
icon: selectedPeriod === option.value ? 'pi pi-check' : undefined,
command: () => handlePeriodChange(option.value),
})),
},
],
[selectedPeriod, handlePeriodChange],
);
const selectedPeriodLabel = useMemo(
() => periodOptions.find(opt => opt.value === selectedPeriod)?.label || 'All Time',
[selectedPeriod],
);
const headerIcons = (
<>
<button
type="button"
className="p-dialog-header-icon p-link"
onClick={e => menuRef.current?.toggle(e)}
aria-label="Filter options"
>
<span className="pi pi-bars" />
</button>
<Menu model={menuItems} popup ref={menuRef} />
</>
);
return (
<Dialog
header="Character Activity"
header={
<div className="flex items-center gap-2">
<span>Character Activity</span>
<span className="text-xs text-stone-400">({selectedPeriodLabel})</span>
</div>
}
visible={visible}
className="w-[550px] max-h-[90vh]"
onHide={onHide}
dismissableMask
contentClassName="p-0 h-full flex flex-col"
icons={headerIcons}
>
<CharacterActivityContent />
<CharacterActivityContent selectedPeriod={selectedPeriod} />
</Dialog>
);
};

View File

@@ -7,16 +7,28 @@ import {
} from '@/hooks/Mapper/components/mapRootContent/components/CharacterActivity/helpers.tsx';
import { Column } from 'primereact/column';
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
import { useMemo } from 'react';
import { useMemo, useEffect } from 'react';
import { useCharacterActivityHandlers } from '@/hooks/Mapper/components/mapRootContent/hooks/useCharacterActivityHandlers';
export const CharacterActivityContent = () => {
interface CharacterActivityContentProps {
selectedPeriod: number | null;
}
export const CharacterActivityContent = ({ selectedPeriod }: CharacterActivityContentProps) => {
const {
data: { characterActivityData },
} = useMapRootState();
const { handleShowActivity } = useCharacterActivityHandlers();
const activity = useMemo(() => characterActivityData?.activity || [], [characterActivityData]);
const loading = useMemo(() => characterActivityData?.loading !== false, [characterActivityData]);
// Reload activity data when period changes
useEffect(() => {
handleShowActivity(selectedPeriod);
}, [selectedPeriod, handleShowActivity]);
if (loading) {
return (
<div className="flex flex-col items-center justify-center h-full w-full">

View File

@@ -3,7 +3,7 @@
}
.SidebarOnTheMap {
width: 400px;
width: 460px;
padding: 0 !important;
:global {

View File

@@ -5,6 +5,7 @@ import {
ConnectionType,
OutCommand,
Passage,
PassageWithSourceTarget,
SolarSystemConnection,
} from '@/hooks/Mapper/types';
import clsx from 'clsx';
@@ -19,7 +20,7 @@ import { PassageCard } from './PassageCard';
const sortByDate = (a: string, b: string) => new Date(a).getTime() - new Date(b).getTime();
const itemTemplate = (item: Passage, options: VirtualScrollerTemplateOptions) => {
const itemTemplate = (item: PassageWithSourceTarget, options: VirtualScrollerTemplateOptions) => {
return (
<div
className={clsx(classes.CharacterRow, 'w-full box-border', {
@@ -35,7 +36,7 @@ const itemTemplate = (item: Passage, options: VirtualScrollerTemplateOptions) =>
};
export interface ConnectionPassagesContentProps {
passages: Passage[];
passages: PassageWithSourceTarget[];
}
export const ConnectionPassages = ({ passages = [] }: ConnectionPassagesContentProps) => {
@@ -113,6 +114,20 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
[outCommand],
);
const preparedPassages = useMemo(() => {
if (!cnInfo) {
return [];
}
return passages
.sort((a, b) => sortByDate(b.inserted_at, a.inserted_at))
.map<PassageWithSourceTarget>(x => ({
...x,
source: x.from ? cnInfo.target : cnInfo.source,
target: x.from ? cnInfo.source : cnInfo.target,
}));
}, [cnInfo, passages]);
useEffect(() => {
if (!selectedConnection) {
return;
@@ -145,12 +160,14 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
<InfoDrawer title="Connection" rightSide>
<div className="flex justify-end gap-2 items-center">
<SystemView
showCustomName
systemId={cnInfo.source}
className={clsx(classes.InfoTextSize, 'select-none text-center')}
hideRegion
/>
<span className="pi pi-angle-double-right text-stone-500 text-[15px]"></span>
<SystemView
showCustomName
systemId={cnInfo.target}
className={clsx(classes.InfoTextSize, 'select-none text-center')}
hideRegion
@@ -184,7 +201,7 @@ export const Connections = ({ selectedConnection, onHide }: OnTheMapProps) => {
{/* separator */}
<div className="w-full h-px bg-neutral-800 px-0.5"></div>
<ConnectionPassages passages={passages} />
<ConnectionPassages passages={preparedPassages} />
</div>
</Sidebar>
);

View File

@@ -35,6 +35,10 @@
&.ThreeColumns {
grid-template-columns: auto 1fr auto;
}
&.FourColumns {
grid-template-columns: auto auto 1fr auto;
}
}
.CardBorderLeftIsOwn {

View File

@@ -1,7 +1,7 @@
import clsx from 'clsx';
import classes from './PassageCard.module.scss';
import { Passage } from '@/hooks/Mapper/types';
import { TimeAgo } from '@/hooks/Mapper/components/ui-kit';
import { PassageWithSourceTarget } from '@/hooks/Mapper/types';
import { SystemView, TimeAgo, TooltipPosition } from '@/hooks/Mapper/components/ui-kit';
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
import { kgToTons } from '@/hooks/Mapper/utils/kgToTons.ts';
import { useMemo } from 'react';
@@ -11,7 +11,7 @@ type PassageCardType = {
showShipName?: boolean;
// showSystem?: boolean;
// useSystemsCache?: boolean;
} & Passage;
} & PassageWithSourceTarget;
const SHIP_NAME_RX = /u'|'/g;
export const getShipName = (name: string) => {
@@ -25,7 +25,7 @@ export const getShipName = (name: string) => {
});
};
export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardType) => {
export const PassageCard = ({ inserted_at, character: char, ship, source, target, from }: PassageCardType) => {
const isOwn = false;
const insertedAt = useMemo(() => {
@@ -37,7 +37,39 @@ export const PassageCard = ({ inserted_at, character: char, ship }: PassageCardT
<div className={clsx(classes.CharacterCard, 'w-full text-xs', 'flex flex-col box-border')}>
<div className="flex flex-col justify-between px-2 py-1 gap-1">
{/*here icon and other*/}
<div className={clsx(classes.CharRow, classes.ThreeColumns)}>
<div className={clsx(classes.CharRow, classes.FourColumns)}>
<WdTooltipWrapper
position={TooltipPosition.top}
content={
<div className="flex justify-between gap-2 items-center">
<SystemView
showCustomName
systemId={source}
className="select-none text-center !text-[12px]"
hideRegion
/>
<span className="pi pi-angle-double-right text-stone-500 text-[15px]"></span>
<SystemView
showCustomName
systemId={target}
className="select-none text-center !text-[12px]"
hideRegion
/>
</div>
}
>
<div
className={clsx(
'transition-all transform ease-in duration-200',
'pi text-stone-500 text-[15px] w-[35px] h-[33px] !flex items-center justify-center border rounded-[6px]',
{
['pi-angle-double-right !text-orange-400 border-orange-400 hover:bg-orange-400/30']: from,
['pi-angle-double-left !text-stone-500/70 border-stone-500/70 hover:bg-stone-500/30']: !from,
},
)}
/>
</WdTooltipWrapper>
{/*portrait*/}
<span
className={clsx(classes.EveIcon, classes.CharIcon, 'wd-bg-default')}

View File

@@ -23,17 +23,17 @@ export const useCharacterActivityHandlers = () => {
/**
* Handle showing the character activity dialog
*/
const handleShowActivity = useCallback(() => {
const handleShowActivity = useCallback((days?: number | null) => {
// Update local state to show the dialog
update(state => ({
...state,
showCharacterActivity: true,
}));
// Send the command to the server
// Send the command to the server with optional days parameter
outCommand({
type: OutCommand.showActivity,
data: {},
data: days !== undefined ? { days } : {},
});
}, [outCommand, update]);

View File

@@ -1,8 +1,5 @@
.MarkdownCommentRoot {
border-left-width: 3px;
.MarkdownTextViewer {
@apply text-[12px] leading-[1.2] text-stone-300 break-words;
@apply bg-gradient-to-r from-stone-600/40 via-stone-600/10 to-stone-600/0;
.h1 {
@apply text-[12px] font-normal m-0 p-0 border-none break-words whitespace-normal;
@@ -56,6 +53,10 @@
@apply font-bold text-green-400 break-words whitespace-normal;
}
strong {
font-weight: bold;
}
i, em {
@apply italic text-pink-400 break-words whitespace-normal;
}

View File

@@ -2,10 +2,16 @@ import Markdown from 'react-markdown';
import remarkGfm from 'remark-gfm';
import remarkBreaks from 'remark-breaks';
import classes from './MarkdownTextViewer.module.scss';
const REMARK_PLUGINS = [remarkGfm, remarkBreaks];
type MarkdownTextViewerProps = { children: string };
export const MarkdownTextViewer = ({ children }: MarkdownTextViewerProps) => {
return <Markdown remarkPlugins={REMARK_PLUGINS}>{children}</Markdown>;
return (
<div className={classes.MarkdownTextViewer}>
<Markdown remarkPlugins={REMARK_PLUGINS}>{children}</Markdown>
</div>
);
};

View File

@@ -68,4 +68,5 @@ export interface ActivitySummary {
passages: number;
connections: number;
signatures: number;
timestamp?: string;
}

View File

@@ -6,11 +6,17 @@ export type PassageLimitedCharacterType = Pick<
>;
export type Passage = {
from: boolean;
inserted_at: string; // Date
ship: ShipTypeRaw;
character: PassageLimitedCharacterType;
};
export type PassageWithSourceTarget = {
source: string;
target: string;
} & Passage;
export type ConnectionInfoOutput = {
marl_eol_time: string;
};

View File

@@ -8,6 +8,8 @@ defmodule WandererApp.Api.Map do
alias Ash.Resource.Change.Builtins
require Logger
postgres do
repo(WandererApp.Repo)
table("maps_v1")
@@ -55,6 +57,7 @@ defmodule WandererApp.Api.Map do
define(:mark_as_deleted, action: :mark_as_deleted)
define(:update_api_key, action: :update_api_key)
define(:toggle_webhooks, action: :toggle_webhooks)
define(:toggle_sse, action: :toggle_sse)
define(:by_id,
get_by: [:id],
@@ -103,7 +106,16 @@ defmodule WandererApp.Api.Map do
end
create :new do
accept [:name, :slug, :description, :scope, :only_tracked_characters, :owner_id, :sse_enabled]
accept [
:name,
:slug,
:description,
:scope,
:only_tracked_characters,
:owner_id,
:sse_enabled
]
primary?(true)
argument :create_default_acl, :boolean, allow_nil?: true
argument :acls, {:array, :uuid}, allow_nil?: true
@@ -188,6 +200,14 @@ defmodule WandererApp.Api.Map do
require_atomic? false
end
update :toggle_sse do
require_atomic? false
accept [:sse_enabled]
# Validate subscription when enabling SSE
validate &validate_sse_subscription/2
end
create :duplicate do
accept [:name, :description, :scope, :only_tracked_characters]
argument :source_map_id, :uuid, allow_nil?: false
@@ -373,19 +393,13 @@ defmodule WandererApp.Api.Map do
end
end
# Private validation functions
@doc false
# Validates that SSE can be enabled based on subscription status.
# SSE Subscription Validation
#
# Validation rules:
# 1. Skip if SSE not being enabled (no validation needed)
# 2. Skip during map creation (map_id is nil, subscription doesn't exist yet)
# 3. Skip in Community Edition mode (subscriptions disabled globally)
# 4. Require active subscription in Enterprise mode
#
# This ensures users cannot enable SSE without a valid subscription in Enterprise mode,
# while allowing SSE in Community Edition and during map creation.
# This validation ensures that SSE can only be enabled when:
# 1. SSE is being disabled (always allowed)
# 2. Map is being created (skip validation, will be checked on first update)
# 3. Community Edition mode (always allowed)
# 4. Enterprise mode with active subscription
defp validate_sse_subscription(changeset, _context) do
sse_enabled = Ash.Changeset.get_attribute(changeset, :sse_enabled)
map_id = changeset.data.id
@@ -397,7 +411,6 @@ defmodule WandererApp.Api.Map do
:ok
# Map creation (no ID yet) - skip validation
# Subscription check will happen on first update if they try to enable SSE
is_nil(map_id) ->
:ok
@@ -411,7 +424,6 @@ defmodule WandererApp.Api.Map do
end
end
# Helper to check if map has an active subscription
defp validate_active_subscription(map_id) do
case WandererApp.Map.is_subscription_active?(map_id) do
{:ok, true} ->
@@ -421,11 +433,8 @@ defmodule WandererApp.Api.Map do
{:error, field: :sse_enabled, message: "Active subscription required to enable SSE"}
{:error, reason} ->
require Logger
Logger.warning("Failed to check subscription for map #{map_id}: #{inspect(reason)}")
# Fail open - allow the operation but log the error
# This prevents database errors from blocking legitimate operations
:ok
Logger.error("Error checking subscription status: #{inspect(reason)}")
{:error, field: :sse_enabled, message: "Unable to verify subscription status"}
end
end
end

View File

@@ -139,7 +139,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :track do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
require_atomic? false
# Load the record first
@@ -153,7 +152,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :untrack do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
require_atomic? false
# Load the record first
@@ -167,7 +165,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :follow do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
require_atomic? false
# Load the record first
@@ -181,7 +178,6 @@ defmodule WandererApp.Api.MapCharacterSettings do
update :unfollow do
accept [:map_id, :character_id]
argument :map_id, :string, allow_nil?: false
require_atomic? false
# Load the record first

View File

@@ -58,6 +58,7 @@ defmodule WandererApp.Api.MapWebhookSubscription do
:consecutive_failures,
:secret
]
require_atomic? false
end

View File

@@ -93,6 +93,8 @@ defmodule WandererApp.CachedInfo do
end
end
def get_system_static_info(nil), do: {:ok, nil}
def get_system_static_info(solar_system_id) do
{:ok, solar_system_id} = APIUtils.parse_int(solar_system_id)

View File

@@ -43,13 +43,14 @@ defmodule WandererApp.Character.Activity do
## Parameters
- `map_id`: ID of the map
- `current_user`: Current user struct (used only to get user settings)
- `days`: Optional number of days to filter activity (nil for all time)
## Returns
- List of processed activity data
"""
def process_character_activity(map_id, current_user) do
def process_character_activity(map_id, current_user, days \\ nil) do
with {:ok, map_user_settings} <- get_map_user_settings(map_id, current_user.id),
{:ok, raw_activity} <- WandererApp.Map.get_character_activity(map_id),
{:ok, raw_activity} <- WandererApp.Map.get_character_activity(map_id, days),
{:ok, user_characters} <-
WandererApp.Api.Character.active_by_user(%{user_id: current_user.id}) do
process_activity_data(raw_activity, map_user_settings, user_characters)

View File

@@ -463,7 +463,8 @@ defmodule WandererApp.Esi.ApiClient do
{:error, reason} ->
# Check if this is a Finch pool error
if is_exception(reason) and Exception.message(reason) =~ "unable to provide a connection" do
if is_exception(reason) and
Exception.message(reason) =~ "unable to provide a connection" do
:telemetry.execute(
[:wanderer_app, :finch, :pool_exhausted],
%{count: 1},
@@ -677,7 +678,8 @@ defmodule WandererApp.Esi.ApiClient do
{:error, reason} ->
# Check if this is a Finch pool error
if is_exception(reason) and Exception.message(reason) =~ "unable to provide a connection" do
if is_exception(reason) and
Exception.message(reason) =~ "unable to provide a connection" do
:telemetry.execute(
[:wanderer_app, :finch, :pool_exhausted],
%{count: 1},

View File

@@ -403,10 +403,24 @@ defmodule WandererApp.Kills.MessageHandler do
defp extract_field(_data, _field_names), do: nil
# Specific field extractors using the generic function
# Generic nested field extraction - tries flat keys first, then nested object
@spec extract_nested_field(map(), list(String.t()), String.t(), String.t()) :: String.t() | nil
defp extract_nested_field(data, flat_keys, nested_key, field) when is_map(data) do
case extract_field(data, flat_keys) do
nil ->
case data[nested_key] do
%{^field => value} when is_binary(value) and value != "" -> value
_ -> nil
end
value ->
value
end
end
# Specific field extractors using the generic functions
@spec get_character_name(map() | any()) :: String.t() | nil
defp get_character_name(data) when is_map(data) do
# Try multiple possible field names
field_names = ["attacker_name", "victim_name", "character_name", "name"]
extract_field(data, field_names) ||
@@ -419,30 +433,26 @@ defmodule WandererApp.Kills.MessageHandler do
defp get_character_name(_), do: nil
@spec get_corp_ticker(map() | any()) :: String.t() | nil
defp get_corp_ticker(data) when is_map(data) do
extract_field(data, ["corporation_ticker", "corp_ticker"])
end
defp get_corp_ticker(data) when is_map(data),
do: extract_nested_field(data, ["corporation_ticker", "corp_ticker"], "corporation", "ticker")
defp get_corp_ticker(_), do: nil
@spec get_corp_name(map() | any()) :: String.t() | nil
defp get_corp_name(data) when is_map(data) do
extract_field(data, ["corporation_name", "corp_name"])
end
defp get_corp_name(data) when is_map(data),
do: extract_nested_field(data, ["corporation_name", "corp_name"], "corporation", "name")
defp get_corp_name(_), do: nil
@spec get_alliance_ticker(map() | any()) :: String.t() | nil
defp get_alliance_ticker(data) when is_map(data) do
extract_field(data, ["alliance_ticker"])
end
defp get_alliance_ticker(data) when is_map(data),
do: extract_nested_field(data, ["alliance_ticker"], "alliance", "ticker")
defp get_alliance_ticker(_), do: nil
@spec get_alliance_name(map() | any()) :: String.t() | nil
defp get_alliance_name(data) when is_map(data) do
extract_field(data, ["alliance_name"])
end
defp get_alliance_name(data) when is_map(data),
do: extract_nested_field(data, ["alliance_name"], "alliance", "name")
defp get_alliance_name(_), do: nil

View File

@@ -18,10 +18,20 @@ defmodule WandererApp.Map.MapPool do
@map_pool_limit 10
@garbage_collection_interval :timer.hours(4)
@systems_cleanup_timeout :timer.minutes(30)
@characters_cleanup_timeout :timer.minutes(5)
@connections_cleanup_timeout :timer.minutes(5)
@backup_state_timeout :timer.minutes(1)
# Use very long timeouts in test environment to prevent background tasks from running during tests
# This avoids database connection ownership errors when tests finish before async tasks complete
@systems_cleanup_timeout if Mix.env() == :test,
do: :timer.hours(24),
else: :timer.minutes(30)
@characters_cleanup_timeout if Mix.env() == :test,
do: :timer.hours(24),
else: :timer.minutes(5)
@connections_cleanup_timeout if Mix.env() == :test,
do: :timer.hours(24),
else: :timer.minutes(5)
@backup_state_timeout if Mix.env() == :test,
do: :timer.hours(24),
else: :timer.minutes(1)
def new(), do: __struct__()
def new(args), do: __struct__(args)
@@ -187,7 +197,7 @@ defmodule WandererApp.Map.MapPool do
# Schedule periodic tasks
Process.send_after(self(), :backup_state, @backup_state_timeout)
Process.send_after(self(), :cleanup_systems, 15_000)
Process.send_after(self(), :cleanup_systems, @systems_cleanup_timeout)
Process.send_after(self(), :cleanup_characters, @characters_cleanup_timeout)
Process.send_after(self(), :cleanup_connections, @connections_cleanup_timeout)
Process.send_after(self(), :garbage_collect, @garbage_collection_interval)

View File

@@ -203,10 +203,7 @@ defmodule WandererApp.Maps do
is_member_corp = to_string(c.corporation_id) in map_member_corporation_ids
is_member_alliance = to_string(c.alliance_id) in map_member_alliance_ids
has_access =
is_owner or is_acl_owner or is_member_eve or is_member_corp or is_member_alliance
has_access
is_owner || is_acl_owner || is_member_eve || is_member_corp || is_member_alliance
end)
end
@@ -250,11 +247,11 @@ defmodule WandererApp.Maps do
members ->
members
|> Enum.any?(fn member ->
(member.role == :blocked and
(member.role == :blocked &&
member.eve_character_id in user_character_eve_ids) or
(member.role == :blocked and
(member.role == :blocked &&
member.eve_corporation_id in user_character_corporation_ids) or
(member.role == :blocked and
(member.role == :blocked &&
member.eve_alliance_id in user_character_alliance_ids)
end)
end

View File

@@ -53,22 +53,40 @@ defmodule WandererApp.MapCharacterSettingsRepo do
def get_tracked_by_map_all(map_id),
do: WandererApp.Api.MapCharacterSettings.tracked_by_map_all(%{map_id: map_id})
def track(settings) do
{:ok, _} = get(settings.map_id, settings.character_id)
def track(%{map_id: map_id, character_id: character_id}) do
# Only update the tracked field, preserving other fields
WandererApp.Api.MapCharacterSettings.track(%{
map_id: settings.map_id,
character_id: settings.character_id
})
case WandererApp.Api.MapCharacterSettings.track(%{
map_id: map_id,
character_id: character_id
}) do
{:ok, _} ->
:ok
error ->
Logger.error(
"Failed to track character: #{character_id} on map: #{map_id}, #{inspect(error)}"
)
{:error, error}
end
end
def untrack(settings) do
{:ok, _} = get(settings.map_id, settings.character_id)
def untrack(%{map_id: map_id, character_id: character_id}) do
# Only update the tracked field, preserving other fields
WandererApp.Api.MapCharacterSettings.untrack(%{
map_id: settings.map_id,
character_id: settings.character_id
})
case WandererApp.Api.MapCharacterSettings.untrack(%{
map_id: map_id,
character_id: character_id
}) do
{:ok, _} ->
:ok
error ->
Logger.error(
"Failed to untrack character: #{character_id} on map: #{map_id}, #{inspect(error)}"
)
{:error, error}
end
end
def track!(settings) do

View File

@@ -336,8 +336,8 @@
label="Valid"
options={Enum.map(@valid_types, fn valid_type -> {valid_type.label, valid_type.id} end)}
/>
<!-- Modal action buttons -->
<!-- Modal action buttons -->
<div class="modal-action">
<.button class="mt-2" type="submit" phx-disable-with="Saving...">
{(@live_action == :add_invite_link && "Add") || "Save"}

View File

@@ -30,14 +30,17 @@ defmodule WandererAppWeb.MapActivityEventHandler do
def handle_ui_event(
"show_activity",
_,
params,
%{assigns: %{map_id: map_id, current_user: current_user}} = socket
) do
Task.async(fn ->
try do
# Extract days parameter (nil if not provided)
days = Map.get(params, "days")
# Get raw activity data from the domain logic
result =
WandererApp.Character.Activity.process_character_activity(map_id, current_user)
WandererApp.Character.Activity.process_character_activity(map_id, current_user, days)
# Group activities by user_id and summarize
summarized_result =

View File

@@ -163,6 +163,7 @@ defmodule WandererAppWeb.MapsLive do
|> assign(:map_slug, map_slug)
|> assign(:map_id, map.id)
|> assign(:public_api_key, map.public_api_key)
|> assign(:sse_enabled, map.sse_enabled)
|> assign(:map, map)
|> assign(
export_settings: export_settings |> _get_export_map_data(),
@@ -232,6 +233,27 @@ defmodule WandererAppWeb.MapsLive do
{:noreply, assign(socket, public_api_key: new_api_key)}
end
def handle_event("toggle-sse", _params, socket) do
new_sse_enabled = not socket.assigns.sse_enabled
map = socket.assigns.map
case WandererApp.Api.Map.toggle_sse(map, %{sse_enabled: new_sse_enabled}) do
{:ok, updated_map} ->
{:noreply, assign(socket, sse_enabled: new_sse_enabled, map: updated_map)}
{:error, %Ash.Error.Invalid{errors: errors}} ->
error_message =
errors
|> Enum.map(fn error -> Map.get(error, :message, "Unknown error") end)
|> Enum.join(", ")
{:noreply, put_flash(socket, :error, error_message)}
{:error, _} ->
{:noreply, put_flash(socket, :error, "Failed to update SSE setting")}
end
end
@impl true
def handle_event(
"live_select_change",

View File

@@ -540,6 +540,24 @@
</.button>
</div>
</div>
<div class="border-t border-stone-700 mt-4 pt-4">
<h3 class="text-md font-semibold mb-3">Server-Sent Events (SSE)</h3>
<div class="flex items-center gap-3">
<label class="flex items-center gap-2 cursor-pointer">
<input
type="checkbox"
class="checkbox checkbox-primary"
checked={@sse_enabled}
phx-click="toggle-sse"
/>
<span>Enable SSE for this map</span>
</label>
</div>
<p class="text-sm text-stone-400 mt-2">
When enabled, external clients can subscribe to real-time map events via SSE.
</p>
</div>
</div>
<.live_component

View File

@@ -3,7 +3,7 @@ defmodule WandererApp.MixProject do
@source_url "https://github.com/wanderer-industries/wanderer"
@version "1.85.5"
@version "1.86.1"
def project do
[

View File

@@ -52,25 +52,24 @@ defmodule WandererApp.Repo.Migrations.AddPublicApiKeyUniqueIndex do
end
defp create_backup_table do
repo().query!("""
CREATE TABLE IF NOT EXISTS maps_v1_api_key_backup (
id UUID PRIMARY KEY,
map_id UUID NOT NULL,
old_public_api_key TEXT NOT NULL,
reason TEXT NOT NULL,
backed_up_at TIMESTAMP NOT NULL DEFAULT NOW()
repo().query!(
"""
CREATE TABLE IF NOT EXISTS maps_v1_api_key_backup (
id UUID PRIMARY KEY,
map_id UUID NOT NULL,
old_public_api_key TEXT NOT NULL,
reason TEXT NOT NULL,
backed_up_at TIMESTAMP NOT NULL DEFAULT NOW()
)
""",
[]
)
""", [])
IO.puts("Created backup table maps_v1_api_key_backup")
end
def down do
drop_if_exists(
index(:maps_v1, [:public_api_key],
name: :maps_v1_unique_public_api_key_index
)
)
drop_if_exists(index(:maps_v1, [:public_api_key], name: :maps_v1_unique_public_api_key_index))
IO.puts("Dropped unique index on maps_v1.public_api_key")
@@ -119,6 +118,7 @@ defmodule WandererApp.Repo.Migrations.AddPublicApiKeyUniqueIndex do
INSERT INTO maps_v1_api_key_backup (id, map_id, old_public_api_key, reason)
VALUES (gen_random_uuid(), $1::uuid, $2, 'duplicate_api_key_cleared_for_unique_index')
"""
repo().query!(backup_query, [id, api_key])
# Clear the duplicate

View File

@@ -104,7 +104,6 @@ defmodule WandererAppWeb.MapAuditAPIControllerIntegrationTest do
assert length(events) >= 0
end
@tag :skip
test "supports different period values", %{conn: conn, map: map} do
character = Factory.insert(:character, %{eve_id: "123456789"})
user = Factory.insert(:user)

View File

@@ -1,5 +1,5 @@
defmodule WandererAppWeb.MapSystemStructureAPIControllerTest do
use WandererAppWeb.ApiCase
use WandererAppWeb.ApiCase, async: false
alias WandererAppWeb.Factory

View File

@@ -247,9 +247,10 @@ defmodule WandererAppWeb.Api.V1.MapSystemApiV1Test do
payload = %{
"data" => %{
"type" => "map_systems",
"attributes" => %{
# Missing solar_system_id - JSON:API returns 400 for schema validation
}
"attributes" =>
%{
# Missing solar_system_id - JSON:API returns 400 for schema validation
}
}
}

View File

@@ -47,24 +47,26 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
user = create_user(%{name: "Test User", hash: "test_hash_#{:rand.uniform(1_000_000)}"})
# Create test character with location tracking scopes
character = create_character(%{
eve_id: "#{@test_character_eve_id}",
name: "Test Character",
user_id: user.id,
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
tracking_pool: "default"
})
character =
create_character(%{
eve_id: "#{@test_character_eve_id}",
name: "Test Character",
user_id: user.id,
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
tracking_pool: "default"
})
# Create test map
# Note: scope: :all is used because :none prevents system addition
# (is_connection_valid returns false for :none scope)
map = create_map(%{
name: "Test Char Track",
slug: "test-char-tracking-#{:rand.uniform(1_000_000)}",
owner_id: character.id,
scope: :all,
only_tracked_characters: false
})
map =
create_map(%{
name: "Test Char Track",
slug: "test-char-tracking-#{:rand.uniform(1_000_000)}",
owner_id: character.id,
scope: :all,
only_tracked_characters: false
})
on_exit(fn ->
cleanup_test_data(map.id)
@@ -150,6 +152,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Setup: Character starts at Jita
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -157,6 +160,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# First update - start system is intentionally NOT added yet
CharactersImpl.update_characters(map.id)
refute system_on_map?(map.id, @system_jita),
"Start system should not be added until character moves"
@@ -167,8 +171,11 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
CharactersImpl.update_characters(map.id)
# Verify: Both systems should be on map after character moves
assert wait_for_system_on_map(map.id, @system_jita), "Jita should be added after character moves"
assert wait_for_system_on_map(map.id, @system_amarr), "Amarr should be added as the new location"
assert wait_for_system_on_map(map.id, @system_jita),
"Jita should be added after character moves"
assert wait_for_system_on_map(map.id, @system_amarr),
"Amarr should be added as the new location"
end
end
@@ -185,6 +192,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Character starts at Jita
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -192,6 +200,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# First update - start system is intentionally NOT added yet
CharactersImpl.update_characters(map.id)
refute system_on_map?(map.id, @system_jita),
"Start system should not be added until character moves"
@@ -202,7 +211,9 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
CharactersImpl.update_characters(map.id)
# Verify both Jita and Amarr are now on map
assert wait_for_system_on_map(map.id, @system_jita), "Jita (start) should be on map after movement"
assert wait_for_system_on_map(map.id, @system_jita),
"Jita (start) should be on map after movement"
assert wait_for_system_on_map(map.id, @system_amarr), "Amarr should be on map"
# Rapid jump to Dodixie before next update cycle
@@ -213,7 +224,10 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Verify: All three systems should be on map
assert wait_for_system_on_map(map.id, @system_jita), "Jita (start) should still be on map"
assert wait_for_system_on_map(map.id, @system_amarr), "Amarr (intermediate) should still be on map - this is the critical test"
assert wait_for_system_on_map(map.id, @system_amarr),
"Amarr (intermediate) should still be on map - this is the critical test"
assert wait_for_system_on_map(map.id, @system_dodixie), "Dodixie (end) should be on map"
end
@@ -230,6 +244,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Start at Jita
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -284,9 +299,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Verify start_solar_system_id still exists after first update
{:ok, start_system} =
WandererApp.Cache.lookup(
"map:#{map.id}:character:#{character.id}:start_solar_system_id"
)
WandererApp.Cache.lookup("map:#{map.id}:character:#{character.id}:start_solar_system_id")
assert start_system == @system_jita,
"start_solar_system_id should persist after first update (not be taken/removed)"
@@ -369,6 +382,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Set character at Jita and set start location
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -401,6 +415,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Set up character location
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -424,19 +439,22 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Create a second character
user2 = create_user(%{name: "Test User 2", hash: "test_hash_#{:rand.uniform(1_000_000)}"})
character2 = create_character(%{
eve_id: "#{@test_character_eve_id + 1}",
name: "Test Character 2",
user_id: user2.id,
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
tracking_pool: "default"
})
character2 =
create_character(%{
eve_id: "#{@test_character_eve_id + 1}",
name: "Test Character 2",
user_id: user2.id,
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
tracking_pool: "default"
})
# Add both characters to map presence
add_character_to_map_presence(map.id, character2.id)
# Set locations for both characters
set_character_location(character2.id, @system_amarr)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character2.id}:start_solar_system_id",
@system_amarr
@@ -464,6 +482,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Set up character with location
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -491,32 +510,38 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
ensure_map_started(map.id)
# Create multiple characters for concurrent processing
characters = for i <- 1..5 do
user = create_user(%{
name: "Test User #{i}",
hash: "test_hash_#{:rand.uniform(1_000_000)}"
})
characters =
for i <- 1..5 do
user =
create_user(%{
name: "Test User #{i}",
hash: "test_hash_#{:rand.uniform(1_000_000)}"
})
character = create_character(%{
eve_id: "#{@test_character_eve_id + i}",
name: "Test Character #{i}",
user_id: user.id,
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
tracking_pool: "default"
})
character =
create_character(%{
eve_id: "#{@test_character_eve_id + i}",
name: "Test Character #{i}",
user_id: user.id,
scopes: "esi-location.read_location.v1 esi-location.read_ship_type.v1",
tracking_pool: "default"
})
# Add character to presence and set location
add_character_to_map_presence(map.id, character.id)
# Add character to presence and set location
add_character_to_map_presence(map.id, character.id)
solar_system_id = Enum.at([@system_jita, @system_amarr, @system_dodixie, @system_rens], rem(i, 4))
set_character_location(character.id, solar_system_id)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
solar_system_id
)
solar_system_id =
Enum.at([@system_jita, @system_amarr, @system_dodixie, @system_rens], rem(i, 4))
character
end
set_character_location(character.id, solar_system_id)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
solar_system_id
)
character
end
# Run update_characters - should handle all characters concurrently
result = CharactersImpl.update_characters(map.id)
@@ -563,7 +588,8 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
CharactersImpl.update_characters(map.id)
# Should receive start and complete events (or error event if something failed)
assert_receive {:telemetry_event, [:wanderer_app, :map, :update_characters, :start], _, _}, 1000
assert_receive {:telemetry_event, [:wanderer_app, :map, :update_characters, :start], _, _},
1000
# Should receive either complete or error event
receive do
@@ -593,6 +619,7 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Set location in character cache
set_character_location(character.id, @system_jita)
WandererApp.Cache.insert(
"map:#{map.id}:character:#{character.id}:start_solar_system_id",
@system_jita
@@ -613,10 +640,12 @@ defmodule WandererApp.Map.CharacterLocationTrackingTest do
# Verify both caches updated
{:ok, character_data} = Cachex.get(:character_cache, character.id)
{:ok, map_cached_location} =
WandererApp.Cache.lookup("map:#{map.id}:character:#{character.id}:solar_system_id")
assert character_data.solar_system_id == @system_amarr
assert map_cached_location == @system_amarr,
"Both caches should be consistent after update"
end

View File

@@ -239,7 +239,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
{:ok, conn: conn, map: map, user: user, character: character}
end
@tag :skip
test "CREATE: fails with missing required parameters", %{conn: conn, map: map} do
invalid_params = %{
"type" => 0
@@ -252,7 +251,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
assert conn.status in [400, 422]
end
@tag :skip
test "UPDATE: fails for non-existent connection", %{conn: conn, map: map} do
non_existent_id = Ecto.UUID.generate()
@@ -267,7 +265,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
assert conn.status in [404, 422, 500]
end
@tag :skip
test "DELETE: handles non-existent connection gracefully", %{conn: conn, map: map} do
non_existent_id = Ecto.UUID.generate()
@@ -277,7 +274,6 @@ defmodule WandererAppWeb.MapConnectionAPIControllerSuccessTest do
assert conn.status in [200, 204, 404]
end
@tag :skip
test "READ: handles filtering with non-existent systems", %{conn: conn, map: map} do
params = %{
"solar_system_source" => "99999999",

View File

@@ -59,7 +59,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
{:ok, %{conn: conn, map: map, user: user, character: character}}
end
@tag :skip
test "READ: successfully retrieves systems for a map", %{conn: conn, map: map} do
# Create some systems for the map
system1 =
@@ -108,7 +107,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
assert amarr["status"] == 0
end
@tag :skip
test "CREATE: successfully creates a single system", %{conn: conn, map: map} do
# Start the map server
ensure_map_started(map.id)
@@ -133,7 +131,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
assert created_count >= 1
end
@tag :skip
test "UPDATE: successfully updates system position", %{conn: conn, map: map} do
system =
insert(:map_system, %{
@@ -165,7 +162,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
assert updated_system["position_y"] == 400.0
end
@tag :skip
test "UPDATE: successfully updates custom_name", %{conn: conn, map: map} do
system =
insert(:map_system, %{
@@ -194,7 +190,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
assert updated_system["custom_name"] == "My Trade Hub"
end
@tag :skip
test "DELETE: successfully deletes a system", %{conn: conn, map: map} do
system =
insert(:map_system, %{
@@ -222,7 +217,6 @@ defmodule WandererAppWeb.MapSystemAPIControllerSuccessTest do
end
end
@tag :skip
test "DELETE: successfully deletes multiple systems", %{conn: conn, map: map} do
system1 = insert(:map_system, %{map_id: map.id, solar_system_id: 30_000_142})
system2 = insert(:map_system, %{map_id: map.id, solar_system_id: 30_000_144})

View File

@@ -47,7 +47,9 @@ defmodule WandererAppWeb.ApiCase do
end
# Set up mocks for this test process
WandererApp.Test.Mocks.setup_test_mocks()
# Use global mode for integration tests so mocks work in spawned processes
mock_mode = if integration_test?, do: :global, else: :private
WandererApp.Test.Mocks.setup_test_mocks(mode: mock_mode)
# Set up integration test environment if needed
if integration_test? do

View File

@@ -63,13 +63,22 @@ defmodule WandererApp.DataCase do
# Use shared mode if requested or if running as a ConnCase test (to avoid ownership issues)
# Otherwise use non-shared mode for proper test isolation
shared = (tags[:shared] || tags[:conn_case] || not tags[:async]) and not tags[:async]
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: shared)
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
# Store the sandbox owner pid for allowing background processes
# Start the sandbox owner and link it to the test process
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: shared)
# Store the sandbox owner pid BEFORE registering on_exit
# This ensures it's available for use in setup callbacks
Process.put(:sandbox_owner_pid, pid)
# Register cleanup - this will be called last (LIFO order)
on_exit(fn ->
# Only stop if the owner is still alive
if Process.alive?(pid) do
Ecto.Adapters.SQL.Sandbox.stop_owner(pid)
end
end)
# Allow critical system processes to access the database
allow_system_processes_database_access()
@@ -112,7 +121,9 @@ defmodule WandererApp.DataCase do
WandererApp.Server.TheraDataFetcher,
WandererApp.ExternalEvents.MapEventRelay,
WandererApp.ExternalEvents.WebhookDispatcher,
WandererApp.ExternalEvents.SseStreamManager
WandererApp.ExternalEvents.SseStreamManager,
# Task.Supervisor for Task.async_stream calls (e.g., from MapPool background tasks)
Task.Supervisor
]
Enum.each(system_processes, fn process_name ->

View File

@@ -108,6 +108,10 @@ defmodule WandererAppWeb.Factory do
create_map_transaction(map_id, attrs)
end
def insert(:solar_system, attrs) do
create_solar_system(attrs)
end
def insert(resource_type, _attrs) do
raise "Unknown factory resource type: #{resource_type}"
end
@@ -802,4 +806,45 @@ defmodule WandererAppWeb.Factory do
{:ok, webhook} = Ash.create(Api.MapWebhookSubscription, attrs)
webhook
end
@doc """
Creates a test solar system (static EVE Online system data) with reasonable defaults.
"""
def build_solar_system(attrs \\ %{}) do
unique_id = System.unique_integer([:positive])
solar_system_id = Map.get(attrs, :solar_system_id, 30_000_000 + rem(unique_id, 10_000))
default_attrs = %{
solar_system_id: solar_system_id,
solar_system_name: "System #{solar_system_id}",
solar_system_name_lc: "system #{solar_system_id}",
region_id: 10_000_000 + rem(unique_id, 1000),
region_name: "Test Region",
constellation_id: 20_000_000 + rem(unique_id, 1000),
constellation_name: "Test Constellation",
security: "0.5",
system_class: 0,
type_description: "HS",
class_title: "High Sec"
}
merged_attrs = Map.merge(default_attrs, attrs)
# Automatically compute solar_system_name_lc from solar_system_name if not provided
if Map.has_key?(attrs, :solar_system_name) and not Map.has_key?(attrs, :solar_system_name_lc) do
Map.put(merged_attrs, :solar_system_name_lc, String.downcase(merged_attrs.solar_system_name))
else
merged_attrs
end
end
def create_solar_system(attrs \\ %{}) do
attrs = build_solar_system(attrs)
# Use upsert to handle cases where the system might already exist
case Ash.create(Api.MapSolarSystem, attrs) do
{:ok, solar_system} -> solar_system
{:error, reason} -> raise "Failed to create solar system: #{inspect(reason)}"
end
end
end

View File

@@ -48,8 +48,9 @@ defmodule WandererAppWeb.IntegrationConnCase do
setup tags do
WandererAppWeb.IntegrationConnCase.setup_sandbox(tags)
# Set up mocks for this test process
WandererApp.Test.Mocks.setup_test_mocks()
# Set up mocks for this test process in global mode
# Integration tests spawn processes (MapPool, etc.) that need mock access
WandererApp.Test.Mocks.setup_test_mocks(mode: :global)
# Set up integration test environment (including Map.Manager)
WandererApp.Test.IntegrationConfig.setup_integration_environment()
@@ -74,7 +75,7 @@ defmodule WandererAppWeb.IntegrationConnCase do
- Uses shared: false for better isolation
- Child processes require explicit allowance
"""
def setup_sandbox(tags) do
def setup_sandbox(_tags) do
# Ensure the repo is started before setting up sandbox
unless Process.whereis(WandererApp.Repo) do
{:ok, _} = WandererApp.Repo.start_link()
@@ -85,26 +86,22 @@ defmodule WandererAppWeb.IntegrationConnCase do
# - This requires tests to be synchronous (async: false) if they share the same case
shared_mode = true
# Set up sandbox mode based on test type
pid =
if shared_mode do
# For async tests with shared mode:
# Checkout the sandbox connection instead of starting an owner
# This allows multiple async tests to use the same connection pool
:ok = Ecto.Adapters.SQL.Sandbox.checkout(WandererApp.Repo)
# Put the connection in shared mode
Ecto.Adapters.SQL.Sandbox.mode(WandererApp.Repo, {:shared, self()})
self()
else
# For sync tests, start a dedicated owner
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: false)
on_exit(fn -> Ecto.Adapters.SQL.Sandbox.stop_owner(pid) end)
pid
end
# Set up sandbox mode - always use start_owner! for proper ownership setup
# This ensures that spawned processes (like Ash transactions) can access the database
pid = Ecto.Adapters.SQL.Sandbox.start_owner!(WandererApp.Repo, shared: shared_mode)
# Store the sandbox owner pid for allowing background processes
# Store the sandbox owner pid BEFORE registering on_exit
# This ensures it's available for use in setup callbacks
Process.put(:sandbox_owner_pid, pid)
# Register cleanup - this will be called last (LIFO order)
on_exit(fn ->
# Only stop if the owner is still alive
if Process.alive?(pid) do
Ecto.Adapters.SQL.Sandbox.stop_owner(pid)
end
end)
# Allow critical system processes to access the database
allow_system_processes_database_access()
@@ -136,7 +133,9 @@ defmodule WandererAppWeb.IntegrationConnCase do
WandererApp.Server.TheraDataFetcher,
WandererApp.ExternalEvents.MapEventRelay,
WandererApp.ExternalEvents.WebhookDispatcher,
WandererApp.ExternalEvents.SseStreamManager
WandererApp.ExternalEvents.SseStreamManager,
# Task.Supervisor for Task.async_stream calls
Task.Supervisor
]
Enum.each(system_processes, fn process_name ->
@@ -177,7 +176,7 @@ defmodule WandererAppWeb.IntegrationConnCase do
end
end
# Monitor for dynamically spawned children and grant them mock access
# Monitor for dynamically spawned children and grant them mock and database access
defp monitor_and_allow_children(supervisor_pid, owner_pid, interval \\ 50) do
if Process.alive?(supervisor_pid) do
:timer.sleep(interval)
@@ -191,7 +190,9 @@ defmodule WandererAppWeb.IntegrationConnCase do
|> Enum.filter(&is_pid/1)
|> Enum.filter(&Process.alive?/1)
|> Enum.each(fn child_pid ->
# Grant both mock and database access
WandererApp.Test.MockOwnership.allow_mocks_for_process(child_pid, owner_pid)
allow_database_access(child_pid)
end)
_ ->

View File

@@ -356,7 +356,8 @@ defmodule WandererApp.MapTestHelpers do
def set_character_location(character_id, solar_system_id, opts \\ []) do
structure_id = opts[:structure_id]
station_id = opts[:station_id]
ship = opts[:ship] || 670 # Capsule
# Capsule
ship = opts[:ship] || 670
# First get the existing character from cache or database to maintain all fields
{:ok, existing_character} = WandererApp.Character.get_character(character_id)

View File

@@ -60,7 +60,7 @@ defmodule WandererApp.Test.MockAllowance do
Mox.set_mox_global()
# Re-setup mocks to ensure they're available globally
WandererApp.Test.Mocks.setup_mocks()
WandererApp.Test.Mocks.setup_test_mocks(mode: :global)
end
end

View File

@@ -16,9 +16,15 @@ defmodule WandererApp.Test.Mocks do
:ok
end
"""
def setup_test_mocks do
# Claim ownership of all mocks for this test process
Mox.set_mox_private()
def setup_test_mocks(opts \\ []) do
# For integration tests that spawn processes (MapPool, etc.),
# we need global mode so mocks work across process boundaries
mode = Keyword.get(opts, :mode, :private)
case mode do
:global -> Mox.set_mox_global()
:private -> Mox.set_mox_private()
end
# Set up default stubs for this test
setup_default_stubs()

View File

@@ -132,22 +132,6 @@ defmodule WandererAppWeb.AuthTest do
assert result.status == 400
end
test "rejects request for non-existent map" do
non_existent_id = "550e8400-e29b-41d4-a716-446655440000"
conn =
build_conn()
|> put_req_header("authorization", "Bearer test_api_key_123")
|> put_private(:phoenix_router, WandererAppWeb.Router)
|> Map.put(:params, %{"map_identifier" => non_existent_id})
|> Plug.Conn.fetch_query_params()
result = CheckMapApiKey.call(conn, CheckMapApiKey.init([]))
assert result.halted
assert result.status == 404
end
test "rejects request for map without API key configured", %{map: map} do
# Update map to have no API key using the proper action
{:ok, map_without_key} = Ash.update(map, %{public_api_key: nil}, action: :update_api_key)
@@ -166,6 +150,24 @@ defmodule WandererAppWeb.AuthTest do
end
end
describe "CheckMapApiKey plug without fixtures" do
test "rejects request for non-existent map" do
non_existent_id = "550e8400-e29b-41d4-a716-446655440000"
conn =
build_conn()
|> put_req_header("authorization", "Bearer test_api_key_123")
|> put_private(:phoenix_router, WandererAppWeb.Router)
|> Map.put(:params, %{"map_identifier" => non_existent_id})
|> Plug.Conn.fetch_query_params()
result = CheckMapApiKey.call(conn, CheckMapApiKey.init([]))
assert result.halted
assert result.status == 404
end
end
describe "CheckAclApiKey plug" do
setup do
user = Factory.insert(:user)
@@ -248,6 +250,25 @@ defmodule WandererAppWeb.AuthTest do
assert result.status == 401
end
test "rejects request for ACL without API key configured", %{acl: acl} do
# Update ACL to have no API key
{:ok, acl_without_key} = Ash.update(acl, %{api_key: nil})
conn =
build_conn()
|> put_req_header("authorization", "Bearer test_acl_key_456")
|> put_private(:phoenix_router, WandererAppWeb.Router)
|> Map.put(:params, %{"id" => acl_without_key.id})
|> Plug.Conn.fetch_query_params()
result = CheckAclApiKey.call(conn, CheckAclApiKey.init([]))
assert result.halted
assert result.status == 401
end
end
describe "CheckAclApiKey plug without fixtures" do
test "rejects request with missing ACL ID" do
conn =
build_conn()
@@ -277,23 +298,6 @@ defmodule WandererAppWeb.AuthTest do
assert result.halted
assert result.status == 404
end
test "rejects request for ACL without API key configured", %{acl: acl} do
# Update ACL to have no API key
{:ok, acl_without_key} = Ash.update(acl, %{api_key: nil})
conn =
build_conn()
|> put_req_header("authorization", "Bearer test_acl_key_456")
|> put_private(:phoenix_router, WandererAppWeb.Router)
|> Map.put(:params, %{"id" => acl_without_key.id})
|> Plug.Conn.fetch_query_params()
result = CheckAclApiKey.call(conn, CheckAclApiKey.init([]))
assert result.halted
assert result.status == 401
end
end
describe "BasicAuth" do

View File

@@ -1,359 +0,0 @@
defmodule WandererApp.Map.MapPoolTest do
use ExUnit.Case, async: true
alias WandererApp.Map.{MapPool, MapPoolDynamicSupervisor, Reconciler}
@cache :map_pool_cache
@registry :map_pool_registry
@unique_registry :unique_map_pool_registry
setup do
# Clean up any existing test data
cleanup_test_data()
# Check if required infrastructure is running
registries_running? =
try do
Registry.keys(@registry, self()) != :error
rescue
_ -> false
end
reconciler_running? = Process.whereis(Reconciler) != nil
on_exit(fn ->
cleanup_test_data()
end)
{:ok, registries_running: registries_running?, reconciler_running: reconciler_running?}
end
defp cleanup_test_data do
# Clean up test caches
WandererApp.Cache.delete("started_maps")
Cachex.clear(@cache)
end
describe "garbage collection with synchronous stop" do
@tag :skip
test "garbage collector successfully stops map with synchronous call" do
# This test would require setting up a full map pool with a test map
# Skipping for now as it requires more complex setup with actual map data
:ok
end
@tag :skip
test "garbage collector handles stop failures gracefully" do
# This test would verify error handling when stop fails
:ok
end
end
describe "cache lookup with registry fallback" do
test "stop_map handles cache miss by scanning registry", %{
registries_running: registries_running?
} do
if registries_running? do
# Setup: Create a map_id that's not in cache but will be found in registry scan
map_id = "test_map_#{:rand.uniform(1_000_000)}"
# Verify cache is empty for this map
assert {:ok, nil} = Cachex.get(@cache, map_id)
# Call stop_map - should handle gracefully with fallback
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
else
# Skip test if registries not running
:ok
end
end
test "stop_map handles non-existent pool_uuid in registry", %{
registries_running: registries_running?
} do
if registries_running? do
map_id = "test_map_#{:rand.uniform(1_000_000)}"
fake_uuid = "fake_uuid_#{:rand.uniform(1_000_000)}"
# Put fake uuid in cache that doesn't exist in registry
Cachex.put(@cache, map_id, fake_uuid)
# Call stop_map - should handle gracefully with fallback
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
else
:ok
end
end
test "stop_map updates cache when found via registry scan", %{
registries_running: registries_running?
} do
if registries_running? do
# This test would require a running pool with registered maps
# For now, we verify the fallback logic doesn't crash
map_id = "test_map_#{:rand.uniform(1_000_000)}"
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
else
:ok
end
end
end
describe "state cleanup atomicity" do
@tag :skip
test "rollback occurs when registry update fails" do
# This would require mocking Registry.update_value to fail
# Skipping for now as it requires more complex mocking setup
:ok
end
@tag :skip
test "rollback occurs when cache delete fails" do
# This would require mocking Cachex.del to fail
:ok
end
@tag :skip
test "successful cleanup updates all three state stores" do
# This would verify Registry, Cache, and GenServer state are all updated
:ok
end
end
describe "Reconciler - zombie map detection and cleanup" do
test "reconciler detects zombie maps in started_maps cache", %{
reconciler_running: reconciler_running?
} do
if reconciler_running? do
# Setup: Add maps to started_maps that aren't in any registry
zombie_map_id = "zombie_map_#{:rand.uniform(1_000_000)}"
WandererApp.Cache.insert_or_update(
"started_maps",
[zombie_map_id],
fn existing -> [zombie_map_id | existing] |> Enum.uniq() end
)
# Get started_maps
{:ok, started_maps} = WandererApp.Cache.lookup("started_maps", [])
assert zombie_map_id in started_maps
# Trigger reconciliation
send(Reconciler, :reconcile)
# Give it time to process
Process.sleep(200)
# Verify zombie was cleaned up
{:ok, started_maps_after} = WandererApp.Cache.lookup("started_maps", [])
refute zombie_map_id in started_maps_after
else
:ok
end
end
test "reconciler cleans up zombie map caches", %{reconciler_running: reconciler_running?} do
if reconciler_running? do
zombie_map_id = "zombie_map_#{:rand.uniform(1_000_000)}"
# Setup zombie state
WandererApp.Cache.insert_or_update(
"started_maps",
[zombie_map_id],
fn existing -> [zombie_map_id | existing] |> Enum.uniq() end
)
WandererApp.Cache.insert("map_#{zombie_map_id}:started", true)
Cachex.put(@cache, zombie_map_id, "fake_uuid")
# Trigger reconciliation
send(Reconciler, :reconcile)
Process.sleep(200)
# Verify all caches cleaned
{:ok, started_maps} = WandererApp.Cache.lookup("started_maps", [])
refute zombie_map_id in started_maps
{:ok, cache_entry} = Cachex.get(@cache, zombie_map_id)
assert cache_entry == nil
else
:ok
end
end
end
describe "Reconciler - orphan map detection and fix" do
@tag :skip
test "reconciler detects orphan maps in registry" do
# This would require setting up a pool with maps in registry
# but not in started_maps cache
:ok
end
@tag :skip
test "reconciler adds orphan maps to started_maps cache" do
# This would verify orphan maps get added to the cache
:ok
end
end
describe "Reconciler - cache inconsistency detection and fix" do
test "reconciler detects map with missing cache entry", %{
reconciler_running: reconciler_running?
} do
if reconciler_running? do
# This test verifies the reconciler can detect when a map
# is in the registry but has no cache entry
# Since we can't easily set up a full pool, we test the detection logic
map_id = "test_map_#{:rand.uniform(1_000_000)}"
# Ensure no cache entry
Cachex.del(@cache, map_id)
# The reconciler would detect this if the map was in a registry
# For now, we just verify the logic doesn't crash
send(Reconciler, :reconcile)
Process.sleep(200)
# No assertions needed - just verifying no crashes
end
end
test "reconciler detects cache pointing to non-existent pool", %{
reconciler_running: reconciler_running?
} do
if reconciler_running? do
map_id = "test_map_#{:rand.uniform(1_000_000)}"
fake_uuid = "fake_uuid_#{:rand.uniform(1_000_000)}"
# Put fake uuid in cache
Cachex.put(@cache, map_id, fake_uuid)
# Trigger reconciliation
send(Reconciler, :reconcile)
Process.sleep(200)
# Cache entry should be removed since pool doesn't exist
{:ok, cache_entry} = Cachex.get(@cache, map_id)
assert cache_entry == nil
else
:ok
end
end
end
describe "Reconciler - stats and telemetry" do
test "reconciler emits telemetry events", %{reconciler_running: reconciler_running?} do
if reconciler_running? do
# Setup telemetry handler
test_pid = self()
:telemetry.attach(
"test-reconciliation",
[:wanderer_app, :map, :reconciliation],
fn _event, measurements, _metadata, _config ->
send(test_pid, {:telemetry, measurements})
end,
nil
)
# Trigger reconciliation
send(Reconciler, :reconcile)
Process.sleep(200)
# Should receive telemetry event
assert_receive {:telemetry, measurements}, 500
assert is_integer(measurements.total_started_maps)
assert is_integer(measurements.total_registry_maps)
assert is_integer(measurements.zombie_maps)
assert is_integer(measurements.orphan_maps)
assert is_integer(measurements.cache_inconsistencies)
# Cleanup
:telemetry.detach("test-reconciliation")
else
:ok
end
end
end
describe "Reconciler - manual trigger" do
test "trigger_reconciliation runs reconciliation immediately", %{
reconciler_running: reconciler_running?
} do
if reconciler_running? do
zombie_map_id = "zombie_map_#{:rand.uniform(1_000_000)}"
# Setup zombie state
WandererApp.Cache.insert_or_update(
"started_maps",
[zombie_map_id],
fn existing -> [zombie_map_id | existing] |> Enum.uniq() end
)
# Verify it exists
{:ok, started_maps_before} = WandererApp.Cache.lookup("started_maps", [])
assert zombie_map_id in started_maps_before
# Trigger manual reconciliation
Reconciler.trigger_reconciliation()
Process.sleep(200)
# Verify zombie was cleaned up
{:ok, started_maps_after} = WandererApp.Cache.lookup("started_maps", [])
refute zombie_map_id in started_maps_after
else
:ok
end
end
end
describe "edge cases and error handling" do
test "stop_map with cache error returns ok", %{registries_running: registries_running?} do
if registries_running? do
map_id = "test_map_#{:rand.uniform(1_000_000)}"
# Even if cache operations fail, should return :ok
assert :ok = MapPoolDynamicSupervisor.stop_map(map_id)
else
:ok
end
end
test "reconciler handles empty registries gracefully", %{
reconciler_running: reconciler_running?
} do
if reconciler_running? do
# Clear everything
cleanup_test_data()
# Should not crash even with empty data
send(Reconciler, :reconcile)
Process.sleep(200)
# No assertions - just verifying no crash
assert true
else
:ok
end
end
test "reconciler handles nil values in caches", %{reconciler_running: reconciler_running?} do
if reconciler_running? do
map_id = "test_map_#{:rand.uniform(1_000_000)}"
# Explicitly set nil
Cachex.put(@cache, map_id, nil)
# Should handle gracefully
send(Reconciler, :reconcile)
Process.sleep(200)
assert true
else
:ok
end
end
end
end

View File

@@ -1,361 +0,0 @@
defmodule WandererApp.Map.SlugUniquenessTest do
@moduledoc """
Tests for map slug uniqueness constraints and handling.
These tests verify that:
1. Database unique constraint is enforced
2. Application-level slug generation handles uniqueness
3. Concurrent map creation doesn't create duplicates
4. Error handling works correctly for slug conflicts
"""
use WandererApp.DataCase, async: true
alias WandererApp.Api.Map
require Logger
describe "slug uniqueness constraint" do
setup do
# Create a test character (which includes a user)
character = create_test_user()
%{character: character}
end
test "prevents duplicate slugs via database constraint", %{character: character} do
# Create first map with a specific slug
{:ok, map1} =
Map.new(%{
name: "Test Map",
slug: "test-map",
owner_id: character.id,
description: "First map",
scope: "wormholes"
})
assert map1.slug == "test-map"
# Attempt to create second map with same slug
# The updated logic now auto-increments the slug instead of failing
result =
Map.new(%{
name: "Different Name",
slug: "test-map",
owner_id: character.id,
description: "Second map",
scope: "wormholes"
})
# Should succeed with auto-incremented slug
assert {:ok, map2} = result
assert map2.slug == "test-map-2"
end
test "automatically increments slug when duplicate detected", %{character: character} do
# Create first map
{:ok, map1} =
Map.new(%{
name: "Test Map",
slug: "test-map",
owner_id: character.id,
description: "First map",
scope: "wormholes"
})
assert map1.slug == "test-map"
# Create second map with same name (should auto-increment slug)
{:ok, map2} =
Map.new(%{
name: "Test Map",
slug: "test-map",
owner_id: character.id,
description: "Second map",
scope: "wormholes"
})
# Slug should be automatically incremented
assert map2.slug == "test-map-2"
# Create third map with same name
{:ok, map3} =
Map.new(%{
name: "Test Map",
slug: "test-map",
owner_id: character.id,
description: "Third map",
scope: "wormholes"
})
assert map3.slug == "test-map-3"
end
test "handles many maps with similar names", %{character: character} do
# Create 10 maps with the same base slug
maps =
for i <- 1..10 do
{:ok, map} =
Map.new(%{
name: "Popular Name",
slug: "popular-name",
owner_id: character.id,
description: "Map #{i}",
scope: "wormholes"
})
map
end
# Verify all slugs are unique
slugs = Enum.map(maps, & &1.slug)
assert length(Enum.uniq(slugs)) == 10
# First should keep the base slug
assert List.first(maps).slug == "popular-name"
# Others should be numbered
assert "popular-name-2" in slugs
assert "popular-name-10" in slugs
end
end
describe "concurrent slug creation (race condition)" do
setup do
character = create_test_user()
%{character: character}
end
@tag :slow
test "handles concurrent map creation with identical slugs", %{character: character} do
# Create 5 concurrent map creation requests with the same slug
tasks =
for i <- 1..5 do
Task.async(fn ->
Map.new(%{
name: "Concurrent Test",
slug: "concurrent-test",
owner_id: character.id,
description: "Concurrent map #{i}",
scope: "wormholes"
})
end)
end
# Wait for all tasks to complete
results = Task.await_many(tasks, 10_000)
# All should either succeed or fail gracefully (no crashes)
assert length(results) == 5
# Get successful results
successful = Enum.filter(results, &match?({:ok, _}, &1))
failed = Enum.filter(results, &match?({:error, _}, &1))
# At least some should succeed
assert length(successful) > 0
# Extract maps from successful results
maps = Enum.map(successful, fn {:ok, map} -> map end)
# Verify all successful maps have unique slugs
slugs = Enum.map(maps, & &1.slug)
assert length(Enum.uniq(slugs)) == length(slugs),
"All successful maps should have unique slugs"
# Log results for visibility
Logger.info("Concurrent test: #{length(successful)} succeeded, #{length(failed)} failed")
Logger.info("Unique slugs created: #{inspect(slugs)}")
end
@tag :slow
test "concurrent creation with different names creates different base slugs", %{character: character} do
# Create concurrent requests with different names (should all succeed)
tasks =
for i <- 1..5 do
Task.async(fn ->
Map.new(%{
name: "Concurrent Map #{i}",
slug: "concurrent-map-#{i}",
owner_id: character.id,
description: "Map #{i}",
scope: "wormholes"
})
end)
end
results = Task.await_many(tasks, 10_000)
# All should succeed
assert Enum.all?(results, &match?({:ok, _}, &1))
# All should have different slugs
slugs = Enum.map(results, fn {:ok, map} -> map.slug end)
assert length(Enum.uniq(slugs)) == 5
end
end
describe "slug generation edge cases" do
setup do
character = create_test_user()
%{character: character}
end
test "handles very long slugs", %{character: character} do
# Create map with name within limits but slug that's very long
# Note: name max is 20 chars, slug max is 40 chars
long_slug = String.duplicate("a", 50)
# Attempting to create a map with a slug that's too long should fail validation
result =
Map.new(%{
name: "Long Slug Test",
slug: long_slug,
owner_id: character.id,
description: "Long slug test",
scope: "wormholes"
})
# Should fail because slug exceeds max length
assert {:error, _error} = result
# But creating with a slug exactly at max length should work
max_length_slug = String.duplicate("a", 40)
{:ok, map} =
Map.new(%{
name: "Long Slug Test",
slug: max_length_slug,
owner_id: character.id,
description: "Long slug test",
scope: "wormholes"
})
assert String.length(map.slug) == 40
end
test "handles special characters in slugs", %{character: character} do
# Test that special characters are properly slugified
{:ok, map} =
Map.new(%{
name: "Test: Map & Name!",
slug: "test-map-name",
owner_id: character.id,
description: "Special chars test",
scope: "wormholes"
})
# Slug should only contain allowed characters
assert map.slug =~ ~r/^[a-z0-9-]+$/
end
end
describe "slug update operations" do
setup do
character = create_test_user()
{:ok, map} =
Map.new(%{
name: "Original Map",
slug: "original-map",
owner_id: character.id,
description: "Original",
scope: "wormholes"
})
%{character: character, map: map}
end
test "updating map with same slug succeeds", %{map: map} do
# Update other fields, keep same slug
result =
Map.update(map, %{
description: "Updated description",
slug: "original-map"
})
assert {:ok, updated_map} = result
assert updated_map.slug == "original-map"
assert updated_map.description == "Updated description"
end
test "updating to conflicting slug is handled", %{character: character, map: map} do
# Create another map
{:ok, _other_map} =
Map.new(%{
name: "Other Map",
slug: "other-map",
owner_id: character.id,
description: "Other",
scope: "wormholes"
})
# Try to update first map to use other map's slug
result =
Map.update(map, %{
slug: "other-map"
})
# Should either fail or auto-increment
case result do
{:ok, updated_map} ->
# If successful, slug should be different
assert updated_map.slug != "other-map"
assert updated_map.slug =~ ~r/^other-map-\d+$/
{:error, _} ->
# Or it can fail with validation error
:ok
end
end
end
describe "get_map_by_slug with duplicates" do
setup do
character = create_test_user()
%{character: character}
end
test "get_map_by_slug! raises on duplicates if they exist" do
# Note: This test documents the behavior when duplicates somehow exist
# In production, this should be prevented by our fixes
# If duplicates exist (data integrity issue), the query should fail
# This is a documentation test - we can't easily create duplicates
# due to the database constraint, but we document expected behavior
assert true
end
end
# Helper functions
defp create_test_user do
# Create a test user with necessary attributes
user =
case Ash.create(WandererApp.Api.User, %{
name: "Test User #{:rand.uniform(10_000)}",
hash: "test_hash_#{:rand.uniform(100_000_000)}"
}) do
{:ok, user} -> user
{:error, reason} -> raise "Failed to create user: #{inspect(reason)}"
end
# Create a character for the user (maps need character as owner)
unique_id = System.unique_integer([:positive])
character =
case Ash.create(
WandererApp.Api.Character,
%{
eve_id: "#{2_000_000_000 + unique_id}",
name: "Test Character #{unique_id}",
user_id: user.id
},
action: :link
) do
{:ok, character} -> character
{:error, reason} -> raise "Failed to create character: #{inspect(reason)}"
end
character
end
end

View File

@@ -82,7 +82,6 @@ defmodule WandererApp.MapDuplicationServiceTest do
assert {:error, {:not_found, _message}} = result
end
@tag :skip
test "preserves original map unchanged", %{owner: owner, source_map: source_map} do
original_name = source_map.name
original_description = source_map.description
@@ -114,7 +113,7 @@ defmodule WandererApp.MapDuplicationServiceTest do
{:ok, duplicate1} = Duplication.duplicate_map(source_map.id, target_map1, [])
# Create second duplicate
# Create second duplicate
target_map2 =
insert(:map, %{
name: "Unique Copy 2",