mirror of
https://github.com/wanderer-industries/wanderer
synced 2025-12-04 14:55:34 +00:00
Compare commits
77 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7a82b2c102 | ||
|
|
2db2a47186 | ||
|
|
eabb0e8470 | ||
|
|
c65b8e5ebd | ||
|
|
bfed1480ae | ||
|
|
5ff902f185 | ||
|
|
8d38345c7f | ||
|
|
14be9dbb8a | ||
|
|
720c26db94 | ||
|
|
6d0b8b845d | ||
|
|
b2767e000e | ||
|
|
169f23c2ca | ||
|
|
81f70eafff | ||
|
|
8b6f600989 | ||
|
|
fe3617b39f | ||
|
|
7fb8d24d73 | ||
|
|
f03448007d | ||
|
|
c317a8bff9 | ||
|
|
618cca39a4 | ||
|
|
fe7a98098f | ||
|
|
df49939990 | ||
|
|
f23f2776f4 | ||
|
|
4419c86164 | ||
|
|
9848f49b49 | ||
|
|
4c23069a0a | ||
|
|
4a1d7be44c | ||
|
|
26d0392da1 | ||
|
|
83b1406cce | ||
|
|
fa83185cf5 | ||
|
|
97d5010d41 | ||
|
|
e73ad93920 | ||
|
|
425af246fb | ||
|
|
a2912ba0ff | ||
|
|
61cd281a18 | ||
|
|
6e28134282 | ||
|
|
d1377f44d2 | ||
|
|
2a72a2612d | ||
|
|
66bb4f87d4 | ||
|
|
977b1ad083 | ||
|
|
94db18d42b | ||
|
|
7e0375108d | ||
|
|
094a5d7b62 | ||
|
|
8f947a5f04 | ||
|
|
5580ad62f9 | ||
|
|
c0953dc954 | ||
|
|
1df93da564 | ||
|
|
e2252a9d72 | ||
|
|
7cdba4b507 | ||
|
|
b110d5afec | ||
|
|
6112b3e399 | ||
|
|
af0869a39b | ||
|
|
d44c339990 | ||
|
|
0304f92ad9 | ||
|
|
4a41d6e5d5 | ||
|
|
30893ca68e | ||
|
|
1edd02fa5c | ||
|
|
10a957ff0d | ||
|
|
35b1b3619d | ||
|
|
777ebd0c41 | ||
|
|
9e7d8c08e1 | ||
|
|
ad3f4cda09 | ||
|
|
98502cc6ae | ||
|
|
0bac671eb0 | ||
|
|
09c9a1e752 | ||
|
|
41e77e8336 | ||
|
|
a6e9fee2a0 | ||
|
|
c403a1cee5 | ||
|
|
02d25b370a | ||
|
|
e5a3eec8a1 | ||
|
|
910352d66c | ||
|
|
c4f02e7d55 | ||
|
|
6a1197ad83 | ||
|
|
84c31bbb88 | ||
|
|
33f6c32306 | ||
|
|
5c71304d41 | ||
|
|
bbaf04e977 | ||
|
|
ad5b2d2eb3 |
@@ -8,4 +8,5 @@ export GIT_SHA="1111"
|
||||
export WANDERER_INVITES="false"
|
||||
export WANDERER_PUBLIC_API_DISABLED="false"
|
||||
export WANDERER_CHARACTER_API_DISABLED="false"
|
||||
export WANDERER_ZKILL_PRELOAD_DISABLED="false"
|
||||
export WANDERER_KILLS_SERVICE_ENABLED="true"
|
||||
export WANDERER_KILLS_BASE_URL="ws://host.docker.internal:4004"
|
||||
209
CHANGELOG.md
209
CHANGELOG.md
@@ -2,6 +2,215 @@
|
||||
|
||||
<!-- changelog -->
|
||||
|
||||
## [v1.74.9](https://github.com/wanderer-industries/wanderer/compare/v1.74.8...v1.74.9) (2025-07-13)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Map: Trying to fix problem with fast forwarding after page are inactive some time.
|
||||
|
||||
## [v1.74.8](https://github.com/wanderer-industries/wanderer/compare/v1.74.7...v1.74.8) (2025-07-11)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Map: removed comments
|
||||
|
||||
* Map: Fixed conflict
|
||||
|
||||
* Map: Unified settings. Second part: Import/Export
|
||||
|
||||
* Map: Unified settings. First part: add one place for storing settings
|
||||
|
||||
## [v1.74.7](https://github.com/wanderer-industries/wanderer/compare/v1.74.6...v1.74.7) (2025-07-09)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.74.6](https://github.com/wanderer-industries/wanderer/compare/v1.74.5...v1.74.6) (2025-07-09)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.74.5](https://github.com/wanderer-industries/wanderer/compare/v1.74.4...v1.74.5) (2025-07-09)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Map: Add background for Pochven's systems. Changed from Region name to constellation name for pochven systems. Changed connection style for gates (display like common connection). Changed behaviour of connections.
|
||||
|
||||
## [v1.74.4](https://github.com/wanderer-industries/wanderer/compare/v1.74.3...v1.74.4) (2025-07-07)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Fixed issue with update system positions
|
||||
|
||||
## [v1.74.3](https://github.com/wanderer-industries/wanderer/compare/v1.74.2...v1.74.3) (2025-07-06)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Fixed issues with map subscription component
|
||||
|
||||
## [v1.74.2](https://github.com/wanderer-industries/wanderer/compare/v1.74.1...v1.74.2) (2025-06-30)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Fixed map loading for not existing maps
|
||||
|
||||
## [v1.74.1](https://github.com/wanderer-industries/wanderer/compare/v1.74.0...v1.74.1) (2025-06-28)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Mark connections between Pochven systems as known.
|
||||
|
||||
## [v1.74.0](https://github.com/wanderer-industries/wanderer/compare/v1.73.0...v1.74.0) (2025-06-25)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* Core: Reverted showing linked signature ID as part of temporary names
|
||||
|
||||
## [v1.73.0](https://github.com/wanderer-industries/wanderer/compare/v1.72.1...v1.73.0) (2025-06-25)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* Core: Allowed system temp names up to 12 characters. Deprecated showing linked signature ID as part of temporary name.
|
||||
|
||||
## [v1.72.1](https://github.com/wanderer-industries/wanderer/compare/v1.72.0...v1.72.1) (2025-06-23)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* issue with tracking signature activity
|
||||
|
||||
## [v1.72.0](https://github.com/wanderer-industries/wanderer/compare/v1.71.3...v1.72.0) (2025-06-21)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* Core: Added an ability to see & topup map balance and map subscription info (on public)
|
||||
|
||||
## [v1.71.3](https://github.com/wanderer-industries/wanderer/compare/v1.71.2...v1.71.3) (2025-06-21)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Map: Fix incorrect placing of labels
|
||||
|
||||
## [v1.71.2](https://github.com/wanderer-industries/wanderer/compare/v1.71.1...v1.71.2) (2025-06-20)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* fix issue with kill service disconnect
|
||||
|
||||
## [v1.71.1](https://github.com/wanderer-industries/wanderer/compare/v1.71.0...v1.71.1) (2025-06-19)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* update system kills widget timing
|
||||
|
||||
## [v1.71.0](https://github.com/wanderer-industries/wanderer/compare/v1.70.7...v1.71.0) (2025-06-19)
|
||||
|
||||
|
||||
|
||||
|
||||
### Features:
|
||||
|
||||
* use external services for kill data
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* remove duplicate kills connections
|
||||
|
||||
* Fixed kills clinet init & map start/update logic
|
||||
|
||||
* avoid duplicate subs, and remove subs on inactive maps
|
||||
|
||||
## [v1.70.7](https://github.com/wanderer-industries/wanderer/compare/v1.70.6...v1.70.7) (2025-06-18)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Subscriptions: Added option to topup using ALL user balance available
|
||||
|
||||
## [v1.70.6](https://github.com/wanderer-industries/wanderer/compare/v1.70.5...v1.70.6) (2025-06-18)
|
||||
|
||||
|
||||
|
||||
|
||||
## [v1.70.5](https://github.com/wanderer-industries/wanderer/compare/v1.70.4...v1.70.5) (2025-06-17)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Fixed character caching issues
|
||||
|
||||
## [v1.70.4](https://github.com/wanderer-industries/wanderer/compare/v1.70.3...v1.70.4) (2025-06-16)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Distribute tracking to minimal pool first
|
||||
|
||||
## [v1.70.3](https://github.com/wanderer-industries/wanderer/compare/v1.70.2...v1.70.3) (2025-06-16)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Don't pause tracking for new pools
|
||||
|
||||
## [v1.70.2](https://github.com/wanderer-industries/wanderer/compare/v1.70.1...v1.70.2) (2025-06-15)
|
||||
|
||||
|
||||
|
||||
|
||||
### Bug Fixes:
|
||||
|
||||
* Core: Invalidate character copr and ally data on map server start
|
||||
|
||||
## [v1.70.1](https://github.com/wanderer-industries/wanderer/compare/v1.70.0...v1.70.1) (2025-06-14)
|
||||
|
||||
|
||||
|
||||
@@ -212,3 +212,75 @@
|
||||
.p-inputtext:enabled:hover {
|
||||
border-color: #335c7e;
|
||||
}
|
||||
|
||||
|
||||
// --------------- TOAST
|
||||
.p-toast .p-toast-message {
|
||||
background-color: #1a1a1a;
|
||||
color: #e0e0e0;
|
||||
border-left: 4px solid transparent;
|
||||
border-radius: 4px;
|
||||
box-shadow: 0 2px 8px rgba(0, 0, 0, 0.7);
|
||||
}
|
||||
|
||||
.p-toast .p-toast-message .p-toast-summary {
|
||||
color: #ffffff;
|
||||
font-weight: 600;
|
||||
}
|
||||
|
||||
.p-toast .p-toast-message .p-toast-detail {
|
||||
color: #c0c0c0;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.p-toast .p-toast-icon-close {
|
||||
color: #ffaa00;
|
||||
transition: background 0.2s;
|
||||
}
|
||||
.p-toast .p-toast-icon-close:hover {
|
||||
background: #333;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
.p-toast-message-success {
|
||||
border-left-color: #f1c40f;
|
||||
}
|
||||
.p-toast-message-error {
|
||||
border-left-color: #e74c3c;
|
||||
}
|
||||
.p-toast-message-info {
|
||||
border-left-color: #3498db;
|
||||
}
|
||||
.p-toast-message-warn {
|
||||
border-left-color: #e67e22;
|
||||
}
|
||||
|
||||
.p-toast-message-success .p-toast-message-icon {
|
||||
color: #f1c40f;
|
||||
}
|
||||
.p-toast-message-error .p-toast-message-icon {
|
||||
color: #e74c3c;
|
||||
}
|
||||
.p-toast-message-info .p-toast-message-icon {
|
||||
color: #3498db;
|
||||
}
|
||||
.p-toast-message-warn .p-toast-message-icon {
|
||||
color: #e67e22;
|
||||
}
|
||||
|
||||
.p-toast-message-success .p-toast-message-content {
|
||||
border-left-color: #f1c40f;
|
||||
}
|
||||
|
||||
.p-toast-message-error .p-toast-message-content {
|
||||
border-left-color: #e74c3c;
|
||||
}
|
||||
|
||||
.p-toast-message-info .p-toast-message-content {
|
||||
border-left-color: #3498db;
|
||||
}
|
||||
|
||||
.p-toast-message-warn .p-toast-message-content {
|
||||
border-left-color: #e67e22;
|
||||
}
|
||||
|
||||
|
||||
@@ -64,9 +64,9 @@ body .p-dialog {
|
||||
}
|
||||
|
||||
.p-dialog-footer {
|
||||
padding: 1rem;
|
||||
border-top: 1px solid #ddd;
|
||||
background: #f4f4f4;
|
||||
padding: .75rem 1rem;
|
||||
border-top: none !important;
|
||||
//background: #f4f4f4;
|
||||
}
|
||||
|
||||
.p-dialog-header-close {
|
||||
|
||||
1
assets/js/hooks/Mapper/components/helpers/index.ts
Normal file
1
assets/js/hooks/Mapper/components/helpers/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './parseMapUserSettings.ts';
|
||||
@@ -0,0 +1,67 @@
|
||||
import { MapUserSettings, SettingsWithVersion } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
|
||||
const REQUIRED_KEYS = [
|
||||
'widgets',
|
||||
'interface',
|
||||
'onTheMap',
|
||||
'routes',
|
||||
'localWidget',
|
||||
'signaturesWidget',
|
||||
'killsWidget',
|
||||
] as const;
|
||||
|
||||
type RequiredKeys = (typeof REQUIRED_KEYS)[number];
|
||||
|
||||
/** Custom error for any parsing / validation issue */
|
||||
export class MapUserSettingsParseError extends Error {
|
||||
constructor(msg: string) {
|
||||
super(`MapUserSettings parse error: ${msg}`);
|
||||
}
|
||||
}
|
||||
|
||||
const isNumber = (v: unknown): v is number => typeof v === 'number' && !Number.isNaN(v);
|
||||
|
||||
/** Minimal check that an object matches SettingsWithVersion<*> */
|
||||
const isSettingsWithVersion = (v: unknown): v is SettingsWithVersion<unknown> =>
|
||||
typeof v === 'object' && v !== null && isNumber((v as any).version) && 'settings' in (v as any);
|
||||
|
||||
/** Ensure every required key is present */
|
||||
const hasAllRequiredKeys = (v: unknown): v is Record<RequiredKeys, unknown> =>
|
||||
typeof v === 'object' && v !== null && REQUIRED_KEYS.every(k => k in v);
|
||||
|
||||
/* ------------------------------ Main parser ------------------------------- */
|
||||
|
||||
/**
|
||||
* Parses and validates a JSON string as `MapUserSettings`.
|
||||
*
|
||||
* @throws `MapUserSettingsParseError` – если строка не JSON или нарушена структура
|
||||
*/
|
||||
export const parseMapUserSettings = (json: unknown): MapUserSettings => {
|
||||
if (typeof json !== 'string') throw new MapUserSettingsParseError('Input must be a JSON string');
|
||||
|
||||
let data: unknown;
|
||||
try {
|
||||
data = JSON.parse(json);
|
||||
} catch (e) {
|
||||
throw new MapUserSettingsParseError(`Invalid JSON: ${(e as Error).message}`);
|
||||
}
|
||||
|
||||
if (!hasAllRequiredKeys(data)) {
|
||||
const missing = REQUIRED_KEYS.filter(k => !(k in (data as any)));
|
||||
throw new MapUserSettingsParseError(`Missing top-level field(s): ${missing.join(', ')}`);
|
||||
}
|
||||
|
||||
for (const key of REQUIRED_KEYS) {
|
||||
if (!isSettingsWithVersion((data as any)[key])) {
|
||||
throw new MapUserSettingsParseError(`"${key}" must match SettingsWithVersion<T>`);
|
||||
}
|
||||
}
|
||||
|
||||
// Everything passes, so cast is safe
|
||||
return data as MapUserSettings;
|
||||
};
|
||||
|
||||
/* ------------------------------ Usage example ----------------------------- */
|
||||
|
||||
// const raw = fetchFromServer(); // string
|
||||
// const settings = parseMapUserSettings(raw);
|
||||
@@ -98,6 +98,7 @@ interface MapCompProps {
|
||||
theme?: string;
|
||||
pings: PingData[];
|
||||
minimapPlacement?: PanelPosition;
|
||||
localShowShipName?: boolean;
|
||||
}
|
||||
|
||||
const MapComp = ({
|
||||
@@ -117,6 +118,7 @@ const MapComp = ({
|
||||
onAddSystem,
|
||||
pings,
|
||||
minimapPlacement = 'bottom-right',
|
||||
localShowShipName = false,
|
||||
}: MapCompProps) => {
|
||||
const { getNodes } = useReactFlow();
|
||||
const [nodes, , onNodesChange] = useNodesState<Node<SolarSystemRawType>>(initialNodes);
|
||||
@@ -212,8 +214,9 @@ const MapComp = ({
|
||||
showKSpaceBG: showKSpaceBG,
|
||||
isThickConnections: isThickConnections,
|
||||
pings,
|
||||
localShowShipName,
|
||||
}));
|
||||
}, [showKSpaceBG, isThickConnections, pings, update]);
|
||||
}, [showKSpaceBG, isThickConnections, pings, update, localShowShipName]);
|
||||
|
||||
return (
|
||||
<>
|
||||
|
||||
@@ -10,6 +10,7 @@ export type MapData = MapUnionTypes & {
|
||||
showKSpaceBG: boolean;
|
||||
isThickConnections: boolean;
|
||||
linkedSigEveId: string;
|
||||
localShowShipName: boolean;
|
||||
};
|
||||
|
||||
interface MapProviderProps {
|
||||
@@ -42,6 +43,7 @@ const INITIAL_DATA: MapData = {
|
||||
followingCharacterEveId: null,
|
||||
userHubs: [],
|
||||
pings: [],
|
||||
localShowShipName: false,
|
||||
};
|
||||
|
||||
export interface MapContextProps {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useMemo } from 'react';
|
||||
import { useKillsCounter } from '../../hooks/useKillsCounter';
|
||||
import { useKillsCounter } from '../../hooks/useKillsCounter.ts';
|
||||
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
|
||||
import { WithChildren, WithClassName } from '@/hooks/Mapper/types/common';
|
||||
import { WithChildren, WithClassName } from '@/hooks/Mapper/types/common.ts';
|
||||
import {
|
||||
KILLS_ROW_HEIGHT,
|
||||
SystemKillsList,
|
||||
@@ -49,7 +49,7 @@ export const KillsCounter = ({
|
||||
content={
|
||||
<div className="overflow-hidden flex w-[450px] flex-col" style={{ height: `${tooltipHeight}px` }}>
|
||||
<div className="flex-1 h-full">
|
||||
<SystemKillsList kills={limitedKills} onlyOneSystem />
|
||||
<SystemKillsList kills={limitedKills} onlyOneSystem timeRange={1} />
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './KillsCounter.tsx';
|
||||
@@ -3,11 +3,11 @@ import clsx from 'clsx';
|
||||
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
|
||||
import { TooltipPosition } from '@/hooks/Mapper/components/ui-kit/WdTooltip';
|
||||
import { CharItemProps, LocalCharactersList } from '../../../mapInterface/widgets/LocalCharacters/components';
|
||||
import { useLocalCharactersItemTemplate } from '../../../mapInterface/widgets/LocalCharacters/hooks/useLocalCharacters';
|
||||
import { useLocalCharacterWidgetSettings } from '../../../mapInterface/widgets/LocalCharacters/hooks/useLocalWidgetSettings';
|
||||
import classes from './SolarSystemLocalCounter.module.scss';
|
||||
import { useTheme } from '@/hooks/Mapper/hooks/useTheme.ts';
|
||||
import { AvailableThemes } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import classes from './LocalCounter.module.scss';
|
||||
import { useMapState } from '@/hooks/Mapper/components/map/MapProvider.tsx';
|
||||
import { useLocalCharactersItemTemplate } from '@/hooks/Mapper/components/mapInterface/widgets/LocalCharacters/hooks/useLocalCharacters.tsx';
|
||||
|
||||
interface LocalCounterProps {
|
||||
localCounterCharacters: Array<CharItemProps>;
|
||||
@@ -16,8 +16,10 @@ interface LocalCounterProps {
|
||||
}
|
||||
|
||||
export const LocalCounter = ({ localCounterCharacters, hasUserCharacters, showIcon = true }: LocalCounterProps) => {
|
||||
const [settings] = useLocalCharacterWidgetSettings();
|
||||
const itemTemplate = useLocalCharactersItemTemplate(settings.showShipName);
|
||||
const {
|
||||
data: { localShowShipName },
|
||||
} = useMapState();
|
||||
const itemTemplate = useLocalCharactersItemTemplate(localShowShipName);
|
||||
const theme = useTheme();
|
||||
|
||||
const pilotTooltipContent = useMemo(() => {
|
||||
@@ -0,0 +1 @@
|
||||
export * from './LocalCounter';
|
||||
@@ -1,7 +1,7 @@
|
||||
import { useCallback, useMemo, useState } from 'react';
|
||||
|
||||
import classes from './SolarSystemEdge.module.scss';
|
||||
import { EdgeLabelRenderer, EdgeProps, getBezierPath, getSmoothStepPath, Position, useStore } from 'reactflow';
|
||||
import { EdgeLabelRenderer, EdgeProps, getBezierPath, Position, useStore } from 'reactflow';
|
||||
import { getEdgeParams } from '@/hooks/Mapper/components/map/utils.ts';
|
||||
import clsx from 'clsx';
|
||||
import { ConnectionType, MassState, ShipSizeStatus, SolarSystemConnection, TimeStatus } from '@/hooks/Mapper/types';
|
||||
@@ -51,11 +51,11 @@ export const SolarSystemEdge = ({ id, source, target, markerEnd, style, data }:
|
||||
const [hovered, setHovered] = useState(false);
|
||||
|
||||
const [path, labelX, labelY, sx, sy, tx, ty, sourcePos, targetPos] = useMemo(() => {
|
||||
const { sx, sy, tx, ty, sourcePos, targetPos } = getEdgeParams(sourceNode, targetNode);
|
||||
const { sx, sy, tx, ty, sourcePos, targetPos } = getEdgeParams(sourceNode!, targetNode!);
|
||||
|
||||
const offset = isThickConnections ? MAP_OFFSETS_TICK[targetPos] : MAP_OFFSETS[targetPos];
|
||||
|
||||
const method = isWormhole ? getBezierPath : getSmoothStepPath;
|
||||
const method = isWormhole ? getBezierPath : getBezierPath;
|
||||
|
||||
const [edgePath, labelX, labelY] = method({
|
||||
sourceX: sx - offset.x,
|
||||
|
||||
@@ -40,6 +40,7 @@ $neon-color-3: rgba(27, 132, 236, 0.40);
|
||||
z-index: 3;
|
||||
overflow: hidden;
|
||||
|
||||
&.Pochven,
|
||||
&.Mataria,
|
||||
&.Amarria,
|
||||
&.Gallente,
|
||||
@@ -95,6 +96,15 @@ $neon-color-3: rgba(27, 132, 236, 0.40);
|
||||
}
|
||||
}
|
||||
|
||||
&.Pochven {
|
||||
&::after {
|
||||
opacity: 0.8;
|
||||
background-image: url('/images/pochven.webp');
|
||||
background-position-x: 0;
|
||||
background-position-y: -13px;
|
||||
}
|
||||
}
|
||||
|
||||
&.selected {
|
||||
border-color: $pastel-pink;
|
||||
box-shadow: 0 0 10px #9a1af1c2;
|
||||
|
||||
@@ -12,11 +12,11 @@ import {
|
||||
} from '@/hooks/Mapper/components/map/constants';
|
||||
import { WormholeClassComp } from '@/hooks/Mapper/components/map/components/WormholeClassComp';
|
||||
import { UnsplashedSignature } from '@/hooks/Mapper/components/map/components/UnsplashedSignature';
|
||||
import { LocalCounter } from './SolarSystemLocalCounter';
|
||||
import { KillsCounter } from './SolarSystemKillsCounter';
|
||||
import { TooltipSize } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper/utils.ts';
|
||||
import { TooltipPosition, WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { Tag } from 'primereact/tag';
|
||||
import { LocalCounter } from '@/hooks/Mapper/components/map/components/LocalCounter';
|
||||
import { KillsCounter } from '@/hooks/Mapper/components/map/components/KillsCounter';
|
||||
|
||||
// let render = 0;
|
||||
export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>) => {
|
||||
@@ -38,7 +38,7 @@ export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{localKillsCount && localKillsCount > 0 && nodeVars.solarSystemId && (
|
||||
{localKillsCount != null && localKillsCount > 0 && nodeVars.solarSystemId && (
|
||||
<KillsCounter
|
||||
killsCount={localKillsCount}
|
||||
systemId={nodeVars.solarSystemId}
|
||||
@@ -48,7 +48,7 @@ export const SolarSystemNodeDefault = memo((props: NodeProps<MapSolarSystemType>
|
||||
>
|
||||
<div className={clsx(classes.BookmarkWithIcon)}>
|
||||
<span className={clsx(PrimeIcons.BOLT, classes.icon)} />
|
||||
<span className={clsx(classes.text)}>{nodeVars.killsCount}</span>
|
||||
<span className={clsx(classes.text)}>{localKillsCount}</span>
|
||||
</div>
|
||||
</KillsCounter>
|
||||
)}
|
||||
|
||||
@@ -12,10 +12,10 @@ import {
|
||||
} from '@/hooks/Mapper/components/map/constants';
|
||||
import { WormholeClassComp } from '@/hooks/Mapper/components/map/components/WormholeClassComp';
|
||||
import { UnsplashedSignature } from '@/hooks/Mapper/components/map/components/UnsplashedSignature';
|
||||
import { LocalCounter } from './SolarSystemLocalCounter';
|
||||
import { KillsCounter } from './SolarSystemKillsCounter';
|
||||
import { TooltipPosition, WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { TooltipSize } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper/utils.ts';
|
||||
import { LocalCounter } from '@/hooks/Mapper/components/map/components/LocalCounter';
|
||||
import { KillsCounter } from '@/hooks/Mapper/components/map/components/KillsCounter';
|
||||
|
||||
// let render = 0;
|
||||
export const SolarSystemNodeTheme = memo((props: NodeProps<MapSolarSystemType>) => {
|
||||
@@ -47,7 +47,7 @@ export const SolarSystemNodeTheme = memo((props: NodeProps<MapSolarSystemType>)
|
||||
>
|
||||
<div className={clsx(classes.BookmarkWithIcon)}>
|
||||
<span className={clsx(PrimeIcons.BOLT, classes.icon)} />
|
||||
<span className={clsx(classes.text)}>{nodeVars.killsCount}</span>
|
||||
<span className={clsx(classes.text)}>{localKillsCount}</span>
|
||||
</div>
|
||||
</KillsCounter>
|
||||
)}
|
||||
|
||||
@@ -22,6 +22,7 @@ export function useKillsCounter({ realSystemId }: UseKillsCounterProps) {
|
||||
systemId: realSystemId,
|
||||
outCommand,
|
||||
showAllVisible: false,
|
||||
sinceHours: 1,
|
||||
});
|
||||
|
||||
const filteredKills = useMemo(() => {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { useEffect, useState, useCallback } from 'react';
|
||||
import { useEffect, useState, useCallback, useMemo } from 'react';
|
||||
import { useMapEventListener } from '@/hooks/Mapper/events';
|
||||
import { Commands } from '@/hooks/Mapper/types';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
interface Kill {
|
||||
solar_system_id: number | string;
|
||||
@@ -9,29 +10,65 @@ interface Kill {
|
||||
|
||||
interface MapEvent {
|
||||
name: Commands;
|
||||
data?: any;
|
||||
data?: unknown;
|
||||
payload?: Kill[];
|
||||
}
|
||||
|
||||
export function useNodeKillsCount(systemId: number | string, initialKillsCount: number | null): number | null {
|
||||
const [killsCount, setKillsCount] = useState<number | null>(initialKillsCount);
|
||||
const { data: mapData } = useMapRootState();
|
||||
const { detailedKills = {} } = mapData;
|
||||
|
||||
// Calculate 1-hour kill count from detailed kills
|
||||
const oneHourKillCount = useMemo(() => {
|
||||
const systemKills = detailedKills[systemId] || [];
|
||||
|
||||
// If we have detailed kills data (even if empty), use it for counting
|
||||
if (Object.prototype.hasOwnProperty.call(detailedKills, systemId)) {
|
||||
const oneHourAgo = Date.now() - 60 * 60 * 1000; // 1 hour in milliseconds
|
||||
const recentKills = systemKills.filter(kill => {
|
||||
if (!kill.kill_time) return false;
|
||||
const killTime = new Date(kill.kill_time).getTime();
|
||||
if (isNaN(killTime)) return false;
|
||||
return killTime >= oneHourAgo;
|
||||
});
|
||||
|
||||
return recentKills.length; // Return 0 if no recent kills, not null
|
||||
}
|
||||
|
||||
// Return null only if we don't have detailed kills data for this system
|
||||
return null;
|
||||
}, [detailedKills, systemId]);
|
||||
|
||||
useEffect(() => {
|
||||
setKillsCount(initialKillsCount);
|
||||
}, [initialKillsCount]);
|
||||
// Always prefer the calculated 1-hour count over initial count
|
||||
// This ensures we properly expire old kills
|
||||
if (oneHourKillCount !== null) {
|
||||
setKillsCount(oneHourKillCount);
|
||||
} else if (detailedKills[systemId] && detailedKills[systemId].length === 0) {
|
||||
// If we have detailed kills data but it's empty, set to 0
|
||||
setKillsCount(0);
|
||||
} else {
|
||||
// Only fall back to initial count if we have no detailed kills data at all
|
||||
setKillsCount(initialKillsCount);
|
||||
}
|
||||
}, [oneHourKillCount, initialKillsCount, detailedKills, systemId]);
|
||||
|
||||
const handleEvent = useCallback(
|
||||
(event: MapEvent): boolean => {
|
||||
if (event.name === Commands.killsUpdated && Array.isArray(event.payload)) {
|
||||
const killForSystem = event.payload.find(kill => kill.solar_system_id.toString() === systemId.toString());
|
||||
if (killForSystem && typeof killForSystem.kills === 'number') {
|
||||
setKillsCount(killForSystem.kills);
|
||||
// Only update if we don't have detailed kills data
|
||||
if (!detailedKills[systemId] || detailedKills[systemId].length === 0) {
|
||||
setKillsCount(killForSystem.kills);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
},
|
||||
[systemId],
|
||||
[systemId, detailedKills],
|
||||
);
|
||||
|
||||
useMapEventListener(handleEvent);
|
||||
|
||||
@@ -5,7 +5,7 @@ import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useMapGetOption } from '@/hooks/Mapper/mapRootProvider/hooks/api';
|
||||
import { useMapState } from '@/hooks/Mapper/components/map/MapProvider';
|
||||
import { useDoubleClick } from '@/hooks/Mapper/hooks/useDoubleClick';
|
||||
import { REGIONS_MAP, Spaces } from '@/hooks/Mapper/constants';
|
||||
import { Regions, REGIONS_MAP, Spaces } from '@/hooks/Mapper/constants';
|
||||
import { isWormholeSpace } from '@/hooks/Mapper/components/map/helpers/isWormholeSpace';
|
||||
import { getSystemClassStyles } from '@/hooks/Mapper/components/map/helpers';
|
||||
import { sortWHClasses } from '@/hooks/Mapper/helpers';
|
||||
@@ -65,6 +65,7 @@ const SpaceToClass: Record<string, string> = {
|
||||
[Spaces.Matar]: 'Mataria',
|
||||
[Spaces.Amarr]: 'Amarria',
|
||||
[Spaces.Gallente]: 'Gallente',
|
||||
[Spaces.Pochven]: 'Pochven',
|
||||
};
|
||||
|
||||
export function useLocalCounter(nodeVars: SolarSystemNodeVars) {
|
||||
@@ -112,6 +113,7 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
region_id,
|
||||
is_shattered,
|
||||
solar_system_name,
|
||||
constellation_name,
|
||||
} = systemStaticInfo;
|
||||
|
||||
const { isShowUnsplashedSignatures } = interfaceSettings;
|
||||
@@ -195,10 +197,18 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
const hubsAsStrings = useMemo(() => hubs.map(item => item.toString()), [hubs]);
|
||||
|
||||
const isRally = useMemo(
|
||||
() => pings.find(x => x.solar_system_id === solar_system_id && x.type === PingType.Rally),
|
||||
() => !!pings.find(x => x.solar_system_id === solar_system_id && x.type === PingType.Rally),
|
||||
[pings, solar_system_id],
|
||||
);
|
||||
|
||||
const regionName = useMemo(() => {
|
||||
if (region_id === Regions.Pochven) {
|
||||
return constellation_name;
|
||||
}
|
||||
|
||||
return region_name;
|
||||
}, [constellation_name, region_id, region_name]);
|
||||
|
||||
const nodeVars: SolarSystemNodeVars = {
|
||||
id,
|
||||
selected,
|
||||
@@ -233,7 +243,7 @@ export const useSolarSystemNode = (props: NodeProps<MapSolarSystemType>): SolarS
|
||||
isThickConnections,
|
||||
classTitle: class_title,
|
||||
temporaryName: computedTemporaryName,
|
||||
regionName: region_name,
|
||||
regionName,
|
||||
solarSystemName: solar_system_name,
|
||||
isRally,
|
||||
};
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useMemo } from 'react';
|
||||
import { SolarSystemStaticInfoRaw } from '@/hooks/Mapper/types';
|
||||
import { useMemo } from 'react';
|
||||
|
||||
interface UseSystemNameParams {
|
||||
isTempSystemNameEnabled: boolean;
|
||||
@@ -26,7 +26,7 @@ export const useSystemName = ({
|
||||
}
|
||||
|
||||
if (isShowLinkedSigIdTempName && linkedSigPrefix) {
|
||||
return temporary_name ? `${linkedSigPrefix}・${temporary_name}` : `${linkedSigPrefix}・${solar_system_name}`;
|
||||
return temporary_name ? `${linkedSigPrefix}:${temporary_name}` : `${linkedSigPrefix}:${solar_system_name}`;
|
||||
}
|
||||
|
||||
return temporary_name ?? '';
|
||||
|
||||
@@ -1,37 +1,48 @@
|
||||
import { Position, internalsSymbol } from 'reactflow';
|
||||
import { Position, internalsSymbol, Node } from 'reactflow';
|
||||
|
||||
// returns the position (top,right,bottom or right) passed node compared to
|
||||
function getParams(nodeA, nodeB) {
|
||||
type Coords = [number, number];
|
||||
type CoordsWithPosition = [number, number, Position];
|
||||
|
||||
function segmentsIntersect(a1: number, a2: number, b1: number, b2: number): boolean {
|
||||
const [minA, maxA] = a1 < a2 ? [a1, a2] : [a2, a1];
|
||||
const [minB, maxB] = b1 < b2 ? [b1, b2] : [b2, b1];
|
||||
|
||||
return maxA >= minB && maxB >= minA;
|
||||
}
|
||||
|
||||
function getParams(nodeA: Node, nodeB: Node): CoordsWithPosition {
|
||||
const centerA = getNodeCenter(nodeA);
|
||||
const centerB = getNodeCenter(nodeB);
|
||||
|
||||
const horizontalDiff = Math.abs(centerA.x - centerB.x);
|
||||
const verticalDiff = Math.abs(centerA.y - centerB.y);
|
||||
|
||||
let position: Position;
|
||||
|
||||
// when the horizontal difference between the nodes is bigger, we use Position.Left or Position.Right for the handle
|
||||
if (horizontalDiff > verticalDiff) {
|
||||
position = centerA.x > centerB.x ? Position.Left : Position.Right;
|
||||
} else {
|
||||
// here the vertical difference between the nodes is bigger, so we use Position.Top or Position.Bottom for the handle
|
||||
if (
|
||||
segmentsIntersect(
|
||||
nodeA.positionAbsolute!.x - 10,
|
||||
nodeA.positionAbsolute!.x - 10 + nodeA.width! + 20,
|
||||
nodeB.positionAbsolute!.x,
|
||||
nodeB.positionAbsolute!.x + nodeB.width!,
|
||||
)
|
||||
) {
|
||||
position = centerA.y > centerB.y ? Position.Top : Position.Bottom;
|
||||
} else {
|
||||
position = centerA.x > centerB.x ? Position.Left : Position.Right;
|
||||
}
|
||||
|
||||
const [x, y] = getHandleCoordsByPosition(nodeA, position);
|
||||
return [x, y, position];
|
||||
}
|
||||
|
||||
function getHandleCoordsByPosition(node, handlePosition) {
|
||||
// all handles are from type source, that's why we use handleBounds.source here
|
||||
const handle = node[internalsSymbol].handleBounds.source.find(h => h.position === handlePosition);
|
||||
function getHandleCoordsByPosition(node: Node, handlePosition: Position): Coords {
|
||||
const handle = node[internalsSymbol]!.handleBounds!.source!.find(h => h.position === handlePosition);
|
||||
|
||||
if (!handle) {
|
||||
throw new Error(`Handle with position ${handlePosition} not found on node ${node.id}`);
|
||||
}
|
||||
|
||||
let offsetX = handle.width / 2;
|
||||
let offsetY = handle.height / 2;
|
||||
|
||||
// this is a tiny detail to make the markerEnd of an edge visible.
|
||||
// The handle position that gets calculated has the origin top-left, so depending which side we are using, we add a little offset
|
||||
// when the handlePosition is Position.Right for example, we need to add an offset as big as the handle itself in order to get the correct position
|
||||
switch (handlePosition) {
|
||||
case Position.Left:
|
||||
offsetX = 0;
|
||||
@@ -47,21 +58,20 @@ function getHandleCoordsByPosition(node, handlePosition) {
|
||||
break;
|
||||
}
|
||||
|
||||
const x = node.positionAbsolute.x + handle.x + offsetX;
|
||||
const y = node.positionAbsolute.y + handle.y + offsetY;
|
||||
const x = node.positionAbsolute!.x + handle.x + offsetX;
|
||||
const y = node.positionAbsolute!.y + handle.y + offsetY;
|
||||
|
||||
return [x, y];
|
||||
}
|
||||
|
||||
function getNodeCenter(node) {
|
||||
function getNodeCenter(node: Node): { x: number; y: number } {
|
||||
return {
|
||||
x: node.positionAbsolute.x + node.width / 2,
|
||||
y: node.positionAbsolute.y + node.height / 2,
|
||||
x: node.positionAbsolute!.x + node.width! / 2,
|
||||
y: node.positionAbsolute!.y + node.height! / 2,
|
||||
};
|
||||
}
|
||||
|
||||
// returns the parameters (sx, sy, tx, ty, sourcePos, targetPos) you need to create an edge
|
||||
export function getEdgeParams(source, target) {
|
||||
export function getEdgeParams(source: Node, target: Node) {
|
||||
const [sx, sy, sourcePos] = getParams(source, target);
|
||||
const [tx, ty, targetPos] = getParams(target, source);
|
||||
|
||||
|
||||
@@ -7,10 +7,6 @@ import {
|
||||
SOLAR_SYSTEM_CLASSES_TO_CLASS_GROUPS,
|
||||
WORMHOLES_ADDITIONAL_INFO_BY_SHORT_NAME,
|
||||
} from '@/hooks/Mapper/components/map/constants.ts';
|
||||
import {
|
||||
SETTINGS_KEYS,
|
||||
SignatureSettingsType,
|
||||
} from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { SystemSignaturesContent } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/SystemSignaturesContent';
|
||||
import { K162_TYPES_MAP } from '@/hooks/Mapper/constants.ts';
|
||||
import { getWhSize } from '@/hooks/Mapper/helpers/getWhSize';
|
||||
@@ -18,6 +14,7 @@ import { parseSignatureCustomInfo } from '@/hooks/Mapper/helpers/parseSignatureC
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { CommandLinkSignatureToSystem, SignatureGroup, SystemSignature, TimeStatus } from '@/hooks/Mapper/types';
|
||||
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers.ts';
|
||||
import { SETTINGS_KEYS, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
|
||||
|
||||
const K162_SIGNATURE_TYPE = WORMHOLES_ADDITIONAL_INFO_BY_SHORT_NAME['K162'].shortName;
|
||||
|
||||
|
||||
@@ -206,7 +206,7 @@ export const SystemSettingsDialog = ({ systemId, visible, setVisible }: SystemSe
|
||||
aria-describedby="temporaryName"
|
||||
autoComplete="off"
|
||||
value={temporaryName}
|
||||
maxLength={10}
|
||||
maxLength={12}
|
||||
onChange={e => setTemporaryName(e.target.value)}
|
||||
/>
|
||||
</IconField>
|
||||
|
||||
@@ -6,7 +6,6 @@ import { useMapCheckPermissions, useMapGetOption } from '@/hooks/Mapper/mapRootP
|
||||
import { UserPermission } from '@/hooks/Mapper/types/permissions';
|
||||
import { LocalCharactersList } from './components/LocalCharactersList';
|
||||
import { useLocalCharactersItemTemplate } from './hooks/useLocalCharacters';
|
||||
import { useLocalCharacterWidgetSettings } from './hooks/useLocalWidgetSettings';
|
||||
import { LocalCharactersHeader } from './components/LocalCharactersHeader';
|
||||
import classes from './LocalCharacters.module.scss';
|
||||
import clsx from 'clsx';
|
||||
@@ -14,9 +13,9 @@ import clsx from 'clsx';
|
||||
export const LocalCharacters = () => {
|
||||
const {
|
||||
data: { characters, userCharacters, selectedSystems },
|
||||
storedSettings: { settingsLocal, settingsLocalUpdate },
|
||||
} = useMapRootState();
|
||||
|
||||
const [settings, setSettings] = useLocalCharacterWidgetSettings();
|
||||
const [systemId] = selectedSystems;
|
||||
const restrictOfflineShowing = useMapGetOption('restrict_offline_showing');
|
||||
const isAdminOrManager = useMapCheckPermissions([UserPermission.MANAGE_MAP]);
|
||||
@@ -31,12 +30,12 @@ export const LocalCharacters = () => {
|
||||
.map(x => ({
|
||||
...x,
|
||||
isOwn: userCharacters.includes(x.eve_id),
|
||||
compact: settings.compact,
|
||||
showShipName: settings.showShipName,
|
||||
compact: settingsLocal.compact,
|
||||
showShipName: settingsLocal.showShipName,
|
||||
}))
|
||||
.sort(sortCharacters);
|
||||
|
||||
if (!showOffline || !settings.showOffline) {
|
||||
if (!showOffline || !settingsLocal.showOffline) {
|
||||
return filtered.filter(c => c.online);
|
||||
}
|
||||
return filtered;
|
||||
@@ -44,9 +43,9 @@ export const LocalCharacters = () => {
|
||||
characters,
|
||||
systemId,
|
||||
userCharacters,
|
||||
settings.compact,
|
||||
settings.showOffline,
|
||||
settings.showShipName,
|
||||
settingsLocal.compact,
|
||||
settingsLocal.showOffline,
|
||||
settingsLocal.showShipName,
|
||||
showOffline,
|
||||
]);
|
||||
|
||||
@@ -54,7 +53,7 @@ export const LocalCharacters = () => {
|
||||
const isNotSelectedSystem = selectedSystems.length !== 1;
|
||||
const showList = sorted.length > 0 && selectedSystems.length === 1;
|
||||
|
||||
const itemTemplate = useLocalCharactersItemTemplate(settings.showShipName);
|
||||
const itemTemplate = useLocalCharactersItemTemplate(settingsLocal.showShipName);
|
||||
|
||||
return (
|
||||
<Widget
|
||||
@@ -63,8 +62,8 @@ export const LocalCharacters = () => {
|
||||
sortedCount={sorted.length}
|
||||
showList={showList}
|
||||
showOffline={showOffline}
|
||||
settings={settings}
|
||||
setSettings={setSettings}
|
||||
settings={settingsLocal}
|
||||
setSettings={settingsLocalUpdate}
|
||||
/>
|
||||
}
|
||||
>
|
||||
@@ -81,7 +80,7 @@ export const LocalCharacters = () => {
|
||||
{showList && (
|
||||
<LocalCharactersList
|
||||
items={sorted}
|
||||
itemSize={settings.compact ? 26 : 41}
|
||||
itemSize={settingsLocal.compact ? 26 : 41}
|
||||
itemTemplate={itemTemplate}
|
||||
containerClassName={clsx(
|
||||
'w-full h-full overflow-x-hidden overflow-y-auto custom-scrollbar select-none',
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
|
||||
export interface LocalCharacterWidgetSettings {
|
||||
compact: boolean;
|
||||
showOffline: boolean;
|
||||
version: number;
|
||||
showShipName: boolean;
|
||||
}
|
||||
|
||||
export const LOCAL_CHARACTER_WIDGET_DEFAULT: LocalCharacterWidgetSettings = {
|
||||
compact: true,
|
||||
showOffline: false,
|
||||
version: 0,
|
||||
showShipName: false,
|
||||
};
|
||||
|
||||
export function useLocalCharacterWidgetSettings() {
|
||||
return useLocalStorageState<LocalCharacterWidgetSettings>('kills:widget:settings', {
|
||||
defaultValue: LOCAL_CHARACTER_WIDGET_DEFAULT,
|
||||
});
|
||||
}
|
||||
@@ -8,8 +8,8 @@ import {
|
||||
Setting,
|
||||
SettingsTypes,
|
||||
SIGNATURE_SETTINGS,
|
||||
SignatureSettingsType,
|
||||
} from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
|
||||
interface SystemSignatureSettingsDialogProps {
|
||||
settings: SignatureSettingsType;
|
||||
|
||||
@@ -1,21 +1,14 @@
|
||||
import { useCallback, useState, useEffect, useRef, useMemo } from 'react';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import { Widget } from '@/hooks/Mapper/components/mapInterface/components';
|
||||
import { SystemSignaturesContent } from './SystemSignaturesContent';
|
||||
import { SystemSignatureSettingsDialog } from './SystemSignatureSettingsDialog';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { SystemSignaturesHeader } from './SystemSignatureHeader';
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import { useHotkey } from '@/hooks/Mapper/hooks/useHotkey';
|
||||
import {
|
||||
SETTINGS_KEYS,
|
||||
SETTINGS_VALUES,
|
||||
SIGNATURE_SETTING_STORE_KEY,
|
||||
SIGNATURE_WINDOW_ID,
|
||||
SignatureSettingsType,
|
||||
getDeletionTimeoutMs,
|
||||
} from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { getDeletionTimeoutMs } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { OutCommand, OutCommandHandler } from '@/hooks/Mapper/types/mapHandlers';
|
||||
import { ExtendedSystemSignature } from '@/hooks/Mapper/types';
|
||||
import { SETTINGS_KEYS, SIGNATURE_WINDOW_ID, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
|
||||
|
||||
/**
|
||||
* Custom hook for managing pending signature deletions and undo countdown.
|
||||
@@ -126,20 +119,14 @@ export const SystemSignatures = () => {
|
||||
const {
|
||||
data: { selectedSystems },
|
||||
outCommand,
|
||||
storedSettings: { settingsSignatures, settingsSignaturesUpdate },
|
||||
} = useMapRootState();
|
||||
|
||||
const [currentSettings, setCurrentSettings] = useLocalStorageState<SignatureSettingsType>(
|
||||
SIGNATURE_SETTING_STORE_KEY,
|
||||
{
|
||||
defaultValue: SETTINGS_VALUES,
|
||||
},
|
||||
);
|
||||
|
||||
const [systemId] = selectedSystems;
|
||||
const isSystemSelected = useMemo(() => selectedSystems.length === 1, [selectedSystems.length]);
|
||||
const { pendingIds, countdown, deletedSignatures, addDeleted, handleUndo } = useSignatureUndo(
|
||||
systemId,
|
||||
currentSettings,
|
||||
settingsSignatures,
|
||||
outCommand,
|
||||
);
|
||||
|
||||
@@ -157,20 +144,20 @@ export const SystemSignatures = () => {
|
||||
|
||||
const handleSettingsSave = useCallback(
|
||||
(newSettings: SignatureSettingsType) => {
|
||||
setCurrentSettings(newSettings);
|
||||
settingsSignaturesUpdate(newSettings);
|
||||
setVisible(false);
|
||||
},
|
||||
[setCurrentSettings],
|
||||
[settingsSignaturesUpdate],
|
||||
);
|
||||
|
||||
const handleLazyDeleteToggle = useCallback(
|
||||
(value: boolean) => {
|
||||
setCurrentSettings(prev => ({
|
||||
settingsSignaturesUpdate(prev => ({
|
||||
...prev,
|
||||
[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES]: value,
|
||||
}));
|
||||
},
|
||||
[setCurrentSettings],
|
||||
[settingsSignaturesUpdate],
|
||||
);
|
||||
|
||||
const openSettings = useCallback(() => setVisible(true), []);
|
||||
@@ -180,7 +167,7 @@ export const SystemSignatures = () => {
|
||||
label={
|
||||
<SystemSignaturesHeader
|
||||
sigCount={sigCount}
|
||||
lazyDeleteValue={currentSettings[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES] as boolean}
|
||||
lazyDeleteValue={settingsSignatures[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES] as boolean}
|
||||
pendingCount={pendingIds.size}
|
||||
undoCountdown={countdown}
|
||||
onLazyDeleteChange={handleLazyDeleteToggle}
|
||||
@@ -197,7 +184,7 @@ export const SystemSignatures = () => {
|
||||
) : (
|
||||
<SystemSignaturesContent
|
||||
systemId={systemId}
|
||||
settings={currentSettings}
|
||||
settings={settingsSignatures}
|
||||
deletedSignatures={deletedSignatures}
|
||||
onLazyDeleteChange={handleLazyDeleteToggle}
|
||||
onCountChange={handleCountChange}
|
||||
@@ -207,7 +194,7 @@ export const SystemSignatures = () => {
|
||||
|
||||
{visible && (
|
||||
<SystemSignatureSettingsDialog
|
||||
settings={currentSettings}
|
||||
settings={settingsSignatures}
|
||||
onCancel={() => setVisible(false)}
|
||||
onSave={handleSettingsSave}
|
||||
/>
|
||||
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
SortOrder,
|
||||
} from 'primereact/datatable';
|
||||
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
|
||||
import { SignatureView } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/SignatureView';
|
||||
import {
|
||||
@@ -17,9 +16,6 @@ import {
|
||||
GROUPS_LIST,
|
||||
MEDIUM_MAX_WIDTH,
|
||||
OTHER_COLUMNS_WIDTH,
|
||||
SETTINGS_KEYS,
|
||||
SIGNATURE_WINDOW_ID,
|
||||
SignatureSettingsType,
|
||||
} from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants';
|
||||
import { SignatureSettings } from '@/hooks/Mapper/components/mapRootContent/components/SignatureSettings';
|
||||
import { TooltipPosition, WdTooltip, WdTooltipHandlers, WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit';
|
||||
@@ -36,19 +32,11 @@ import { useClipboard, useHotkey } from '@/hooks/Mapper/hooks';
|
||||
import useMaxWidth from '@/hooks/Mapper/hooks/useMaxWidth';
|
||||
import { getSignatureRowClass } from '../helpers/rowStyles';
|
||||
import { useSystemSignaturesData } from '../hooks/useSystemSignaturesData';
|
||||
import { SETTINGS_KEYS, SIGNATURE_WINDOW_ID, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
const renderColIcon = (sig: SystemSignature) => renderIcon(sig);
|
||||
|
||||
type SystemSignaturesSortSettings = {
|
||||
sortField: string;
|
||||
sortOrder: SortOrder;
|
||||
};
|
||||
|
||||
const SORT_DEFAULT_VALUES: SystemSignaturesSortSettings = {
|
||||
sortField: 'inserted_at',
|
||||
sortOrder: -1,
|
||||
};
|
||||
|
||||
interface SystemSignaturesContentProps {
|
||||
systemId: string;
|
||||
settings: SignatureSettingsType;
|
||||
@@ -79,6 +67,10 @@ export const SystemSignaturesContent = ({
|
||||
const [nameColumnWidth, setNameColumnWidth] = useState('auto');
|
||||
const [hoveredSignature, setHoveredSignature] = useState<SystemSignature | null>(null);
|
||||
|
||||
const {
|
||||
storedSettings: { settingsSignatures, settingsSignaturesUpdate },
|
||||
} = useMapRootState();
|
||||
|
||||
const tableRef = useRef<HTMLDivElement>(null);
|
||||
const tooltipRef = useRef<WdTooltipHandlers>(null);
|
||||
|
||||
@@ -87,11 +79,6 @@ export const SystemSignaturesContent = ({
|
||||
|
||||
const { clipboardContent, setClipboardContent } = useClipboard();
|
||||
|
||||
const [sortSettings, setSortSettings] = useLocalStorageState<{ sortField: string; sortOrder: SortOrder }>(
|
||||
'window:signatures:sort',
|
||||
{ defaultValue: SORT_DEFAULT_VALUES },
|
||||
);
|
||||
|
||||
const {
|
||||
signatures,
|
||||
selectedSignatures,
|
||||
@@ -246,8 +233,8 @@ export const SystemSignaturesContent = ({
|
||||
tooltipRef.current?.hide();
|
||||
}, []);
|
||||
|
||||
const refVars = useRef({ settings, selectedSignatures, setSortSettings });
|
||||
refVars.current = { settings, selectedSignatures, setSortSettings };
|
||||
const refVars = useRef({ settings, selectedSignatures, settingsSignatures, settingsSignaturesUpdate });
|
||||
refVars.current = { settings, selectedSignatures, settingsSignatures, settingsSignaturesUpdate };
|
||||
|
||||
// @ts-ignore
|
||||
const getRowClassName = useCallback(rowData => {
|
||||
@@ -263,7 +250,12 @@ export const SystemSignaturesContent = ({
|
||||
}, []);
|
||||
|
||||
const handleSortSettings = useCallback(
|
||||
(e: DataTableStateEvent) => refVars.current.setSortSettings({ sortField: e.sortField, sortOrder: e.sortOrder }),
|
||||
(e: DataTableStateEvent) =>
|
||||
refVars.current.settingsSignaturesUpdate({
|
||||
...refVars.current.settingsSignatures,
|
||||
[SETTINGS_KEYS.SORT_FIELD]: e.sortField,
|
||||
[SETTINGS_KEYS.SORT_ORDER]: e.sortOrder,
|
||||
}),
|
||||
[],
|
||||
);
|
||||
|
||||
@@ -295,8 +287,8 @@ export const SystemSignaturesContent = ({
|
||||
rowHover
|
||||
selectAll
|
||||
onRowDoubleClick={handleRowClick}
|
||||
sortField={sortSettings.sortField}
|
||||
sortOrder={sortSettings.sortOrder}
|
||||
sortField={settingsSignatures[SETTINGS_KEYS.SORT_FIELD] as string}
|
||||
sortOrder={settingsSignatures[SETTINGS_KEYS.SORT_ORDER] as SortOrder}
|
||||
onSort={handleSortSettings}
|
||||
onRowMouseEnter={onRowMouseEnter}
|
||||
onRowMouseLeave={onRowMouseLeave}
|
||||
|
||||
@@ -11,6 +11,7 @@ import {
|
||||
SignatureKindFR,
|
||||
SignatureKindRU,
|
||||
} from '@/hooks/Mapper/types';
|
||||
import { SETTINGS_KEYS, SIGNATURES_DELETION_TIMING, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
|
||||
|
||||
export const TIME_ONE_MINUTE = 1000 * 60;
|
||||
export const TIME_TEN_MINUTES = TIME_ONE_MINUTE * 10;
|
||||
@@ -96,44 +97,11 @@ export const getGroupIdByRawGroup = (val: string): SignatureGroup | undefined =>
|
||||
return MAPPING_GROUP_TO_ENG[val] || undefined;
|
||||
};
|
||||
|
||||
export const SIGNATURE_WINDOW_ID = 'system_signatures_window';
|
||||
export const SIGNATURE_SETTING_STORE_KEY = 'wanderer_system_signature_settings_v6_5';
|
||||
|
||||
export enum SETTINGS_KEYS {
|
||||
SHOW_DESCRIPTION_COLUMN = 'show_description_column',
|
||||
SHOW_UPDATED_COLUMN = 'show_updated_column',
|
||||
SHOW_CHARACTER_COLUMN = 'show_character_column',
|
||||
LAZY_DELETE_SIGNATURES = 'lazy_delete_signatures',
|
||||
KEEP_LAZY_DELETE = 'keep_lazy_delete_enabled',
|
||||
DELETION_TIMING = 'deletion_timing',
|
||||
COLOR_BY_TYPE = 'color_by_type',
|
||||
SHOW_CHARACTER_PORTRAIT = 'show_character_portrait',
|
||||
|
||||
// From SignatureKind
|
||||
COSMIC_ANOMALY = SignatureKind.CosmicAnomaly,
|
||||
COSMIC_SIGNATURE = SignatureKind.CosmicSignature,
|
||||
DEPLOYABLE = SignatureKind.Deployable,
|
||||
STRUCTURE = SignatureKind.Structure,
|
||||
STARBASE = SignatureKind.Starbase,
|
||||
SHIP = SignatureKind.Ship,
|
||||
DRONE = SignatureKind.Drone,
|
||||
|
||||
// From SignatureGroup
|
||||
WORMHOLE = SignatureGroup.Wormhole,
|
||||
RELIC_SITE = SignatureGroup.RelicSite,
|
||||
DATA_SITE = SignatureGroup.DataSite,
|
||||
ORE_SITE = SignatureGroup.OreSite,
|
||||
GAS_SITE = SignatureGroup.GasSite,
|
||||
COMBAT_SITE = SignatureGroup.CombatSite,
|
||||
}
|
||||
|
||||
export enum SettingsTypes {
|
||||
flag,
|
||||
dropdown,
|
||||
}
|
||||
|
||||
export type SignatureSettingsType = { [key in SETTINGS_KEYS]?: unknown };
|
||||
|
||||
export type Setting = {
|
||||
key: SETTINGS_KEYS;
|
||||
name: string;
|
||||
@@ -142,12 +110,6 @@ export type Setting = {
|
||||
options?: { label: string; value: number | string | boolean }[];
|
||||
};
|
||||
|
||||
export enum SIGNATURES_DELETION_TIMING {
|
||||
IMMEDIATE,
|
||||
DEFAULT,
|
||||
EXTENDED,
|
||||
}
|
||||
|
||||
// Now use a stricter type: every timing key maps to a number
|
||||
export type SignatureDeletionTimingType = Record<SIGNATURES_DELETION_TIMING, number>;
|
||||
|
||||
@@ -194,32 +156,6 @@ export const SIGNATURE_SETTINGS = {
|
||||
],
|
||||
};
|
||||
|
||||
export const SETTINGS_VALUES: SignatureSettingsType = {
|
||||
[SETTINGS_KEYS.SHOW_UPDATED_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_DESCRIPTION_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_CHARACTER_COLUMN]: true,
|
||||
[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES]: true,
|
||||
[SETTINGS_KEYS.KEEP_LAZY_DELETE]: false,
|
||||
[SETTINGS_KEYS.DELETION_TIMING]: SIGNATURES_DELETION_TIMING.DEFAULT,
|
||||
[SETTINGS_KEYS.COLOR_BY_TYPE]: true,
|
||||
[SETTINGS_KEYS.SHOW_CHARACTER_PORTRAIT]: true,
|
||||
|
||||
[SETTINGS_KEYS.COSMIC_ANOMALY]: true,
|
||||
[SETTINGS_KEYS.COSMIC_SIGNATURE]: true,
|
||||
[SETTINGS_KEYS.DEPLOYABLE]: true,
|
||||
[SETTINGS_KEYS.STRUCTURE]: true,
|
||||
[SETTINGS_KEYS.STARBASE]: true,
|
||||
[SETTINGS_KEYS.SHIP]: true,
|
||||
[SETTINGS_KEYS.DRONE]: true,
|
||||
|
||||
[SETTINGS_KEYS.WORMHOLE]: true,
|
||||
[SETTINGS_KEYS.RELIC_SITE]: true,
|
||||
[SETTINGS_KEYS.DATA_SITE]: true,
|
||||
[SETTINGS_KEYS.ORE_SITE]: true,
|
||||
[SETTINGS_KEYS.GAS_SITE]: true,
|
||||
[SETTINGS_KEYS.COMBAT_SITE]: true,
|
||||
};
|
||||
|
||||
// Now this map is strongly typed as “number” for each timing enum
|
||||
export const SIGNATURE_DELETION_TIMEOUTS: SignatureDeletionTimingType = {
|
||||
[SIGNATURES_DELETION_TIMING.IMMEDIATE]: 0,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { SignatureSettingsType } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { ExtendedSystemSignature } from '@/hooks/Mapper/types';
|
||||
import { SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
|
||||
export interface UseSystemSignaturesDataProps {
|
||||
systemId: string;
|
||||
|
||||
@@ -5,15 +5,13 @@ import { OutCommand } from '@/hooks/Mapper/types/mapHandlers';
|
||||
import { useCallback, useEffect, useState } from 'react';
|
||||
import useRefState from 'react-usestateref';
|
||||
|
||||
import {
|
||||
SETTINGS_KEYS,
|
||||
getDeletionTimeoutMs,
|
||||
} from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { getDeletionTimeoutMs } from '@/hooks/Mapper/components/mapInterface/widgets/SystemSignatures/constants.ts';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { getActualSigs } from '../helpers';
|
||||
import { UseSystemSignaturesDataProps } from './types';
|
||||
import { usePendingDeletions } from './usePendingDeletions';
|
||||
import { useSignatureFetching } from './useSignatureFetching';
|
||||
import { SETTINGS_KEYS } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
|
||||
export const useSystemSignaturesData = ({
|
||||
systemId,
|
||||
|
||||
@@ -3,7 +3,6 @@ import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { Widget } from '@/hooks/Mapper/components/mapInterface/components';
|
||||
import { SystemKillsList } from './SystemKillsList';
|
||||
import { KillsHeader } from './components/SystemKillsHeader';
|
||||
import { useKillsWidgetSettings } from './hooks/useKillsWidgetSettings';
|
||||
import { useSystemKills } from './hooks/useSystemKills';
|
||||
import { KillsSettingsDialog } from './components/SystemKillsSettingsDialog';
|
||||
import { isWormholeSpace } from '@/hooks/Mapper/components/map/helpers/isWormholeSpace';
|
||||
@@ -13,27 +12,25 @@ const SystemKillsContent = () => {
|
||||
const {
|
||||
data: { selectedSystems, isSubscriptionActive },
|
||||
outCommand,
|
||||
storedSettings: { settingsKills },
|
||||
} = useMapRootState();
|
||||
|
||||
const [systemId] = selectedSystems || [];
|
||||
|
||||
const systemStaticInfo = getSystemStaticInfo(systemId)!;
|
||||
|
||||
const [settings] = useKillsWidgetSettings();
|
||||
const visible = settings.showAll;
|
||||
|
||||
const { kills, isLoading, error } = useSystemKills({
|
||||
systemId,
|
||||
outCommand,
|
||||
showAllVisible: visible,
|
||||
sinceHours: settings.timeRange,
|
||||
showAllVisible: settingsKills.showAll,
|
||||
sinceHours: settingsKills.timeRange,
|
||||
});
|
||||
|
||||
const isNothingSelected = !systemId && !visible;
|
||||
const isNothingSelected = !systemId && !settingsKills.showAll;
|
||||
const showLoading = isLoading && kills.length === 0;
|
||||
|
||||
const filteredKills = useMemo(() => {
|
||||
if (!settings.whOnly || !visible) return kills;
|
||||
if (!settingsKills.whOnly || !settingsKills.showAll) return kills;
|
||||
return kills.filter(kill => {
|
||||
if (!systemStaticInfo) {
|
||||
console.warn(`System with id ${kill.solar_system_id} not found.`);
|
||||
@@ -41,7 +38,7 @@ const SystemKillsContent = () => {
|
||||
}
|
||||
return isWormholeSpace(systemStaticInfo.system_class);
|
||||
});
|
||||
}, [kills, settings.whOnly, systemStaticInfo, visible]);
|
||||
}, [kills, settingsKills.whOnly, systemStaticInfo, settingsKills.showAll]);
|
||||
|
||||
if (!isSubscriptionActive) {
|
||||
return (
|
||||
@@ -87,7 +84,9 @@ const SystemKillsContent = () => {
|
||||
);
|
||||
}
|
||||
|
||||
return <SystemKillsList kills={filteredKills} onlyOneSystem={!visible} timeRange={settings.timeRange} />;
|
||||
return (
|
||||
<SystemKillsList kills={filteredKills} onlyOneSystem={!settingsKills.showAll} timeRange={settingsKills.timeRange} />
|
||||
);
|
||||
};
|
||||
|
||||
export const WSystemKills = () => {
|
||||
|
||||
@@ -17,67 +17,91 @@ import { TooltipPosition } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { WithClassName } from '@/hooks/Mapper/types/common.ts';
|
||||
|
||||
export type CompactKillRowProps = {
|
||||
killDetails: DetailedKill;
|
||||
killDetails?: DetailedKill | null;
|
||||
systemName: string;
|
||||
onlyOneSystem: boolean;
|
||||
} & WithClassName;
|
||||
|
||||
export const KillRowDetail = ({ killDetails, systemName, onlyOneSystem, className }: CompactKillRowProps) => {
|
||||
const {
|
||||
killmail_id = 0,
|
||||
killmail_id,
|
||||
// Victim data
|
||||
victim_char_name = 'Unknown Pilot',
|
||||
victim_alliance_ticker = '',
|
||||
victim_corp_ticker = '',
|
||||
victim_ship_name = 'Unknown Ship',
|
||||
victim_corp_name = '',
|
||||
victim_alliance_name = '',
|
||||
victim_char_id = 0,
|
||||
victim_corp_id = 0,
|
||||
victim_alliance_id = 0,
|
||||
victim_ship_type_id = 0,
|
||||
// Attacker data
|
||||
final_blow_char_id = 0,
|
||||
final_blow_char_name = '',
|
||||
final_blow_alliance_ticker = '',
|
||||
final_blow_alliance_name = '',
|
||||
final_blow_alliance_id = 0,
|
||||
final_blow_corp_ticker = '',
|
||||
final_blow_corp_id = 0,
|
||||
final_blow_corp_name = '',
|
||||
final_blow_ship_type_id = 0,
|
||||
kill_time = '',
|
||||
total_value = 0,
|
||||
} = killDetails || {};
|
||||
|
||||
const attackerIsNpc = final_blow_char_id === 0;
|
||||
|
||||
// Define victim affiliation ticker.
|
||||
const victimAffiliationTicker = victim_alliance_ticker || victim_corp_ticker || 'No Ticker';
|
||||
|
||||
const killValueFormatted = total_value != null && total_value > 0 ? `${formatISK(total_value)} ISK` : null;
|
||||
const killTimeAgo = kill_time ? formatTimeMixed(kill_time) : '0h ago';
|
||||
|
||||
const attackerSubscript = getAttackerSubscript(killDetails);
|
||||
|
||||
const { victimCorpLogoUrl, victimAllianceLogoUrl, victimShipUrl } = buildVictimImageUrls({
|
||||
victim_char_name,
|
||||
victim_alliance_ticker,
|
||||
victim_corp_ticker,
|
||||
victim_ship_name,
|
||||
victim_corp_name,
|
||||
victim_alliance_name,
|
||||
victim_char_id,
|
||||
victim_ship_type_id,
|
||||
victim_corp_id,
|
||||
victim_alliance_id,
|
||||
victim_ship_type_id,
|
||||
// Attacker data
|
||||
final_blow_char_id,
|
||||
final_blow_char_name,
|
||||
final_blow_alliance_ticker,
|
||||
final_blow_alliance_name,
|
||||
final_blow_alliance_id,
|
||||
final_blow_corp_ticker,
|
||||
final_blow_corp_id,
|
||||
final_blow_corp_name,
|
||||
final_blow_ship_type_id,
|
||||
kill_time,
|
||||
total_value,
|
||||
} = killDetails || {};
|
||||
|
||||
// Apply fallback values using nullish coalescing to handle both null and undefined
|
||||
const safeKillmailId = killmail_id ?? 0;
|
||||
const safeVictimCharName = victim_char_name ?? 'Unknown Pilot';
|
||||
const safeVictimAllianceTicker = victim_alliance_ticker ?? '';
|
||||
const safeVictimCorpTicker = victim_corp_ticker ?? '';
|
||||
const safeVictimShipName = victim_ship_name ?? 'Unknown Ship';
|
||||
const safeVictimCorpName = victim_corp_name ?? '';
|
||||
const safeVictimAllianceName = victim_alliance_name ?? '';
|
||||
const safeVictimCharId = victim_char_id ?? 0;
|
||||
const safeVictimCorpId = victim_corp_id ?? 0;
|
||||
const safeVictimAllianceId = victim_alliance_id ?? 0;
|
||||
const safeVictimShipTypeId = victim_ship_type_id ?? 0;
|
||||
const safeFinalBlowCharId = final_blow_char_id ?? 0;
|
||||
const safeFinalBlowCharName = final_blow_char_name ?? '';
|
||||
const safeFinalBlowAllianceTicker = final_blow_alliance_ticker ?? '';
|
||||
const safeFinalBlowAllianceName = final_blow_alliance_name ?? '';
|
||||
const safeFinalBlowAllianceId = final_blow_alliance_id ?? 0;
|
||||
const safeFinalBlowCorpTicker = final_blow_corp_ticker ?? '';
|
||||
const safeFinalBlowCorpId = final_blow_corp_id ?? 0;
|
||||
const safeFinalBlowCorpName = final_blow_corp_name ?? '';
|
||||
const safeFinalBlowShipTypeId = final_blow_ship_type_id ?? 0;
|
||||
const safeKillTime = kill_time ?? '';
|
||||
const safeTotalValue = total_value ?? 0;
|
||||
|
||||
const attackerIsNpc = safeFinalBlowCharId === 0;
|
||||
|
||||
// Define victim affiliation ticker.
|
||||
const victimAffiliationTicker = safeVictimAllianceTicker || safeVictimCorpTicker || 'No Ticker';
|
||||
|
||||
const killValueFormatted = safeTotalValue != null && safeTotalValue > 0 ? `${formatISK(safeTotalValue)} ISK` : null;
|
||||
const killTimeAgo = safeKillTime ? formatTimeMixed(safeKillTime) : '0h ago';
|
||||
|
||||
const attackerSubscript = killDetails ? getAttackerSubscript(killDetails) : undefined;
|
||||
|
||||
const { victimCorpLogoUrl, victimAllianceLogoUrl, victimShipUrl } = buildVictimImageUrls({
|
||||
victim_char_id: safeVictimCharId,
|
||||
victim_ship_type_id: safeVictimShipTypeId,
|
||||
victim_corp_id: safeVictimCorpId,
|
||||
victim_alliance_id: safeVictimAllianceId,
|
||||
});
|
||||
|
||||
const { attackerCorpLogoUrl, attackerAllianceLogoUrl } = buildAttackerImageUrls({
|
||||
final_blow_char_id,
|
||||
final_blow_corp_id,
|
||||
final_blow_alliance_id,
|
||||
final_blow_char_id: safeFinalBlowCharId,
|
||||
final_blow_corp_id: safeFinalBlowCorpId,
|
||||
final_blow_alliance_id: safeFinalBlowAllianceId,
|
||||
});
|
||||
|
||||
const { url: victimPrimaryLogoUrl, tooltip: victimPrimaryTooltip } = getPrimaryLogoAndTooltip(
|
||||
victimAllianceLogoUrl,
|
||||
victimCorpLogoUrl,
|
||||
victim_alliance_name,
|
||||
victim_corp_name,
|
||||
safeVictimAllianceName,
|
||||
safeVictimCorpName,
|
||||
'Victim',
|
||||
);
|
||||
|
||||
@@ -87,25 +111,25 @@ export const KillRowDetail = ({ killDetails, systemName, onlyOneSystem, classNam
|
||||
attackerIsNpc,
|
||||
attackerAllianceLogoUrl,
|
||||
attackerCorpLogoUrl,
|
||||
final_blow_alliance_name,
|
||||
final_blow_corp_name,
|
||||
final_blow_ship_type_id,
|
||||
safeFinalBlowAllianceName,
|
||||
safeFinalBlowCorpName,
|
||||
safeFinalBlowShipTypeId,
|
||||
),
|
||||
[
|
||||
attackerAllianceLogoUrl,
|
||||
attackerCorpLogoUrl,
|
||||
attackerIsNpc,
|
||||
final_blow_alliance_name,
|
||||
final_blow_corp_name,
|
||||
final_blow_ship_type_id,
|
||||
safeFinalBlowAllianceName,
|
||||
safeFinalBlowCorpName,
|
||||
safeFinalBlowShipTypeId,
|
||||
],
|
||||
);
|
||||
|
||||
// Define attackerTicker to use the alliance ticker if available, otherwise the corp ticker.
|
||||
const attackerTicker = attackerIsNpc ? '' : final_blow_alliance_ticker || final_blow_corp_ticker || '';
|
||||
const attackerTicker = attackerIsNpc ? '' : safeFinalBlowAllianceTicker || safeFinalBlowCorpTicker || '';
|
||||
|
||||
// For the attacker image link: if the attacker is not an NPC, link to the character page; otherwise, link to the kill page.
|
||||
const attackerLink = attackerIsNpc ? zkillLink('kill', killmail_id) : zkillLink('character', final_blow_char_id);
|
||||
const attackerLink = attackerIsNpc ? zkillLink('kill', safeKillmailId) : zkillLink('character', safeFinalBlowCharId);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -121,7 +145,7 @@ export const KillRowDetail = ({ killDetails, systemName, onlyOneSystem, classNam
|
||||
{victimShipUrl && (
|
||||
<div className="relative shrink-0 w-8 h-8 overflow-hidden">
|
||||
<a
|
||||
href={zkillLink('kill', killmail_id)}
|
||||
href={zkillLink('kill', safeKillmailId)}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="block w-full h-full"
|
||||
@@ -137,7 +161,7 @@ export const KillRowDetail = ({ killDetails, systemName, onlyOneSystem, classNam
|
||||
{victimPrimaryLogoUrl && (
|
||||
<WdTooltipWrapper content={victimPrimaryTooltip} position={TooltipPosition.top}>
|
||||
<a
|
||||
href={zkillLink('kill', killmail_id)}
|
||||
href={zkillLink('kill', safeKillmailId)}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
className="relative block shrink-0 w-8 h-8 overflow-hidden"
|
||||
@@ -153,12 +177,12 @@ export const KillRowDetail = ({ killDetails, systemName, onlyOneSystem, classNam
|
||||
</div>
|
||||
<div className="flex flex-col ml-2 flex-1 min-w-0 overflow-hidden leading-[1rem]">
|
||||
<div className="truncate text-stone-200">
|
||||
{victim_char_name}
|
||||
{safeVictimCharName}
|
||||
<span className="text-stone-400"> / {victimAffiliationTicker}</span>
|
||||
</div>
|
||||
<div className="truncate text-stone-300 flex items-center gap-1">
|
||||
<span className="text-stone-400 overflow-hidden text-ellipsis whitespace-nowrap max-w-[140px]">
|
||||
{victim_ship_name}
|
||||
{safeVictimShipName}
|
||||
</span>
|
||||
{killValueFormatted && (
|
||||
<>
|
||||
@@ -170,9 +194,9 @@ export const KillRowDetail = ({ killDetails, systemName, onlyOneSystem, classNam
|
||||
</div>
|
||||
<div className="flex items-center ml-auto gap-2">
|
||||
<div className="flex flex-col items-end flex-1 min-w-0 overflow-hidden text-right leading-[1rem]">
|
||||
{!attackerIsNpc && (final_blow_char_name || attackerTicker) && (
|
||||
{!attackerIsNpc && (safeFinalBlowCharName || attackerTicker) && (
|
||||
<div className="truncate text-stone-200">
|
||||
{final_blow_char_name}
|
||||
{safeFinalBlowCharName}
|
||||
{!attackerIsNpc && attackerTicker && <span className="ml-1 text-stone-400">/ {attackerTicker}</span>}
|
||||
</div>
|
||||
)}
|
||||
|
||||
@@ -7,9 +7,9 @@ import {
|
||||
WdImgButton,
|
||||
WdTooltipWrapper,
|
||||
} from '@/hooks/Mapper/components/ui-kit';
|
||||
import { useKillsWidgetSettings } from '../hooks/useKillsWidgetSettings';
|
||||
import { PrimeIcons } from 'primereact/api';
|
||||
import useMaxWidth from '@/hooks/Mapper/hooks/useMaxWidth.ts';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
interface KillsHeaderProps {
|
||||
systemId?: string;
|
||||
@@ -17,11 +17,14 @@ interface KillsHeaderProps {
|
||||
}
|
||||
|
||||
export const KillsHeader: React.FC<KillsHeaderProps> = ({ systemId, onOpenSettings }) => {
|
||||
const [settings, setSettings] = useKillsWidgetSettings();
|
||||
const { showAll } = settings;
|
||||
const {
|
||||
storedSettings: { settingsKills, settingsKillsUpdate },
|
||||
} = useMapRootState();
|
||||
|
||||
const { showAll } = settingsKills;
|
||||
|
||||
const onToggleShowAllVisible = () => {
|
||||
setSettings(prev => ({ ...prev, showAll: !prev.showAll }));
|
||||
settingsKillsUpdate(prev => ({ ...prev, showAll: !prev.showAll }));
|
||||
};
|
||||
|
||||
const headerRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
@@ -3,12 +3,12 @@ import { Dialog } from 'primereact/dialog';
|
||||
import { Button } from 'primereact/button';
|
||||
import { WdImgButton } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { PrimeIcons } from 'primereact/api';
|
||||
import { useKillsWidgetSettings } from '../hooks/useKillsWidgetSettings';
|
||||
import {
|
||||
AddSystemDialog,
|
||||
SearchOnSubmitCallback,
|
||||
} from '@/hooks/Mapper/components/mapInterface/components/AddSystemDialog';
|
||||
import { SystemView, TooltipPosition } from '@/hooks/Mapper/components/ui-kit';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
interface KillsSettingsDialogProps {
|
||||
visible: boolean;
|
||||
@@ -16,12 +16,15 @@ interface KillsSettingsDialogProps {
|
||||
}
|
||||
|
||||
export const KillsSettingsDialog: React.FC<KillsSettingsDialogProps> = ({ visible, setVisible }) => {
|
||||
const [globalSettings, setGlobalSettings] = useKillsWidgetSettings();
|
||||
const {
|
||||
storedSettings: { settingsKills, settingsKillsUpdate },
|
||||
} = useMapRootState();
|
||||
|
||||
const localRef = useRef({
|
||||
showAll: globalSettings.showAll,
|
||||
whOnly: globalSettings.whOnly,
|
||||
excludedSystems: globalSettings.excludedSystems || [],
|
||||
timeRange: globalSettings.timeRange,
|
||||
showAll: settingsKills.showAll,
|
||||
whOnly: settingsKills.whOnly,
|
||||
excludedSystems: settingsKills.excludedSystems || [],
|
||||
timeRange: settingsKills.timeRange,
|
||||
});
|
||||
|
||||
const [, forceRender] = useState(0);
|
||||
@@ -30,14 +33,14 @@ export const KillsSettingsDialog: React.FC<KillsSettingsDialogProps> = ({ visibl
|
||||
useEffect(() => {
|
||||
if (visible) {
|
||||
localRef.current = {
|
||||
showAll: globalSettings.showAll,
|
||||
whOnly: globalSettings.whOnly,
|
||||
excludedSystems: globalSettings.excludedSystems || [],
|
||||
timeRange: globalSettings.timeRange,
|
||||
showAll: settingsKills.showAll,
|
||||
whOnly: settingsKills.whOnly,
|
||||
excludedSystems: settingsKills.excludedSystems || [],
|
||||
timeRange: settingsKills.timeRange,
|
||||
};
|
||||
forceRender(n => n + 1);
|
||||
}
|
||||
}, [visible, globalSettings]);
|
||||
}, [visible, settingsKills]);
|
||||
|
||||
const handleWHChange = useCallback((checked: boolean) => {
|
||||
localRef.current = {
|
||||
@@ -75,12 +78,12 @@ export const KillsSettingsDialog: React.FC<KillsSettingsDialogProps> = ({ visibl
|
||||
}, []);
|
||||
|
||||
const handleApply = useCallback(() => {
|
||||
setGlobalSettings(prev => ({
|
||||
settingsKillsUpdate(prev => ({
|
||||
...prev,
|
||||
...localRef.current,
|
||||
}));
|
||||
setVisible(false);
|
||||
}, [setGlobalSettings, setVisible]);
|
||||
}, [settingsKillsUpdate, setVisible]);
|
||||
|
||||
const handleHide = useCallback(() => {
|
||||
setVisible(false);
|
||||
|
||||
@@ -33,7 +33,10 @@ export function formatISK(value: number): string {
|
||||
return Math.round(value).toString();
|
||||
}
|
||||
|
||||
export function getAttackerSubscript(kill: DetailedKill) {
|
||||
export function getAttackerSubscript(kill: DetailedKill | undefined) {
|
||||
if (!kill) {
|
||||
return null;
|
||||
}
|
||||
if (kill.npc) {
|
||||
return { label: 'npc', cssClass: 'text-purple-400' };
|
||||
}
|
||||
|
||||
@@ -1,53 +0,0 @@
|
||||
import { useMemo, useCallback } from 'react';
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
|
||||
export interface KillsWidgetSettings {
|
||||
showAll: boolean;
|
||||
whOnly: boolean;
|
||||
excludedSystems: number[];
|
||||
version: number;
|
||||
timeRange: number;
|
||||
}
|
||||
|
||||
export const DEFAULT_KILLS_WIDGET_SETTINGS: KillsWidgetSettings = {
|
||||
showAll: false,
|
||||
whOnly: true,
|
||||
excludedSystems: [],
|
||||
version: 2,
|
||||
timeRange: 4,
|
||||
};
|
||||
|
||||
function mergeWithDefaults(settings?: Partial<KillsWidgetSettings>): KillsWidgetSettings {
|
||||
if (!settings) {
|
||||
return DEFAULT_KILLS_WIDGET_SETTINGS;
|
||||
}
|
||||
|
||||
return {
|
||||
...DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
...settings,
|
||||
excludedSystems: Array.isArray(settings.excludedSystems) ? settings.excludedSystems : [],
|
||||
};
|
||||
}
|
||||
|
||||
export function useKillsWidgetSettings() {
|
||||
const [rawValue, setRawValue] = useLocalStorageState<KillsWidgetSettings | undefined>('kills:widget:settings');
|
||||
|
||||
const value = useMemo<KillsWidgetSettings>(() => {
|
||||
return mergeWithDefaults(rawValue);
|
||||
}, [rawValue]);
|
||||
|
||||
const setValue = useCallback(
|
||||
(newVal: KillsWidgetSettings | ((prev: KillsWidgetSettings) => KillsWidgetSettings)) => {
|
||||
setRawValue(prev => {
|
||||
const mergedPrev = mergeWithDefaults(prev);
|
||||
|
||||
const nextUnmerged = typeof newVal === 'function' ? newVal(mergedPrev) : newVal;
|
||||
|
||||
return mergeWithDefaults(nextUnmerged);
|
||||
});
|
||||
},
|
||||
[setRawValue],
|
||||
);
|
||||
|
||||
return [value, setValue] as const;
|
||||
}
|
||||
@@ -3,7 +3,6 @@ import debounce from 'lodash.debounce';
|
||||
import { OutCommand } from '@/hooks/Mapper/types/mapHandlers';
|
||||
import { DetailedKill } from '@/hooks/Mapper/types/kills';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useKillsWidgetSettings } from './useKillsWidgetSettings';
|
||||
|
||||
interface UseSystemKillsProps {
|
||||
systemId?: string;
|
||||
@@ -13,26 +12,25 @@ interface UseSystemKillsProps {
|
||||
sinceHours?: number;
|
||||
}
|
||||
|
||||
function combineKills(existing: DetailedKill[], incoming: DetailedKill[], sinceHours: number): DetailedKill[] {
|
||||
const cutoff = Date.now() - sinceHours * 60 * 60 * 1000;
|
||||
function combineKills(existing: DetailedKill[], incoming: DetailedKill[]): DetailedKill[] {
|
||||
// Don't filter by time when storing - let components filter when displaying
|
||||
const byId: Record<string, DetailedKill> = {};
|
||||
|
||||
for (const kill of [...existing, ...incoming]) {
|
||||
if (!kill.kill_time) continue;
|
||||
const killTimeMs = new Date(kill.kill_time).valueOf();
|
||||
if (killTimeMs >= cutoff) {
|
||||
byId[kill.killmail_id] = kill;
|
||||
}
|
||||
byId[kill.killmail_id] = kill;
|
||||
}
|
||||
|
||||
return Object.values(byId);
|
||||
}
|
||||
|
||||
export function useSystemKills({ systemId, outCommand, showAllVisible = false, sinceHours = 24 }: UseSystemKillsProps) {
|
||||
const { data, update } = useMapRootState();
|
||||
const { detailedKills = {}, systems = [] } = data;
|
||||
const [settings] = useKillsWidgetSettings();
|
||||
const excludedSystems = settings.excludedSystems;
|
||||
const {
|
||||
data: { detailedKills = {}, systems = [] },
|
||||
update,
|
||||
storedSettings: { settingsKills },
|
||||
} = useMapRootState();
|
||||
const { excludedSystems } = settingsKills;
|
||||
|
||||
const effectiveSinceHours = sinceHours;
|
||||
|
||||
@@ -55,14 +53,14 @@ export function useSystemKills({ systemId, outCommand, showAllVisible = false, s
|
||||
|
||||
for (const [sid, newKills] of Object.entries(killsMap)) {
|
||||
const existing = updated[sid] ?? [];
|
||||
const combined = combineKills(existing, newKills, effectiveSinceHours);
|
||||
const combined = combineKills(existing, newKills);
|
||||
updated[sid] = combined;
|
||||
}
|
||||
|
||||
return { ...prev, detailedKills: updated };
|
||||
});
|
||||
},
|
||||
[update, effectiveSinceHours],
|
||||
[update],
|
||||
);
|
||||
|
||||
const fetchKills = useCallback(
|
||||
|
||||
@@ -14,13 +14,14 @@ import { TrackingDialog } from '@/hooks/Mapper/components/mapRootContent/compone
|
||||
import { useMapEventListener } from '@/hooks/Mapper/events';
|
||||
import { Commands } from '@/hooks/Mapper/types';
|
||||
import { PingsInterface } from '@/hooks/Mapper/components/mapInterface/components';
|
||||
import { OldSettingsDialog } from '@/hooks/Mapper/components/mapRootContent/components/OldSettingsDialog.tsx';
|
||||
|
||||
export interface MapRootContentProps {}
|
||||
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
export const MapRootContent = ({}: MapRootContentProps) => {
|
||||
const {
|
||||
storedSettings: { interfaceSettings },
|
||||
storedSettings: { interfaceSettings, isReady, hasOldSettings },
|
||||
data,
|
||||
} = useMapRootState();
|
||||
const { isShowMenu } = interfaceSettings;
|
||||
@@ -34,7 +35,7 @@ export const MapRootContent = ({}: MapRootContentProps) => {
|
||||
const [showTrackingDialog, setShowTrackingDialog] = useState(false);
|
||||
|
||||
/* Important Notice - this solution needs for use one instance of MapInterface */
|
||||
const mapInterface = <MapInterface />;
|
||||
const mapInterface = isReady ? <MapInterface /> : null;
|
||||
|
||||
const handleShowOnTheMap = useCallback(() => setShowOnTheMap(true), []);
|
||||
const handleShowMapSettings = useCallback(() => setShowMapSettings(true), []);
|
||||
@@ -90,6 +91,8 @@ export const MapRootContent = ({}: MapRootContentProps) => {
|
||||
{showTrackingDialog && (
|
||||
<TrackingDialog visible={showTrackingDialog} onHide={() => setShowTrackingDialog(false)} />
|
||||
)}
|
||||
|
||||
{hasOldSettings && <OldSettingsDialog />}
|
||||
</Layout>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -12,6 +12,7 @@ import {
|
||||
import { WidgetsSettings } from './components/WidgetsSettings';
|
||||
import { CommonSettings } from './components/CommonSettings';
|
||||
import { SettingsListItem } from './types.ts';
|
||||
import { ImportExport } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/components/ImportExport.tsx';
|
||||
|
||||
export interface MapSettingsProps {
|
||||
visible: boolean;
|
||||
@@ -87,6 +88,10 @@ export const MapSettingsComp = ({ visible, onHide }: MapSettingsProps) => {
|
||||
<TabPanel header="Widgets" className="h-full" headerClassName={styles.verticalTabHeader}>
|
||||
<WidgetsSettings />
|
||||
</TabPanel>
|
||||
|
||||
<TabPanel header="Import/Export" className="h-full" headerClassName={styles.verticalTabHeader}>
|
||||
<ImportExport />
|
||||
</TabPanel>
|
||||
</TabView>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -22,6 +22,7 @@ import { OutCommand } from '@/hooks/Mapper/types';
|
||||
import { PrettySwitchbox } from '@/hooks/Mapper/components/mapRootContent/components/MapSettings/components';
|
||||
import { Dropdown } from 'primereact/dropdown';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { WithChildren } from '@/hooks/Mapper/types/common.ts';
|
||||
|
||||
type MapSettingsContextType = {
|
||||
renderSettingItem: (item: SettingsListItem) => ReactNode;
|
||||
@@ -30,7 +31,7 @@ type MapSettingsContextType = {
|
||||
|
||||
const MapSettingsContext = createContext<MapSettingsContextType | undefined>(undefined);
|
||||
|
||||
export const MapSettingsProvider = ({ children }: { children: ReactNode }) => {
|
||||
export const MapSettingsProvider = ({ children }: WithChildren) => {
|
||||
const {
|
||||
outCommand,
|
||||
storedSettings: { interfaceSettings, setInterfaceSettings },
|
||||
|
||||
@@ -0,0 +1,202 @@
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useCallback, useMemo, useRef } from 'react';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { parseMapUserSettings } from '@/hooks/Mapper/components/helpers';
|
||||
import { saveTextFile } from '@/hooks/Mapper/utils/saveToFile.ts';
|
||||
import { SplitButton } from 'primereact/splitbutton';
|
||||
import { loadTextFile } from '@/hooks/Mapper/utils';
|
||||
|
||||
export const ImportExport = () => {
|
||||
const {
|
||||
storedSettings: { getSettingsForExport, applySettings },
|
||||
data: { map_slug },
|
||||
} = useMapRootState();
|
||||
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const handleImportFromClipboard = useCallback(async () => {
|
||||
const text = await navigator.clipboard.readText();
|
||||
|
||||
if (text == null || text == '') {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = parseMapUserSettings(text);
|
||||
if (applySettings(parsed)) {
|
||||
toast.current?.show({
|
||||
severity: 'success',
|
||||
summary: 'Import',
|
||||
detail: 'Map settings was imported successfully.',
|
||||
life: 3000,
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
window.dispatchEvent(new Event('resize'));
|
||||
}, 100);
|
||||
return;
|
||||
}
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'warn',
|
||||
summary: 'Warning',
|
||||
detail: 'Settings already imported. Or something went wrong.',
|
||||
life: 3000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Import from clipboard Error: `, error);
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: 'Some error occurred on import from Clipboard, check console log.',
|
||||
life: 3000,
|
||||
});
|
||||
}
|
||||
}, [applySettings]);
|
||||
|
||||
const handleImportFromFile = useCallback(async () => {
|
||||
try {
|
||||
const text = await loadTextFile();
|
||||
|
||||
const parsed = parseMapUserSettings(text);
|
||||
if (applySettings(parsed)) {
|
||||
toast.current?.show({
|
||||
severity: 'success',
|
||||
summary: 'Import',
|
||||
detail: 'Map settings was imported successfully.',
|
||||
life: 3000,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'warn',
|
||||
summary: 'Warning',
|
||||
detail: 'Settings already imported. Or something went wrong.',
|
||||
life: 3000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Import from file Error: `, error);
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: 'Some error occurred on import from File, check console log.',
|
||||
life: 3000,
|
||||
});
|
||||
}
|
||||
}, [applySettings]);
|
||||
|
||||
const handleExportToClipboard = useCallback(async () => {
|
||||
const settings = getSettingsForExport();
|
||||
if (!settings) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await navigator.clipboard.writeText(settings);
|
||||
toast.current?.show({
|
||||
severity: 'success',
|
||||
summary: 'Export',
|
||||
detail: 'Map settings copied into clipboard',
|
||||
life: 3000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Export to clipboard Error: `, error);
|
||||
toast.current?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: 'Some error occurred on copying to clipboard, check console log.',
|
||||
life: 3000,
|
||||
});
|
||||
}
|
||||
}, [getSettingsForExport]);
|
||||
|
||||
const handleExportToFile = useCallback(async () => {
|
||||
const settings = getSettingsForExport();
|
||||
if (!settings) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
saveTextFile(`map_settings_${map_slug}.json`, settings);
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'success',
|
||||
summary: 'Export to File',
|
||||
detail: 'Map settings successfully saved to file',
|
||||
life: 3000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Export to cliboard Error: `, error);
|
||||
toast.current?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: 'Some error occurred on saving to file, check console log.',
|
||||
life: 3000,
|
||||
});
|
||||
}
|
||||
}, [getSettingsForExport, map_slug]);
|
||||
|
||||
const importItems = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: 'Import from File',
|
||||
icon: 'pi pi-file-import',
|
||||
command: handleImportFromFile,
|
||||
},
|
||||
],
|
||||
[handleImportFromFile],
|
||||
);
|
||||
|
||||
const exportItems = useMemo(
|
||||
() => [
|
||||
{
|
||||
label: 'Export as File',
|
||||
icon: 'pi pi-file-export',
|
||||
command: handleExportToFile,
|
||||
},
|
||||
],
|
||||
[handleExportToFile],
|
||||
);
|
||||
|
||||
return (
|
||||
<div className="w-full h-full flex flex-col gap-5">
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<SplitButton
|
||||
onClick={handleImportFromClipboard}
|
||||
icon="pi pi-download"
|
||||
size="small"
|
||||
severity="warning"
|
||||
label="Import from Clipboard"
|
||||
className="py-[4px]"
|
||||
model={importItems}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<span className="text-stone-500 text-[12px]">
|
||||
*Will read map settings from clipboard. Be careful it could overwrite current settings.
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col gap-1">
|
||||
<div>
|
||||
<SplitButton
|
||||
onClick={handleExportToClipboard}
|
||||
icon="pi pi-upload"
|
||||
size="small"
|
||||
label="Export to Clipboard"
|
||||
className="py-[4px]"
|
||||
model={exportItems}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<span className="text-stone-500 text-[12px]">*Will save map settings to clipboard.</span>
|
||||
</div>
|
||||
|
||||
<Toast ref={toast} />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
@@ -0,0 +1,206 @@
|
||||
import { Dialog } from 'primereact/dialog';
|
||||
import { Button } from 'primereact/button';
|
||||
import { ConfirmPopup } from 'primereact/confirmpopup';
|
||||
import { useCallback, useRef, useState } from 'react';
|
||||
import { MapUserSettings } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
import { Toast } from 'primereact/toast';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { saveTextFile } from '@/hooks/Mapper/utils';
|
||||
|
||||
const createSettings = function <T>(lsSettings: string | null, defaultValues: T) {
|
||||
return {
|
||||
version: -1,
|
||||
settings: lsSettings ? JSON.parse(lsSettings) : defaultValues,
|
||||
};
|
||||
};
|
||||
|
||||
export const OldSettingsDialog = () => {
|
||||
const cpRemoveBtnRef = useRef<HTMLElement>();
|
||||
const [cpRemoveVisible, setCpRemoveVisible] = useState(false);
|
||||
const handleShowCP = useCallback(() => setCpRemoveVisible(true), []);
|
||||
const handleHideCP = useCallback(() => setCpRemoveVisible(false), []);
|
||||
const toast = useRef<Toast | null>(null);
|
||||
|
||||
const {
|
||||
storedSettings: { checkOldSettings },
|
||||
data: { map_slug },
|
||||
} = useMapRootState();
|
||||
|
||||
const handleExport = useCallback(
|
||||
async (asFile?: boolean) => {
|
||||
const interfaceSettings = localStorage.getItem('window:interface:settings');
|
||||
const widgetRoutes = localStorage.getItem('window:interface:routes');
|
||||
const widgetLocal = localStorage.getItem('window:interface:local');
|
||||
const widgetKills = localStorage.getItem('kills:widget:settings');
|
||||
const onTheMapOld = localStorage.getItem('window:onTheMap:settings');
|
||||
const widgetsOld = localStorage.getItem('windows:settings:v2');
|
||||
const signatures = localStorage.getItem('wanderer_system_signature_settings_v6_5');
|
||||
|
||||
const out: MapUserSettings = {
|
||||
killsWidget: createSettings(widgetKills, DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
localWidget: createSettings(widgetLocal, DEFAULT_WIDGET_LOCAL_SETTINGS),
|
||||
widgets: createSettings(widgetsOld, getDefaultWidgetProps()),
|
||||
routes: createSettings(widgetRoutes, DEFAULT_ROUTES_SETTINGS),
|
||||
onTheMap: createSettings(onTheMapOld, DEFAULT_ON_THE_MAP_SETTINGS),
|
||||
signaturesWidget: createSettings(signatures, DEFAULT_SIGNATURE_SETTINGS),
|
||||
interface: createSettings(interfaceSettings, STORED_INTERFACE_DEFAULT_VALUES),
|
||||
};
|
||||
|
||||
if (asFile) {
|
||||
if (!out) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
saveTextFile(`map_settings_${map_slug}.json`, JSON.stringify(out));
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'success',
|
||||
summary: 'Export to File',
|
||||
detail: 'Map settings successfully saved to file',
|
||||
life: 3000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Export to cliboard Error: `, error);
|
||||
toast.current?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: 'Some error occurred on saving to file, check console log.',
|
||||
life: 3000,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await navigator.clipboard.writeText(JSON.stringify(out));
|
||||
|
||||
toast.current?.show({
|
||||
severity: 'success',
|
||||
summary: 'Export to clipboard',
|
||||
detail: 'Map settings was export successfully.',
|
||||
life: 3000,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(`Export to clipboard Error: `, error);
|
||||
toast.current?.show({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: 'Some error occurred on copying to clipboard, check console log.',
|
||||
life: 3000,
|
||||
});
|
||||
}
|
||||
},
|
||||
[map_slug],
|
||||
);
|
||||
|
||||
const handleExportClipboard = useCallback(async () => {
|
||||
await handleExport();
|
||||
}, [handleExport]);
|
||||
|
||||
const handleExportAsFile = useCallback(async () => {
|
||||
await handleExport(true);
|
||||
}, [handleExport]);
|
||||
|
||||
const handleProceed = useCallback(() => {
|
||||
localStorage.removeItem('window:interface:settings');
|
||||
localStorage.removeItem('window:interface:routes');
|
||||
localStorage.removeItem('window:interface:local');
|
||||
localStorage.removeItem('kills:widget:settings');
|
||||
localStorage.removeItem('window:onTheMap:settings');
|
||||
localStorage.removeItem('windows:settings:v2');
|
||||
localStorage.removeItem('wanderer_system_signature_settings_v6_5');
|
||||
|
||||
checkOldSettings();
|
||||
}, [checkOldSettings]);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Dialog
|
||||
header={
|
||||
<div className="dialog-header">
|
||||
<span className="pointer-events-none">Old settings detected!</span>
|
||||
</div>
|
||||
}
|
||||
draggable={false}
|
||||
resizable={false}
|
||||
closable={false}
|
||||
visible
|
||||
onHide={() => null}
|
||||
className="w-[640px] h-[400px] text-text-color min-h-0"
|
||||
footer={
|
||||
<div className="flex items-center justify-end">
|
||||
<Button
|
||||
// @ts-ignore
|
||||
ref={cpRemoveBtnRef}
|
||||
onClick={handleShowCP}
|
||||
icon="pi pi-exclamation-triangle"
|
||||
size="small"
|
||||
severity="warning"
|
||||
label="Proceed"
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="w-full h-full flex flex-col gap-1 items-center justify-center text-stone-400 text-[15px]">
|
||||
<span>
|
||||
We detected <span className="text-orange-400">deprecated</span> settings saved in your browser.
|
||||
</span>
|
||||
<span>
|
||||
Now we will give you ability to make <span className="text-orange-400">export</span> your old settings.
|
||||
</span>
|
||||
<span>
|
||||
After click: all settings will saved in your <span className="text-orange-400">clipboard</span>.
|
||||
</span>
|
||||
<span>
|
||||
Then you need to go into <span className="text-orange-400">Map Settings</span> and click{' '}
|
||||
<span className="text-orange-400">Import from clipboard</span>
|
||||
</span>
|
||||
<div className="h-[30px]"></div>
|
||||
|
||||
<div className="flex items-center gap-3">
|
||||
<Button
|
||||
onClick={handleExportClipboard}
|
||||
icon="pi pi-copy"
|
||||
size="small"
|
||||
severity="info"
|
||||
label="Export to Clipboard"
|
||||
/>
|
||||
|
||||
<Button
|
||||
onClick={handleExportAsFile}
|
||||
icon="pi pi-download"
|
||||
size="small"
|
||||
severity="info"
|
||||
label="Export as File"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<span className="text-stone-600 text-[12px]">*You will see this dialog until click Export.</span>
|
||||
</div>
|
||||
</Dialog>
|
||||
|
||||
<ConfirmPopup
|
||||
target={cpRemoveBtnRef.current}
|
||||
visible={cpRemoveVisible}
|
||||
onHide={handleHideCP}
|
||||
message="After click dialog will disappear. Ready?"
|
||||
icon="pi pi-exclamation-triangle"
|
||||
accept={handleProceed}
|
||||
/>
|
||||
|
||||
<Toast ref={toast} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -7,24 +7,11 @@ import { VirtualScroller, VirtualScrollerTemplateOptions } from 'primereact/virt
|
||||
import clsx from 'clsx';
|
||||
import { CharacterTypeRaw, WithIsOwnCharacter } from '@/hooks/Mapper/types';
|
||||
import { CharacterCard, TooltipPosition, WdCheckbox, WdImageSize, WdImgButton } from '@/hooks/Mapper/components/ui-kit';
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import { useMapCheckPermissions, useMapGetOption } from '@/hooks/Mapper/mapRootProvider/hooks/api';
|
||||
import { UserPermission } from '@/hooks/Mapper/types/permissions.ts';
|
||||
import { InputText } from 'primereact/inputtext';
|
||||
import { IconField } from 'primereact/iconfield';
|
||||
|
||||
type WindowLocalSettingsType = {
|
||||
compact: boolean;
|
||||
hideOffline: boolean;
|
||||
version: number;
|
||||
};
|
||||
|
||||
const STORED_DEFAULT_VALUES: WindowLocalSettingsType = {
|
||||
compact: true,
|
||||
hideOffline: false,
|
||||
version: 0,
|
||||
};
|
||||
|
||||
const itemTemplate = (item: CharacterTypeRaw & WithIsOwnCharacter, options: VirtualScrollerTemplateOptions) => {
|
||||
return (
|
||||
<div
|
||||
@@ -48,14 +35,11 @@ export interface OnTheMapProps {
|
||||
export const OnTheMap = ({ show, onHide }: OnTheMapProps) => {
|
||||
const {
|
||||
data: { characters, userCharacters },
|
||||
storedSettings: { settingsOnTheMap, settingsOnTheMapUpdate },
|
||||
} = useMapRootState();
|
||||
|
||||
const [searchVal, setSearchVal] = useState('');
|
||||
|
||||
const [settings, setSettings] = useLocalStorageState<WindowLocalSettingsType>('window:onTheMap:settings', {
|
||||
defaultValue: STORED_DEFAULT_VALUES,
|
||||
});
|
||||
|
||||
const restrictOfflineShowing = useMapGetOption('restrict_offline_showing');
|
||||
const isAdminOrManager = useMapCheckPermissions([UserPermission.MANAGE_MAP]);
|
||||
|
||||
@@ -107,12 +91,12 @@ export const OnTheMap = ({ show, onHide }: OnTheMapProps) => {
|
||||
});
|
||||
}
|
||||
|
||||
if (showOffline && !settings.hideOffline) {
|
||||
if (showOffline && !settingsOnTheMap.hideOffline) {
|
||||
return out;
|
||||
}
|
||||
|
||||
return out.filter(x => x.online);
|
||||
}, [showOffline, searchVal, characters, settings.hideOffline, userCharacters]);
|
||||
}, [showOffline, searchVal, characters, settingsOnTheMap.hideOffline, userCharacters]);
|
||||
|
||||
return (
|
||||
<Sidebar
|
||||
@@ -153,9 +137,11 @@ export const OnTheMap = ({ show, onHide }: OnTheMapProps) => {
|
||||
size="m"
|
||||
labelSide="left"
|
||||
label={'Hide offline'}
|
||||
value={settings.hideOffline}
|
||||
value={settingsOnTheMap.hideOffline}
|
||||
classNameLabel="text-stone-400 hover:text-stone-200 transition duration-300"
|
||||
onChange={() => setSettings(() => ({ ...settings, hideOffline: !settings.hideOffline }))}
|
||||
onChange={() =>
|
||||
settingsOnTheMapUpdate(() => ({ ...settingsOnTheMap, hideOffline: !settingsOnTheMap.hideOffline }))
|
||||
}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
|
||||
@@ -0,0 +1,49 @@
|
||||
import { TooltipPosition, WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit';
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import { useMapRootState } from '@/hooks/Mapper/mapRootProvider';
|
||||
|
||||
export const DebugComponent = () => {
|
||||
const { outCommand } = useMapRootState();
|
||||
|
||||
const [record, setRecord] = useLocalStorageState<boolean>('record', {
|
||||
defaultValue: false,
|
||||
});
|
||||
|
||||
// @ts-ignore
|
||||
const [recordsList] = useLocalStorageState<{ type; data }[]>('recordsList', {
|
||||
defaultValue: [],
|
||||
});
|
||||
|
||||
const handleRunSavedEvents = () => {
|
||||
recordsList.forEach(record => outCommand(record));
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
<WdTooltipWrapper content="Run saved events" position={TooltipPosition.left}>
|
||||
<button
|
||||
className="btn bg-transparent text-gray-400 hover:text-white border-transparent hover:bg-transparent py-2 h-auto min-h-auto"
|
||||
type="button"
|
||||
onClick={handleRunSavedEvents}
|
||||
disabled={recordsList.length === 0 || record}
|
||||
>
|
||||
<i className="pi pi-forward"></i>
|
||||
</button>
|
||||
</WdTooltipWrapper>
|
||||
|
||||
<WdTooltipWrapper content="Record" position={TooltipPosition.left}>
|
||||
<button
|
||||
className="btn bg-transparent text-gray-400 hover:text-white border-transparent hover:bg-transparent py-2 h-auto min-h-auto"
|
||||
type="button"
|
||||
onClick={() => setRecord(x => !x)}
|
||||
>
|
||||
{!record ? (
|
||||
<i className="pi pi-play-circle text-green-500"></i>
|
||||
) : (
|
||||
<i className="pi pi-stop-circle text-red-500"></i>
|
||||
)}
|
||||
</button>
|
||||
</WdTooltipWrapper>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -7,6 +7,7 @@ import { TooltipPosition } from '@/hooks/Mapper/components/ui-kit';
|
||||
|
||||
import { useMapCheckPermissions } from '@/hooks/Mapper/mapRootProvider/hooks/api';
|
||||
import { UserPermission } from '@/hooks/Mapper/types/permissions.ts';
|
||||
// import { DebugComponent } from '@/hooks/Mapper/components/mapRootContent/components/RightBar/DebugComponent.tsx';
|
||||
|
||||
interface RightBarProps {
|
||||
onShowOnTheMap?: () => void;
|
||||
@@ -79,6 +80,9 @@ export const RightBar = ({
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col items-center mb-2 gap-1">
|
||||
{/* TODO - do not delete this code needs for debug */}
|
||||
{/*<DebugComponent />*/}
|
||||
|
||||
<WdTooltipWrapper content="Map user settings" position={TooltipPosition.left}>
|
||||
<button
|
||||
className="btn bg-transparent text-gray-400 hover:text-white border-transparent hover:bg-transparent py-2 h-auto min-h-auto"
|
||||
|
||||
@@ -48,7 +48,7 @@ export const MapWrapper = () => {
|
||||
linkSignatureToSystem,
|
||||
systemSignatures,
|
||||
},
|
||||
storedSettings: { interfaceSettings },
|
||||
storedSettings: { interfaceSettings, settingsLocal },
|
||||
} = useMapRootState();
|
||||
|
||||
const {
|
||||
@@ -254,6 +254,7 @@ export const MapWrapper = () => {
|
||||
pings={pings}
|
||||
onAddSystem={onAddSystem}
|
||||
minimapPlacement={minimapPosition}
|
||||
localShowShipName={settingsLocal.showShipName}
|
||||
/>
|
||||
|
||||
{openSettings != null && (
|
||||
|
||||
@@ -33,6 +33,7 @@ export enum Regions {
|
||||
Solitude = 10000044,
|
||||
TashMurkon = 10000020,
|
||||
VergeVendor = 10000068,
|
||||
Pochven = 10000070,
|
||||
}
|
||||
|
||||
export enum Spaces {
|
||||
@@ -40,6 +41,7 @@ export enum Spaces {
|
||||
'Gallente' = 'Gallente',
|
||||
'Matar' = 'Matar',
|
||||
'Amarr' = 'Amarr',
|
||||
'Pochven' = 'Pochven',
|
||||
}
|
||||
|
||||
export const REGIONS_MAP: Record<number, Spaces> = {
|
||||
@@ -66,6 +68,7 @@ export const REGIONS_MAP: Record<number, Spaces> = {
|
||||
[Regions.Solitude]: Spaces.Gallente,
|
||||
[Regions.TashMurkon]: Spaces.Amarr,
|
||||
[Regions.VergeVendor]: Spaces.Gallente,
|
||||
[Regions.Pochven]: Spaces.Pochven,
|
||||
};
|
||||
|
||||
export type K162Type = {
|
||||
|
||||
71
assets/js/hooks/Mapper/constants/signatures.ts
Normal file
71
assets/js/hooks/Mapper/constants/signatures.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import { SignatureGroup, SignatureKind } from '@/hooks/Mapper/types';
|
||||
|
||||
export const SIGNATURE_WINDOW_ID = 'system_signatures_window';
|
||||
|
||||
export enum SIGNATURES_DELETION_TIMING {
|
||||
IMMEDIATE,
|
||||
DEFAULT,
|
||||
EXTENDED,
|
||||
}
|
||||
|
||||
export enum SETTINGS_KEYS {
|
||||
SORT_FIELD = 'sortField',
|
||||
SORT_ORDER = 'sortOrder',
|
||||
|
||||
SHOW_DESCRIPTION_COLUMN = 'show_description_column',
|
||||
SHOW_UPDATED_COLUMN = 'show_updated_column',
|
||||
SHOW_CHARACTER_COLUMN = 'show_character_column',
|
||||
LAZY_DELETE_SIGNATURES = 'lazy_delete_signatures',
|
||||
KEEP_LAZY_DELETE = 'keep_lazy_delete_enabled',
|
||||
DELETION_TIMING = 'deletion_timing',
|
||||
COLOR_BY_TYPE = 'color_by_type',
|
||||
SHOW_CHARACTER_PORTRAIT = 'show_character_portrait',
|
||||
|
||||
// From SignatureKind
|
||||
COSMIC_ANOMALY = SignatureKind.CosmicAnomaly,
|
||||
COSMIC_SIGNATURE = SignatureKind.CosmicSignature,
|
||||
DEPLOYABLE = SignatureKind.Deployable,
|
||||
STRUCTURE = SignatureKind.Structure,
|
||||
STARBASE = SignatureKind.Starbase,
|
||||
SHIP = SignatureKind.Ship,
|
||||
DRONE = SignatureKind.Drone,
|
||||
|
||||
// From SignatureGroup
|
||||
WORMHOLE = SignatureGroup.Wormhole,
|
||||
RELIC_SITE = SignatureGroup.RelicSite,
|
||||
DATA_SITE = SignatureGroup.DataSite,
|
||||
ORE_SITE = SignatureGroup.OreSite,
|
||||
GAS_SITE = SignatureGroup.GasSite,
|
||||
COMBAT_SITE = SignatureGroup.CombatSite,
|
||||
}
|
||||
|
||||
export type SignatureSettingsType = { [key in SETTINGS_KEYS]?: unknown };
|
||||
|
||||
export const DEFAULT_SIGNATURE_SETTINGS: SignatureSettingsType = {
|
||||
[SETTINGS_KEYS.SORT_FIELD]: 'inserted_at',
|
||||
[SETTINGS_KEYS.SORT_ORDER]: -1,
|
||||
|
||||
[SETTINGS_KEYS.SHOW_UPDATED_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_DESCRIPTION_COLUMN]: true,
|
||||
[SETTINGS_KEYS.SHOW_CHARACTER_COLUMN]: true,
|
||||
[SETTINGS_KEYS.LAZY_DELETE_SIGNATURES]: true,
|
||||
[SETTINGS_KEYS.KEEP_LAZY_DELETE]: false,
|
||||
[SETTINGS_KEYS.DELETION_TIMING]: SIGNATURES_DELETION_TIMING.DEFAULT,
|
||||
[SETTINGS_KEYS.COLOR_BY_TYPE]: true,
|
||||
[SETTINGS_KEYS.SHOW_CHARACTER_PORTRAIT]: true,
|
||||
|
||||
[SETTINGS_KEYS.COSMIC_ANOMALY]: true,
|
||||
[SETTINGS_KEYS.COSMIC_SIGNATURE]: true,
|
||||
[SETTINGS_KEYS.DEPLOYABLE]: true,
|
||||
[SETTINGS_KEYS.STRUCTURE]: true,
|
||||
[SETTINGS_KEYS.STARBASE]: true,
|
||||
[SETTINGS_KEYS.SHIP]: true,
|
||||
[SETTINGS_KEYS.DRONE]: true,
|
||||
|
||||
[SETTINGS_KEYS.WORMHOLE]: true,
|
||||
[SETTINGS_KEYS.RELIC_SITE]: true,
|
||||
[SETTINGS_KEYS.DATA_SITE]: true,
|
||||
[SETTINGS_KEYS.ORE_SITE]: true,
|
||||
[SETTINGS_KEYS.GAS_SITE]: true,
|
||||
[SETTINGS_KEYS.COMBAT_SITE]: true,
|
||||
};
|
||||
11
assets/js/hooks/Mapper/helpers/getFormattedTime.ts
Normal file
11
assets/js/hooks/Mapper/helpers/getFormattedTime.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export function getFormattedTime() {
|
||||
const now = new Date();
|
||||
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const minutes = String(now.getMinutes()).padStart(2, '0');
|
||||
const seconds = String(now.getSeconds()).padStart(2, '0');
|
||||
|
||||
const ms = String(now.getMilliseconds() + 1000).slice(1);
|
||||
|
||||
return `${hours}:${minutes}:${seconds} ${ms}`;
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
export * from './useActualizeSettings';
|
||||
export * from './useClipboard';
|
||||
export * from './useHotkey';
|
||||
export * from './usePageVisibility';
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import { useEffect } from 'react';
|
||||
|
||||
type Settings = Record<string, unknown>;
|
||||
export const useActualizeSettings = <T extends Settings>(defaultVals: T, vals: T, setVals: (newVals: T) => void) => {
|
||||
useEffect(() => {
|
||||
let foundNew = false;
|
||||
const newVals = Object.keys(defaultVals).reduce((acc, x) => {
|
||||
if (Object.keys(acc).includes(x)) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
foundNew = true;
|
||||
|
||||
// @ts-ignore
|
||||
return { ...acc, [x]: defaultVals[x] };
|
||||
}, vals);
|
||||
|
||||
if (foundNew) {
|
||||
setVals(newVals);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
};
|
||||
@@ -1,11 +1,12 @@
|
||||
import { useState, useEffect } from 'react';
|
||||
|
||||
function usePageVisibility() {
|
||||
const [isVisible, setIsVisible] = useState(!document.hidden);
|
||||
const getIsVisible = () => !document.hidden;
|
||||
const [isVisible, setIsVisible] = useState(getIsVisible());
|
||||
|
||||
useEffect(() => {
|
||||
const handleVisibilityChange = () => {
|
||||
setIsVisible(!document.hidden);
|
||||
setIsVisible(getIsVisible());
|
||||
};
|
||||
|
||||
document.addEventListener('visibilitychange', handleVisibilityChange);
|
||||
|
||||
@@ -19,10 +19,24 @@ import {
|
||||
} from '@/hooks/Mapper/mapRootProvider/hooks/useStoreWidgets.ts';
|
||||
import { WindowsManagerOnChange } from '@/hooks/Mapper/components/ui-kit/WindowManager';
|
||||
import { DetailedKill } from '../types/kills';
|
||||
import { InterfaceStoredSettings, RoutesType } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { DEFAULT_ROUTES_SETTINGS, STORED_INTERFACE_DEFAULT_VALUES } from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import {
|
||||
InterfaceStoredSettings,
|
||||
KillsWidgetSettings,
|
||||
LocalWidgetSettings,
|
||||
MapUserSettings,
|
||||
OnTheMapSettingsType,
|
||||
RoutesType,
|
||||
} from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { useMapUserSettings } from '@/hooks/Mapper/mapRootProvider/hooks/useMapUserSettings.ts';
|
||||
import { useGlobalHooks } from '@/hooks/Mapper/mapRootProvider/hooks/useGlobalHooks.ts';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS, SignatureSettingsType } from '@/hooks/Mapper/constants/signatures';
|
||||
|
||||
export type MapRootData = MapUnionTypes & {
|
||||
selectedSystems: string[];
|
||||
@@ -36,6 +50,7 @@ export type MapRootData = MapUnionTypes & {
|
||||
};
|
||||
trackingCharactersData: TrackingCharacter[];
|
||||
loadingPublicRoutes: boolean;
|
||||
map_slug: string | null;
|
||||
};
|
||||
|
||||
const INITIAL_DATA: MapRootData = {
|
||||
@@ -70,6 +85,7 @@ const INITIAL_DATA: MapRootData = {
|
||||
followingCharacterEveId: null,
|
||||
pings: [],
|
||||
loadingPublicRoutes: false,
|
||||
map_slug: null,
|
||||
};
|
||||
|
||||
export enum InterfaceStoredSettingsProps {
|
||||
@@ -103,6 +119,19 @@ export interface MapRootContextProps {
|
||||
setInterfaceSettings: Dispatch<SetStateAction<InterfaceStoredSettings>>;
|
||||
settingsRoutes: RoutesType;
|
||||
settingsRoutesUpdate: Dispatch<SetStateAction<RoutesType>>;
|
||||
settingsLocal: LocalWidgetSettings;
|
||||
settingsLocalUpdate: Dispatch<SetStateAction<LocalWidgetSettings>>;
|
||||
settingsSignatures: SignatureSettingsType;
|
||||
settingsSignaturesUpdate: Dispatch<SetStateAction<SignatureSettingsType>>;
|
||||
settingsOnTheMap: OnTheMapSettingsType;
|
||||
settingsOnTheMapUpdate: Dispatch<SetStateAction<OnTheMapSettingsType>>;
|
||||
settingsKills: KillsWidgetSettings;
|
||||
settingsKillsUpdate: Dispatch<SetStateAction<KillsWidgetSettings>>;
|
||||
isReady: boolean;
|
||||
hasOldSettings: boolean;
|
||||
getSettingsForExport(): string | undefined;
|
||||
applySettings(settings: MapUserSettings): boolean;
|
||||
checkOldSettings(): void;
|
||||
};
|
||||
}
|
||||
|
||||
@@ -134,6 +163,19 @@ const MapRootContext = createContext<MapRootContextProps>({
|
||||
setInterfaceSettings: () => null,
|
||||
settingsRoutes: DEFAULT_ROUTES_SETTINGS,
|
||||
settingsRoutesUpdate: () => null,
|
||||
settingsLocal: DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
settingsLocalUpdate: () => null,
|
||||
settingsSignatures: DEFAULT_SIGNATURE_SETTINGS,
|
||||
settingsSignaturesUpdate: () => null,
|
||||
settingsOnTheMap: DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
settingsOnTheMapUpdate: () => null,
|
||||
settingsKills: DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
settingsKillsUpdate: () => null,
|
||||
isReady: false,
|
||||
hasOldSettings: false,
|
||||
getSettingsForExport: () => '',
|
||||
applySettings: () => false,
|
||||
checkOldSettings: () => null,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -154,9 +196,11 @@ const MapRootHandlers = forwardRef(({ children }: WithChildren, fwdRef: Forwarde
|
||||
export const MapRootProvider = ({ children, fwdRef, outCommand }: MapRootProviderProps) => {
|
||||
const { update, ref } = useContextStore<MapRootData>({ ...INITIAL_DATA });
|
||||
|
||||
const storedSettings = useMapUserSettings();
|
||||
const storedSettings = useMapUserSettings(ref);
|
||||
|
||||
const { windowsSettings, toggleWidgetVisibility, updateWidgetSettings, resetWidgets } =
|
||||
useStoreWidgets(storedSettings);
|
||||
|
||||
const { windowsSettings, toggleWidgetVisibility, updateWidgetSettings, resetWidgets } = useStoreWidgets();
|
||||
const comments = useComments({ outCommand });
|
||||
const charactersCache = useCharactersCache({ outCommand });
|
||||
|
||||
|
||||
@@ -1,10 +1,18 @@
|
||||
import {
|
||||
AvailableThemes,
|
||||
InterfaceStoredSettings,
|
||||
KillsWidgetSettings,
|
||||
LocalWidgetSettings,
|
||||
MiniMapPlacement,
|
||||
OnTheMapSettingsType,
|
||||
PingsPlacement,
|
||||
RoutesType,
|
||||
} from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
CURRENT_WINDOWS_VERSION,
|
||||
DEFAULT_WIDGETS,
|
||||
STORED_VISIBLE_WIDGETS_DEFAULT,
|
||||
} from '@/hooks/Mapper/components/mapInterface/constants.tsx';
|
||||
|
||||
export const STORED_INTERFACE_DEFAULT_VALUES: InterfaceStoredSettings = {
|
||||
isShowMenu: false,
|
||||
@@ -31,3 +39,29 @@ export const DEFAULT_ROUTES_SETTINGS: RoutesType = {
|
||||
avoid_triglavian: false,
|
||||
avoid: [],
|
||||
};
|
||||
|
||||
export const DEFAULT_WIDGET_LOCAL_SETTINGS: LocalWidgetSettings = {
|
||||
compact: true,
|
||||
showOffline: false,
|
||||
version: 0,
|
||||
showShipName: false,
|
||||
};
|
||||
|
||||
export const DEFAULT_ON_THE_MAP_SETTINGS: OnTheMapSettingsType = {
|
||||
hideOffline: false,
|
||||
version: 0,
|
||||
};
|
||||
|
||||
export const DEFAULT_KILLS_WIDGET_SETTINGS: KillsWidgetSettings = {
|
||||
showAll: false,
|
||||
whOnly: true,
|
||||
excludedSystems: [],
|
||||
version: 2,
|
||||
timeRange: 4,
|
||||
};
|
||||
|
||||
export const getDefaultWidgetProps = () => ({
|
||||
version: CURRENT_WINDOWS_VERSION,
|
||||
visible: STORED_VISIBLE_WIDGETS_DEFAULT,
|
||||
windows: DEFAULT_WIDGETS,
|
||||
});
|
||||
|
||||
@@ -0,0 +1,22 @@
|
||||
type Settings = Record<string, unknown>;
|
||||
|
||||
export const actualizeSettings = <T extends Settings>(defaultVals: T, vals: T, setVals: (newVals: T) => void) => {
|
||||
let foundNew = false;
|
||||
|
||||
const newVals = Object.keys(defaultVals).reduce((acc, key) => {
|
||||
if (key in acc) {
|
||||
return acc;
|
||||
}
|
||||
|
||||
foundNew = true;
|
||||
|
||||
return {
|
||||
...acc,
|
||||
[key]: defaultVals[key],
|
||||
};
|
||||
}, vals);
|
||||
|
||||
if (foundNew) {
|
||||
setVals(newVals);
|
||||
}
|
||||
};
|
||||
1
assets/js/hooks/Mapper/mapRootProvider/helpers/index.ts
Normal file
1
assets/js/hooks/Mapper/mapRootProvider/helpers/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './actualizeSettings';
|
||||
@@ -27,6 +27,7 @@ export const useMapInit = () => {
|
||||
main_character_eve_id,
|
||||
following_character_eve_id,
|
||||
user_hubs,
|
||||
map_slug,
|
||||
} = props;
|
||||
|
||||
const updateData: Partial<MapRootData> = {};
|
||||
@@ -98,6 +99,10 @@ export const useMapInit = () => {
|
||||
updateData.followingCharacterEveId = following_character_eve_id;
|
||||
}
|
||||
|
||||
if ('map_slug' in props) {
|
||||
updateData.map_slug = map_slug;
|
||||
}
|
||||
|
||||
update(updateData);
|
||||
},
|
||||
[update, addSystemStatic],
|
||||
|
||||
@@ -1,39 +1,222 @@
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import { InterfaceStoredSettings, RoutesType } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import { DEFAULT_ROUTES_SETTINGS, STORED_INTERFACE_DEFAULT_VALUES } from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { useActualizeSettings } from '@/hooks/Mapper/hooks';
|
||||
import { useEffect } from 'react';
|
||||
import { SESSION_KEY } from '@/hooks/Mapper/constants.ts';
|
||||
import { MapUserSettings, MapUserSettingsStructure } from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
import {
|
||||
DEFAULT_KILLS_WIDGET_SETTINGS,
|
||||
DEFAULT_ON_THE_MAP_SETTINGS,
|
||||
DEFAULT_ROUTES_SETTINGS,
|
||||
DEFAULT_WIDGET_LOCAL_SETTINGS,
|
||||
getDefaultWidgetProps,
|
||||
STORED_INTERFACE_DEFAULT_VALUES,
|
||||
} from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
import { useCallback, useEffect, useRef, useState } from 'react';
|
||||
import { DEFAULT_SIGNATURE_SETTINGS } from '@/hooks/Mapper/constants/signatures';
|
||||
import { MapRootData } from '@/hooks/Mapper/mapRootProvider';
|
||||
import { useSettingsValueAndSetter } from '@/hooks/Mapper/mapRootProvider/hooks/useSettingsValueAndSetter.ts';
|
||||
import fastDeepEqual from 'fast-deep-equal';
|
||||
|
||||
export const useMigrationRoutesSettingsV1 = (update: (upd: RoutesType) => void) => {
|
||||
//TODO if current Date is more than 01.01.2026 - remove this hook.
|
||||
// import { actualizeSettings } from '@/hooks/Mapper/mapRootProvider/helpers';
|
||||
|
||||
useEffect(() => {
|
||||
const items = localStorage.getItem(SESSION_KEY.routes);
|
||||
if (items) {
|
||||
update(JSON.parse(items));
|
||||
localStorage.removeItem(SESSION_KEY.routes);
|
||||
}
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, []);
|
||||
// TODO - we need provide and compare version
|
||||
const createWidgetSettingsWithVersion = <T>(settings: T) => {
|
||||
return {
|
||||
version: 0,
|
||||
settings,
|
||||
};
|
||||
};
|
||||
|
||||
export const useMapUserSettings = () => {
|
||||
const [interfaceSettings, setInterfaceSettings] = useLocalStorageState<InterfaceStoredSettings>(
|
||||
'window:interface:settings',
|
||||
{
|
||||
defaultValue: STORED_INTERFACE_DEFAULT_VALUES,
|
||||
},
|
||||
);
|
||||
const createDefaultWidgetSettings = (): MapUserSettings => {
|
||||
return {
|
||||
killsWidget: createWidgetSettingsWithVersion(DEFAULT_KILLS_WIDGET_SETTINGS),
|
||||
localWidget: createWidgetSettingsWithVersion(DEFAULT_WIDGET_LOCAL_SETTINGS),
|
||||
widgets: createWidgetSettingsWithVersion(getDefaultWidgetProps()),
|
||||
routes: createWidgetSettingsWithVersion(DEFAULT_ROUTES_SETTINGS),
|
||||
onTheMap: createWidgetSettingsWithVersion(DEFAULT_ON_THE_MAP_SETTINGS),
|
||||
signaturesWidget: createWidgetSettingsWithVersion(DEFAULT_SIGNATURE_SETTINGS),
|
||||
interface: createWidgetSettingsWithVersion(STORED_INTERFACE_DEFAULT_VALUES),
|
||||
};
|
||||
};
|
||||
|
||||
const [settingsRoutes, settingsRoutesUpdate] = useLocalStorageState<RoutesType>('window:interface:routes', {
|
||||
defaultValue: DEFAULT_ROUTES_SETTINGS,
|
||||
const EMPTY_OBJ = {};
|
||||
|
||||
export const useMapUserSettings = ({ map_slug }: MapRootData) => {
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const [hasOldSettings, setHasOldSettings] = useState(false);
|
||||
|
||||
const [mapUserSettings, setMapUserSettings] = useLocalStorageState<MapUserSettingsStructure>('map-user-settings', {
|
||||
defaultValue: EMPTY_OBJ,
|
||||
});
|
||||
|
||||
useActualizeSettings(STORED_INTERFACE_DEFAULT_VALUES, interfaceSettings, setInterfaceSettings);
|
||||
useActualizeSettings(DEFAULT_ROUTES_SETTINGS, settingsRoutes, settingsRoutesUpdate);
|
||||
const ref = useRef({ mapUserSettings, setMapUserSettings, map_slug });
|
||||
ref.current = { mapUserSettings, setMapUserSettings, map_slug };
|
||||
|
||||
useMigrationRoutesSettingsV1(settingsRoutesUpdate);
|
||||
useEffect(() => {
|
||||
const { mapUserSettings, setMapUserSettings } = ref.current;
|
||||
if (map_slug === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
return { interfaceSettings, setInterfaceSettings, settingsRoutes, settingsRoutesUpdate };
|
||||
if (!(map_slug in mapUserSettings)) {
|
||||
setMapUserSettings({
|
||||
...mapUserSettings,
|
||||
[map_slug]: createDefaultWidgetSettings(),
|
||||
});
|
||||
}
|
||||
}, [map_slug]);
|
||||
|
||||
const [interfaceSettings, setInterfaceSettings] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'interface',
|
||||
);
|
||||
|
||||
const [settingsRoutes, settingsRoutesUpdate] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'routes',
|
||||
);
|
||||
|
||||
const [settingsLocal, settingsLocalUpdate] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'localWidget',
|
||||
);
|
||||
|
||||
const [settingsSignatures, settingsSignaturesUpdate] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'signaturesWidget',
|
||||
);
|
||||
|
||||
const [settingsOnTheMap, settingsOnTheMapUpdate] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'onTheMap',
|
||||
);
|
||||
|
||||
const [settingsKills, settingsKillsUpdate] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'killsWidget',
|
||||
);
|
||||
|
||||
const [windowsSettings, setWindowsSettings] = useSettingsValueAndSetter(
|
||||
mapUserSettings,
|
||||
setMapUserSettings,
|
||||
map_slug,
|
||||
'widgets',
|
||||
);
|
||||
|
||||
// HERE we MUST work with migrations
|
||||
useEffect(() => {
|
||||
if (isReady) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (map_slug === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (mapUserSettings[map_slug] == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO !!!! FROM this date 06.07.2025 - we must work only with migrations
|
||||
// actualizeSettings(STORED_INTERFACE_DEFAULT_VALUES, interfaceSettings, setInterfaceSettings);
|
||||
// actualizeSettings(DEFAULT_ROUTES_SETTINGS, settingsRoutes, settingsRoutesUpdate);
|
||||
// actualizeSettings(DEFAULT_WIDGET_LOCAL_SETTINGS, settingsLocal, settingsLocalUpdate);
|
||||
// actualizeSettings(DEFAULT_SIGNATURE_SETTINGS, settingsSignatures, settingsSignaturesUpdate);
|
||||
// actualizeSettings(DEFAULT_ON_THE_MAP_SETTINGS, settingsOnTheMap, settingsOnTheMapUpdate);
|
||||
// actualizeSettings(DEFAULT_KILLS_WIDGET_SETTINGS, settingsKills, settingsKillsUpdate);
|
||||
|
||||
setIsReady(true);
|
||||
}, [
|
||||
map_slug,
|
||||
mapUserSettings,
|
||||
interfaceSettings,
|
||||
setInterfaceSettings,
|
||||
settingsRoutes,
|
||||
settingsRoutesUpdate,
|
||||
settingsLocal,
|
||||
settingsLocalUpdate,
|
||||
settingsSignatures,
|
||||
settingsSignaturesUpdate,
|
||||
settingsOnTheMap,
|
||||
settingsOnTheMapUpdate,
|
||||
settingsKills,
|
||||
settingsKillsUpdate,
|
||||
isReady,
|
||||
]);
|
||||
|
||||
const checkOldSettings = useCallback(() => {
|
||||
const interfaceSettings = localStorage.getItem('window:interface:settings');
|
||||
const widgetRoutes = localStorage.getItem('window:interface:routes');
|
||||
const widgetLocal = localStorage.getItem('window:interface:local');
|
||||
const widgetKills = localStorage.getItem('kills:widget:settings');
|
||||
const onTheMapOld = localStorage.getItem('window:onTheMap:settings');
|
||||
const widgetsOld = localStorage.getItem('windows:settings:v2');
|
||||
|
||||
setHasOldSettings(!!(widgetsOld || interfaceSettings || widgetRoutes || widgetLocal || widgetKills || onTheMapOld));
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
checkOldSettings();
|
||||
}, [checkOldSettings]);
|
||||
|
||||
const getSettingsForExport = useCallback(() => {
|
||||
const { map_slug } = ref.current;
|
||||
|
||||
if (map_slug == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
return JSON.stringify(ref.current.mapUserSettings[map_slug]);
|
||||
}, []);
|
||||
|
||||
const applySettings = useCallback((settings: MapUserSettings) => {
|
||||
const { map_slug, mapUserSettings, setMapUserSettings } = ref.current;
|
||||
|
||||
if (map_slug == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (fastDeepEqual(settings, mapUserSettings[map_slug])) {
|
||||
return false;
|
||||
}
|
||||
|
||||
setMapUserSettings(old => ({
|
||||
...old,
|
||||
[map_slug]: settings,
|
||||
}));
|
||||
return true;
|
||||
}, []);
|
||||
|
||||
return {
|
||||
isReady,
|
||||
hasOldSettings,
|
||||
|
||||
interfaceSettings,
|
||||
setInterfaceSettings,
|
||||
settingsRoutes,
|
||||
settingsRoutesUpdate,
|
||||
settingsLocal,
|
||||
settingsLocalUpdate,
|
||||
settingsSignatures,
|
||||
settingsSignaturesUpdate,
|
||||
settingsOnTheMap,
|
||||
settingsOnTheMapUpdate,
|
||||
settingsKills,
|
||||
settingsKillsUpdate,
|
||||
windowsSettings,
|
||||
setWindowsSettings,
|
||||
|
||||
getSettingsForExport,
|
||||
applySettings,
|
||||
checkOldSettings,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -0,0 +1,60 @@
|
||||
import { Dispatch, SetStateAction, useCallback, useMemo, useRef } from 'react';
|
||||
import {
|
||||
MapUserSettings,
|
||||
MapUserSettingsStructure,
|
||||
SettingsWithVersion,
|
||||
} from '@/hooks/Mapper/mapRootProvider/types.ts';
|
||||
|
||||
type ExtractSettings<S extends keyof MapUserSettings> =
|
||||
MapUserSettings[S] extends SettingsWithVersion<infer U> ? U : never;
|
||||
|
||||
type Setter<S extends keyof MapUserSettings> = (
|
||||
value: Partial<ExtractSettings<S>> | ((prev: ExtractSettings<S>) => Partial<ExtractSettings<S>>),
|
||||
) => void;
|
||||
|
||||
type GenerateSettingsReturn<S extends keyof MapUserSettings> = [ExtractSettings<S>, Setter<S>];
|
||||
|
||||
export const useSettingsValueAndSetter = <S extends keyof MapUserSettings>(
|
||||
settings: MapUserSettingsStructure,
|
||||
setSettings: Dispatch<SetStateAction<MapUserSettingsStructure>>,
|
||||
mapId: string | null,
|
||||
setting: S,
|
||||
): GenerateSettingsReturn<S> => {
|
||||
const data = useMemo<ExtractSettings<S>>(() => {
|
||||
if (!mapId) return {} as ExtractSettings<S>;
|
||||
|
||||
const mapSettings = settings[mapId];
|
||||
return (mapSettings?.[setting]?.settings ?? ({} as ExtractSettings<S>)) as ExtractSettings<S>;
|
||||
}, [mapId, setting, settings]);
|
||||
|
||||
const refData = useRef({ mapId, setting, setSettings });
|
||||
refData.current = { mapId, setting, setSettings };
|
||||
|
||||
const setter = useCallback<Setter<S>>(value => {
|
||||
const { mapId, setting, setSettings } = refData.current;
|
||||
|
||||
if (!mapId) return;
|
||||
|
||||
setSettings(all => {
|
||||
const currentMap = all[mapId];
|
||||
const prev = currentMap[setting].settings as ExtractSettings<S>;
|
||||
const version = currentMap[setting].version;
|
||||
|
||||
const patch =
|
||||
typeof value === 'function' ? (value as (p: ExtractSettings<S>) => Partial<ExtractSettings<S>>)(prev) : value;
|
||||
|
||||
return {
|
||||
...all,
|
||||
[mapId]: {
|
||||
...currentMap,
|
||||
[setting]: {
|
||||
version,
|
||||
settings: { ...(prev as any), ...patch } as ExtractSettings<S>,
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
}, []);
|
||||
|
||||
return [data, setter];
|
||||
};
|
||||
@@ -1,14 +1,8 @@
|
||||
import useLocalStorageState from 'use-local-storage-state';
|
||||
import {
|
||||
CURRENT_WINDOWS_VERSION,
|
||||
DEFAULT_WIDGETS,
|
||||
STORED_VISIBLE_WIDGETS_DEFAULT,
|
||||
WidgetsIds,
|
||||
WINDOWS_LOCAL_STORE_KEY,
|
||||
} from '@/hooks/Mapper/components/mapInterface/constants.tsx';
|
||||
import { DEFAULT_WIDGETS, WidgetsIds } from '@/hooks/Mapper/components/mapInterface/constants.tsx';
|
||||
import { WindowProps } from '@/hooks/Mapper/components/ui-kit/WindowManager/types.ts';
|
||||
import { useCallback, useEffect, useRef } from 'react';
|
||||
import { /*SNAP_GAP,*/ WindowsManagerOnChange } from '@/hooks/Mapper/components/ui-kit/WindowManager';
|
||||
import { Dispatch, SetStateAction, useCallback, useRef } from 'react';
|
||||
import { WindowsManagerOnChange } from '@/hooks/Mapper/components/ui-kit/WindowManager';
|
||||
import { getDefaultWidgetProps } from '@/hooks/Mapper/mapRootProvider/constants.ts';
|
||||
|
||||
export type StoredWindowProps = Omit<WindowProps, 'content'>;
|
||||
export type WindowStoreInfo = {
|
||||
@@ -20,17 +14,12 @@ export type WindowStoreInfo = {
|
||||
// export type UpdateWidgetSettingsFunc = (widgets: WindowProps[]) => void;
|
||||
export type ToggleWidgetVisibility = (widgetId: WidgetsIds) => void;
|
||||
|
||||
export const getDefaultWidgetProps = () => ({
|
||||
version: CURRENT_WINDOWS_VERSION,
|
||||
visible: STORED_VISIBLE_WIDGETS_DEFAULT,
|
||||
windows: DEFAULT_WIDGETS,
|
||||
});
|
||||
|
||||
export const useStoreWidgets = () => {
|
||||
const [windowsSettings, setWindowsSettings] = useLocalStorageState<WindowStoreInfo>(WINDOWS_LOCAL_STORE_KEY, {
|
||||
defaultValue: getDefaultWidgetProps(),
|
||||
});
|
||||
interface UseStoreWidgetsProps {
|
||||
windowsSettings: WindowStoreInfo;
|
||||
setWindowsSettings: Dispatch<SetStateAction<WindowStoreInfo>>;
|
||||
}
|
||||
|
||||
export const useStoreWidgets = ({ windowsSettings, setWindowsSettings }: UseStoreWidgetsProps) => {
|
||||
const ref = useRef({ windowsSettings, setWindowsSettings });
|
||||
ref.current = { windowsSettings, setWindowsSettings };
|
||||
|
||||
@@ -83,33 +72,6 @@ export const useStoreWidgets = () => {
|
||||
});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const { setWindowsSettings } = ref.current;
|
||||
|
||||
const raw = localStorage.getItem(WINDOWS_LOCAL_STORE_KEY);
|
||||
if (!raw) {
|
||||
console.warn('No windows found in local storage!!');
|
||||
|
||||
setWindowsSettings(getDefaultWidgetProps());
|
||||
return;
|
||||
}
|
||||
|
||||
const { version, windows, visible, viewPort } = JSON.parse(raw) as WindowStoreInfo;
|
||||
if (!version || CURRENT_WINDOWS_VERSION > version) {
|
||||
setWindowsSettings(getDefaultWidgetProps());
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-debugger
|
||||
const out = windows.filter(x => DEFAULT_WIDGETS.find(def => def.id === x.id));
|
||||
|
||||
setWindowsSettings({
|
||||
version: CURRENT_WINDOWS_VERSION,
|
||||
windows: out as WindowProps[],
|
||||
visible,
|
||||
viewPort,
|
||||
});
|
||||
}, []);
|
||||
|
||||
const resetWidgets = useCallback(() => ref.current.setWindowsSettings(getDefaultWidgetProps()), []);
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { WindowStoreInfo } from '@/hooks/Mapper/mapRootProvider/hooks/useStoreWidgets.ts';
|
||||
import { SignatureSettingsType } from '@/hooks/Mapper/constants/signatures.ts';
|
||||
|
||||
export enum AvailableThemes {
|
||||
default = 'default',
|
||||
pathfinder = 'pathfinder',
|
||||
@@ -43,3 +46,42 @@ export type RoutesType = {
|
||||
avoid_triglavian: boolean;
|
||||
avoid: number[];
|
||||
};
|
||||
|
||||
export type LocalWidgetSettings = {
|
||||
compact: boolean;
|
||||
showOffline: boolean;
|
||||
version: number;
|
||||
showShipName: boolean;
|
||||
};
|
||||
|
||||
export type OnTheMapSettingsType = {
|
||||
hideOffline: boolean;
|
||||
version: number;
|
||||
};
|
||||
|
||||
export type KillsWidgetSettings = {
|
||||
showAll: boolean;
|
||||
whOnly: boolean;
|
||||
excludedSystems: number[];
|
||||
version: number;
|
||||
timeRange: number;
|
||||
};
|
||||
|
||||
export type SettingsWithVersion<T> = {
|
||||
version: number;
|
||||
settings: T;
|
||||
};
|
||||
|
||||
export type MapUserSettings = {
|
||||
widgets: SettingsWithVersion<WindowStoreInfo>;
|
||||
interface: SettingsWithVersion<InterfaceStoredSettings>;
|
||||
onTheMap: SettingsWithVersion<OnTheMapSettingsType>;
|
||||
routes: SettingsWithVersion<RoutesType>;
|
||||
localWidget: SettingsWithVersion<LocalWidgetSettings>;
|
||||
signaturesWidget: SettingsWithVersion<SignatureSettingsType>;
|
||||
killsWidget: SettingsWithVersion<KillsWidgetSettings>;
|
||||
};
|
||||
|
||||
export type MapUserSettingsStructure = {
|
||||
[mapId: string]: MapUserSettings;
|
||||
};
|
||||
|
||||
@@ -97,6 +97,7 @@ export type CommandInit = {
|
||||
is_subscription_active?: boolean;
|
||||
main_character_eve_id?: string | null;
|
||||
following_character_eve_id?: string | null;
|
||||
map_slug?: string;
|
||||
};
|
||||
|
||||
export type CommandAddSystems = SolarSystemRawType[];
|
||||
|
||||
@@ -1,27 +1,121 @@
|
||||
import { MapHandlers } from '@/hooks/Mapper/types/mapHandlers.ts';
|
||||
import { RefObject, useCallback } from 'react';
|
||||
import { RefObject, useCallback, useEffect, useRef } from 'react';
|
||||
import debounce from 'lodash.debounce';
|
||||
import usePageVisibility from '@/hooks/Mapper/hooks/usePageVisibility.ts';
|
||||
|
||||
// const inIndex = 0;
|
||||
// const prevEventTime = +new Date();
|
||||
const LAST_VERSION_KEY = 'wandererLastVersion';
|
||||
|
||||
// @ts-ignore
|
||||
export const useMapperHandlers = (handlerRefs: RefObject<MapHandlers>[], hooksRef: RefObject<any>) => {
|
||||
const visible = usePageVisibility();
|
||||
const wasHiddenOnce = useRef(false);
|
||||
const visibleRef = useRef(visible);
|
||||
visibleRef.current = visible;
|
||||
|
||||
// TODO - do not delete THIS code it needs for debug
|
||||
// const [record, setRecord] = useLocalStorageState<boolean>('record', {
|
||||
// defaultValue: false,
|
||||
// });
|
||||
// const [recordsList, setRecordsList] = useLocalStorageState<{ type; data }[]>('recordsList', {
|
||||
// defaultValue: [],
|
||||
// });
|
||||
//
|
||||
// const ref = useRef({ record, setRecord, recordsList, setRecordsList });
|
||||
// ref.current = { record, setRecord, recordsList, setRecordsList };
|
||||
//
|
||||
// const recordBufferRef = useRef<{ type; data }[]>([]);
|
||||
// useEffect(() => {
|
||||
// if (record || recordBufferRef.current.length === 0) {
|
||||
// return;
|
||||
// }
|
||||
//
|
||||
// ref.current.setRecordsList([...recordBufferRef.current]);
|
||||
// recordBufferRef.current = [];
|
||||
// }, [record]);
|
||||
|
||||
const handleCommand = useCallback(
|
||||
// @ts-ignore
|
||||
async ({ type, data }) => {
|
||||
if (!hooksRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO - do not delete THIS code it needs for debug
|
||||
// console.log('JOipP', `OUT`, ref.current.record, { type, data });
|
||||
// if (ref.current.record) {
|
||||
// recordBufferRef.current.push({ type, data });
|
||||
// }
|
||||
|
||||
// 'ui_loaded'
|
||||
return await hooksRef.current.pushEventAsync(type, data);
|
||||
},
|
||||
[hooksRef.current],
|
||||
);
|
||||
|
||||
const handleMapEvent = useCallback(({ type, body }) => {
|
||||
handlerRefs.forEach(ref => {
|
||||
if (!ref.current) {
|
||||
// @ts-ignore
|
||||
const eventsBufferRef = useRef<{ type; body }[]>([]);
|
||||
|
||||
const eventTick = useCallback(
|
||||
debounce(() => {
|
||||
if (eventsBufferRef.current.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
ref.current?.command(type, body);
|
||||
});
|
||||
const { type, body } = eventsBufferRef.current.shift()!;
|
||||
handlerRefs.forEach(ref => {
|
||||
if (!ref.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
ref.current?.command(type, body);
|
||||
});
|
||||
|
||||
// TODO - do not delete THIS code it needs for debug
|
||||
// console.log('JOipP', `Tick Buff`, eventsBufferRef.current.length);
|
||||
|
||||
if (eventsBufferRef.current.length > 0) {
|
||||
eventTick();
|
||||
}
|
||||
}, 10),
|
||||
[],
|
||||
);
|
||||
const eventTickRef = useRef(eventTick);
|
||||
eventTickRef.current = eventTick;
|
||||
|
||||
// @ts-ignore
|
||||
const handleMapEvent = useCallback(({ type, body }) => {
|
||||
// TODO - do not delete THIS code it needs for debug
|
||||
// const currentTime = +new Date();
|
||||
// const timeDiff = currentTime - prevEventTime;
|
||||
// prevEventTime = currentTime;
|
||||
// console.log('JOipP', `IN [${inIndex++}] [${timeDiff}] ${getFormattedTime()}`, { type, body });
|
||||
|
||||
if (!eventTickRef.current || !visibleRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
eventsBufferRef.current.push({ type, body });
|
||||
eventTickRef.current();
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (!visible && !wasHiddenOnce.current) {
|
||||
wasHiddenOnce.current = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!wasHiddenOnce.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!visible) {
|
||||
return;
|
||||
}
|
||||
|
||||
hooksRef.current.pushEventAsync('ui_loaded', { version: localStorage.getItem(LAST_VERSION_KEY) });
|
||||
}, [hooksRef.current, visible]);
|
||||
|
||||
return { handleCommand, handleMapEvent };
|
||||
};
|
||||
|
||||
@@ -1,2 +1,4 @@
|
||||
export * from './contextStore';
|
||||
export * from './getQueryVariable';
|
||||
export * from './loadTextFile';
|
||||
export * from './saveToFile';
|
||||
|
||||
27
assets/js/hooks/Mapper/utils/loadTextFile.ts
Normal file
27
assets/js/hooks/Mapper/utils/loadTextFile.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
export function loadTextFile(): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const input = document.createElement('input');
|
||||
input.type = 'file';
|
||||
input.accept = 'application/json,.json';
|
||||
|
||||
input.onchange = () => {
|
||||
const file = input.files?.[0];
|
||||
if (!file) {
|
||||
reject(new Error('No file selected'));
|
||||
return;
|
||||
}
|
||||
|
||||
const reader = new FileReader();
|
||||
reader.onload = () => {
|
||||
resolve(reader.result as string);
|
||||
};
|
||||
reader.onerror = () => {
|
||||
reject(reader.error);
|
||||
};
|
||||
|
||||
reader.readAsText(file);
|
||||
};
|
||||
|
||||
input.click();
|
||||
});
|
||||
}
|
||||
33
assets/js/hooks/Mapper/utils/saveToFile.ts
Normal file
33
assets/js/hooks/Mapper/utils/saveToFile.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
export function saveTextFile(filename: string, content: string) {
|
||||
const blob = new Blob([content], { type: 'text/plain;charset=utf-8' });
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
export async function saveTextFileInteractive(filename: string, content: string) {
|
||||
if (!('showSaveFilePicker' in window)) {
|
||||
throw new Error('File System Access API is not supported in this browser.');
|
||||
}
|
||||
|
||||
const handle = await (window as any).showSaveFilePicker({
|
||||
suggestedName: filename,
|
||||
types: [
|
||||
{
|
||||
description: 'Text Files',
|
||||
accept: { 'text/plain': ['.txt', '.json'] },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const writable = await handle.createWritable();
|
||||
await writable.write(content);
|
||||
await writable.close();
|
||||
}
|
||||
BIN
assets/static/images/pochven.webp
Normal file
BIN
assets/static/images/pochven.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 131 KiB |
@@ -27,7 +27,11 @@ config :wanderer_app,
|
||||
generators: [timestamp_type: :utc_datetime],
|
||||
ddrt: DDRT,
|
||||
logger: Logger,
|
||||
pubsub_client: Phoenix.PubSub
|
||||
pubsub_client: Phoenix.PubSub,
|
||||
wanderer_kills_base_url:
|
||||
System.get_env("WANDERER_KILLS_BASE_URL", "ws://host.docker.internal:4004"),
|
||||
wanderer_kills_service_enabled:
|
||||
System.get_env("WANDERER_KILLS_SERVICE_ENABLED", "false") == "true"
|
||||
|
||||
config :wanderer_app, WandererAppWeb.Endpoint,
|
||||
adapter: Bandit.PhoenixAdapter,
|
||||
|
||||
@@ -84,3 +84,12 @@ config :swoosh, :api_client, false
|
||||
config :logger, :console,
|
||||
level: :info,
|
||||
format: "$time $metadata[$level] $message\n"
|
||||
|
||||
# WandererKills service configuration (WebSocket-based)
|
||||
config :wanderer_app,
|
||||
# Enable WandererKills service integration
|
||||
wanderer_kills_service_enabled: true,
|
||||
|
||||
# WebSocket connection URL for WandererKills service
|
||||
wanderer_kills_base_url:
|
||||
System.get_env("WANDERER_KILLS_BASE_URL", "ws://host.docker.internal:4004")
|
||||
|
||||
@@ -53,6 +53,20 @@ public_api_disabled =
|
||||
|> get_var_from_path_or_env("WANDERER_PUBLIC_API_DISABLED", "false")
|
||||
|> String.to_existing_atom()
|
||||
|
||||
character_api_disabled =
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_CHARACTER_API_DISABLED", "true")
|
||||
|> String.to_existing_atom()
|
||||
|
||||
wanderer_kills_service_enabled =
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_KILLS_SERVICE_ENABLED", "false")
|
||||
|> String.to_existing_atom()
|
||||
|
||||
wanderer_kills_base_url =
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_KILLS_BASE_URL", "ws://wanderer-kills:4004")
|
||||
|
||||
map_subscriptions_enabled =
|
||||
config_dir
|
||||
|> get_var_from_path_or_env("WANDERER_MAP_SUBSCRIPTIONS_ENABLED", "false")
|
||||
@@ -70,9 +84,9 @@ map_subscription_base_price =
|
||||
config_dir
|
||||
|> get_int_from_path_or_env("WANDERER_MAP_SUBSCRIPTION_BASE_PRICE", 100_000_000)
|
||||
|
||||
map_subscription_extra_characters_100_price =
|
||||
map_subscription_extra_characters_50_price =
|
||||
config_dir
|
||||
|> get_int_from_path_or_env("WANDERER_MAP_SUBSCRIPTION_EXTRA_CHARACTERS_100_PRICE", 50_000_000)
|
||||
|> get_int_from_path_or_env("WANDERER_MAP_SUBSCRIPTION_EXTRA_CHARACTERS_50_PRICE", 50_000_000)
|
||||
|
||||
map_subscription_extra_hubs_10_price =
|
||||
config_dir
|
||||
@@ -120,13 +134,14 @@ config :wanderer_app,
|
||||
corp_wallet_eve_id: System.get_env("WANDERER_CORP_WALLET_EVE_ID", "-1"),
|
||||
public_api_disabled: public_api_disabled,
|
||||
active_tracking_pool: System.get_env("WANDERER_ACTIVE_TRACKING_POOL", "default"),
|
||||
tracking_pool_max_size:
|
||||
System.get_env("WANDERER_TRACKING_POOL_MAX_SIZE", "300") |> String.to_integer(),
|
||||
character_tracking_pause_disabled:
|
||||
System.get_env("WANDERER_CHARACTER_TRACKING_PAUSE_DISABLED", "true")
|
||||
|> String.to_existing_atom(),
|
||||
character_api_disabled:
|
||||
System.get_env("WANDERER_CHARACTER_API_DISABLED", "true") |> String.to_existing_atom(),
|
||||
zkill_preload_disabled:
|
||||
System.get_env("WANDERER_ZKILL_PRELOAD_DISABLED", "false") |> String.to_existing_atom(),
|
||||
character_api_disabled: character_api_disabled,
|
||||
wanderer_kills_service_enabled: wanderer_kills_service_enabled,
|
||||
wanderer_kills_base_url: wanderer_kills_base_url,
|
||||
map_subscriptions_enabled: map_subscriptions_enabled,
|
||||
map_connection_auto_expire_hours: map_connection_auto_expire_hours,
|
||||
map_connection_auto_eol_hours: map_connection_auto_eol_hours,
|
||||
@@ -152,7 +167,7 @@ config :wanderer_app,
|
||||
month_12_discount: 0.5
|
||||
}
|
||||
],
|
||||
extra_characters_100: map_subscription_extra_characters_100_price,
|
||||
extra_characters_50: map_subscription_extra_characters_50_price,
|
||||
extra_hubs_10: map_subscription_extra_hubs_10_price
|
||||
}
|
||||
|
||||
|
||||
@@ -27,8 +27,10 @@ defmodule WandererApp.Application do
|
||||
}
|
||||
},
|
||||
WandererApp.Cache,
|
||||
Supervisor.child_spec({Cachex, name: :api_cache, default_ttl: :timer.hours(1)},
|
||||
id: :api_cache_worker
|
||||
),
|
||||
Supervisor.child_spec({Cachex, name: :esi_auth_cache}, id: :esi_auth_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :api_cache}, id: :api_cache_worker),
|
||||
Supervisor.child_spec({Cachex, name: :system_static_info_cache},
|
||||
id: :system_static_info_cache_worker
|
||||
),
|
||||
@@ -58,7 +60,7 @@ defmodule WandererApp.Application do
|
||||
WandererAppWeb.Endpoint
|
||||
] ++
|
||||
maybe_start_corp_wallet_tracker(WandererApp.Env.map_subscriptions_enabled?()) ++
|
||||
maybe_start_zkb(WandererApp.Env.zkill_preload_disabled?())
|
||||
maybe_start_kills_services()
|
||||
|
||||
opts = [strategy: :one_for_one, name: WandererApp.Supervisor]
|
||||
|
||||
@@ -79,12 +81,6 @@ defmodule WandererApp.Application do
|
||||
:ok
|
||||
end
|
||||
|
||||
defp maybe_start_zkb(false),
|
||||
do: [WandererApp.Zkb.Supervisor, WandererApp.Map.ZkbDataFetcher]
|
||||
|
||||
defp maybe_start_zkb(_),
|
||||
do: []
|
||||
|
||||
defp maybe_start_corp_wallet_tracker(true),
|
||||
do: [
|
||||
WandererApp.StartCorpWalletTrackerTask
|
||||
@@ -92,4 +88,20 @@ defmodule WandererApp.Application do
|
||||
|
||||
defp maybe_start_corp_wallet_tracker(_),
|
||||
do: []
|
||||
|
||||
defp maybe_start_kills_services do
|
||||
wanderer_kills_enabled =
|
||||
Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, false)
|
||||
|
||||
if wanderer_kills_enabled in [true, :true, "true"] do
|
||||
Logger.info("Starting WandererKills service integration...")
|
||||
|
||||
[
|
||||
WandererApp.Kills.Supervisor,
|
||||
WandererApp.Map.ZkbDataFetcher
|
||||
]
|
||||
else
|
||||
[]
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@@ -16,10 +16,6 @@ defmodule WandererApp.Character do
|
||||
ship_item_id: nil
|
||||
}
|
||||
|
||||
@decorate cacheable(
|
||||
cache: WandererApp.Cache,
|
||||
key: "characters-#{character_eve_id}"
|
||||
)
|
||||
def get_by_eve_id(character_eve_id) when is_binary(character_eve_id) do
|
||||
WandererApp.Api.Character.by_eve_id(character_eve_id)
|
||||
end
|
||||
@@ -201,9 +197,9 @@ defmodule WandererApp.Character do
|
||||
end
|
||||
end
|
||||
|
||||
def can_track_wallet?(%{scopes: scopes} = _character) when not is_nil(scopes) do
|
||||
scopes |> String.split(" ") |> Enum.member?(@read_character_wallet_scope)
|
||||
end
|
||||
def can_track_wallet?(%{scopes: scopes, id: character_id} = _character)
|
||||
when is_binary(scopes) and is_binary(character_id),
|
||||
do: scopes |> String.split(" ") |> Enum.member?(@read_character_wallet_scope)
|
||||
|
||||
def can_track_wallet?(_), do: false
|
||||
|
||||
@@ -213,15 +209,12 @@ defmodule WandererApp.Character do
|
||||
|
||||
def can_track_corp_wallet?(_), do: false
|
||||
|
||||
@decorate cacheable(
|
||||
cache: WandererApp.Cache,
|
||||
key: "can_pause_tracking-#{character_id}"
|
||||
)
|
||||
def can_pause_tracking?(character_id) do
|
||||
case get_character(character_id) do
|
||||
{:ok, character} when not is_nil(character) ->
|
||||
{:ok, %{tracking_pool: tracking_pool} = character} when not is_nil(character) ->
|
||||
not WandererApp.Env.character_tracking_pause_disabled?() &&
|
||||
not can_track_corp_wallet?(character)
|
||||
not can_track_wallet?(character) &&
|
||||
(is_nil(tracking_pool) || tracking_pool == "default")
|
||||
|
||||
_ ->
|
||||
true
|
||||
|
||||
@@ -166,7 +166,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
|
||||
def update_online(%{track_online: true, character_id: character_id} = character_state) do
|
||||
case WandererApp.Character.get_character(character_id) do
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token}}
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token, tracking_pool: tracking_pool}}
|
||||
when not is_nil(access_token) ->
|
||||
(WandererApp.Cache.has_key?("character:#{character_id}:online_forbidden") ||
|
||||
WandererApp.Cache.has_key?("character:#{character_id}:tracking_paused"))
|
||||
@@ -233,7 +233,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:online_forbidden",
|
||||
@@ -287,15 +287,15 @@ defmodule WandererApp.Character.Tracker do
|
||||
defp get_reset_timeout(_headers, default_timeout), do: default_timeout
|
||||
|
||||
def update_info(character_id) do
|
||||
(WandererApp.Cache.has_key?("character:#{character_id}:online_forbidden") ||
|
||||
WandererApp.Cache.has_key?("character:#{character_id}:info_forbidden") ||
|
||||
(WandererApp.Cache.has_key?("character:#{character_id}:info_forbidden") ||
|
||||
WandererApp.Cache.has_key?("character:#{character_id}:tracking_paused"))
|
||||
|> case do
|
||||
true ->
|
||||
{:error, :skipped}
|
||||
|
||||
false ->
|
||||
{:ok, %{eve_id: eve_id}} = WandererApp.Character.get_character(character_id)
|
||||
{:ok, %{eve_id: eve_id, tracking_pool: tracking_pool}} =
|
||||
WandererApp.Character.get_character(character_id)
|
||||
|
||||
case WandererApp.Esi.get_character_info(eve_id) do
|
||||
{:ok, _info} ->
|
||||
@@ -320,7 +320,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:info_forbidden",
|
||||
@@ -358,7 +358,8 @@ defmodule WandererApp.Character.Tracker do
|
||||
character_id
|
||||
|> WandererApp.Character.get_character()
|
||||
|> case do
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token}} when not is_nil(access_token) ->
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token, tracking_pool: tracking_pool}}
|
||||
when not is_nil(access_token) ->
|
||||
(WandererApp.Cache.has_key?("character:#{character_id}:online_forbidden") ||
|
||||
WandererApp.Cache.has_key?("character:#{character_id}:ship_forbidden") ||
|
||||
WandererApp.Cache.has_key?("character:#{character_id}:tracking_paused"))
|
||||
@@ -397,7 +398,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:ship_forbidden",
|
||||
@@ -462,7 +463,8 @@ defmodule WandererApp.Character.Tracker do
|
||||
%{track_location: true, is_online: true, character_id: character_id} = character_state
|
||||
) do
|
||||
case WandererApp.Character.get_character(character_id) do
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token}} when not is_nil(access_token) ->
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token, tracking_pool: tracking_pool}}
|
||||
when not is_nil(access_token) ->
|
||||
WandererApp.Cache.has_key?("character:#{character_id}:tracking_paused")
|
||||
|> case do
|
||||
true ->
|
||||
@@ -494,7 +496,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :skipped}
|
||||
|
||||
{:error, :error_limited, headers} ->
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
|
||||
reset_timeout = get_reset_timeout(headers, @location_limit_ttl)
|
||||
|
||||
@@ -550,7 +552,8 @@ defmodule WandererApp.Character.Tracker do
|
||||
character_id
|
||||
|> WandererApp.Character.get_character()
|
||||
|> case do
|
||||
{:ok, %{eve_id: eve_id, access_token: access_token} = character}
|
||||
{:ok,
|
||||
%{eve_id: eve_id, access_token: access_token, tracking_pool: tracking_pool} = character}
|
||||
when not is_nil(access_token) ->
|
||||
character
|
||||
|> WandererApp.Character.can_track_wallet?()
|
||||
@@ -589,7 +592,7 @@ defmodule WandererApp.Character.Tracker do
|
||||
{:error, :error_limited, headers} ->
|
||||
reset_timeout = get_reset_timeout(headers)
|
||||
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
|
||||
WandererApp.Cache.put(
|
||||
"character:#{character_id}:wallet_forbidden",
|
||||
|
||||
@@ -25,9 +25,6 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
@update_ship_interval :timer.seconds(2)
|
||||
@update_info_interval :timer.minutes(1)
|
||||
@update_wallet_interval :timer.minutes(1)
|
||||
@inactive_character_timeout :timer.minutes(5)
|
||||
|
||||
@pause_tracking_timeout :timer.minutes(60 * 24)
|
||||
|
||||
@logger Application.compile_env(:wanderer_app, :logger)
|
||||
|
||||
@@ -55,12 +52,6 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
|
||||
tracked_ids
|
||||
|> Enum.each(fn id ->
|
||||
# WandererApp.Cache.put(
|
||||
# "character:#{id}:tracking_paused",
|
||||
# true,
|
||||
# ttl: @pause_tracking_timeout
|
||||
# )
|
||||
|
||||
Cachex.put(@cache, id, uuid)
|
||||
end)
|
||||
|
||||
@@ -88,12 +79,6 @@ defmodule WandererApp.Character.TrackerPool do
|
||||
[tracked_id | r_tracked_ids]
|
||||
end)
|
||||
|
||||
# WandererApp.Cache.put(
|
||||
# "character:#{tracked_id}:tracking_paused",
|
||||
# true,
|
||||
# ttl: @pause_tracking_timeout
|
||||
# )
|
||||
|
||||
# Cachex.get_and_update(@cache, :tracked_characters, fn ids ->
|
||||
# {:commit, ids ++ [tracked_id]}
|
||||
# end)
|
||||
|
||||
142
lib/wanderer_app/character/tracking_config_utils.ex
Normal file
142
lib/wanderer_app/character/tracking_config_utils.ex
Normal file
@@ -0,0 +1,142 @@
|
||||
defmodule WandererApp.Character.TrackingConfigUtils do
|
||||
use Nebulex.Caching
|
||||
@moduledoc false
|
||||
|
||||
@ttl :timer.minutes(5)
|
||||
@last_active_character_minutes -1 * 60 * 24 * 7
|
||||
|
||||
@decorate cacheable(
|
||||
cache: WandererApp.Cache,
|
||||
key: "tracker-stats",
|
||||
opts: [ttl: @ttl]
|
||||
)
|
||||
def load_tracker_stats() do
|
||||
{:ok, characters} = get_active_characters()
|
||||
|
||||
admins_count =
|
||||
characters |> Enum.filter(&WandererApp.Character.can_track_corp_wallet?/1) |> Enum.count()
|
||||
|
||||
with_wallets_count =
|
||||
characters
|
||||
|> Enum.filter(
|
||||
&(WandererApp.Character.can_track_wallet?(&1) and
|
||||
not WandererApp.Character.can_track_corp_wallet?(&1))
|
||||
)
|
||||
|> Enum.count()
|
||||
|
||||
default_count =
|
||||
characters
|
||||
|> Enum.filter(
|
||||
&(is_nil(&1.tracking_pool) and not WandererApp.Character.can_track_wallet?(&1) and
|
||||
not WandererApp.Character.can_track_corp_wallet?(&1))
|
||||
)
|
||||
|> Enum.count()
|
||||
|
||||
result = [
|
||||
%{id: "admins", title: "Admins", value: admins_count},
|
||||
%{id: "wallet", title: "With Wallet", value: with_wallets_count},
|
||||
%{id: "default", title: "Default", value: default_count}
|
||||
]
|
||||
|
||||
{:ok, pools_count} =
|
||||
Cachex.get(
|
||||
:esi_auth_cache,
|
||||
"configs_total_count"
|
||||
)
|
||||
|
||||
{:ok, pools} = get_pools_info(characters)
|
||||
|
||||
{:ok, result ++ pools}
|
||||
end
|
||||
|
||||
def update_active_tracking_pool() do
|
||||
{:ok, pools_count} =
|
||||
Cachex.get(
|
||||
:esi_auth_cache,
|
||||
"configs_total_count"
|
||||
)
|
||||
|
||||
active_pool =
|
||||
if not is_nil(pools_count) && pools_count != 0 do
|
||||
tracking_pool_max_size = WandererApp.Env.tracking_pool_max_size()
|
||||
{:ok, characters} = get_active_characters()
|
||||
{:ok, pools} = get_pools_info(characters)
|
||||
|
||||
minimal_pool_id =
|
||||
pools
|
||||
|> Enum.filter(&(&1.value < tracking_pool_max_size))
|
||||
|> Enum.min_by(& &1.value)
|
||||
|> Map.get(:id)
|
||||
|
||||
if not is_nil(minimal_pool_id) do
|
||||
minimal_pool_id
|
||||
else
|
||||
"default"
|
||||
end
|
||||
else
|
||||
"default"
|
||||
end
|
||||
|
||||
Cachex.put(
|
||||
:esi_auth_cache,
|
||||
"active_pool",
|
||||
active_pool
|
||||
)
|
||||
end
|
||||
|
||||
def get_active_pool!() do
|
||||
Cachex.get(
|
||||
:esi_auth_cache,
|
||||
"active_pool"
|
||||
)
|
||||
|> case do
|
||||
{:ok, active_pool} when not is_nil(active_pool) ->
|
||||
active_pool
|
||||
|
||||
_ ->
|
||||
"default"
|
||||
end
|
||||
end
|
||||
|
||||
defp get_active_characters() do
|
||||
WandererApp.Api.Character.last_active(%{
|
||||
from:
|
||||
DateTime.utc_now()
|
||||
|> DateTime.add(@last_active_character_minutes, :minute)
|
||||
})
|
||||
end
|
||||
|
||||
@decorate cacheable(
|
||||
cache: WandererApp.Cache,
|
||||
key: "character-pools-info",
|
||||
opts: [ttl: @ttl]
|
||||
)
|
||||
defp get_pools_info(characters) do
|
||||
{:ok, pools_count} =
|
||||
Cachex.get(
|
||||
:esi_auth_cache,
|
||||
"configs_total_count"
|
||||
)
|
||||
|
||||
if not is_nil(pools_count) && pools_count != 0 do
|
||||
pools =
|
||||
1..pools_count
|
||||
|> Enum.map(fn pool_id ->
|
||||
pools_character_count =
|
||||
characters
|
||||
|> Enum.filter(
|
||||
&(&1.tracking_pool == "#{pool_id}" and
|
||||
not WandererApp.Character.can_track_wallet?(&1) and
|
||||
not WandererApp.Character.can_track_corp_wallet?(&1))
|
||||
)
|
||||
|> Enum.count()
|
||||
|
||||
%{id: "#{pool_id}", title: "Pool #{pool_id}", value: pools_character_count}
|
||||
end)
|
||||
|
||||
{:ok, pools}
|
||||
else
|
||||
{:ok, []}
|
||||
end
|
||||
end
|
||||
end
|
||||
@@ -146,7 +146,12 @@ defmodule WandererApp.Character.TransactionsTracker.Impl do
|
||||
end
|
||||
|
||||
defp get_wallet_journal(
|
||||
%{id: character_id, corporation_id: corporation_id, access_token: access_token} =
|
||||
%{
|
||||
id: character_id,
|
||||
corporation_id: corporation_id,
|
||||
access_token: access_token,
|
||||
tracking_pool: tracking_pool
|
||||
} =
|
||||
_character,
|
||||
division
|
||||
)
|
||||
@@ -164,7 +169,7 @@ defmodule WandererApp.Character.TransactionsTracker.Impl do
|
||||
{:error, :forbidden}
|
||||
|
||||
{:error, :error_limited, _headers} ->
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
{:error, :error_limited}
|
||||
|
||||
{:error, error} ->
|
||||
@@ -176,7 +181,12 @@ defmodule WandererApp.Character.TransactionsTracker.Impl do
|
||||
defp get_wallet_journal(_character, _division), do: {:error, :skipped}
|
||||
|
||||
defp update_corp_wallets(
|
||||
%{id: character_id, corporation_id: corporation_id, access_token: access_token} =
|
||||
%{
|
||||
id: character_id,
|
||||
corporation_id: corporation_id,
|
||||
access_token: access_token,
|
||||
tracking_pool: tracking_pool
|
||||
} =
|
||||
_character
|
||||
)
|
||||
when not is_nil(access_token) do
|
||||
@@ -193,7 +203,7 @@ defmodule WandererApp.Character.TransactionsTracker.Impl do
|
||||
{:error, :forbidden}
|
||||
|
||||
{:error, :error_limited, _headers} ->
|
||||
Logger.warning(".")
|
||||
Logger.warning("#{inspect(tracking_pool)} ..")
|
||||
{:error, :error_limited}
|
||||
|
||||
{:error, error} ->
|
||||
|
||||
@@ -14,6 +14,7 @@ defmodule WandererApp.Env do
|
||||
def base_url, do: get_key(:web_app_url, "<BASE_URL>")
|
||||
def custom_route_base_url, do: get_key(:custom_route_base_url, "<CUSTOM_ROUTE_BASE_URL>")
|
||||
def invites, do: get_key(:invites, false)
|
||||
|
||||
def map_subscriptions_enabled?, do: get_key(:map_subscriptions_enabled, false)
|
||||
def public_api_disabled?, do: get_key(:public_api_disabled, false)
|
||||
|
||||
@@ -22,9 +23,15 @@ defmodule WandererApp.Env do
|
||||
key: "active_tracking_pool"
|
||||
)
|
||||
def active_tracking_pool, do: get_key(:active_tracking_pool, "default")
|
||||
|
||||
@decorate cacheable(
|
||||
cache: WandererApp.Cache,
|
||||
key: "tracking_pool_max_size"
|
||||
)
|
||||
def tracking_pool_max_size, do: get_key(:tracking_pool_max_size, 300)
|
||||
def character_tracking_pause_disabled?, do: get_key(:character_tracking_pause_disabled, true)
|
||||
def character_api_disabled?, do: get_key(:character_api_disabled, false)
|
||||
def zkill_preload_disabled?, do: get_key(:zkill_preload_disabled, false)
|
||||
def wanderer_kills_service_enabled?, do: get_key(:wanderer_kills_service_enabled, false)
|
||||
def wallet_tracking_enabled?, do: get_key(:wallet_tracking_enabled, false)
|
||||
def admins, do: get_key(:admins, [])
|
||||
def admin_username, do: get_key(:admin_username)
|
||||
@@ -66,6 +73,6 @@ defmodule WandererApp.Env do
|
||||
made available to react
|
||||
"""
|
||||
def to_client_env do
|
||||
%{detailedKillsDisabled: zkill_preload_disabled?()}
|
||||
%{detailedKillsDisabled: not wanderer_kills_service_enabled?()}
|
||||
end
|
||||
end
|
||||
|
||||
55
lib/wanderer_app/kills.ex
Normal file
55
lib/wanderer_app/kills.ex
Normal file
@@ -0,0 +1,55 @@
|
||||
defmodule WandererApp.Kills do
|
||||
@moduledoc """
|
||||
Main interface for the WandererKills integration subsystem.
|
||||
|
||||
Provides high-level functions for monitoring and managing the kills
|
||||
data pipeline, including connection status, health metrics, and
|
||||
system subscriptions.
|
||||
"""
|
||||
|
||||
alias WandererApp.Kills.{Client, Storage}
|
||||
|
||||
@doc """
|
||||
Gets comprehensive status of the kills subsystem.
|
||||
"""
|
||||
@spec get_status() :: {:ok, map()} | {:error, term()}
|
||||
def get_status do
|
||||
with {:ok, client_status} <- Client.get_status() do
|
||||
{:ok, %{
|
||||
enabled: Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, false),
|
||||
client: client_status,
|
||||
websocket_url: Application.get_env(:wanderer_app, :wanderer_kills_base_url, "ws://wanderer-kills:4004")
|
||||
}}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Subscribes to killmail updates for specified systems.
|
||||
"""
|
||||
@spec subscribe_systems([integer()]) :: :ok | {:error, term()}
|
||||
defdelegate subscribe_systems(system_ids), to: Client, as: :subscribe_to_systems
|
||||
|
||||
@doc """
|
||||
Unsubscribes from killmail updates for specified systems.
|
||||
"""
|
||||
@spec unsubscribe_systems([integer()]) :: :ok | {:error, term()}
|
||||
defdelegate unsubscribe_systems(system_ids), to: Client, as: :unsubscribe_from_systems
|
||||
|
||||
@doc """
|
||||
Gets kill count for a specific system.
|
||||
"""
|
||||
@spec get_system_kill_count(integer()) :: {:ok, non_neg_integer()} | {:error, :not_found}
|
||||
defdelegate get_system_kill_count(system_id), to: Storage, as: :get_kill_count
|
||||
|
||||
@doc """
|
||||
Gets recent kills for a specific system.
|
||||
"""
|
||||
@spec get_system_kills(integer()) :: {:ok, list(map())} | {:error, :not_found}
|
||||
defdelegate get_system_kills(system_id), to: Storage
|
||||
|
||||
@doc """
|
||||
Manually triggers a reconnection attempt.
|
||||
"""
|
||||
@spec reconnect() :: :ok | {:error, term()}
|
||||
defdelegate reconnect(), to: Client
|
||||
end
|
||||
788
lib/wanderer_app/kills/client.ex
Normal file
788
lib/wanderer_app/kills/client.ex
Normal file
@@ -0,0 +1,788 @@
|
||||
defmodule WandererApp.Kills.Client do
|
||||
@moduledoc """
|
||||
WebSocket client for WandererKills service.
|
||||
|
||||
Follows patterns established in the character and map modules.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Kills.{MessageHandler, Config}
|
||||
alias WandererApp.Kills.Subscription.{Manager, MapIntegration}
|
||||
alias Phoenix.Channels.GenSocketClient
|
||||
|
||||
# Simple retry configuration - inline like character module
|
||||
@retry_delays [5_000, 10_000, 30_000, 60_000]
|
||||
@max_retries 10
|
||||
@health_check_interval :timer.seconds(30) # Check every 30 seconds
|
||||
@message_timeout :timer.minutes(15) # No messages timeout
|
||||
|
||||
defstruct [
|
||||
:socket_pid,
|
||||
:retry_timer_ref,
|
||||
:connection_timeout_ref,
|
||||
:last_message_time,
|
||||
:last_retry_cycle_end,
|
||||
:last_health_reconnect_attempt,
|
||||
connected: false,
|
||||
connecting: false,
|
||||
subscribed_systems: MapSet.new(),
|
||||
retry_count: 0,
|
||||
last_error: nil
|
||||
]
|
||||
|
||||
# Client API
|
||||
|
||||
@spec start_link(keyword()) :: GenServer.on_start()
|
||||
def start_link(opts) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@spec subscribe_to_systems([integer()]) :: :ok | {:error, atom()}
|
||||
def subscribe_to_systems(system_ids) do
|
||||
case validate_system_ids(system_ids) do
|
||||
{:ok, valid_ids} ->
|
||||
GenServer.cast(__MODULE__, {:subscribe_systems, valid_ids})
|
||||
|
||||
{:error, _} = error ->
|
||||
Logger.error("[Client] Invalid system IDs: #{inspect(system_ids)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@spec unsubscribe_from_systems([integer()]) :: :ok | {:error, atom()}
|
||||
def unsubscribe_from_systems(system_ids) do
|
||||
case validate_system_ids(system_ids) do
|
||||
{:ok, valid_ids} ->
|
||||
GenServer.cast(__MODULE__, {:unsubscribe_systems, valid_ids})
|
||||
|
||||
{:error, _} = error ->
|
||||
Logger.error("[Client] Invalid system IDs: #{inspect(system_ids)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@spec get_status() :: {:ok, map()} | {:error, term()}
|
||||
def get_status do
|
||||
GenServer.call(__MODULE__, :get_status)
|
||||
catch
|
||||
:exit, _ -> {:error, :not_running}
|
||||
end
|
||||
|
||||
@spec reconnect() :: :ok | {:error, term()}
|
||||
def reconnect do
|
||||
GenServer.call(__MODULE__, :reconnect)
|
||||
catch
|
||||
:exit, _ -> {:error, :not_running}
|
||||
end
|
||||
|
||||
@spec force_health_check() :: :ok
|
||||
def force_health_check do
|
||||
send(__MODULE__, :health_check)
|
||||
:ok
|
||||
end
|
||||
|
||||
# Server callbacks
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
if Config.enabled?() do
|
||||
# Start connection attempt immediately
|
||||
send(self(), :connect)
|
||||
|
||||
# Schedule first health check after a reasonable delay
|
||||
Process.send_after(self(), :health_check, @health_check_interval)
|
||||
|
||||
{:ok, %__MODULE__{}}
|
||||
else
|
||||
Logger.info("[Client] Kills integration disabled")
|
||||
:ignore
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:connect, %{connecting: true} = state) do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info(:connect, %{connected: true} = state) do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info(:connect, state) do
|
||||
Logger.info("[Client] Initiating connection attempt (retry count: #{state.retry_count})")
|
||||
state = cancel_retry(state)
|
||||
new_state = attempt_connection(%{state | connecting: true})
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
def handle_info(:retry_connection, %{connecting: true} = state) do
|
||||
{:noreply, %{state | retry_timer_ref: nil}}
|
||||
end
|
||||
|
||||
def handle_info(:retry_connection, %{connected: true} = state) do
|
||||
{:noreply, %{state | retry_timer_ref: nil}}
|
||||
end
|
||||
|
||||
def handle_info(:retry_connection, state) do
|
||||
state = %{state | retry_timer_ref: nil, connecting: true}
|
||||
new_state = attempt_connection(state)
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
def handle_info(:refresh_subscriptions, %{connected: true} = state) do
|
||||
case MapIntegration.get_tracked_system_ids() do
|
||||
{:ok, system_list} ->
|
||||
if system_list != [] do
|
||||
subscribe_to_systems(system_list)
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error(
|
||||
"[Client] Failed to refresh subscriptions: #{inspect(reason)}, scheduling retry"
|
||||
)
|
||||
|
||||
Process.send_after(self(), :refresh_subscriptions, 5000)
|
||||
end
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info(:refresh_subscriptions, state) do
|
||||
# Not connected yet, retry later
|
||||
Process.send_after(self(), :refresh_subscriptions, 5000)
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({:connected, socket_pid}, state) do
|
||||
Logger.info("[Client] WebSocket connected, socket_pid: #{inspect(socket_pid)}")
|
||||
# Monitor the socket process so we know if it dies
|
||||
Process.monitor(socket_pid)
|
||||
|
||||
new_state =
|
||||
%{
|
||||
state
|
||||
| connected: true,
|
||||
connecting: false,
|
||||
socket_pid: socket_pid,
|
||||
retry_count: 0, # Reset retry count only on successful connection
|
||||
last_error: nil,
|
||||
last_message_time: System.system_time(:millisecond)
|
||||
}
|
||||
|> cancel_retry()
|
||||
|> cancel_connection_timeout()
|
||||
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
# Guard against duplicate disconnection events
|
||||
def handle_info({:disconnected, reason}, %{connected: false, connecting: false} = state) do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({:disconnected, reason}, state) do
|
||||
Logger.warning("[Client] WebSocket disconnected: #{inspect(reason)} (was connected: #{state.connected}, was connecting: #{state.connecting})")
|
||||
|
||||
# Cancel connection timeout if pending
|
||||
state = cancel_connection_timeout(state)
|
||||
|
||||
state =
|
||||
%{state |
|
||||
connected: false,
|
||||
connecting: false,
|
||||
socket_pid: nil,
|
||||
last_error: reason
|
||||
}
|
||||
|
||||
if should_retry?(state) do
|
||||
{:noreply, schedule_retry(state)}
|
||||
else
|
||||
Logger.error("[Client] Max retry attempts (#{@max_retries}) reached. Will not retry automatically.")
|
||||
{:noreply, state}
|
||||
end
|
||||
end
|
||||
|
||||
def handle_info(:health_check, state) do
|
||||
health_status = check_health(state)
|
||||
new_state = case health_status do
|
||||
:healthy ->
|
||||
state
|
||||
|
||||
:needs_reconnect ->
|
||||
Logger.warning("[Client] Connection unhealthy, triggering reconnect (retry count: #{state.retry_count})")
|
||||
# Don't reset retry count during health check failures
|
||||
if state.connected or state.connecting do
|
||||
send(self(), {:disconnected, :health_check_failed})
|
||||
%{state | connected: false, connecting: false, socket_pid: nil}
|
||||
else
|
||||
# Already disconnected, just maintain state
|
||||
state
|
||||
end
|
||||
|
||||
:needs_reconnect_with_timestamp ->
|
||||
Logger.warning("[Client] Health check triggering reconnect (retry count: #{state.retry_count})")
|
||||
new_state = %{state | last_health_reconnect_attempt: System.system_time(:millisecond)}
|
||||
if state.connected or state.connecting do
|
||||
send(self(), {:disconnected, :health_check_failed})
|
||||
%{new_state | connected: false, connecting: false, socket_pid: nil}
|
||||
else
|
||||
# Already disconnected, trigger reconnect
|
||||
send(self(), :connect)
|
||||
new_state
|
||||
end
|
||||
|
||||
:needs_reconnect_reset_retries ->
|
||||
Logger.warning("[Client] Health check resetting retry count and triggering reconnect")
|
||||
new_state = %{state | retry_count: 0, last_retry_cycle_end: nil}
|
||||
if state.connected or state.connecting do
|
||||
send(self(), {:disconnected, :health_check_failed})
|
||||
%{new_state | connected: false, connecting: false, socket_pid: nil}
|
||||
else
|
||||
# Already disconnected, trigger immediate reconnect with reset count
|
||||
send(self(), :connect)
|
||||
new_state
|
||||
end
|
||||
end
|
||||
|
||||
schedule_health_check()
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
# Handle process DOWN messages for socket monitoring
|
||||
def handle_info({:DOWN, _ref, :process, pid, reason}, %{socket_pid: pid} = state) do
|
||||
Logger.error("[Client] Socket process died: #{inspect(reason)}")
|
||||
send(self(), {:disconnected, {:socket_died, reason}})
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({:DOWN, _ref, :process, _pid, _reason}, state) do
|
||||
# Ignore DOWN messages for other processes
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({:connection_timeout, socket_pid}, %{socket_pid: socket_pid} = state) do
|
||||
Logger.error("[Client] Connection timeout - socket process failed to connect within 10s (retry #{state.retry_count}/#{@max_retries})")
|
||||
|
||||
# Kill the socket process if it's still alive
|
||||
if socket_alive?(socket_pid) do
|
||||
try do
|
||||
GenServer.stop(socket_pid, :normal, 5000)
|
||||
catch
|
||||
:exit, _ -> :ok
|
||||
end
|
||||
end
|
||||
|
||||
# Clear connection timeout ref
|
||||
state = %{state | connection_timeout_ref: nil}
|
||||
|
||||
# Treat this as a disconnection
|
||||
send(self(), {:disconnected, :connection_timeout})
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({:connection_timeout, _old_pid}, state) do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info({:message_received, _type}, state) do
|
||||
# Update last message time when we receive a kill message
|
||||
{:noreply, %{state | last_message_time: System.system_time(:millisecond)}}
|
||||
end
|
||||
|
||||
def handle_info(_msg, state), do: {:noreply, state}
|
||||
|
||||
@impl true
|
||||
def handle_cast({:subscribe_systems, system_ids}, state) do
|
||||
{updated_systems, to_subscribe} =
|
||||
Manager.subscribe_systems(state.subscribed_systems, system_ids)
|
||||
|
||||
# Log subscription details
|
||||
if length(to_subscribe) > 0 do
|
||||
# Get map information for the systems
|
||||
map_info = get_system_map_info(to_subscribe)
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[Client] Subscribing to #{length(to_subscribe)} new systems. " <>
|
||||
"Total subscribed: #{MapSet.size(updated_systems)}. " <>
|
||||
"Map breakdown: #{inspect(map_info)}" end
|
||||
)
|
||||
end
|
||||
|
||||
if length(to_subscribe) > 0 and state.socket_pid do
|
||||
Manager.sync_with_server(state.socket_pid, to_subscribe, [])
|
||||
end
|
||||
|
||||
{:noreply, %{state | subscribed_systems: updated_systems}}
|
||||
end
|
||||
|
||||
def handle_cast({:unsubscribe_systems, system_ids}, state) do
|
||||
{updated_systems, to_unsubscribe} =
|
||||
Manager.unsubscribe_systems(state.subscribed_systems, system_ids)
|
||||
if length(to_unsubscribe) > 0 and state.socket_pid do
|
||||
Manager.sync_with_server(state.socket_pid, [], to_unsubscribe)
|
||||
end
|
||||
|
||||
{:noreply, %{state | subscribed_systems: updated_systems}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(:get_status, _from, state) do
|
||||
status = %{
|
||||
connected: state.connected,
|
||||
connecting: state.connecting,
|
||||
retry_count: state.retry_count,
|
||||
last_error: state.last_error,
|
||||
subscribed_systems: MapSet.size(state.subscribed_systems),
|
||||
socket_alive: socket_alive?(state.socket_pid),
|
||||
subscriptions: %{
|
||||
subscribed_systems: MapSet.to_list(state.subscribed_systems)
|
||||
}
|
||||
}
|
||||
|
||||
{:reply, {:ok, status}, state}
|
||||
end
|
||||
|
||||
def handle_call(:reconnect, _from, state) do
|
||||
state = cancel_retry(state)
|
||||
|
||||
if state.socket_pid do
|
||||
disconnect_socket(state.socket_pid)
|
||||
end
|
||||
|
||||
new_state = %{
|
||||
state
|
||||
| connected: false,
|
||||
connecting: false,
|
||||
socket_pid: nil,
|
||||
retry_count: 0, # Manual reconnect resets retry count
|
||||
last_error: nil
|
||||
}
|
||||
|
||||
send(self(), :connect)
|
||||
{:reply, :ok, new_state}
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp attempt_connection(state) do
|
||||
case connect_to_server() do
|
||||
{:ok, socket_pid} ->
|
||||
timeout_ref = Process.send_after(self(), {:connection_timeout, socket_pid}, 10_000)
|
||||
%{state | socket_pid: socket_pid, connecting: true, connection_timeout_ref: timeout_ref}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[Client] Connection failed: #{inspect(reason)}")
|
||||
schedule_retry(%{state | connecting: false, last_error: reason})
|
||||
end
|
||||
end
|
||||
|
||||
defp connect_to_server do
|
||||
url = Config.server_url()
|
||||
systems =
|
||||
case MapIntegration.get_tracked_system_ids() do
|
||||
{:ok, system_list} ->
|
||||
system_list
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"[Client] Failed to get tracked system IDs for initial subscription: #{inspect(reason)}, will retry after connection"
|
||||
)
|
||||
|
||||
# Return empty list but schedule immediate refresh after connection
|
||||
Process.send_after(self(), :refresh_subscriptions, 1000)
|
||||
[]
|
||||
end
|
||||
|
||||
handler_state = %{
|
||||
server_url: url,
|
||||
parent: self(),
|
||||
subscribed_systems: systems,
|
||||
disconnected: false
|
||||
}
|
||||
|
||||
# GenSocketClient expects transport_opts to be wrapped in a specific format
|
||||
opts = [
|
||||
transport_opts: [
|
||||
timeout: 10_000, # 10 second connection timeout
|
||||
tcp_opts: [
|
||||
connect_timeout: 10_000, # TCP connection timeout
|
||||
send_timeout: 5_000,
|
||||
recv_timeout: 5_000
|
||||
]
|
||||
]
|
||||
]
|
||||
|
||||
case GenSocketClient.start_link(
|
||||
__MODULE__.Handler,
|
||||
Phoenix.Channels.GenSocketClient.Transport.WebSocketClient,
|
||||
handler_state,
|
||||
opts
|
||||
) do
|
||||
{:ok, socket_pid} ->
|
||||
{:ok, socket_pid}
|
||||
|
||||
error ->
|
||||
Logger.error("[Client] Failed to start WebSocket client: #{inspect(error)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp should_retry?(%{retry_count: count}) when count >= @max_retries, do: false
|
||||
defp should_retry?(_), do: true
|
||||
|
||||
defp should_start_new_retry_cycle?(%{last_retry_cycle_end: nil}), do: true
|
||||
defp should_start_new_retry_cycle?(%{last_retry_cycle_end: end_time}) do
|
||||
System.system_time(:millisecond) - end_time >= @message_timeout
|
||||
end
|
||||
|
||||
# Prevent health check from triggering reconnects too frequently
|
||||
# Allow health check reconnects only every 2 minutes to avoid spam
|
||||
@health_check_reconnect_cooldown :timer.minutes(2)
|
||||
|
||||
defp should_health_check_reconnect?(%{last_health_reconnect_attempt: nil}), do: true
|
||||
defp should_health_check_reconnect?(%{last_health_reconnect_attempt: last_attempt}) do
|
||||
System.system_time(:millisecond) - last_attempt >= @health_check_reconnect_cooldown
|
||||
end
|
||||
|
||||
defp schedule_retry(state) do
|
||||
# Cancel any existing retry timer first
|
||||
state = cancel_retry(state)
|
||||
|
||||
# Increment retry count first
|
||||
new_retry_count = state.retry_count + 1
|
||||
|
||||
# If we've hit max retries, mark the end of this retry cycle
|
||||
state = if new_retry_count >= @max_retries do
|
||||
%{state | last_retry_cycle_end: System.system_time(:millisecond)}
|
||||
else
|
||||
state
|
||||
end
|
||||
|
||||
delay = Enum.at(@retry_delays, min(state.retry_count, length(@retry_delays) - 1))
|
||||
|
||||
timer_ref = Process.send_after(self(), :retry_connection, delay)
|
||||
%{state | retry_timer_ref: timer_ref, retry_count: new_retry_count}
|
||||
end
|
||||
|
||||
defp cancel_retry(%{retry_timer_ref: nil} = state), do: state
|
||||
|
||||
defp cancel_retry(%{retry_timer_ref: timer_ref} = state) do
|
||||
Process.cancel_timer(timer_ref)
|
||||
%{state | retry_timer_ref: nil}
|
||||
end
|
||||
|
||||
defp cancel_connection_timeout(%{connection_timeout_ref: nil} = state), do: state
|
||||
|
||||
defp cancel_connection_timeout(%{connection_timeout_ref: ref} = state) do
|
||||
Process.cancel_timer(ref)
|
||||
%{state | connection_timeout_ref: nil}
|
||||
end
|
||||
|
||||
defp check_health(%{connecting: true} = _state) do
|
||||
:healthy # Don't interfere with ongoing connection attempts
|
||||
end
|
||||
|
||||
defp check_health(%{connected: false, retry_timer_ref: ref} = _state) when not is_nil(ref) do
|
||||
:healthy # Don't interfere with scheduled retries
|
||||
end
|
||||
|
||||
defp check_health(%{connected: false} = state) do
|
||||
if should_retry?(state) do
|
||||
# Don't trigger reconnect too frequently from health checks
|
||||
if should_health_check_reconnect?(state) do
|
||||
:needs_reconnect_with_timestamp
|
||||
else
|
||||
:healthy # Recent health check reconnect attempt
|
||||
end
|
||||
else
|
||||
# Max retries reached, check if 15 minutes have passed since last retry cycle
|
||||
if should_start_new_retry_cycle?(state) do
|
||||
Logger.info("[Client] 15 minutes elapsed since max retries, starting new retry cycle")
|
||||
:needs_reconnect_reset_retries
|
||||
else
|
||||
:healthy # Still within 15-minute cooldown period
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp check_health(%{socket_pid: nil} = state) do
|
||||
# Don't trigger reconnect too frequently from health checks
|
||||
if should_health_check_reconnect?(state) do
|
||||
Logger.debug("[Client] Health check: no socket pid, triggering reconnect")
|
||||
:needs_reconnect_with_timestamp
|
||||
else
|
||||
Logger.debug("[Client] Health check: no socket pid, but recent reconnect attempt - waiting")
|
||||
:healthy
|
||||
end
|
||||
end
|
||||
|
||||
defp check_health(%{socket_pid: pid, last_message_time: last_msg_time} = state) when not is_nil(pid) and not is_nil(last_msg_time) do
|
||||
cond do
|
||||
not socket_alive?(pid) ->
|
||||
Logger.warning("[Client] Health check: Socket process #{inspect(pid)} is dead")
|
||||
:needs_reconnect
|
||||
|
||||
# Check if we haven't received a message in the configured timeout
|
||||
System.system_time(:millisecond) - last_msg_time > @message_timeout ->
|
||||
Logger.warning("[Client] Health check: No messages received for 15+ minutes, reconnecting")
|
||||
:needs_reconnect
|
||||
|
||||
true ->
|
||||
:healthy
|
||||
end
|
||||
end
|
||||
|
||||
defp check_health(%{socket_pid: pid} = state) do
|
||||
if socket_alive?(pid) do
|
||||
:healthy
|
||||
else
|
||||
Logger.warning("[Client] Health check: Socket process #{inspect(pid)} is dead")
|
||||
:needs_reconnect
|
||||
end
|
||||
end
|
||||
|
||||
defp socket_alive?(nil), do: false
|
||||
defp socket_alive?(pid), do: Process.alive?(pid)
|
||||
|
||||
defp disconnect_socket(nil), do: :ok
|
||||
|
||||
defp disconnect_socket(pid) when is_pid(pid) do
|
||||
if Process.alive?(pid) do
|
||||
GenServer.stop(pid, :normal)
|
||||
end
|
||||
end
|
||||
|
||||
defp schedule_health_check do
|
||||
Process.send_after(self(), :health_check, @health_check_interval)
|
||||
end
|
||||
|
||||
defp handle_connection_lost(%{connected: false} = _state) do
|
||||
Logger.debug("[Client] Connection already lost, skipping cleanup")
|
||||
end
|
||||
|
||||
defp handle_connection_lost(state) do
|
||||
Logger.warning("[Client] Connection lost, cleaning up and reconnecting")
|
||||
|
||||
# Clean up existing socket
|
||||
if state.socket_pid do
|
||||
disconnect_socket(state.socket_pid)
|
||||
end
|
||||
|
||||
# Reset state and trigger reconnection
|
||||
send(self(), {:disconnected, :connection_lost})
|
||||
end
|
||||
|
||||
|
||||
# Handler module for WebSocket events
|
||||
defmodule Handler do
|
||||
@moduledoc """
|
||||
WebSocket handler for the kills client.
|
||||
|
||||
Handles Phoenix Channel callbacks for WebSocket communication.
|
||||
"""
|
||||
|
||||
@behaviour Phoenix.Channels.GenSocketClient
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Kills.MessageHandler
|
||||
|
||||
@impl true
|
||||
def init(state) do
|
||||
ws_url = "#{state.server_url}/socket/websocket"
|
||||
# Configure with heartbeat interval (Phoenix default is 30s)
|
||||
params = [
|
||||
{"vsn", "2.0.0"},
|
||||
{"heartbeat", "30000"} # 30 second heartbeat
|
||||
]
|
||||
{:connect, ws_url, params, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_connected(transport, state) do
|
||||
join_params = %{
|
||||
systems: state.subscribed_systems,
|
||||
client_identifier: "wanderer_app"
|
||||
}
|
||||
|
||||
case GenSocketClient.join(transport, "killmails:lobby", join_params) do
|
||||
{:ok, response} ->
|
||||
send(state.parent, {:connected, self()})
|
||||
# Reset disconnected flag on successful connection
|
||||
{:ok, %{state | disconnected: false}}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[Handler] Failed to join channel: #{inspect(reason)}")
|
||||
send(state.parent, {:disconnected, {:join_error, reason}})
|
||||
{:ok, %{state | disconnected: true}}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_disconnected(reason, state) do
|
||||
if state.disconnected do
|
||||
{:ok, state}
|
||||
else
|
||||
Logger.warning("[Handler] Disconnected from server: #{inspect(reason)}")
|
||||
send(state.parent, {:disconnected, reason})
|
||||
{:ok, %{state | disconnected: true}}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_channel_closed(topic, payload, _transport, state) do
|
||||
if state.disconnected do
|
||||
{:ok, state}
|
||||
else
|
||||
Logger.warning("[Handler] Channel #{topic} closed with payload: #{inspect(payload)}")
|
||||
send(state.parent, {:disconnected, {:channel_closed, topic}})
|
||||
{:ok, %{state | disconnected: true}}
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_message(topic, event, payload, _transport, state) do
|
||||
case {topic, event} do
|
||||
{"killmails:lobby", "killmail_update"} ->
|
||||
# Notify parent that we received a message
|
||||
send(state.parent, {:message_received, :killmail_update})
|
||||
|
||||
# Use supervised task to handle failures gracefully
|
||||
Task.Supervisor.start_child(
|
||||
WandererApp.Kills.TaskSupervisor,
|
||||
fn -> MessageHandler.process_killmail_update(payload) end
|
||||
)
|
||||
|
||||
{"killmails:lobby", "kill_count_update"} ->
|
||||
# Notify parent that we received a message
|
||||
send(state.parent, {:message_received, :kill_count_update})
|
||||
|
||||
# Use supervised task to handle failures gracefully
|
||||
Task.Supervisor.start_child(
|
||||
WandererApp.Kills.TaskSupervisor,
|
||||
fn -> MessageHandler.process_kill_count_update(payload) end
|
||||
)
|
||||
|
||||
_ ->
|
||||
Logger.debug("[Handler] Unhandled message: #{topic} - #{event}")
|
||||
:ok
|
||||
end
|
||||
|
||||
{:ok, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_reply(_topic, _ref, _payload, _transport, state), do: {:ok, state}
|
||||
|
||||
@impl true
|
||||
def handle_info({:subscribe_systems, system_ids}, transport, state) do
|
||||
case push_to_channel(transport, "subscribe_systems", %{"systems" => system_ids}) do
|
||||
:ok ->
|
||||
Logger.debug(fn -> "[Handler] Successfully pushed subscribe_systems event" end)
|
||||
error ->
|
||||
Logger.error("[Handler] Failed to push subscribe_systems: #{inspect(error)}")
|
||||
end
|
||||
|
||||
{:ok, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:unsubscribe_systems, system_ids}, transport, state) do
|
||||
case push_to_channel(transport, "unsubscribe_systems", %{"systems" => system_ids}) do
|
||||
:ok ->
|
||||
Logger.debug(fn -> "[Handler] Successfully pushed unsubscribe_systems event" end)
|
||||
error ->
|
||||
Logger.error("[Handler] Failed to push unsubscribe_systems: #{inspect(error)}")
|
||||
end
|
||||
|
||||
{:ok, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(_msg, _transport, state) do
|
||||
{:ok, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_call(_msg, _from, _transport, state),
|
||||
do: {:reply, {:error, :not_implemented}, state}
|
||||
|
||||
@impl true
|
||||
def handle_joined(_topic, _payload, _transport, state), do: {:ok, state}
|
||||
|
||||
@impl true
|
||||
def handle_join_error(topic, payload, _transport, state) do
|
||||
if state.disconnected do
|
||||
{:ok, state}
|
||||
else
|
||||
Logger.error("[Handler] Join error on #{topic}: #{inspect(payload)}")
|
||||
send(state.parent, {:disconnected, {:join_error, {topic, payload}}})
|
||||
{:ok, %{state | disconnected: true}}
|
||||
end
|
||||
end
|
||||
|
||||
defp push_to_channel(transport, event, payload) do
|
||||
Logger.debug(fn -> "[Handler] Pushing event '#{event}' with payload: #{inspect(payload)}" end)
|
||||
|
||||
case GenSocketClient.push(transport, "killmails:lobby", event, payload) do
|
||||
{:ok, ref} ->
|
||||
Logger.debug(fn -> "[Handler] Push successful, ref: #{inspect(ref)}" end)
|
||||
:ok
|
||||
error ->
|
||||
Logger.error("[Handler] Push failed: #{inspect(error)}")
|
||||
error
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
# Validation functions (inlined from Validation module)
|
||||
|
||||
@spec validate_system_id(any()) :: {:ok, integer()} | {:error, :invalid_system_id}
|
||||
defp validate_system_id(system_id)
|
||||
when is_integer(system_id) and system_id > 30_000_000 and system_id < 33_000_000 do
|
||||
{:ok, system_id}
|
||||
end
|
||||
|
||||
defp validate_system_id(system_id) when is_binary(system_id) do
|
||||
case Integer.parse(system_id) do
|
||||
{id, ""} when id > 30_000_000 and id < 33_000_000 ->
|
||||
{:ok, id}
|
||||
|
||||
_ ->
|
||||
{:error, :invalid_system_id}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_system_id(_), do: {:error, :invalid_system_id}
|
||||
|
||||
@spec validate_system_ids(list()) :: {:ok, [integer()]} | {:error, :invalid_system_ids}
|
||||
defp validate_system_ids(system_ids) when is_list(system_ids) do
|
||||
results = Enum.map(system_ids, &validate_system_id/1)
|
||||
|
||||
case Enum.all?(results, &match?({:ok, _}, &1)) do
|
||||
true ->
|
||||
valid_ids = Enum.map(results, fn {:ok, id} -> id end)
|
||||
{:ok, valid_ids}
|
||||
|
||||
false ->
|
||||
{:error, :invalid_system_ids}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_system_ids(_), do: {:error, :invalid_system_ids}
|
||||
|
||||
# Helper function to get map information for systems
|
||||
defp get_system_map_info(system_ids) do
|
||||
# Use the SystemMapIndex to get map associations
|
||||
system_ids
|
||||
|> Enum.reduce(%{}, fn system_id, acc ->
|
||||
maps = WandererApp.Kills.Subscription.SystemMapIndex.get_maps_for_system(system_id)
|
||||
Enum.reduce(maps, acc, fn map_id, inner_acc ->
|
||||
Map.update(inner_acc, map_id, 1, &(&1 + 1))
|
||||
end)
|
||||
end)
|
||||
|> Enum.map_join(", ", fn {map_id, count} -> "#{map_id}: #{count} systems" end)
|
||||
|> case do
|
||||
"" -> "no map associations found"
|
||||
info -> info
|
||||
end
|
||||
end
|
||||
end
|
||||
62
lib/wanderer_app/kills/config.ex
Normal file
62
lib/wanderer_app/kills/config.ex
Normal file
@@ -0,0 +1,62 @@
|
||||
defmodule WandererApp.Kills.Config do
|
||||
@moduledoc """
|
||||
Simple configuration helpers for the kills subsystem.
|
||||
Following the pattern of other modules that use Application.get_env directly.
|
||||
"""
|
||||
|
||||
def enabled? do
|
||||
Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, false)
|
||||
end
|
||||
|
||||
def websocket_url do
|
||||
Application.get_env(:wanderer_app, :wanderer_kills_base_url, "ws://wanderer-kills:4004")
|
||||
end
|
||||
|
||||
def server_url do
|
||||
# Remove /socket/websocket suffix if present for backward compatibility
|
||||
websocket_url()
|
||||
|> String.replace(~r/\/socket\/websocket$/, "")
|
||||
end
|
||||
|
||||
def kill_list_limit do
|
||||
Application.get_env(:wanderer_app, :kill_list_limit, 100)
|
||||
|> to_integer()
|
||||
end
|
||||
|
||||
def max_concurrent_tasks do
|
||||
:wanderer_app
|
||||
|> Application.get_env(:kills_max_concurrent_tasks, 50)
|
||||
|> ensure_integer()
|
||||
end
|
||||
|
||||
def max_task_queue_size do
|
||||
:wanderer_app
|
||||
|> Application.get_env(:kills_max_task_queue_size, 5000)
|
||||
|> ensure_integer()
|
||||
end
|
||||
|
||||
def killmail_ttl do
|
||||
:timer.hours(24)
|
||||
end
|
||||
|
||||
def kill_count_ttl do
|
||||
:timer.hours(24)
|
||||
end
|
||||
|
||||
# Simple conversion helper
|
||||
defp to_integer(value) when is_binary(value), do: String.to_integer(value)
|
||||
defp to_integer(value) when is_integer(value), do: value
|
||||
defp to_integer(_), do: 100
|
||||
|
||||
defp ensure_integer(value) when is_integer(value), do: value
|
||||
|
||||
defp ensure_integer(value) when is_binary(value) do
|
||||
case Integer.parse(value) do
|
||||
{int, ""} -> int
|
||||
# Default fallback
|
||||
_ -> 50
|
||||
end
|
||||
end
|
||||
|
||||
defp ensure_integer(_), do: 50
|
||||
end
|
||||
278
lib/wanderer_app/kills/map_event_listener.ex
Normal file
278
lib/wanderer_app/kills/map_event_listener.ex
Normal file
@@ -0,0 +1,278 @@
|
||||
defmodule WandererApp.Kills.MapEventListener do
|
||||
@moduledoc """
|
||||
Listens for map events and updates kill subscriptions accordingly.
|
||||
|
||||
This module bridges the gap between map system changes and the kills
|
||||
WebSocket subscription system.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Kills.Client
|
||||
alias WandererApp.Kills.Subscription.MapIntegration
|
||||
|
||||
def start_link(opts \\ []) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
# Subscribe to map lifecycle events
|
||||
Phoenix.PubSub.subscribe(WandererApp.PubSub, "maps")
|
||||
|
||||
# Defer subscription update to avoid blocking init
|
||||
Process.send_after(self(), :initial_subscription_update, 30_000)
|
||||
|
||||
# Also schedule a re-subscription after a delay in case maps start after us
|
||||
Process.send_after(self(), :resubscribe_to_maps, 60_000)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
last_update: nil,
|
||||
pending_update: nil,
|
||||
pending_removals: MapSet.new(),
|
||||
subscribed_maps: MapSet.new(),
|
||||
retry_count: 0,
|
||||
retry_timer: nil
|
||||
}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:initial_subscription_update, state) do
|
||||
{:noreply, do_update_subscriptions(state)}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(%{event: :map_server_started, payload: map_info}, state) do
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info(:map_server_started, state) do
|
||||
Process.send_after(self(), :resubscribe_to_maps, 1000)
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info(%{event: :add_system, payload: system}, state) do
|
||||
Logger.debug(fn -> "[MapEventListener] System added: #{inspect(system)}" end)
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info({:add_system, system}, state) do
|
||||
Logger.debug(fn -> "[MapEventListener] System added (alt format): #{inspect(system)}" end)
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info(%{event: :systems_removed, payload: system_ids}, state) do
|
||||
Logger.debug(fn -> "[MapEventListener] Systems removed: #{length(system_ids)} systems" end)
|
||||
# Track pending removals so we can handle them immediately
|
||||
new_pending_removals = MapSet.union(state.pending_removals, MapSet.new(system_ids))
|
||||
{:noreply, schedule_subscription_update(%{state | pending_removals: new_pending_removals})}
|
||||
end
|
||||
|
||||
def handle_info({:systems_removed, system_ids}, state) do
|
||||
Logger.debug(fn -> "[MapEventListener] Systems removed (alt format): #{length(system_ids)} systems" end)
|
||||
# Track pending removals so we can handle them immediately
|
||||
new_pending_removals = MapSet.union(state.pending_removals, MapSet.new(system_ids))
|
||||
{:noreply, schedule_subscription_update(%{state | pending_removals: new_pending_removals})}
|
||||
end
|
||||
|
||||
def handle_info(%{event: :update_system, payload: _system}, state) do
|
||||
# System updates might change visibility or other properties
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info({:update_system, _system}, state) do
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info(%{event: :map_server_stopped}, state) do
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
def handle_info(:map_server_stopped, state) do
|
||||
{:noreply, schedule_subscription_update(state)}
|
||||
end
|
||||
|
||||
# Handle scheduled update
|
||||
def handle_info(:perform_subscription_update, state) do
|
||||
Logger.debug(fn -> "[MapEventListener] Performing scheduled subscription update" end)
|
||||
# Clear pending removals after processing
|
||||
new_state = do_update_subscriptions(%{state | pending_update: nil})
|
||||
{:noreply, new_state}
|
||||
end
|
||||
|
||||
# Handle re-subscription attempt
|
||||
def handle_info(:resubscribe_to_maps, state) do
|
||||
running_maps = WandererApp.Map.RegistryHelper.list_all_maps()
|
||||
current_running_map_ids = MapSet.new(Enum.map(running_maps, & &1.id))
|
||||
Logger.debug(fn ->
|
||||
"[MapEventListener] Resubscribing to maps. Running maps: #{MapSet.size(current_running_map_ids)}"
|
||||
end)
|
||||
|
||||
# Unsubscribe from maps no longer running
|
||||
maps_to_unsubscribe = MapSet.difference(state.subscribed_maps, current_running_map_ids)
|
||||
Enum.each(maps_to_unsubscribe, fn map_id ->
|
||||
Phoenix.PubSub.unsubscribe(WandererApp.PubSub, map_id)
|
||||
end)
|
||||
|
||||
# Subscribe to new running maps
|
||||
maps_to_subscribe = MapSet.difference(current_running_map_ids, state.subscribed_maps)
|
||||
Enum.each(maps_to_subscribe, fn map_id ->
|
||||
Phoenix.PubSub.subscribe(WandererApp.PubSub, map_id)
|
||||
end)
|
||||
|
||||
{:noreply, %{state | subscribed_maps: current_running_map_ids}}
|
||||
end
|
||||
|
||||
# Handle map creation - subscribe to new map
|
||||
def handle_info({:map_created, map_id}, state) do
|
||||
Logger.debug(fn -> "[MapEventListener] Map created: #{map_id}" end)
|
||||
Phoenix.PubSub.subscribe(WandererApp.PubSub, map_id)
|
||||
updated_subscribed_maps = MapSet.put(state.subscribed_maps, map_id)
|
||||
{:noreply, schedule_subscription_update(%{state | subscribed_maps: updated_subscribed_maps})}
|
||||
end
|
||||
|
||||
def handle_info(_msg, state) do
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def terminate(_reason, state) do
|
||||
# Unsubscribe from all maps
|
||||
Enum.each(state.subscribed_maps, fn map_id ->
|
||||
Phoenix.PubSub.unsubscribe(WandererApp.PubSub, map_id)
|
||||
end)
|
||||
|
||||
# Unsubscribe from general maps channel
|
||||
Phoenix.PubSub.unsubscribe(WandererApp.PubSub, "maps")
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
# Debounce delay in milliseconds
|
||||
@debounce_delay 1000
|
||||
# Backoff delays for retries when client is not connected
|
||||
@retry_delays [5_000, 10_000, 30_000, 60_000]
|
||||
|
||||
defp schedule_subscription_update(state) do
|
||||
# Cancel pending update if exists
|
||||
if state.pending_update do
|
||||
Process.cancel_timer(state.pending_update)
|
||||
end
|
||||
|
||||
# Schedule new update
|
||||
timer_ref = Process.send_after(self(), :perform_subscription_update, @debounce_delay)
|
||||
%{state | pending_update: timer_ref}
|
||||
end
|
||||
|
||||
defp do_update_subscriptions(state) do
|
||||
state =
|
||||
try do
|
||||
case perform_subscription_update(state.pending_removals) do
|
||||
:ok ->
|
||||
# Also refresh the system->map index
|
||||
WandererApp.Kills.Subscription.SystemMapIndex.refresh()
|
||||
%{state | pending_removals: MapSet.new(), retry_count: 0}
|
||||
|
||||
{:error, :connecting} ->
|
||||
# Client is connecting, retry with backoff
|
||||
schedule_retry_update(state)
|
||||
|
||||
{:error, :not_connected} ->
|
||||
# Client is not connected, retry with backoff
|
||||
schedule_retry_update(state)
|
||||
|
||||
error ->
|
||||
schedule_retry_update(state)
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("[MapEventListener] Error updating subscriptions: #{inspect(e)}")
|
||||
schedule_retry_update(state)
|
||||
end
|
||||
|
||||
%{state | last_update: System.monotonic_time(:millisecond)}
|
||||
end
|
||||
|
||||
defp schedule_retry_update(state) do
|
||||
# Cancel any existing retry timer
|
||||
if state.retry_timer do
|
||||
Process.cancel_timer(state.retry_timer)
|
||||
end
|
||||
|
||||
retry_count = state.retry_count
|
||||
delay = Enum.at(@retry_delays, min(retry_count, length(@retry_delays) - 1))
|
||||
|
||||
timer_ref = Process.send_after(self(), :perform_subscription_update, delay)
|
||||
|
||||
%{state | retry_timer: timer_ref, retry_count: retry_count + 1}
|
||||
end
|
||||
|
||||
defp perform_subscription_update(pending_removals) do
|
||||
case Client.get_status() do
|
||||
{:ok, %{connected: true, subscriptions: %{subscribed_systems: current_systems}}} ->
|
||||
apply_subscription_changes(current_systems, pending_removals)
|
||||
:ok
|
||||
|
||||
{:ok, %{connecting: true}} ->
|
||||
{:error, :connecting}
|
||||
|
||||
{:error, :not_running} ->
|
||||
{:error, :not_running}
|
||||
|
||||
{:ok, status} ->
|
||||
{:error, :not_connected}
|
||||
|
||||
error ->
|
||||
Logger.error("[MapEventListener] Failed to get client status: #{inspect(error)}")
|
||||
{:error, :client_error}
|
||||
end
|
||||
end
|
||||
|
||||
defp apply_subscription_changes(current_systems, pending_removals) do
|
||||
current_set = MapSet.new(current_systems)
|
||||
Logger.debug(fn ->
|
||||
"[MapEventListener] Current subscriptions: #{MapSet.size(current_set)} systems, " <>
|
||||
"Pending removals: #{MapSet.size(pending_removals)} systems"
|
||||
end)
|
||||
|
||||
# Use get_tracked_system_ids to get only systems from running maps
|
||||
case MapIntegration.get_tracked_system_ids() do
|
||||
{:ok, tracked_systems} ->
|
||||
handle_tracked_systems(tracked_systems, current_set, pending_removals)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[MapEventListener] Failed to get tracked system IDs: #{inspect(reason)}")
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_tracked_systems(tracked_systems, current_set, pending_removals) do
|
||||
tracked_systems_set = MapSet.new(tracked_systems)
|
||||
|
||||
# Remove pending removals from tracked_systems since DB might not be updated yet
|
||||
tracked_systems_adjusted = MapSet.difference(tracked_systems_set, pending_removals)
|
||||
Logger.debug(fn ->
|
||||
"[MapEventListener] Tracked systems from maps: #{MapSet.size(tracked_systems_set)}, " <>
|
||||
"After removing pending: #{MapSet.size(tracked_systems_adjusted)}"
|
||||
end)
|
||||
|
||||
# Use the existing MapIntegration logic to determine changes
|
||||
{:ok, to_subscribe, to_unsubscribe} =
|
||||
MapIntegration.handle_map_systems_updated(
|
||||
MapSet.to_list(tracked_systems_adjusted),
|
||||
current_set
|
||||
)
|
||||
|
||||
# Apply the changes
|
||||
if to_subscribe != [] do
|
||||
Logger.debug(fn -> "[MapEventListener] Triggering subscription for #{length(to_subscribe)} systems" end)
|
||||
Client.subscribe_to_systems(to_subscribe)
|
||||
end
|
||||
|
||||
if to_unsubscribe != [] do
|
||||
Logger.debug(fn -> "[MapEventListener] Triggering unsubscription for #{length(to_unsubscribe)} systems" end)
|
||||
Client.unsubscribe_from_systems(to_unsubscribe)
|
||||
end
|
||||
end
|
||||
end
|
||||
549
lib/wanderer_app/kills/message_handler.ex
Normal file
549
lib/wanderer_app/kills/message_handler.ex
Normal file
@@ -0,0 +1,549 @@
|
||||
defmodule WandererApp.Kills.MessageHandler do
|
||||
@moduledoc """
|
||||
Handles killmail message processing and broadcasting.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Kills.{Config, Storage}
|
||||
alias WandererApp.Kills.Subscription.MapIntegration
|
||||
|
||||
@spec process_killmail_update(map()) :: :ok
|
||||
def process_killmail_update(payload) do
|
||||
case validate_killmail_payload(payload) do
|
||||
{:ok, %{"system_id" => system_id, "killmails" => killmails}} ->
|
||||
# Log each kill received
|
||||
log_received_killmails(killmails, system_id)
|
||||
|
||||
process_valid_killmail_update(system_id, killmails, payload)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[MessageHandler] Invalid killmail payload: #{inspect(reason)}")
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp process_valid_killmail_update(system_id, killmails, payload) do
|
||||
{valid_killmails, failed_adaptations} =
|
||||
killmails
|
||||
|> Enum.filter(&is_map/1)
|
||||
|> Enum.with_index()
|
||||
|> Enum.reduce({[], []}, &process_killmail_for_adaptation/2)
|
||||
|
||||
# Reverse to maintain original order
|
||||
valid_killmails = Enum.reverse(valid_killmails)
|
||||
failed_adaptations = Enum.reverse(failed_adaptations)
|
||||
|
||||
# Store failed adaptations for potential retry
|
||||
if failed_adaptations != [] do
|
||||
store_failed_adaptations(system_id, failed_adaptations)
|
||||
end
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[MessageHandler] Valid killmails after adaptation: #{length(valid_killmails)}"
|
||||
end)
|
||||
|
||||
if valid_killmails != [] do
|
||||
store_and_broadcast_killmails(system_id, valid_killmails, payload)
|
||||
else
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp store_and_broadcast_killmails(system_id, valid_killmails, payload) do
|
||||
killmail_ttl = Config.killmail_ttl()
|
||||
kill_count_ttl = Config.kill_count_ttl()
|
||||
|
||||
case Storage.store_killmails(system_id, valid_killmails, killmail_ttl) do
|
||||
:ok ->
|
||||
handle_stored_killmails(system_id, valid_killmails, kill_count_ttl, payload)
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"[MessageHandler] Failed to store killmails for system #{system_id}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_stored_killmails(system_id, valid_killmails, kill_count_ttl, payload) do
|
||||
case Storage.update_kill_count(system_id, length(valid_killmails), kill_count_ttl) do
|
||||
:ok ->
|
||||
broadcast_killmails(system_id, valid_killmails, payload)
|
||||
:ok
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"[MessageHandler] Failed to update kill count for system #{system_id}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@spec process_kill_count_update(map()) :: :ok | {:error, atom()} | {:error, term()}
|
||||
def process_kill_count_update(payload) do
|
||||
case validate_kill_count_payload(payload) do
|
||||
{:ok, %{"system_id" => system_id, "count" => count}} ->
|
||||
case Storage.store_kill_count(system_id, count) do
|
||||
:ok ->
|
||||
broadcast_kill_count(system_id, payload)
|
||||
:ok
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"[MessageHandler] Failed to store kill count for system #{system_id}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error
|
||||
end
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"[MessageHandler] Invalid kill count payload: #{inspect(reason)}, payload: #{inspect(payload)}"
|
||||
)
|
||||
|
||||
{:error, :invalid_payload}
|
||||
end
|
||||
end
|
||||
|
||||
defp broadcast_kill_count(system_id, payload) do
|
||||
case MapIntegration.broadcast_kill_to_maps(%{
|
||||
"solar_system_id" => system_id,
|
||||
"count" => payload["count"],
|
||||
"type" => :kill_count
|
||||
}) do
|
||||
:ok ->
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MessageHandler] Failed to broadcast kill count: #{inspect(reason)}")
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp broadcast_killmails(system_id, killmails, payload) do
|
||||
case MapIntegration.broadcast_kill_to_maps(%{
|
||||
"solar_system_id" => system_id,
|
||||
"killmails" => killmails,
|
||||
"timestamp" => payload["timestamp"],
|
||||
"type" => :killmail_update
|
||||
}) do
|
||||
:ok ->
|
||||
:ok
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MessageHandler] Failed to broadcast killmails: #{inspect(reason)}")
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp store_failed_adaptations(system_id, failed_kills) do
|
||||
# Store with a special key for retry processing
|
||||
key = "kills:failed_adaptations:#{system_id}"
|
||||
# Keep for 1 hour for potential retry
|
||||
ttl = :timer.hours(1)
|
||||
|
||||
case WandererApp.Cache.insert_or_update(
|
||||
key,
|
||||
failed_kills,
|
||||
fn existing ->
|
||||
# Merge with existing failed kills, keeping newest
|
||||
(failed_kills ++ existing)
|
||||
|> Enum.uniq_by(& &1["killmail_id"])
|
||||
# Limit to prevent unbounded growth
|
||||
|> Enum.take(100)
|
||||
end,
|
||||
ttl: ttl
|
||||
) do
|
||||
:ok ->
|
||||
Logger.debug(
|
||||
"[MessageHandler] Stored #{length(failed_kills)} failed adaptations for system #{system_id}"
|
||||
)
|
||||
|
||||
{:ok, _} ->
|
||||
Logger.debug(
|
||||
"[MessageHandler] Stored #{length(failed_kills)} failed adaptations for system #{system_id}"
|
||||
)
|
||||
|
||||
error ->
|
||||
Logger.error("[MessageHandler] Failed to store failed adaptations: #{inspect(error)}")
|
||||
end
|
||||
end
|
||||
|
||||
# Data adaptation functions (moved from DataAdapter module)
|
||||
|
||||
@type killmail :: map()
|
||||
@type adapter_result :: {:ok, killmail()} | {:error, term()}
|
||||
|
||||
@spec adapt_kill_data(any()) :: adapter_result()
|
||||
# Pattern match on zkillboard format - not supported
|
||||
defp adapt_kill_data(%{"killID" => kill_id}) do
|
||||
Logger.warning("[MessageHandler] Zkillboard format not supported: killID=#{kill_id}")
|
||||
{:error, :zkillboard_format_not_supported}
|
||||
end
|
||||
|
||||
# Pattern match on flat format - already adapted
|
||||
defp adapt_kill_data(%{"victim_char_id" => _} = kill) do
|
||||
validated_kill = validate_flat_format_kill(kill)
|
||||
|
||||
if map_size(validated_kill) > 0 do
|
||||
{:ok, validated_kill}
|
||||
else
|
||||
Logger.warning(
|
||||
"[MessageHandler] Invalid flat format kill: #{inspect(kill["killmail_id"])}"
|
||||
)
|
||||
{:error, :invalid_data}
|
||||
end
|
||||
end
|
||||
|
||||
# Pattern match on nested format with valid structure
|
||||
defp adapt_kill_data(
|
||||
%{
|
||||
"killmail_id" => killmail_id,
|
||||
"kill_time" => _kill_time,
|
||||
"victim" => victim
|
||||
} = kill
|
||||
)
|
||||
when is_map(victim) do
|
||||
# Validate and normalize IDs first
|
||||
with {:ok, valid_killmail_id} <- validate_killmail_id(killmail_id),
|
||||
{:ok, valid_system_id} <- get_and_validate_system_id(kill) do
|
||||
# Update kill with normalized IDs
|
||||
normalized_kill =
|
||||
kill
|
||||
|> Map.put("killmail_id", valid_killmail_id)
|
||||
|> Map.put("solar_system_id", valid_system_id)
|
||||
# Remove alternate key
|
||||
|> Map.delete("system_id")
|
||||
|
||||
adapted_kill = adapt_nested_format_kill(normalized_kill)
|
||||
|
||||
if map_size(adapted_kill) > 0 do
|
||||
{:ok, adapted_kill}
|
||||
else
|
||||
Logger.warning("[MessageHandler] Invalid nested format kill: #{valid_killmail_id}")
|
||||
{:error, :invalid_data}
|
||||
end
|
||||
else
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MessageHandler] ID validation failed: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
end
|
||||
end
|
||||
|
||||
# Invalid data type
|
||||
defp adapt_kill_data(invalid_data) do
|
||||
data_type = if(is_nil(invalid_data), do: "nil", else: "#{inspect(invalid_data)}")
|
||||
Logger.warning("[MessageHandler] Invalid data type: #{data_type}")
|
||||
{:error, :invalid_format}
|
||||
end
|
||||
|
||||
# Validation and adaptation helper functions
|
||||
|
||||
@spec validate_flat_format_kill(map()) :: map()
|
||||
defp validate_flat_format_kill(kill) do
|
||||
required_fields = ["killmail_id", "kill_time", "solar_system_id"]
|
||||
|
||||
case validate_required_fields(kill, required_fields) do
|
||||
:ok ->
|
||||
kill
|
||||
|
||||
{:error, missing} ->
|
||||
Logger.warning(
|
||||
"[MessageHandler] Flat format kill missing required fields: #{inspect(missing)}"
|
||||
)
|
||||
|
||||
%{}
|
||||
end
|
||||
end
|
||||
|
||||
@spec adapt_nested_format_kill(map()) :: map()
|
||||
defp adapt_nested_format_kill(kill) do
|
||||
victim = kill["victim"]
|
||||
attackers = Map.get(kill, "attackers", [])
|
||||
zkb = Map.get(kill, "zkb", %{})
|
||||
|
||||
# Validate attackers is a list
|
||||
attackers_list = if is_list(attackers), do: attackers, else: []
|
||||
final_blow_attacker = find_final_blow_attacker(attackers_list)
|
||||
|
||||
adapted_kill =
|
||||
%{}
|
||||
|> add_core_kill_data(kill, zkb)
|
||||
|> add_victim_data(victim)
|
||||
|> add_final_blow_attacker_data(final_blow_attacker)
|
||||
|> add_kill_statistics(attackers_list, zkb)
|
||||
|
||||
# Validate that critical output fields are present
|
||||
case validate_required_output_fields(adapted_kill) do
|
||||
:ok ->
|
||||
adapted_kill
|
||||
|
||||
{:error, missing_fields} ->
|
||||
Logger.warning(
|
||||
"[MessageHandler] Kill adaptation failed - missing required fields: #{inspect(missing_fields)}, killmail_id: #{inspect(kill["killmail_id"])}"
|
||||
)
|
||||
|
||||
%{}
|
||||
end
|
||||
end
|
||||
|
||||
@spec add_core_kill_data(map(), map(), map()) :: map()
|
||||
defp add_core_kill_data(acc, kill, zkb) do
|
||||
# Handle both "solar_system_id" and "system_id"
|
||||
solar_system_id = kill["solar_system_id"] || kill["system_id"]
|
||||
|
||||
Map.merge(acc, %{
|
||||
"killmail_id" => kill["killmail_id"],
|
||||
"kill_time" => kill["kill_time"],
|
||||
"solar_system_id" => solar_system_id,
|
||||
"zkb" => zkb
|
||||
})
|
||||
end
|
||||
|
||||
@spec add_victim_data(map(), map()) :: map()
|
||||
defp add_victim_data(acc, victim) do
|
||||
victim_data = %{
|
||||
"victim_char_id" => victim["character_id"],
|
||||
"victim_char_name" => get_character_name(victim),
|
||||
"victim_corp_id" => victim["corporation_id"],
|
||||
"victim_corp_ticker" => get_corp_ticker(victim),
|
||||
"victim_corp_name" => get_corp_name(victim),
|
||||
"victim_alliance_id" => victim["alliance_id"],
|
||||
"victim_alliance_ticker" => get_alliance_ticker(victim),
|
||||
"victim_alliance_name" => get_alliance_name(victim),
|
||||
"victim_ship_type_id" => victim["ship_type_id"],
|
||||
"victim_ship_name" => get_ship_name(victim)
|
||||
}
|
||||
|
||||
Map.merge(acc, victim_data)
|
||||
end
|
||||
|
||||
@spec add_final_blow_attacker_data(map(), map()) :: map()
|
||||
defp add_final_blow_attacker_data(acc, attacker) do
|
||||
attacker_data = %{
|
||||
"final_blow_char_id" => attacker["character_id"],
|
||||
"final_blow_char_name" => get_character_name(attacker),
|
||||
"final_blow_corp_id" => attacker["corporation_id"],
|
||||
"final_blow_corp_ticker" => get_corp_ticker(attacker),
|
||||
"final_blow_corp_name" => get_corp_name(attacker),
|
||||
"final_blow_alliance_id" => attacker["alliance_id"],
|
||||
"final_blow_alliance_ticker" => get_alliance_ticker(attacker),
|
||||
"final_blow_alliance_name" => get_alliance_name(attacker),
|
||||
"final_blow_ship_type_id" => attacker["ship_type_id"],
|
||||
"final_blow_ship_name" => get_ship_name(attacker)
|
||||
}
|
||||
|
||||
Map.merge(acc, attacker_data)
|
||||
end
|
||||
|
||||
@spec add_kill_statistics(map(), list(), map()) :: map()
|
||||
defp add_kill_statistics(acc, attackers_list, zkb) do
|
||||
Map.merge(acc, %{
|
||||
"attacker_count" => length(attackers_list),
|
||||
"total_value" => zkb["total_value"] || zkb["totalValue"] || 0,
|
||||
"npc" => zkb["npc"] || false
|
||||
})
|
||||
end
|
||||
|
||||
# Critical fields that the frontend expects to be present in killmail data
|
||||
@required_output_fields [
|
||||
"killmail_id",
|
||||
"kill_time",
|
||||
"solar_system_id",
|
||||
"victim_ship_type_id",
|
||||
"attacker_count",
|
||||
"total_value"
|
||||
]
|
||||
|
||||
@spec validate_required_output_fields(map()) :: :ok | {:error, list(String.t())}
|
||||
defp validate_required_output_fields(adapted_kill) do
|
||||
validate_required_fields(adapted_kill, @required_output_fields)
|
||||
end
|
||||
|
||||
@spec validate_required_fields(map(), list(String.t())) :: :ok | {:error, list(String.t())}
|
||||
defp validate_required_fields(data, fields) do
|
||||
missing = Enum.filter(fields, &(not Map.has_key?(data, &1)))
|
||||
|
||||
case missing do
|
||||
[] -> :ok
|
||||
_ -> {:error, missing}
|
||||
end
|
||||
end
|
||||
|
||||
@spec find_final_blow_attacker(list(map()) | any()) :: map()
|
||||
defp find_final_blow_attacker(attackers) when is_list(attackers) do
|
||||
final_blow =
|
||||
Enum.find(attackers, %{}, fn
|
||||
%{"final_blow" => true} = attacker -> attacker
|
||||
_ -> false
|
||||
end)
|
||||
|
||||
if final_blow == %{} and length(attackers) > 0 do
|
||||
Logger.debug(fn ->
|
||||
"[MessageHandler] No final blow attacker found in #{length(attackers)} attackers"
|
||||
end)
|
||||
end
|
||||
|
||||
final_blow
|
||||
end
|
||||
|
||||
defp find_final_blow_attacker(_), do: %{}
|
||||
|
||||
# Generic field extraction with multiple possible field names
|
||||
@spec extract_field(map(), list(String.t())) :: String.t() | nil
|
||||
defp extract_field(data, field_names) when is_map(data) and is_list(field_names) do
|
||||
Enum.find_value(field_names, fn field_name ->
|
||||
case Map.get(data, field_name) do
|
||||
value when is_binary(value) and value != "" -> value
|
||||
_ -> nil
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp extract_field(_data, _field_names), do: nil
|
||||
|
||||
# Specific field extractors using the generic function
|
||||
@spec get_character_name(map() | any()) :: String.t() | nil
|
||||
defp get_character_name(data) when is_map(data) do
|
||||
# Try multiple possible field names
|
||||
field_names = ["attacker_name", "victim_name", "character_name", "name"]
|
||||
extract_field(data, field_names) ||
|
||||
case Map.get(data, "character") do
|
||||
%{"name" => name} when is_binary(name) -> name
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp get_character_name(_), do: nil
|
||||
|
||||
@spec get_corp_ticker(map() | any()) :: String.t() | nil
|
||||
defp get_corp_ticker(data) when is_map(data) do
|
||||
extract_field(data, ["corporation_ticker", "corp_ticker"])
|
||||
end
|
||||
defp get_corp_ticker(_), do: nil
|
||||
|
||||
@spec get_corp_name(map() | any()) :: String.t() | nil
|
||||
defp get_corp_name(data) when is_map(data) do
|
||||
extract_field(data, ["corporation_name", "corp_name"])
|
||||
end
|
||||
defp get_corp_name(_), do: nil
|
||||
|
||||
@spec get_alliance_ticker(map() | any()) :: String.t() | nil
|
||||
defp get_alliance_ticker(data) when is_map(data) do
|
||||
extract_field(data, ["alliance_ticker"])
|
||||
end
|
||||
defp get_alliance_ticker(_), do: nil
|
||||
|
||||
@spec get_alliance_name(map() | any()) :: String.t() | nil
|
||||
defp get_alliance_name(data) when is_map(data) do
|
||||
extract_field(data, ["alliance_name"])
|
||||
end
|
||||
defp get_alliance_name(_), do: nil
|
||||
|
||||
@spec get_ship_name(map() | any()) :: String.t() | nil
|
||||
defp get_ship_name(data) when is_map(data) do
|
||||
extract_field(data, ["ship_name", "ship_type_name"])
|
||||
end
|
||||
defp get_ship_name(_), do: nil
|
||||
|
||||
defp get_and_validate_system_id(kill) do
|
||||
system_id = kill["solar_system_id"] || kill["system_id"]
|
||||
validate_system_id(system_id)
|
||||
end
|
||||
|
||||
# Validation functions (inlined from Validation module)
|
||||
|
||||
@spec validate_system_id(any()) :: {:ok, integer()} | {:error, :invalid_system_id}
|
||||
defp validate_system_id(system_id)
|
||||
when is_integer(system_id) and system_id > 30_000_000 and system_id < 33_000_000 do
|
||||
{:ok, system_id}
|
||||
end
|
||||
|
||||
defp validate_system_id(system_id) when is_binary(system_id) do
|
||||
case Integer.parse(system_id) do
|
||||
{id, ""} when id > 30_000_000 and id < 33_000_000 ->
|
||||
{:ok, id}
|
||||
|
||||
_ ->
|
||||
{:error, :invalid_system_id}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_system_id(_), do: {:error, :invalid_system_id}
|
||||
|
||||
@spec validate_killmail_id(any()) :: {:ok, integer()} | {:error, :invalid_killmail_id}
|
||||
defp validate_killmail_id(killmail_id) when is_integer(killmail_id) and killmail_id > 0 do
|
||||
{:ok, killmail_id}
|
||||
end
|
||||
|
||||
defp validate_killmail_id(killmail_id) when is_binary(killmail_id) do
|
||||
case Integer.parse(killmail_id) do
|
||||
{id, ""} when id > 0 ->
|
||||
{:ok, id}
|
||||
|
||||
_ ->
|
||||
{:error, :invalid_killmail_id}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_killmail_id(_), do: {:error, :invalid_killmail_id}
|
||||
|
||||
@spec validate_killmail_payload(map()) :: {:ok, map()} | {:error, atom()}
|
||||
defp validate_killmail_payload(%{"system_id" => system_id, "killmails" => killmails} = payload)
|
||||
when is_list(killmails) do
|
||||
with {:ok, valid_system_id} <- validate_system_id(system_id) do
|
||||
{:ok, %{payload | "system_id" => valid_system_id}}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_killmail_payload(_), do: {:error, :invalid_payload}
|
||||
|
||||
@spec validate_kill_count_payload(map()) :: {:ok, map()} | {:error, atom()}
|
||||
defp validate_kill_count_payload(%{"system_id" => system_id, "count" => count} = payload)
|
||||
when is_integer(count) and count >= 0 do
|
||||
with {:ok, valid_system_id} <- validate_system_id(system_id) do
|
||||
{:ok, %{payload | "system_id" => valid_system_id}}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_kill_count_payload(_), do: {:error, :invalid_kill_count_payload}
|
||||
|
||||
# Helper functions to reduce nesting
|
||||
|
||||
defp log_received_killmails(killmails, system_id) do
|
||||
Enum.each(killmails, fn kill ->
|
||||
killmail_id = kill["killmail_id"] || "unknown"
|
||||
kill_system_id = kill["solar_system_id"] || kill["system_id"] || system_id
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[MessageHandler] Received kill: killmail_id=#{killmail_id}, system_id=#{kill_system_id}"
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
defp process_killmail_for_adaptation({kill, index}, {valid, failed}) do
|
||||
# Log raw kill data
|
||||
Logger.debug(fn ->
|
||||
"[MessageHandler] Raw kill ##{index}: #{inspect(kill, pretty: true, limit: :infinity)}"
|
||||
end)
|
||||
|
||||
# Adapt and log result
|
||||
case adapt_kill_data(kill) do
|
||||
{:ok, adapted} ->
|
||||
Logger.debug(fn ->
|
||||
"[MessageHandler] Adapted kill ##{index}: #{inspect(adapted, pretty: true, limit: :infinity)}"
|
||||
end)
|
||||
|
||||
{[adapted | valid], failed}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning("[MessageHandler] Failed to adapt kill ##{index}: #{inspect(reason)}")
|
||||
# Store raw kill for potential retry
|
||||
failed_kill = Map.put(kill, "_adaptation_error", to_string(reason))
|
||||
{valid, [failed_kill | failed]}
|
||||
end
|
||||
end
|
||||
end
|
||||
296
lib/wanderer_app/kills/storage.ex
Normal file
296
lib/wanderer_app/kills/storage.ex
Normal file
@@ -0,0 +1,296 @@
|
||||
defmodule WandererApp.Kills.Storage do
|
||||
@moduledoc """
|
||||
Manages caching and storage of killmail data.
|
||||
|
||||
Provides a centralized interface for storing and retrieving kill-related data
|
||||
using Cachex for distributed caching.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Kills.Config
|
||||
|
||||
@doc """
|
||||
Stores killmails for a specific system.
|
||||
|
||||
Stores both individual killmails by ID and a list of kills for the system.
|
||||
"""
|
||||
@spec store_killmails(integer(), list(map()), pos_integer()) :: :ok | {:error, term()}
|
||||
def store_killmails(system_id, killmails, ttl) do
|
||||
result1 = store_individual_killmails(killmails, ttl)
|
||||
require Logger
|
||||
Logger.debug("[Storage] store_individual_killmails returned: #{inspect(result1)}")
|
||||
|
||||
result2 = update_system_kill_list(system_id, killmails, ttl)
|
||||
Logger.debug("[Storage] update_system_kill_list returned: #{inspect(result2)}")
|
||||
|
||||
case {result1, result2} do
|
||||
{:ok, :ok} ->
|
||||
:ok
|
||||
|
||||
{{:error, reason}, _} ->
|
||||
Logger.error("[Storage] Failed to store individual killmails: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
|
||||
{_, {:error, reason}} ->
|
||||
Logger.error("[Storage] Failed to update system kill list: #{inspect(reason)}")
|
||||
{:error, reason}
|
||||
|
||||
other ->
|
||||
Logger.error("[Storage] Unexpected results: #{inspect(other)}")
|
||||
{:error, {:unexpected_results, other}}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Stores or updates the kill count for a system.
|
||||
This should only be used for kill count updates from the WebSocket service.
|
||||
"""
|
||||
@spec store_kill_count(integer(), non_neg_integer()) :: :ok | {:error, any()}
|
||||
def store_kill_count(system_id, count) do
|
||||
key = "zkb:kills:#{system_id}"
|
||||
ttl = Config.kill_count_ttl()
|
||||
metadata_key = "zkb:kills:metadata:#{system_id}"
|
||||
|
||||
# Store both the count and metadata about when it was set
|
||||
# This helps detect if we should trust incremental updates or the absolute count
|
||||
timestamp = System.system_time(:millisecond)
|
||||
|
||||
with :ok <- WandererApp.Cache.insert(key, count, ttl: ttl),
|
||||
:ok <-
|
||||
WandererApp.Cache.insert(
|
||||
metadata_key,
|
||||
%{
|
||||
"source" => "websocket",
|
||||
"timestamp" => timestamp,
|
||||
"absolute_count" => count
|
||||
},
|
||||
ttl: ttl
|
||||
) do
|
||||
:ok
|
||||
else
|
||||
# Nebulex might return true instead of :ok
|
||||
true -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Updates the kill count by adding to the existing count.
|
||||
This is used when processing incoming killmails.
|
||||
"""
|
||||
@spec update_kill_count(integer(), non_neg_integer(), pos_integer()) :: :ok | {:error, any()}
|
||||
def update_kill_count(system_id, additional_kills, ttl) do
|
||||
key = "zkb:kills:#{system_id}"
|
||||
metadata_key = "zkb:kills:metadata:#{system_id}"
|
||||
|
||||
# Check metadata to see if we should trust incremental updates
|
||||
metadata = WandererApp.Cache.get(metadata_key)
|
||||
current_time = System.system_time(:millisecond)
|
||||
|
||||
# If we have recent websocket data (within 5 seconds), don't increment
|
||||
# This prevents double counting when both killmail and count updates arrive
|
||||
should_increment =
|
||||
case metadata do
|
||||
%{"source" => "websocket", "timestamp" => ws_timestamp} ->
|
||||
current_time - ws_timestamp > 5000
|
||||
|
||||
_ ->
|
||||
true
|
||||
end
|
||||
|
||||
if should_increment do
|
||||
# Use atomic update operation
|
||||
result =
|
||||
WandererApp.Cache.insert_or_update(
|
||||
key,
|
||||
additional_kills,
|
||||
fn current_count -> current_count + additional_kills end,
|
||||
ttl: ttl
|
||||
)
|
||||
|
||||
case result do
|
||||
:ok ->
|
||||
# Update metadata to indicate this was an incremental update
|
||||
WandererApp.Cache.insert(
|
||||
metadata_key,
|
||||
%{
|
||||
"source" => "incremental",
|
||||
"timestamp" => current_time,
|
||||
"last_increment" => additional_kills
|
||||
},
|
||||
ttl: ttl
|
||||
)
|
||||
|
||||
:ok
|
||||
|
||||
{:ok, _} ->
|
||||
:ok
|
||||
|
||||
true ->
|
||||
:ok
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
else
|
||||
# Skip increment as we have recent absolute count from websocket
|
||||
Logger.debug(
|
||||
"[Storage] Skipping kill count increment for system #{system_id} due to recent websocket update"
|
||||
)
|
||||
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Retrieves the kill count for a system.
|
||||
"""
|
||||
@spec get_kill_count(integer()) :: {:ok, non_neg_integer()} | {:error, :not_found}
|
||||
def get_kill_count(system_id) do
|
||||
key = "zkb:kills:#{system_id}"
|
||||
|
||||
case WandererApp.Cache.get(key) do
|
||||
nil -> {:error, :not_found}
|
||||
count -> {:ok, count}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Retrieves a specific killmail by ID.
|
||||
"""
|
||||
@spec get_killmail(integer()) :: {:ok, map()} | {:error, :not_found}
|
||||
def get_killmail(killmail_id) do
|
||||
key = "zkb:killmail:#{killmail_id}"
|
||||
|
||||
case WandererApp.Cache.get(key) do
|
||||
nil -> {:error, :not_found}
|
||||
killmail -> {:ok, killmail}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Retrieves all kills for a specific system.
|
||||
"""
|
||||
@spec get_system_kills(integer()) :: {:ok, list(map())} | {:error, :not_found}
|
||||
def get_system_kills(system_id) do
|
||||
# Get the list of killmail IDs for this system
|
||||
kill_ids = WandererApp.Cache.get("zkb:kills:list:#{system_id}") || []
|
||||
|
||||
if kill_ids == [] do
|
||||
{:error, :not_found}
|
||||
else
|
||||
# Fetch details for each killmail
|
||||
kills =
|
||||
kill_ids
|
||||
|> Enum.map(&WandererApp.Cache.get("zkb:killmail:#{&1}"))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
{:ok, kills}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Reconciles kill count with actual kill list length.
|
||||
This can be called periodically to ensure consistency.
|
||||
"""
|
||||
@spec reconcile_kill_count(integer()) :: :ok | {:error, term()}
|
||||
def reconcile_kill_count(system_id) do
|
||||
key = "zkb:kills:#{system_id}"
|
||||
list_key = "zkb:kills:list:#{system_id}"
|
||||
metadata_key = "zkb:kills:metadata:#{system_id}"
|
||||
ttl = Config.kill_count_ttl()
|
||||
|
||||
# Get actual kill list length
|
||||
actual_count =
|
||||
case WandererApp.Cache.get(list_key) do
|
||||
nil -> 0
|
||||
list when is_list(list) -> length(list)
|
||||
_ -> 0
|
||||
end
|
||||
|
||||
# Update the count to match reality
|
||||
with :ok <- WandererApp.Cache.insert(key, actual_count, ttl: ttl),
|
||||
:ok <-
|
||||
WandererApp.Cache.insert(
|
||||
metadata_key,
|
||||
%{
|
||||
"source" => "reconciliation",
|
||||
"timestamp" => System.system_time(:millisecond),
|
||||
"actual_count" => actual_count
|
||||
},
|
||||
ttl: ttl
|
||||
) do
|
||||
:ok
|
||||
else
|
||||
true -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp store_individual_killmails(killmails, ttl) do
|
||||
results =
|
||||
Enum.map(killmails, fn killmail ->
|
||||
killmail_id = Map.get(killmail, "killmail_id") || Map.get(killmail, :killmail_id)
|
||||
|
||||
if killmail_id do
|
||||
key = "zkb:killmail:#{killmail_id}"
|
||||
# Capture the result of cache insert
|
||||
WandererApp.Cache.insert(key, killmail, ttl: ttl)
|
||||
else
|
||||
{:error, :missing_killmail_id}
|
||||
end
|
||||
end)
|
||||
|
||||
# Check if any failed
|
||||
case Enum.find(results, &match?({:error, _}, &1)) do
|
||||
nil -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
defp update_system_kill_list(system_id, new_killmails, ttl) do
|
||||
# Store as a list of killmail IDs for compatibility with ZkbDataFetcher
|
||||
key = "zkb:kills:list:#{system_id}"
|
||||
kill_list_limit = Config.kill_list_limit()
|
||||
|
||||
# Extract killmail IDs from new kills
|
||||
new_ids =
|
||||
new_killmails
|
||||
|> Enum.map(fn kill ->
|
||||
Map.get(kill, "killmail_id") || Map.get(kill, :killmail_id)
|
||||
end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
# Use atomic update to prevent race conditions
|
||||
case WandererApp.Cache.insert_or_update(
|
||||
key,
|
||||
new_ids,
|
||||
fn existing_ids ->
|
||||
# Merge with existing, keeping unique IDs and newest first
|
||||
(new_ids ++ existing_ids)
|
||||
|> Enum.uniq()
|
||||
|> Enum.take(kill_list_limit)
|
||||
end,
|
||||
ttl: ttl
|
||||
) do
|
||||
:ok ->
|
||||
:ok
|
||||
|
||||
{:ok, _} ->
|
||||
:ok
|
||||
|
||||
true ->
|
||||
:ok
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"[Storage] Failed to update system kill list for system #{system_id}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
{:error, :cache_update_failed}
|
||||
end
|
||||
end
|
||||
end
|
||||
87
lib/wanderer_app/kills/subscription/manager.ex
Normal file
87
lib/wanderer_app/kills/subscription/manager.ex
Normal file
@@ -0,0 +1,87 @@
|
||||
defmodule WandererApp.Kills.Subscription.Manager do
|
||||
@moduledoc """
|
||||
Manages system subscriptions for kills WebSocket service.
|
||||
"""
|
||||
require Logger
|
||||
|
||||
@type subscriptions :: MapSet.t(integer())
|
||||
|
||||
@spec subscribe_systems(subscriptions(), [integer()]) :: {subscriptions(), [integer()]}
|
||||
def subscribe_systems(current_systems, system_ids) when is_list(system_ids) do
|
||||
system_set = MapSet.new(system_ids)
|
||||
new_systems = MapSet.difference(system_set, current_systems)
|
||||
new_list = MapSet.to_list(new_systems)
|
||||
{MapSet.union(current_systems, new_systems), new_list}
|
||||
end
|
||||
|
||||
@spec unsubscribe_systems(subscriptions(), [integer()]) :: {subscriptions(), [integer()]}
|
||||
def unsubscribe_systems(current_systems, system_ids) when is_list(system_ids) do
|
||||
system_set = MapSet.new(system_ids)
|
||||
systems_to_remove = MapSet.intersection(current_systems, system_set)
|
||||
removed_list = MapSet.to_list(systems_to_remove)
|
||||
|
||||
{MapSet.difference(current_systems, systems_to_remove), removed_list}
|
||||
end
|
||||
|
||||
@spec sync_with_server(pid() | nil, [integer()], [integer()]) :: :ok
|
||||
def sync_with_server(nil, _to_subscribe, _to_unsubscribe) do
|
||||
Logger.warning("[Manager] Attempted to sync with server but socket_pid is nil")
|
||||
:ok
|
||||
end
|
||||
|
||||
def sync_with_server(socket_pid, to_subscribe, to_unsubscribe) do
|
||||
if to_unsubscribe != [] do
|
||||
send(socket_pid, {:unsubscribe_systems, to_unsubscribe})
|
||||
end
|
||||
|
||||
if to_subscribe != [] do
|
||||
send(socket_pid, {:subscribe_systems, to_subscribe})
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
@spec resubscribe_all(pid(), subscriptions()) :: :ok
|
||||
def resubscribe_all(socket_pid, subscribed_systems) do
|
||||
system_list = MapSet.to_list(subscribed_systems)
|
||||
|
||||
if system_list != [] do
|
||||
Logger.info(
|
||||
"[Manager] Resubscribing to all #{length(system_list)} systems after reconnection"
|
||||
)
|
||||
|
||||
send(socket_pid, {:subscribe_systems, system_list})
|
||||
else
|
||||
Logger.debug(fn -> "[Manager] No systems to resubscribe after reconnection" end)
|
||||
end
|
||||
|
||||
:ok
|
||||
end
|
||||
|
||||
@spec get_stats(subscriptions()) :: map()
|
||||
def get_stats(subscribed_systems) do
|
||||
%{
|
||||
total_subscribed: MapSet.size(subscribed_systems),
|
||||
subscribed_systems: MapSet.to_list(subscribed_systems) |> Enum.sort()
|
||||
}
|
||||
end
|
||||
|
||||
@spec cleanup_subscriptions(subscriptions()) :: {subscriptions(), [integer()]}
|
||||
def cleanup_subscriptions(subscribed_systems) do
|
||||
systems_to_check = MapSet.to_list(subscribed_systems)
|
||||
# Use MapIntegration's system_in_active_map? to avoid duplication
|
||||
valid_systems =
|
||||
Enum.filter(
|
||||
systems_to_check,
|
||||
&WandererApp.Kills.Subscription.MapIntegration.system_in_active_map?/1
|
||||
)
|
||||
|
||||
invalid_systems = systems_to_check -- valid_systems
|
||||
|
||||
if invalid_systems != [] do
|
||||
{MapSet.new(valid_systems), invalid_systems}
|
||||
else
|
||||
{subscribed_systems, []}
|
||||
end
|
||||
end
|
||||
end
|
||||
293
lib/wanderer_app/kills/subscription/map_integration.ex
Normal file
293
lib/wanderer_app/kills/subscription/map_integration.ex
Normal file
@@ -0,0 +1,293 @@
|
||||
defmodule WandererApp.Kills.Subscription.MapIntegration do
|
||||
@moduledoc """
|
||||
Handles integration between the kills WebSocket service and the map system.
|
||||
|
||||
Manages automatic subscription updates when maps change and provides
|
||||
utilities for syncing kill data with map systems.
|
||||
"""
|
||||
|
||||
require Logger
|
||||
|
||||
@doc """
|
||||
Handles updates when map systems change.
|
||||
|
||||
Determines which systems to subscribe/unsubscribe based on the update.
|
||||
"""
|
||||
@spec handle_map_systems_updated([integer()], MapSet.t(integer())) ::
|
||||
{:ok, [integer()], [integer()]}
|
||||
def handle_map_systems_updated(system_ids, current_subscriptions) when is_list(system_ids) do
|
||||
# Systems to subscribe: in the update and in active maps but not currently subscribed
|
||||
new_systems =
|
||||
system_ids
|
||||
|> Enum.reject(&MapSet.member?(current_subscriptions, &1))
|
||||
|
||||
# Systems to unsubscribe: currently subscribed but no longer in any active map
|
||||
obsolete_systems =
|
||||
current_subscriptions
|
||||
|> MapSet.to_list()
|
||||
|> Enum.reject(&(&1 in system_ids))
|
||||
|
||||
if new_systems != [] or obsolete_systems != [] do
|
||||
Logger.debug(fn ->
|
||||
"[MapIntegration] Map systems updated - " <>
|
||||
"New: #{length(new_systems)}, Obsolete: #{length(obsolete_systems)}, " <>
|
||||
"Total active: #{length(system_ids)}"
|
||||
end)
|
||||
end
|
||||
|
||||
{:ok, new_systems, obsolete_systems}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets all unique system IDs across all active maps.
|
||||
|
||||
This function queries the DATABASE for all persisted maps and their systems,
|
||||
regardless of whether those maps have active GenServer processes running.
|
||||
|
||||
This is different from `get_tracked_system_ids/0` which only returns systems
|
||||
from maps with live processes in the Registry.
|
||||
|
||||
Use this function when you need a complete view of all systems across all
|
||||
stored maps (e.g., for bulk operations or reporting).
|
||||
|
||||
This replaces the duplicate functionality from SystemTracker.
|
||||
"""
|
||||
@spec get_all_map_systems() :: MapSet.t(integer())
|
||||
def get_all_map_systems do
|
||||
{:ok, maps} = WandererApp.Maps.get_available_maps()
|
||||
|
||||
# Get all map IDs
|
||||
map_ids = Enum.map(maps, & &1.id)
|
||||
|
||||
# Batch query all systems for all maps at once
|
||||
all_systems = WandererApp.MapSystemRepo.get_all_by_maps(map_ids)
|
||||
|
||||
# Handle direct list return from repo
|
||||
all_systems
|
||||
|> Enum.map(& &1.solar_system_id)
|
||||
|> MapSet.new()
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets all system IDs that should be tracked for kills.
|
||||
|
||||
Returns a list of unique system IDs from all active maps.
|
||||
|
||||
This function returns systems from LIVE MAP PROCESSES only - maps that are currently
|
||||
running in the system. It uses the Registry to find active map GenServers.
|
||||
|
||||
This is different from `get_all_map_systems/0` which queries the database for ALL
|
||||
persisted maps regardless of whether they have an active process.
|
||||
|
||||
Use this function when you need to know which systems are actively being tracked
|
||||
by running map processes (e.g., for real-time updates).
|
||||
|
||||
This consolidates functionality from SystemTracker.
|
||||
"""
|
||||
@spec get_tracked_system_ids() :: {:ok, list(integer())} | {:error, term()}
|
||||
def get_tracked_system_ids do
|
||||
try do
|
||||
# Get systems from currently running maps
|
||||
active_maps = WandererApp.Map.RegistryHelper.list_all_maps()
|
||||
|
||||
Logger.debug("[MapIntegration] Found #{length(active_maps)} active maps")
|
||||
|
||||
map_systems =
|
||||
active_maps
|
||||
|> Enum.map(fn %{id: map_id} ->
|
||||
case WandererApp.MapSystemRepo.get_visible_by_map(map_id) do
|
||||
{:ok, systems} ->
|
||||
system_ids = Enum.map(systems, & &1.solar_system_id)
|
||||
Logger.debug("[MapIntegration] Map #{map_id} has #{length(system_ids)} systems")
|
||||
{map_id, system_ids}
|
||||
|
||||
_ ->
|
||||
Logger.warning("[MapIntegration] Failed to get systems for map #{map_id}")
|
||||
{map_id, []}
|
||||
end
|
||||
end)
|
||||
|
||||
system_ids =
|
||||
map_systems
|
||||
|> Enum.flat_map(fn {_map_id, systems} -> systems end)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|> Enum.uniq()
|
||||
|
||||
Logger.debug(fn ->
|
||||
"[MapIntegration] Total tracked systems: #{length(system_ids)} across #{length(active_maps)} maps"
|
||||
end)
|
||||
|
||||
{:ok, system_ids}
|
||||
rescue
|
||||
error ->
|
||||
Logger.error("[MapIntegration] Failed to get tracked systems: #{inspect(error)}")
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets all system IDs for a specific map.
|
||||
"""
|
||||
@spec get_map_system_ids(String.t()) :: {:ok, [integer()]} | {:error, term()}
|
||||
def get_map_system_ids(map_id) do
|
||||
case WandererApp.MapSystemRepo.get_all_by_map(map_id) do
|
||||
{:ok, systems} ->
|
||||
system_ids = Enum.map(systems, & &1.solar_system_id)
|
||||
{:ok, system_ids}
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"[MapIntegration] Failed to get systems for map #{map_id}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks if a system is in any active map.
|
||||
"""
|
||||
@spec system_in_active_map?(integer()) :: boolean()
|
||||
def system_in_active_map?(system_id) do
|
||||
{:ok, maps} = WandererApp.Maps.get_available_maps()
|
||||
Enum.any?(maps, &system_in_map?(&1, system_id))
|
||||
end
|
||||
|
||||
@doc """
|
||||
Broadcasts kill data to relevant map servers.
|
||||
"""
|
||||
@spec broadcast_kill_to_maps(map()) :: :ok | {:error, term()}
|
||||
def broadcast_kill_to_maps(kill_data) when is_map(kill_data) do
|
||||
case Map.get(kill_data, "solar_system_id") do
|
||||
system_id when is_integer(system_id) ->
|
||||
# Use the index to find maps containing this system
|
||||
map_ids = WandererApp.Kills.Subscription.SystemMapIndex.get_maps_for_system(system_id)
|
||||
|
||||
# Broadcast to each relevant map
|
||||
Enum.each(map_ids, fn map_id ->
|
||||
Phoenix.PubSub.broadcast(
|
||||
WandererApp.PubSub,
|
||||
"map:#{map_id}",
|
||||
{:map_kill, kill_data}
|
||||
)
|
||||
end)
|
||||
|
||||
:ok
|
||||
|
||||
system_id when is_binary(system_id) ->
|
||||
Logger.warning(
|
||||
"[MapIntegration] Invalid solar_system_id format (string): #{inspect(system_id)}"
|
||||
)
|
||||
|
||||
{:error, {:invalid_system_id_format, system_id}}
|
||||
|
||||
nil ->
|
||||
Logger.warning(
|
||||
"[MapIntegration] Missing solar_system_id in kill data: #{inspect(Map.keys(kill_data))}"
|
||||
)
|
||||
|
||||
{:error, {:missing_solar_system_id, kill_data}}
|
||||
|
||||
invalid_id ->
|
||||
Logger.warning("[MapIntegration] Invalid solar_system_id type: #{inspect(invalid_id)}")
|
||||
{:error, {:invalid_system_id_type, invalid_id}}
|
||||
end
|
||||
end
|
||||
|
||||
def broadcast_kill_to_maps(invalid_data) do
|
||||
Logger.warning(
|
||||
"[MapIntegration] Invalid kill_data type (expected map): #{inspect(invalid_data)}"
|
||||
)
|
||||
|
||||
{:error, {:invalid_kill_data_type, invalid_data}}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets subscription statistics grouped by map.
|
||||
"""
|
||||
@spec get_map_subscription_stats(MapSet.t(integer())) :: map()
|
||||
def get_map_subscription_stats(subscribed_systems) do
|
||||
{:ok, maps} = WandererApp.Maps.get_available_maps()
|
||||
stats = Enum.map(maps, &get_map_stats(&1, subscribed_systems))
|
||||
|
||||
%{
|
||||
maps: stats,
|
||||
total_subscribed: MapSet.size(subscribed_systems),
|
||||
total_maps: length(maps)
|
||||
}
|
||||
end
|
||||
|
||||
@doc """
|
||||
Handles map deletion by returning systems to unsubscribe.
|
||||
"""
|
||||
@spec handle_map_deleted(String.t(), MapSet.t(integer())) :: [integer()]
|
||||
def handle_map_deleted(map_id, current_subscriptions) do
|
||||
# Get systems from the deleted map
|
||||
case get_map_system_ids(map_id) do
|
||||
{:ok, deleted_systems} ->
|
||||
# Precompute all active systems to avoid O(N×M) queries
|
||||
active_systems = get_all_active_systems_set()
|
||||
|
||||
# Only unsubscribe systems that aren't in other maps
|
||||
deleted_systems
|
||||
|> Enum.filter(&MapSet.member?(current_subscriptions, &1))
|
||||
|> Enum.reject(&MapSet.member?(active_systems, &1))
|
||||
|
||||
_ ->
|
||||
[]
|
||||
end
|
||||
end
|
||||
|
||||
# Helper functions to reduce nesting
|
||||
|
||||
defp get_all_active_systems_set do
|
||||
{:ok, maps} = WandererApp.Maps.get_available_maps()
|
||||
|
||||
maps
|
||||
|> Enum.flat_map(&get_map_systems_or_empty/1)
|
||||
|> MapSet.new()
|
||||
end
|
||||
|
||||
defp get_map_systems_or_empty(map) do
|
||||
case get_map_system_ids(map.id) do
|
||||
{:ok, system_ids} -> system_ids
|
||||
_ -> []
|
||||
end
|
||||
end
|
||||
|
||||
defp system_in_map?(map, system_id) do
|
||||
case WandererApp.MapSystemRepo.get_by_map_and_solar_system_id(map.id, system_id) do
|
||||
{:ok, _system} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
defp get_map_stats(map, subscribed_systems) do
|
||||
case get_map_system_ids(map.id) do
|
||||
{:ok, system_ids} ->
|
||||
subscribed_count =
|
||||
system_ids
|
||||
|> Enum.filter(&MapSet.member?(subscribed_systems, &1))
|
||||
|> length()
|
||||
|
||||
%{
|
||||
map_id: map.id,
|
||||
map_name: map.name,
|
||||
total_systems: length(system_ids),
|
||||
subscribed_systems: subscribed_count,
|
||||
subscription_rate:
|
||||
if(length(system_ids) > 0,
|
||||
do: subscribed_count / length(system_ids) * 100,
|
||||
else: 0
|
||||
)
|
||||
}
|
||||
|
||||
_ ->
|
||||
%{
|
||||
map_id: map.id,
|
||||
map_name: map.name,
|
||||
error: "Failed to load systems"
|
||||
}
|
||||
end
|
||||
end
|
||||
end
|
||||
130
lib/wanderer_app/kills/subscription/system_map_index.ex
Normal file
130
lib/wanderer_app/kills/subscription/system_map_index.ex
Normal file
@@ -0,0 +1,130 @@
|
||||
defmodule WandererApp.Kills.Subscription.SystemMapIndex do
|
||||
@moduledoc """
|
||||
Maintains an in-memory index of system_id -> [map_ids] for efficient kill broadcasting.
|
||||
|
||||
This index prevents N+1 queries when broadcasting kills to relevant maps.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
@table_name :kills_system_map_index
|
||||
@refresh_interval :timer.minutes(5)
|
||||
|
||||
# Client API
|
||||
|
||||
def start_link(opts) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Gets all map IDs that contain the given system.
|
||||
"""
|
||||
@spec get_maps_for_system(integer()) :: [String.t()]
|
||||
def get_maps_for_system(system_id) do
|
||||
case :ets.lookup(@table_name, system_id) do
|
||||
[{^system_id, map_ids}] -> map_ids
|
||||
[] -> []
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Refreshes the index immediately.
|
||||
"""
|
||||
@spec refresh() :: :ok
|
||||
def refresh do
|
||||
GenServer.cast(__MODULE__, :refresh)
|
||||
end
|
||||
|
||||
# Server callbacks
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
# Create ETS table for fast lookups
|
||||
:ets.new(@table_name, [:set, :protected, :named_table, read_concurrency: true])
|
||||
|
||||
# Initial build
|
||||
send(self(), :build_index)
|
||||
|
||||
# Schedule periodic refresh
|
||||
schedule_refresh()
|
||||
|
||||
{:ok, %{}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:build_index, state) do
|
||||
build_index()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_info(:refresh, state) do
|
||||
build_index()
|
||||
schedule_refresh()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_cast(:refresh, state) do
|
||||
build_index()
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
# Private functions
|
||||
|
||||
defp build_index do
|
||||
Logger.debug("[SystemMapIndex] Building system->maps index")
|
||||
|
||||
case fetch_all_map_systems() do
|
||||
{:ok, index_data} ->
|
||||
# Clear and rebuild the table
|
||||
:ets.delete_all_objects(@table_name)
|
||||
|
||||
# Insert all entries
|
||||
Enum.each(index_data, fn {system_id, map_ids} ->
|
||||
:ets.insert(@table_name, {system_id, map_ids})
|
||||
end)
|
||||
|
||||
Logger.debug("[SystemMapIndex] Index built with #{map_size(index_data)} systems")
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[SystemMapIndex] Failed to build index: #{inspect(reason)}")
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_all_map_systems do
|
||||
try do
|
||||
{:ok, maps} = WandererApp.Maps.get_available_maps()
|
||||
|
||||
# Build the index: system_id -> [map_ids]
|
||||
index =
|
||||
maps
|
||||
|> Enum.reduce(%{}, fn map, acc ->
|
||||
case WandererApp.MapSystemRepo.get_all_by_map(map.id) do
|
||||
{:ok, systems} ->
|
||||
# Add this map to each system's list
|
||||
Enum.reduce(systems, acc, fn system, acc2 ->
|
||||
Map.update(acc2, system.solar_system_id, [map.id], &[map.id | &1])
|
||||
end)
|
||||
|
||||
_ ->
|
||||
acc
|
||||
end
|
||||
end)
|
||||
|> Enum.map(fn {system_id, map_ids} ->
|
||||
# Remove duplicates and convert to list
|
||||
{system_id, Enum.uniq(map_ids)}
|
||||
end)
|
||||
|> Map.new()
|
||||
|
||||
{:ok, index}
|
||||
rescue
|
||||
e ->
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
defp schedule_refresh do
|
||||
Process.send_after(self(), :refresh, @refresh_interval)
|
||||
end
|
||||
end
|
||||
23
lib/wanderer_app/kills/supervisor.ex
Normal file
23
lib/wanderer_app/kills/supervisor.ex
Normal file
@@ -0,0 +1,23 @@
|
||||
defmodule WandererApp.Kills.Supervisor do
|
||||
@moduledoc """
|
||||
Supervisor for the kills subsystem.
|
||||
"""
|
||||
use Supervisor
|
||||
|
||||
@spec start_link(keyword()) :: Supervisor.on_start()
|
||||
def start_link(opts) do
|
||||
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_opts) do
|
||||
children = [
|
||||
{Task.Supervisor, name: WandererApp.Kills.TaskSupervisor},
|
||||
{WandererApp.Kills.Subscription.SystemMapIndex, []},
|
||||
{WandererApp.Kills.Client, []},
|
||||
{WandererApp.Kills.MapEventListener, []}
|
||||
]
|
||||
|
||||
Supervisor.init(children, strategy: :one_for_one)
|
||||
end
|
||||
end
|
||||
@@ -138,13 +138,8 @@ defmodule WandererApp.Map do
|
||||
def add_characters!(map, []), do: map
|
||||
|
||||
def add_characters!(%{map_id: map_id} = map, [character | rest]) do
|
||||
case add_character(map_id, character) do
|
||||
:ok ->
|
||||
add_characters!(map, rest)
|
||||
|
||||
{:error, :already_exists} ->
|
||||
add_characters!(map, rest)
|
||||
end
|
||||
add_character(map_id, character)
|
||||
add_characters!(map, rest)
|
||||
end
|
||||
|
||||
def add_character(
|
||||
@@ -172,15 +167,15 @@ defmodule WandererApp.Map do
|
||||
map_id
|
||||
|> update_map(%{characters: [character_id | characters]})
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map_id}:character:#{character_id}:alliance_id",
|
||||
alliance_id
|
||||
)
|
||||
# WandererApp.Cache.insert(
|
||||
# "map:#{map_id}:character:#{character_id}:alliance_id",
|
||||
# alliance_id
|
||||
# )
|
||||
|
||||
WandererApp.Cache.insert(
|
||||
"map:#{map_id}:character:#{character_id}:corporation_id",
|
||||
corporation_id
|
||||
)
|
||||
# WandererApp.Cache.insert(
|
||||
# "map:#{map_id}:character:#{character_id}:corporation_id",
|
||||
# corporation_id
|
||||
# )
|
||||
|
||||
# WandererApp.Cache.insert(
|
||||
# "map:#{map_id}:character:#{character_id}:solar_system_id",
|
||||
|
||||
@@ -97,7 +97,7 @@ defmodule WandererApp.Map.SubscriptionManager do
|
||||
) do
|
||||
%{
|
||||
plans: plans,
|
||||
extra_characters_100: extra_characters_100,
|
||||
extra_characters_50: extra_characters_50,
|
||||
extra_hubs_10: extra_hubs_10
|
||||
} = WandererApp.Env.subscription_settings()
|
||||
|
||||
@@ -113,7 +113,7 @@ defmodule WandererApp.Map.SubscriptionManager do
|
||||
case characters_limit > plan_characters_limit do
|
||||
true ->
|
||||
estimated_price +
|
||||
(characters_limit - plan_characters_limit) / 100 * extra_characters_100
|
||||
(characters_limit - plan_characters_limit) / 50 * extra_characters_50
|
||||
|
||||
_ ->
|
||||
estimated_price
|
||||
@@ -153,7 +153,7 @@ defmodule WandererApp.Map.SubscriptionManager do
|
||||
) do
|
||||
%{
|
||||
plans: plans,
|
||||
extra_characters_100: extra_characters_100,
|
||||
extra_characters_50: extra_characters_50,
|
||||
extra_hubs_10: extra_hubs_10
|
||||
} = WandererApp.Env.subscription_settings()
|
||||
|
||||
@@ -170,7 +170,7 @@ defmodule WandererApp.Map.SubscriptionManager do
|
||||
case characters_limit > sub_characters_limit do
|
||||
true ->
|
||||
additional_price +
|
||||
(characters_limit - sub_characters_limit) / 100 * extra_characters_100
|
||||
(characters_limit - sub_characters_limit) / 50 * extra_characters_50
|
||||
|
||||
_ ->
|
||||
additional_price
|
||||
@@ -186,6 +186,7 @@ defmodule WandererApp.Map.SubscriptionManager do
|
||||
end
|
||||
|
||||
period = get_active_months(selected_subscription)
|
||||
|
||||
total_price = additional_price * period
|
||||
|
||||
{:ok, discount} =
|
||||
@@ -200,9 +201,16 @@ defmodule WandererApp.Map.SubscriptionManager do
|
||||
end
|
||||
|
||||
defp get_active_months(subscription) do
|
||||
subscription.active_till
|
||||
|> Timex.shift(days: 5)
|
||||
|> Timex.diff(Timex.now(), :months)
|
||||
months =
|
||||
subscription.active_till
|
||||
|> Timex.shift(days: 5)
|
||||
|> Timex.diff(Timex.now(), :months)
|
||||
|
||||
if months == 0 do
|
||||
1
|
||||
else
|
||||
months
|
||||
end
|
||||
end
|
||||
|
||||
defp calc_discount(
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
@moduledoc """
|
||||
Refreshes the map zKillboard data every 15 seconds.
|
||||
Refreshes and broadcasts map kill data every 15 seconds.
|
||||
Works with cache data populated by the WandererKills WebSocket service.
|
||||
"""
|
||||
use GenServer
|
||||
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Zkb.KillsProvider.KillsCache
|
||||
alias WandererApp.Map.Server.Impl, as: MapServerImpl
|
||||
|
||||
@interval :timer.seconds(15)
|
||||
@store_map_kills_timeout :timer.hours(1)
|
||||
@killmail_ttl_hours 24
|
||||
@logger Application.compile_env(:wanderer_app, :logger)
|
||||
|
||||
# This means 120 “ticks” of 15s each → ~30 minutes
|
||||
@preload_cycle_ticks 120
|
||||
|
||||
def start_link(_) do
|
||||
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
|
||||
end
|
||||
@@ -22,53 +21,40 @@ defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
@impl true
|
||||
def init(_arg) do
|
||||
{:ok, _timer_ref} = :timer.send_interval(@interval, :fetch_data)
|
||||
{:ok, %{iteration: 0}}
|
||||
{:ok, %{}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:fetch_data, %{iteration: iteration} = state) do
|
||||
zkill_preload_disabled = WandererApp.Env.zkill_preload_disabled?()
|
||||
def handle_info(:fetch_data, state) do
|
||||
kills_enabled = Application.get_env(:wanderer_app, :wanderer_kills_service_enabled, true)
|
||||
|
||||
WandererApp.Map.RegistryHelper.list_all_maps()
|
||||
|> Task.async_stream(
|
||||
fn %{id: map_id, pid: _server_pid} ->
|
||||
try do
|
||||
if WandererApp.Map.Server.map_pid(map_id) do
|
||||
update_map_kills(map_id)
|
||||
if kills_enabled do
|
||||
WandererApp.Map.RegistryHelper.list_all_maps()
|
||||
|> Task.async_stream(
|
||||
fn %{id: map_id, pid: _server_pid} ->
|
||||
try do
|
||||
if WandererApp.Map.Server.map_pid(map_id) do
|
||||
# Always update kill counts
|
||||
update_map_kills(map_id)
|
||||
|
||||
{:ok, is_subscription_active} = map_id |> WandererApp.Map.is_subscription_active?()
|
||||
|
||||
can_preload_zkill = not zkill_preload_disabled && is_subscription_active
|
||||
|
||||
if can_preload_zkill do
|
||||
update_detailed_map_kills(map_id)
|
||||
# Update detailed kills for maps with active subscriptions
|
||||
{:ok, is_subscription_active} = map_id |> WandererApp.Map.is_subscription_active?()
|
||||
if is_subscription_active do
|
||||
update_detailed_map_kills(map_id)
|
||||
end
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
@logger.error(Exception.message(e))
|
||||
end
|
||||
rescue
|
||||
e ->
|
||||
@logger.error(Exception.message(e))
|
||||
end
|
||||
end,
|
||||
max_concurrency: 10,
|
||||
on_timeout: :kill_task
|
||||
)
|
||||
|> Enum.each(fn _ -> :ok end)
|
||||
|
||||
new_iteration = iteration + 1
|
||||
|
||||
cond do
|
||||
zkill_preload_disabled ->
|
||||
# If preload is disabled, just update iteration
|
||||
{:noreply, %{state | iteration: new_iteration}}
|
||||
|
||||
new_iteration >= @preload_cycle_ticks ->
|
||||
Logger.info("[ZkbDataFetcher] Triggering a fresh kill preload pass ...")
|
||||
WandererApp.Zkb.KillsPreloader.run_preload_now()
|
||||
{:noreply, %{state | iteration: 0}}
|
||||
|
||||
true ->
|
||||
{:noreply, %{state | iteration: new_iteration}}
|
||||
end,
|
||||
max_concurrency: 10,
|
||||
on_timeout: :kill_task
|
||||
)
|
||||
|> Enum.each(fn _ -> :ok end)
|
||||
end
|
||||
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
# Catch any async task results we aren't explicitly pattern-matching
|
||||
@@ -84,7 +70,8 @@ defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
|> WandererApp.Map.get_map!()
|
||||
|> Map.get(:systems, %{})
|
||||
|> Enum.into(%{}, fn {solar_system_id, _system} ->
|
||||
kills_count = WandererApp.Cache.get("zkb_kills_#{solar_system_id}") || 0
|
||||
# Read kill counts from cache (populated by WebSocket)
|
||||
kills_count = WandererApp.Cache.get("zkb:kills:#{solar_system_id}") || 0
|
||||
{solar_system_id, kills_count}
|
||||
end)
|
||||
|> maybe_broadcast_map_kills(map_id)
|
||||
@@ -98,64 +85,70 @@ defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
|> WandererApp.Map.get_map!()
|
||||
|> Map.get(:systems, %{})
|
||||
|
||||
# Old cache data
|
||||
old_ids_map = WandererApp.Cache.get("map_#{map_id}:zkb_ids") || %{}
|
||||
old_details_map = WandererApp.Cache.get("map_#{map_id}:zkb_detailed_kills") || %{}
|
||||
# Get existing cached data - ensure it's a map
|
||||
cache_key_ids = "map:#{map_id}:zkb:ids"
|
||||
cache_key_details = "map:#{map_id}:zkb:detailed_kills"
|
||||
|
||||
old_ids_map = case WandererApp.Cache.get(cache_key_ids) do
|
||||
map when is_map(map) -> map
|
||||
_ -> %{}
|
||||
end
|
||||
|
||||
old_details_map = case WandererApp.Cache.get(cache_key_details) do
|
||||
map when is_map(map) -> map
|
||||
_ ->
|
||||
# Initialize with empty map and store it
|
||||
WandererApp.Cache.insert(cache_key_details, %{}, ttl: :timer.hours(@killmail_ttl_hours))
|
||||
%{}
|
||||
end
|
||||
|
||||
# Build current killmail ID map from cache
|
||||
new_ids_map =
|
||||
Enum.into(systems, %{}, fn {solar_system_id, _} ->
|
||||
ids = KillsCache.get_system_killmail_ids(solar_system_id) |> MapSet.new()
|
||||
{solar_system_id, ids}
|
||||
# Get killmail IDs from cache (populated by WebSocket)
|
||||
ids = WandererApp.Cache.get("zkb:kills:list:#{solar_system_id}") || []
|
||||
{solar_system_id, MapSet.new(ids)}
|
||||
end)
|
||||
|
||||
# Find systems with changed killmail lists or empty detailed kills
|
||||
changed_systems =
|
||||
new_ids_map
|
||||
|> Enum.filter(fn {system_id, new_ids_set} ->
|
||||
old_set = MapSet.new(Map.get(old_ids_map, system_id, []))
|
||||
not MapSet.equal?(new_ids_set, old_set)
|
||||
old_details = Map.get(old_details_map, system_id, [])
|
||||
# Update if IDs changed OR if we have IDs but no detailed kills
|
||||
not MapSet.equal?(new_ids_set, old_set) or
|
||||
(MapSet.size(new_ids_set) > 0 and old_details == [])
|
||||
end)
|
||||
|> Enum.map(&elem(&1, 0))
|
||||
|
||||
if changed_systems == [] do
|
||||
Logger.debug(fn ->
|
||||
"[ZkbDataFetcher] No changes in detailed kills for map_id=#{map_id}"
|
||||
end)
|
||||
log_no_changes(map_id)
|
||||
|
||||
# Don't overwrite existing cache data when there are no changes
|
||||
# Only initialize if cache doesn't exist
|
||||
maybe_initialize_empty_details_map(old_details_map, systems, cache_key_details)
|
||||
|
||||
:ok
|
||||
else
|
||||
# Build new details for each changed system
|
||||
updated_details_map =
|
||||
Enum.reduce(changed_systems, old_details_map, fn system_id, acc ->
|
||||
kill_ids =
|
||||
new_ids_map
|
||||
|> Map.fetch!(system_id)
|
||||
|> MapSet.to_list()
|
||||
updated_details_map = build_updated_details_map(changed_systems, old_details_map, new_ids_map)
|
||||
|
||||
kill_details =
|
||||
kill_ids
|
||||
|> Enum.map(&KillsCache.get_killmail/1)
|
||||
|> Enum.reject(&is_nil/1)
|
||||
# Update the ID map cache
|
||||
updated_ids_map = build_updated_ids_map(changed_systems, old_ids_map, new_ids_map)
|
||||
|
||||
Map.put(acc, system_id, kill_details)
|
||||
end)
|
||||
|
||||
updated_ids_map =
|
||||
Enum.reduce(changed_systems, old_ids_map, fn system_id, acc ->
|
||||
new_ids_list = new_ids_map[system_id] |> MapSet.to_list()
|
||||
Map.put(acc, system_id, new_ids_list)
|
||||
end)
|
||||
|
||||
WandererApp.Cache.put("map_#{map_id}:zkb_ids", updated_ids_map,
|
||||
ttl: :timer.hours(KillsCache.killmail_ttl())
|
||||
# Store updated caches
|
||||
WandererApp.Cache.insert(cache_key_ids, updated_ids_map,
|
||||
ttl: :timer.hours(@killmail_ttl_hours)
|
||||
)
|
||||
|
||||
WandererApp.Cache.put("map_#{map_id}:zkb_detailed_kills", updated_details_map,
|
||||
ttl: :timer.hours(KillsCache.killmail_ttl())
|
||||
WandererApp.Cache.insert(cache_key_details, updated_details_map,
|
||||
ttl: :timer.hours(@killmail_ttl_hours)
|
||||
)
|
||||
|
||||
# Broadcast changes
|
||||
changed_data = Map.take(updated_details_map, changed_systems)
|
||||
|
||||
WandererApp.Map.Server.Impl.broadcast!(map_id, :detailed_kills_updated, changed_data)
|
||||
MapServerImpl.broadcast!(map_id, :detailed_kills_updated, changed_data)
|
||||
|
||||
:ok
|
||||
end
|
||||
@@ -163,7 +156,7 @@ defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
end
|
||||
|
||||
defp maybe_broadcast_map_kills(new_kills_map, map_id) do
|
||||
{:ok, old_kills_map} = WandererApp.Cache.lookup("map_#{map_id}:zkb_kills", %{})
|
||||
{:ok, old_kills_map} = WandererApp.Cache.lookup("map:#{map_id}:zkb:kills", %{})
|
||||
|
||||
# Use the union of keys from both the new and old maps
|
||||
all_system_ids = Map.keys(Map.merge(new_kills_map, old_kills_map))
|
||||
@@ -181,13 +174,13 @@ defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
:ok
|
||||
else
|
||||
:ok =
|
||||
WandererApp.Cache.put("map_#{map_id}:zkb_kills", new_kills_map,
|
||||
WandererApp.Cache.insert("map:#{map_id}:zkb:kills", new_kills_map,
|
||||
ttl: @store_map_kills_timeout
|
||||
)
|
||||
|
||||
payload = Map.take(new_kills_map, changed_system_ids)
|
||||
|
||||
WandererApp.Map.Server.Impl.broadcast!(map_id, :kills_updated, payload)
|
||||
MapServerImpl.broadcast!(map_id, :kills_updated, payload)
|
||||
|
||||
:ok
|
||||
end
|
||||
@@ -201,4 +194,40 @@ defmodule WandererApp.Map.ZkbDataFetcher do
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_initialize_empty_details_map(%{}, systems, cache_key_details) do
|
||||
# First time initialization - create empty structure
|
||||
initial_map = Enum.into(systems, %{}, fn {system_id, _} -> {system_id, []} end)
|
||||
WandererApp.Cache.insert(cache_key_details, initial_map, ttl: :timer.hours(@killmail_ttl_hours))
|
||||
end
|
||||
|
||||
defp maybe_initialize_empty_details_map(_old_details_map, _systems, _cache_key_details), do: :ok
|
||||
|
||||
defp build_updated_details_map(changed_systems, old_details_map, new_ids_map) do
|
||||
Enum.reduce(changed_systems, old_details_map, fn system_id, acc ->
|
||||
kill_details = get_kill_details_for_system(system_id, new_ids_map)
|
||||
Map.put(acc, system_id, kill_details)
|
||||
end)
|
||||
end
|
||||
|
||||
defp get_kill_details_for_system(system_id, new_ids_map) do
|
||||
new_ids_map
|
||||
|> Map.fetch!(system_id)
|
||||
|> MapSet.to_list()
|
||||
|> Enum.map(&WandererApp.Cache.get("zkb:killmail:#{&1}"))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
end
|
||||
|
||||
defp build_updated_ids_map(changed_systems, old_ids_map, new_ids_map) do
|
||||
Enum.reduce(changed_systems, old_ids_map, fn system_id, acc ->
|
||||
new_ids_list = new_ids_map[system_id] |> MapSet.to_list()
|
||||
Map.put(acc, system_id, new_ids_list)
|
||||
end)
|
||||
end
|
||||
|
||||
defp log_no_changes(map_id) do
|
||||
Logger.debug(fn ->
|
||||
"[ZkbDataFetcher] No changes in detailed kills for map_id=#{map_id}"
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -30,7 +30,7 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
|
||||
@a4 22
|
||||
@a5 23
|
||||
@ccp4 24
|
||||
# @pochven 25
|
||||
@pochven 25
|
||||
# @zarzakh 10100
|
||||
|
||||
@jita 30_000_142
|
||||
@@ -51,7 +51,7 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do
|
||||
@redoubt
|
||||
]
|
||||
|
||||
@known_space [@hs, @ls, @ns]
|
||||
@known_space [@hs, @ls, @ns, @pochven]
|
||||
|
||||
@prohibited_systems [@jita]
|
||||
@prohibited_system_classes [
|
||||
|
||||
@@ -98,7 +98,12 @@ defmodule WandererApp.Map.Server.Impl do
|
||||
|
||||
WandererApp.Cache.insert("map_#{map_id}:started", true)
|
||||
|
||||
# Initialize zkb cache structure to prevent timing issues
|
||||
cache_key = "map:#{map_id}:zkb:detailed_kills"
|
||||
WandererApp.Cache.insert(cache_key, %{}, ttl: :timer.hours(24))
|
||||
|
||||
broadcast!(map_id, :map_server_started)
|
||||
@pubsub_client.broadcast!(WandererApp.PubSub, "maps", :map_server_started)
|
||||
|
||||
:telemetry.execute([:wanderer_app, :map, :started], %{count: 1})
|
||||
|
||||
|
||||
@@ -28,12 +28,11 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
|
||||
MapSystem.read_by_map_and_solar_system(%{
|
||||
map_id: map_id,
|
||||
solar_system_id: system_solar_id
|
||||
}),
|
||||
{:ok, %{eve_id: char_eve_id}} <- Character.get_character(char_id) do
|
||||
}) do
|
||||
do_update_signatures(
|
||||
state,
|
||||
system,
|
||||
char_eve_id,
|
||||
char_id,
|
||||
user_id,
|
||||
delete_conn?,
|
||||
added_params,
|
||||
@@ -52,13 +51,24 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
|
||||
defp do_update_signatures(
|
||||
state,
|
||||
system,
|
||||
character_eve_id,
|
||||
character_id,
|
||||
user_id,
|
||||
delete_conn?,
|
||||
added_params,
|
||||
updated_params,
|
||||
removed_params
|
||||
) do
|
||||
# Get character EVE ID for signature parsing
|
||||
character_eve_id =
|
||||
case Character.get_character(character_id) do
|
||||
{:ok, %{eve_id: eve_id}} ->
|
||||
eve_id
|
||||
|
||||
_ ->
|
||||
Logger.warning("Could not get character EVE ID for character_id: #{character_id}")
|
||||
nil
|
||||
end
|
||||
|
||||
# parse incoming DTOs
|
||||
added_sigs = parse_signatures(added_params, character_eve_id, system.id)
|
||||
updated_sigs = parse_signatures(updated_params, character_eve_id, system.id)
|
||||
@@ -89,7 +99,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
|
||||
added_eve_ids = Enum.map(added_sigs, & &1.eve_id)
|
||||
|
||||
existing_index =
|
||||
MapSystemSignature.by_system_id_all!(system.id)
|
||||
existing_all
|
||||
|> Enum.filter(&(&1.eve_id in added_eve_ids))
|
||||
|> Map.new(&{&1.eve_id, &1})
|
||||
|
||||
@@ -127,7 +137,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
|
||||
state.map_id,
|
||||
system.solar_system_id,
|
||||
user_id,
|
||||
character_eve_id,
|
||||
character_id,
|
||||
added_ids
|
||||
)
|
||||
end
|
||||
@@ -138,7 +148,7 @@ defmodule WandererApp.Map.Server.SignaturesImpl do
|
||||
state.map_id,
|
||||
system.solar_system_id,
|
||||
user_id,
|
||||
character_eve_id,
|
||||
character_id,
|
||||
removed_ids
|
||||
)
|
||||
end
|
||||
|
||||
@@ -20,6 +20,18 @@ defmodule WandererApp.MapSystemRepo do
|
||||
WandererApp.Api.MapSystem.read_all_by_map(%{map_id: map_id})
|
||||
end
|
||||
|
||||
def get_all_by_maps(map_ids) when is_list(map_ids) do
|
||||
# Since there's no bulk query, we need to query each map individually
|
||||
map_ids
|
||||
|> Enum.flat_map(fn map_id ->
|
||||
case get_all_by_map(map_id) do
|
||||
{:ok, systems} -> systems
|
||||
_ -> []
|
||||
end
|
||||
end)
|
||||
|> Enum.uniq_by(& &1.solar_system_id)
|
||||
end
|
||||
|
||||
def get_visible_by_map(map_id) do
|
||||
WandererApp.Api.MapSystem.read_visible_by_map(%{map_id: map_id})
|
||||
end
|
||||
|
||||
@@ -192,7 +192,7 @@ defmodule WandererApp.Ueberauth.Strategy.Eve do
|
||||
end
|
||||
|
||||
defp oauth_client_options_from_conn(conn, with_wallet, is_admin?) do
|
||||
tracking_pool = WandererApp.Env.active_tracking_pool()
|
||||
tracking_pool = WandererApp.Character.TrackingConfigUtils.get_active_pool!()
|
||||
|
||||
base_options = [
|
||||
redirect_uri: callback_url(conn),
|
||||
|
||||
@@ -10,13 +10,8 @@ defmodule WandererApp.Esi.InitClientsTask do
|
||||
def run(_arg) do
|
||||
Logger.info("starting")
|
||||
|
||||
Cachex.put(
|
||||
:esi_auth_cache,
|
||||
:active_config,
|
||||
"config_#{WandererApp.Env.active_tracking_pool()}"
|
||||
)
|
||||
|
||||
cache_clients()
|
||||
WandererApp.Character.TrackingConfigUtils.update_active_tracking_pool()
|
||||
end
|
||||
|
||||
defp cache_clients() do
|
||||
|
||||
@@ -1,291 +0,0 @@
|
||||
defmodule WandererApp.Zkb.KillsPreloader do
|
||||
@moduledoc """
|
||||
On startup, kicks off two passes (quick and expanded) to preload kills data.
|
||||
|
||||
There is also a `run_preload_now/0` function for manual triggering of the same logic.
|
||||
"""
|
||||
|
||||
use GenServer
|
||||
require Logger
|
||||
|
||||
alias WandererApp.Zkb.KillsProvider
|
||||
alias WandererApp.Zkb.KillsProvider.KillsCache
|
||||
|
||||
# ----------------
|
||||
# Configuration
|
||||
# ----------------
|
||||
|
||||
# (1) Quick pass
|
||||
@quick_limit 1
|
||||
@quick_hours 1
|
||||
|
||||
# (2) Expanded pass
|
||||
@expanded_limit 25
|
||||
@expanded_hours 24
|
||||
|
||||
# How many minutes back we look for “last active” maps
|
||||
@last_active_cutoff 30
|
||||
|
||||
# Default concurrency if not provided
|
||||
@default_max_concurrency 2
|
||||
|
||||
@doc """
|
||||
Starts the GenServer with optional opts (like `max_concurrency`).
|
||||
"""
|
||||
def start_link(opts \\ []) do
|
||||
GenServer.start_link(__MODULE__, opts, name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Public helper to explicitly request a fresh preload pass (both quick & expanded).
|
||||
"""
|
||||
def run_preload_now() do
|
||||
send(__MODULE__, :start_preload)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(opts) do
|
||||
state = %{
|
||||
phase: :idle,
|
||||
calls_count: 0,
|
||||
max_concurrency: Keyword.get(opts, :max_concurrency, @default_max_concurrency)
|
||||
}
|
||||
|
||||
# Kick off the preload passes once at startup
|
||||
send(self(), :start_preload)
|
||||
{:ok, state}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(:start_preload, state) do
|
||||
# Gather last-active maps (or fallback).
|
||||
cutoff_time =
|
||||
DateTime.utc_now()
|
||||
|> DateTime.add(-@last_active_cutoff, :minute)
|
||||
|
||||
last_active_maps_result = WandererApp.Api.MapState.get_last_active(cutoff_time)
|
||||
last_active_maps = resolve_last_active_maps(last_active_maps_result)
|
||||
active_maps_with_subscription = get_active_maps_with_subscription(last_active_maps)
|
||||
|
||||
# Gather systems from those maps
|
||||
system_tuples = gather_visible_systems(active_maps_with_subscription)
|
||||
unique_systems = Enum.uniq(system_tuples)
|
||||
|
||||
Logger.debug(fn -> "
|
||||
[KillsPreloader] Found #{length(unique_systems)} unique systems \
|
||||
across #{length(last_active_maps)} map(s)
|
||||
" end)
|
||||
|
||||
# ---- QUICK PASS ----
|
||||
state_quick = %{state | phase: :quick_pass}
|
||||
|
||||
{time_quick_ms, state_after_quick} =
|
||||
measure_execution_time(fn ->
|
||||
do_pass(unique_systems, :quick, @quick_hours, @quick_limit, state_quick)
|
||||
end)
|
||||
|
||||
Logger.info(
|
||||
"[KillsPreloader] Phase 1 (quick) done => calls_count=#{state_after_quick.calls_count}, elapsed=#{time_quick_ms}ms"
|
||||
)
|
||||
|
||||
# ---- EXPANDED PASS ----
|
||||
state_expanded = %{state_after_quick | phase: :expanded_pass}
|
||||
|
||||
{time_expanded_ms, final_state} =
|
||||
measure_execution_time(fn ->
|
||||
do_pass(unique_systems, :expanded, @quick_hours, @expanded_limit, state_expanded)
|
||||
end)
|
||||
|
||||
Logger.info(
|
||||
"[KillsPreloader] Phase 2 (expanded) done => calls_count=#{final_state.calls_count}, elapsed=#{time_expanded_ms}ms"
|
||||
)
|
||||
|
||||
# Reset phase to :idle
|
||||
{:noreply, %{final_state | phase: :idle}}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info(_other, state), do: {:noreply, state}
|
||||
|
||||
defp resolve_last_active_maps({:ok, []}) do
|
||||
Logger.warning("[KillsPreloader] No last-active maps found. Using fallback logic...")
|
||||
|
||||
case WandererApp.Maps.get_available_maps() do
|
||||
{:ok, []} ->
|
||||
Logger.error("[KillsPreloader] Fallback: get_available_maps returned zero maps!")
|
||||
[]
|
||||
|
||||
{:ok, maps} ->
|
||||
# pick the newest map by updated_at
|
||||
fallback_map = Enum.max_by(maps, & &1.updated_at, fn -> nil end)
|
||||
if fallback_map, do: [fallback_map], else: []
|
||||
end
|
||||
end
|
||||
|
||||
defp resolve_last_active_maps({:ok, maps}) when is_list(maps),
|
||||
do: maps
|
||||
|
||||
defp resolve_last_active_maps({:error, reason}) do
|
||||
Logger.error("[KillsPreloader] Could not load last-active maps => #{inspect(reason)}")
|
||||
[]
|
||||
end
|
||||
|
||||
defp get_active_maps_with_subscription(maps) do
|
||||
maps
|
||||
|> Enum.filter(fn map ->
|
||||
{:ok, is_subscription_active} = map.id |> WandererApp.Map.is_subscription_active?()
|
||||
is_subscription_active
|
||||
end)
|
||||
end
|
||||
|
||||
defp gather_visible_systems(maps) do
|
||||
maps
|
||||
|> Enum.flat_map(fn map_record ->
|
||||
the_map_id = Map.get(map_record, :map_id) || Map.get(map_record, :id)
|
||||
|
||||
case WandererApp.MapSystemRepo.get_visible_by_map(the_map_id) do
|
||||
{:ok, systems} ->
|
||||
Enum.map(systems, fn sys -> {the_map_id, sys.solar_system_id} end)
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.warning(
|
||||
"[KillsPreloader] get_visible_by_map failed => map_id=#{inspect(the_map_id)}, reason=#{inspect(reason)}"
|
||||
)
|
||||
|
||||
[]
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
defp do_pass(unique_systems, pass_type, hours, limit, state) do
|
||||
Logger.info(
|
||||
"[KillsPreloader] Starting #{pass_type} pass => #{length(unique_systems)} systems"
|
||||
)
|
||||
|
||||
{final_state, _kills_map} =
|
||||
unique_systems
|
||||
|> Task.async_stream(
|
||||
fn {_map_id, system_id} ->
|
||||
fetch_kills_for_system(system_id, pass_type, hours, limit, state)
|
||||
end,
|
||||
max_concurrency: state.max_concurrency,
|
||||
timeout: pass_timeout_ms(pass_type)
|
||||
)
|
||||
|> Enum.reduce({state, %{}}, fn task_result, {acc_state, acc_map} ->
|
||||
reduce_task_result(pass_type, task_result, acc_state, acc_map)
|
||||
end)
|
||||
|
||||
final_state
|
||||
end
|
||||
|
||||
defp fetch_kills_for_system(system_id, :quick, hours, limit, state) do
|
||||
Logger.debug(fn -> "[KillsPreloader] Quick fetch => system=#{system_id}, hours=#{hours}, limit=#{limit}" end)
|
||||
|
||||
case KillsProvider.Fetcher.fetch_kills_for_system(system_id, hours, state,
|
||||
limit: limit,
|
||||
force: false
|
||||
) do
|
||||
{:ok, kills, updated_state} ->
|
||||
{:ok, system_id, kills, updated_state}
|
||||
|
||||
{:error, reason, updated_state} ->
|
||||
Logger.warning(
|
||||
"[KillsPreloader] Quick fetch failed => system=#{system_id}, reason=#{inspect(reason)}"
|
||||
)
|
||||
|
||||
{:error, reason, updated_state}
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_kills_for_system(system_id, :expanded, hours, limit, state) do
|
||||
Logger.debug(fn -> "[KillsPreloader] Expanded fetch => system=#{system_id}, hours=#{hours}, limit=#{limit} (forcing refresh)" end)
|
||||
|
||||
with {:ok, kills_1h, updated_state} <-
|
||||
KillsProvider.Fetcher.fetch_kills_for_system(system_id, hours, state,
|
||||
limit: limit,
|
||||
force: true
|
||||
),
|
||||
{:ok, final_kills, final_state} <-
|
||||
maybe_fetch_more_if_needed(system_id, kills_1h, limit, updated_state) do
|
||||
{:ok, system_id, final_kills, final_state}
|
||||
else
|
||||
{:error, reason, updated_state} ->
|
||||
Logger.warning(
|
||||
"[KillsPreloader] Expanded fetch (#{hours}h) failed => system=#{system_id}, reason=#{inspect(reason)}"
|
||||
)
|
||||
|
||||
{:error, reason, updated_state}
|
||||
end
|
||||
end
|
||||
|
||||
# If we got fewer kills than `limit` from the 1h fetch, top up from 24h
|
||||
defp maybe_fetch_more_if_needed(system_id, kills_1h, limit, state) do
|
||||
if length(kills_1h) < limit do
|
||||
needed = limit - length(kills_1h)
|
||||
Logger.debug(fn -> "[KillsPreloader] Expanding to #{@expanded_hours}h => system=#{system_id}, need=#{needed} more kills" end)
|
||||
|
||||
case KillsProvider.Fetcher.fetch_kills_for_system(system_id, @expanded_hours, state,
|
||||
limit: needed,
|
||||
force: true
|
||||
) do
|
||||
{:ok, _kills_24h, updated_state2} ->
|
||||
final_kills =
|
||||
KillsCache.fetch_cached_kills(system_id)
|
||||
|> Enum.take(limit)
|
||||
|
||||
{:ok, final_kills, updated_state2}
|
||||
|
||||
{:error, reason2, updated_state2} ->
|
||||
Logger.warning(
|
||||
"[KillsPreloader] #{@expanded_hours}h fetch failed => system=#{system_id}, reason=#{inspect(reason2)}"
|
||||
)
|
||||
|
||||
{:error, reason2, updated_state2}
|
||||
end
|
||||
else
|
||||
{:ok, kills_1h, state}
|
||||
end
|
||||
end
|
||||
|
||||
defp reduce_task_result(pass_type, task_result, acc_state, acc_map) do
|
||||
case task_result do
|
||||
{:ok, {:ok, sys_id, kills, updated_state}} ->
|
||||
# Merge calls count from updated_state into acc_state
|
||||
new_state = merge_calls_count(acc_state, updated_state)
|
||||
new_map = Map.put(acc_map, sys_id, kills)
|
||||
{new_state, new_map}
|
||||
|
||||
{:ok, {:error, reason, updated_state}} ->
|
||||
log_failed_task(pass_type, reason)
|
||||
new_state = merge_calls_count(acc_state, updated_state)
|
||||
{new_state, acc_map}
|
||||
|
||||
{:error, reason} ->
|
||||
Logger.error("[KillsPreloader] #{pass_type} fetch task crashed => #{inspect(reason)}")
|
||||
{acc_state, acc_map}
|
||||
end
|
||||
end
|
||||
|
||||
defp log_failed_task(:quick, reason),
|
||||
do: Logger.warning("[KillsPreloader] Quick fetch task failed => #{inspect(reason)}")
|
||||
|
||||
defp log_failed_task(:expanded, reason),
|
||||
do: Logger.error("[KillsPreloader] Expanded fetch task failed => #{inspect(reason)}")
|
||||
|
||||
defp merge_calls_count(%{calls_count: c1} = st1, %{calls_count: c2}),
|
||||
do: %{st1 | calls_count: c1 + c2}
|
||||
|
||||
defp merge_calls_count(st1, _other),
|
||||
do: st1
|
||||
|
||||
defp pass_timeout_ms(:quick), do: :timer.minutes(2)
|
||||
defp pass_timeout_ms(:expanded), do: :timer.minutes(5)
|
||||
|
||||
defp measure_execution_time(fun) when is_function(fun, 0) do
|
||||
start = System.monotonic_time()
|
||||
result = fun.()
|
||||
finish = System.monotonic_time()
|
||||
ms = System.convert_time_unit(finish - start, :native, :millisecond)
|
||||
{ms, result}
|
||||
end
|
||||
end
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user