diff --git a/assets/js/hooks/Mapper/components/map/components/SolarSystemNode/SolarSystemKillsCounter.tsx b/assets/js/hooks/Mapper/components/map/components/SolarSystemNode/SolarSystemKillsCounter.tsx
index 57d14b1d..02e1594c 100644
--- a/assets/js/hooks/Mapper/components/map/components/SolarSystemNode/SolarSystemKillsCounter.tsx
+++ b/assets/js/hooks/Mapper/components/map/components/SolarSystemNode/SolarSystemKillsCounter.tsx
@@ -2,6 +2,7 @@ import { SystemKillsContent } from '../../../mapInterface/widgets/SystemKills/Sy
import { useKillsCounter } from '../../hooks/useKillsCounter';
import { WdTooltipWrapper } from '@/hooks/Mapper/components/ui-kit/WdTooltipWrapper';
import { WithChildren, WithClassName } from '@/hooks/Mapper/types/common';
+import { useMemo } from 'react';
type TooltipSize = 'xs' | 'sm' | 'md' | 'lg';
@@ -17,17 +18,44 @@ type KillsBookmarkTooltipProps = {
export const KillsCounter = ({ killsCount, systemId, className, children, size = 'xs' }: KillsBookmarkTooltipProps) => {
const { isLoading, kills: detailedKills, systemNameMap } = useKillsCounter({ realSystemId: systemId });
- if (!killsCount || detailedKills.length === 0 || !systemId || isLoading) return null;
+ // Limit the kills shown to match the killsCount parameter
+ const limitedKills = useMemo(() => {
+ if (!detailedKills || detailedKills.length === 0) return [];
+ return detailedKills.slice(0, killsCount);
+ }, [detailedKills, killsCount]);
+
+ if (!killsCount || limitedKills.length === 0 || !systemId || isLoading) return null;
+
+ // Calculate a reasonable height for the tooltip based on the number of kills
+ // but cap it to avoid excessively large tooltips
+ const maxKillsToShow = Math.min(limitedKills.length, 20);
+ const tooltipHeight = Math.max(200, Math.min(500, maxKillsToShow * 35));
const tooltipContent = (
-
-
+
+
+ System Kills ({limitedKills.length})
+
+
+
+
);
diff --git a/assets/js/hooks/Mapper/components/map/hooks/useKillsCounter.ts b/assets/js/hooks/Mapper/components/map/hooks/useKillsCounter.ts
index 4dbb8863..2a50fb59 100644
--- a/assets/js/hooks/Mapper/components/map/hooks/useKillsCounter.ts
+++ b/assets/js/hooks/Mapper/components/map/hooks/useKillsCounter.ts
@@ -27,13 +27,12 @@ export function useKillsCounter({ realSystemId }: UseKillsCounterProps) {
const filteredKills = useMemo(() => {
if (!allKills || allKills.length === 0) return [];
- return [...allKills]
- .sort((a, b) => {
- const aTime = a.kill_time ? new Date(a.kill_time).getTime() : 0;
- const bTime = b.kill_time ? new Date(b.kill_time).getTime() : 0;
- return bTime - aTime;
- })
- .slice(0, 10);
+ // Sort kills by time, most recent first, but don't limit the number of kills
+ return [...allKills].sort((a, b) => {
+ const aTime = a.kill_time ? new Date(a.kill_time).getTime() : 0;
+ const bTime = b.kill_time ? new Date(b.kill_time).getTime() : 0;
+ return bTime - aTime;
+ });
}, [allKills]);
return {
diff --git a/assets/js/hooks/Mapper/components/mapInterface/widgets/SystemKills/hooks/useSystemKills.ts b/assets/js/hooks/Mapper/components/mapInterface/widgets/SystemKills/hooks/useSystemKills.ts
index 06c4c7ae..7d751b6d 100644
--- a/assets/js/hooks/Mapper/components/mapInterface/widgets/SystemKills/hooks/useSystemKills.ts
+++ b/assets/js/hooks/Mapper/components/mapInterface/widgets/SystemKills/hooks/useSystemKills.ts
@@ -14,11 +14,7 @@ interface UseSystemKillsProps {
sinceHours?: number;
}
-function combineKills(
- existing: DetailedKill[],
- incoming: DetailedKill[],
- sinceHours: number
-): DetailedKill[] {
+function combineKills(existing: DetailedKill[], incoming: DetailedKill[], sinceHours: number): DetailedKill[] {
const cutoff = Date.now() - sinceHours * 60 * 60 * 1000;
const byId: Record
= {};
@@ -37,27 +33,25 @@ interface DetailedKillsEvent extends MapEvent {
payload: Record;
}
-export function useSystemKills({
- systemId,
- outCommand,
- showAllVisible = false,
- sinceHours = 24,
-}: UseSystemKillsProps) {
+export function useSystemKills({ systemId, outCommand, showAllVisible = false, sinceHours = 24 }: UseSystemKillsProps) {
const { data, update } = useMapRootState();
const { detailedKills = {}, systems = [] } = data;
const [settings] = useKillsWidgetSettings();
const excludedSystems = settings.excludedSystems;
- const updateDetailedKills = useCallback((newKillsMap: Record) => {
- update((prev) => {
- const oldKills = prev.detailedKills ?? {};
- const updated = { ...oldKills };
- for (const [sid, killsArr] of Object.entries(newKillsMap)) {
- updated[sid] = killsArr;
- }
- return { ...prev, detailedKills: updated };
- }, true);
- }, [update]);
+ const updateDetailedKills = useCallback(
+ (newKillsMap: Record) => {
+ update(prev => {
+ const oldKills = prev.detailedKills ?? {};
+ const updated = { ...oldKills };
+ for (const [sid, killsArr] of Object.entries(newKillsMap)) {
+ updated[sid] = killsArr;
+ }
+ return { ...prev, detailedKills: updated };
+ }, true);
+ },
+ [update],
+ );
useMapEventListener((event: MapEvent) => {
if (event.name === Commands.detailedKillsUpdated) {
@@ -73,77 +67,82 @@ export function useSystemKills({
const effectiveSystemIds = useMemo(() => {
if (showAllVisible) {
- return systems.map((s) => s.id).filter((id) => !excludedSystems.includes(Number(id)));
+ return systems.map(s => s.id).filter(id => !excludedSystems.includes(Number(id)));
}
- return systems.map((s) => s.id);
+ return systems.map(s => s.id);
}, [systems, excludedSystems, showAllVisible]);
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState(null);
const didFallbackFetch = useRef(Object.keys(detailedKills).length !== 0);
- const mergeKillsIntoGlobal = useCallback((killsMap: Record) => {
- update((prev) => {
- const oldMap = prev.detailedKills ?? {};
- const updated: Record = { ...oldMap };
+ const mergeKillsIntoGlobal = useCallback(
+ (killsMap: Record) => {
+ update(prev => {
+ const oldMap = prev.detailedKills ?? {};
+ const updated: Record = { ...oldMap };
- for (const [sid, newKills] of Object.entries(killsMap)) {
- const existing = updated[sid] ?? [];
- const combined = combineKills(existing, newKills, sinceHours);
- updated[sid] = combined;
- }
+ for (const [sid, newKills] of Object.entries(killsMap)) {
+ const existing = updated[sid] ?? [];
+ const combined = combineKills(existing, newKills, sinceHours);
+ updated[sid] = combined;
+ }
- return { ...prev, detailedKills: updated };
- });
- }, [update, sinceHours]);
-
- const fetchKills = useCallback(async (forceFallback = false) => {
- setIsLoading(true);
- setError(null);
-
- try {
- let eventType: OutCommand;
- let requestData: Record;
-
- if (showAllVisible || forceFallback) {
- eventType = OutCommand.getSystemsKills;
- requestData = {
- system_ids: effectiveSystemIds,
- since_hours: sinceHours,
- };
- } else if (systemId) {
- eventType = OutCommand.getSystemKills;
- requestData = {
- system_id: systemId,
- since_hours: sinceHours,
- };
- } else {
- setIsLoading(false);
- return;
- }
-
- const resp = await outCommand({
- type: eventType,
- data: requestData,
+ return { ...prev, detailedKills: updated };
});
+ },
+ [update, sinceHours],
+ );
- if (resp?.kills) {
- const arr = resp.kills as DetailedKill[];
- const sid = systemId ?? 'unknown';
- mergeKillsIntoGlobal({ [sid]: arr });
+ const fetchKills = useCallback(
+ async (forceFallback = false) => {
+ setIsLoading(true);
+ setError(null);
+
+ try {
+ let eventType: OutCommand;
+ let requestData: Record;
+
+ if (showAllVisible || forceFallback) {
+ eventType = OutCommand.getSystemsKills;
+ requestData = {
+ system_ids: effectiveSystemIds,
+ since_hours: sinceHours,
+ };
+ } else if (systemId) {
+ eventType = OutCommand.getSystemKills;
+ requestData = {
+ system_id: systemId,
+ since_hours: sinceHours,
+ };
+ } else {
+ setIsLoading(false);
+ return;
+ }
+
+ const resp = await outCommand({
+ type: eventType,
+ data: requestData,
+ });
+
+ if (resp?.kills) {
+ const arr = resp.kills as DetailedKill[];
+ const sid = systemId ?? 'unknown';
+ mergeKillsIntoGlobal({ [sid]: arr });
+ } else if (resp?.systems_kills) {
+ mergeKillsIntoGlobal(resp.systems_kills as Record);
+ } else {
+ console.warn('[useSystemKills] Unexpected kills response =>', resp);
+ }
+ } catch (err) {
+ console.error('[useSystemKills] Failed to fetch kills:', err);
+ setError(err instanceof Error ? err.message : 'Error fetching kills');
+ } finally {
+ setIsLoading(false);
}
- else if (resp?.systems_kills) {
- mergeKillsIntoGlobal(resp.systems_kills as Record);
- } else {
- console.warn('[useSystemKills] Unexpected kills response =>', resp);
- }
- } catch (err) {
- console.error('[useSystemKills] Failed to fetch kills:', err);
- setError(err instanceof Error ? err.message : 'Error fetching kills');
- } finally {
- setIsLoading(false);
- }
- }, [showAllVisible, systemId, outCommand, effectiveSystemIds, sinceHours, mergeKillsIntoGlobal]);
+ },
+ [showAllVisible, systemId, outCommand, effectiveSystemIds, sinceHours, mergeKillsIntoGlobal],
+ );
const debouncedFetchKills = useMemo(
() =>
@@ -156,11 +155,11 @@ export function useSystemKills({
const finalKills = useMemo(() => {
if (showAllVisible) {
- return effectiveSystemIds.flatMap((sid) => detailedKills[sid] ?? []);
+ return effectiveSystemIds.flatMap(sid => detailedKills[sid] ?? []);
} else if (systemId) {
return detailedKills[systemId] ?? [];
} else if (didFallbackFetch.current) {
- return effectiveSystemIds.flatMap((sid) => detailedKills[sid] ?? []);
+ return effectiveSystemIds.flatMap(sid => detailedKills[sid] ?? []);
}
return [];
}, [showAllVisible, systemId, effectiveSystemIds, detailedKills]);
@@ -171,7 +170,7 @@ export function useSystemKills({
if (!systemId && !showAllVisible && !didFallbackFetch.current) {
didFallbackFetch.current = true;
debouncedFetchKills.cancel();
- fetchKills(true);
+ fetchKills(true);
}
}, [systemId, showAllVisible, debouncedFetchKills, fetchKills]);
diff --git a/lib/wanderer_app/utils/http_util.ex b/lib/wanderer_app/utils/http_util.ex
new file mode 100644
index 00000000..f3bce668
--- /dev/null
+++ b/lib/wanderer_app/utils/http_util.ex
@@ -0,0 +1,18 @@
+defmodule WandererApp.Utils.HttpUtil do
+ @moduledoc """
+ Utility functions for HTTP operations and error handling.
+ """
+
+ @doc """
+ Determines if an HTTP error is retriable.
+
+ Returns `true` for common transient errors like timeouts and server errors (500, 502, 503, 504).
+ """
+ def retriable_error?(:timeout), do: true
+ def retriable_error?("Unexpected status: 500"), do: true
+ def retriable_error?("Unexpected status: 502"), do: true
+ def retriable_error?("Unexpected status: 503"), do: true
+ def retriable_error?("Unexpected status: 504"), do: true
+ def retriable_error?("Request failed"), do: true
+ def retriable_error?(_), do: false
+end
diff --git a/lib/wanderer_app/zkb/zkills_provider/fetcher.ex b/lib/wanderer_app/zkb/zkills_provider/fetcher.ex
index 2eb9c2dd..ffbcda1f 100644
--- a/lib/wanderer_app/zkb/zkills_provider/fetcher.ex
+++ b/lib/wanderer_app/zkb/zkills_provider/fetcher.ex
@@ -7,6 +7,7 @@ defmodule WandererApp.Zkb.KillsProvider.Fetcher do
use Retry
alias WandererApp.Zkb.KillsProvider.{Parser, KillsCache, ZkbApi}
+ alias WandererApp.Utils.HttpUtil
@page_size 200
@max_pages 2
@@ -190,9 +191,28 @@ defmodule WandererApp.Zkb.KillsProvider.Fetcher do
defp parse_partial(_other, _cutoff_dt), do: :skip
defp fetch_full_killmail(k_id, k_hash) do
- case WandererApp.Esi.get_killmail(k_id, k_hash) do
- {:ok, full_km} -> {:ok, full_km}
- {:error, reason} -> {:error, reason}
+ retry with: exponential_backoff(300) |> randomize() |> cap(5_000) |> expiry(30_000), rescue_only: [RuntimeError] do
+ case WandererApp.Esi.get_killmail(k_id, k_hash) do
+ {:ok, full_km} ->
+ {:ok, full_km}
+
+ {:error, :timeout} ->
+ Logger.warning("[Fetcher] ESI get_killmail timeout => kill_id=#{k_id}, retrying...")
+ raise "ESI timeout, will retry"
+
+ {:error, :not_found} ->
+ Logger.warning("[Fetcher] ESI get_killmail not_found => kill_id=#{k_id}")
+ {:error, :not_found}
+
+ {:error, reason} ->
+ if HttpUtil.retriable_error?(reason) do
+ Logger.warning("[Fetcher] ESI get_killmail retriable error => kill_id=#{k_id}, reason=#{inspect(reason)}")
+ raise "ESI error: #{inspect(reason)}, will retry"
+ else
+ Logger.warning("[Fetcher] ESI get_killmail failed => kill_id=#{k_id}, reason=#{inspect(reason)}")
+ {:error, reason}
+ end
+ end
end
end
diff --git a/lib/wanderer_app/zkb/zkills_provider/parser.ex b/lib/wanderer_app/zkb/zkills_provider/parser.ex
index 12458474..7c64ab2a 100644
--- a/lib/wanderer_app/zkb/zkills_provider/parser.ex
+++ b/lib/wanderer_app/zkb/zkills_provider/parser.ex
@@ -10,6 +10,11 @@ defmodule WandererApp.Zkb.KillsProvider.Parser do
require Logger
alias WandererApp.Zkb.KillsProvider.KillsCache
+ alias WandererApp.Utils.HttpUtil
+ use Retry
+
+ # Maximum retries for enrichment calls
+ @max_enrichment_retries 2
@doc """
Merges the 'partial' from zKB and the 'full' killmail from ESI, checks its time
@@ -254,12 +259,33 @@ defmodule WandererApp.Zkb.KillsProvider.Parser do
nil -> km
0 -> km
eve_id ->
- case WandererApp.Esi.get_character_info(eve_id) do
- {:ok, %{"name" => char_name}} ->
- Map.put(km, name_key, char_name)
+ result = retry with: exponential_backoff(200) |> randomize() |> cap(2_000) |> expiry(10_000), rescue_only: [RuntimeError] do
+ case WandererApp.Esi.get_character_info(eve_id) do
+ {:ok, %{"name" => char_name}} ->
+ {:ok, char_name}
- _ ->
- km
+ {:error, :timeout} ->
+ Logger.debug(fn -> "[Parser] Character info timeout, retrying => id=#{eve_id}" end)
+ raise "Character info timeout, will retry"
+
+ {:error, :not_found} ->
+ Logger.debug(fn -> "[Parser] Character not found => id=#{eve_id}" end)
+ :skip
+
+ {:error, reason} ->
+ if HttpUtil.retriable_error?(reason) do
+ Logger.debug(fn -> "[Parser] Character info retriable error => id=#{eve_id}, reason=#{inspect(reason)}" end)
+ raise "Character info error: #{inspect(reason)}, will retry"
+ else
+ Logger.debug(fn -> "[Parser] Character info failed => id=#{eve_id}, reason=#{inspect(reason)}" end)
+ :skip
+ end
+ end
+ end
+
+ case result do
+ {:ok, char_name} -> Map.put(km, name_key, char_name)
+ _ -> km
end
end
end
@@ -269,18 +295,36 @@ defmodule WandererApp.Zkb.KillsProvider.Parser do
nil -> km
0 -> km
corp_id ->
- case WandererApp.Esi.get_corporation_info(corp_id) do
- {:ok, %{"ticker" => ticker, "name" => corp_name}} ->
+ result = retry with: exponential_backoff(200) |> randomize() |> cap(2_000) |> expiry(10_000), rescue_only: [RuntimeError] do
+ case WandererApp.Esi.get_corporation_info(corp_id) do
+ {:ok, %{"ticker" => ticker, "name" => corp_name}} ->
+ {:ok, {ticker, corp_name}}
+
+ {:error, :timeout} ->
+ Logger.debug(fn -> "[Parser] Corporation info timeout, retrying => id=#{corp_id}" end)
+ raise "Corporation info timeout, will retry"
+
+ {:error, :not_found} ->
+ Logger.debug(fn -> "[Parser] Corporation not found => id=#{corp_id}" end)
+ :skip
+
+ {:error, reason} ->
+ if HttpUtil.retriable_error?(reason) do
+ Logger.debug(fn -> "[Parser] Corporation info retriable error => id=#{corp_id}, reason=#{inspect(reason)}" end)
+ raise "Corporation info error: #{inspect(reason)}, will retry"
+ else
+ Logger.warning("[Parser] Failed to fetch corp info: ID=#{corp_id}, reason=#{inspect(reason)}")
+ :skip
+ end
+ end
+ end
+
+ case result do
+ {:ok, {ticker, corp_name}} ->
km
|> Map.put(ticker_key, ticker)
|> Map.put(name_key, corp_name)
-
- {:error, reason} ->
- Logger.warning("[Parser] Failed to fetch corp info: ID=#{corp_id}, reason=#{inspect(reason)}")
- km
-
- _ ->
- km
+ _ -> km
end
end
end
@@ -290,14 +334,36 @@ defmodule WandererApp.Zkb.KillsProvider.Parser do
nil -> km
0 -> km
alliance_id ->
- case WandererApp.Esi.get_alliance_info(alliance_id) do
- {:ok, %{"ticker" => alliance_ticker, "name" => alliance_name}} ->
+ result = retry with: exponential_backoff(200) |> randomize() |> cap(2_000) |> expiry(10_000), rescue_only: [RuntimeError] do
+ case WandererApp.Esi.get_alliance_info(alliance_id) do
+ {:ok, %{"ticker" => alliance_ticker, "name" => alliance_name}} ->
+ {:ok, {alliance_ticker, alliance_name}}
+
+ {:error, :timeout} ->
+ Logger.debug(fn -> "[Parser] Alliance info timeout, retrying => id=#{alliance_id}" end)
+ raise "Alliance info timeout, will retry"
+
+ {:error, :not_found} ->
+ Logger.debug(fn -> "[Parser] Alliance not found => id=#{alliance_id}" end)
+ :skip
+
+ {:error, reason} ->
+ if HttpUtil.retriable_error?(reason) do
+ Logger.debug(fn -> "[Parser] Alliance info retriable error => id=#{alliance_id}, reason=#{inspect(reason)}" end)
+ raise "Alliance info error: #{inspect(reason)}, will retry"
+ else
+ Logger.debug(fn -> "[Parser] Alliance info failed => id=#{alliance_id}, reason=#{inspect(reason)}" end)
+ :skip
+ end
+ end
+ end
+
+ case result do
+ {:ok, {alliance_ticker, alliance_name}} ->
km
|> Map.put(ticker_key, alliance_ticker)
|> Map.put(name_key, alliance_name)
-
- _ ->
- km
+ _ -> km
end
end
end
@@ -307,13 +373,31 @@ defmodule WandererApp.Zkb.KillsProvider.Parser do
nil -> km
0 -> km
type_id ->
- case WandererApp.CachedInfo.get_ship_type(type_id) do
- {:ok, nil} -> km
- {:ok, %{name: ship_name}} -> Map.put(km, name_key, ship_name)
- {:error, reason} ->
- Logger.warning("[Parser] Failed to fetch ship type: ID=#{type_id}, reason=#{inspect(reason)}")
- km
+ result = retry with: exponential_backoff(200) |> randomize() |> cap(2_000) |> expiry(10_000), rescue_only: [RuntimeError] do
+ case WandererApp.CachedInfo.get_ship_type(type_id) do
+ {:ok, nil} -> :skip
+ {:ok, %{name: ship_name}} -> {:ok, ship_name}
+ {:error, :timeout} ->
+ Logger.debug(fn -> "[Parser] Ship type timeout, retrying => id=#{type_id}" end)
+ raise "Ship type timeout, will retry"
+ {:error, :not_found} ->
+ Logger.debug(fn -> "[Parser] Ship type not found => id=#{type_id}" end)
+ :skip
+
+ {:error, reason} ->
+ if HttpUtil.retriable_error?(reason) do
+ Logger.debug(fn -> "[Parser] Ship type retriable error => id=#{type_id}, reason=#{inspect(reason)}" end)
+ raise "Ship type error: #{inspect(reason)}, will retry"
+ else
+ Logger.warning("[Parser] Failed to fetch ship type: ID=#{type_id}, reason=#{inspect(reason)}")
+ :skip
+ end
+ end
+ end
+
+ case result do
+ {:ok, ship_name} -> Map.put(km, name_key, ship_name)
_ -> km
end
end
diff --git a/lib/wanderer_app/zkb/zkills_provider/websocket.ex b/lib/wanderer_app/zkb/zkills_provider/websocket.ex
index c5e3b979..0a8faa29 100644
--- a/lib/wanderer_app/zkb/zkills_provider/websocket.ex
+++ b/lib/wanderer_app/zkb/zkills_provider/websocket.ex
@@ -7,8 +7,11 @@ defmodule WandererApp.Zkb.KillsProvider.Websocket do
require Logger
alias WandererApp.Zkb.KillsProvider.Parser
alias WandererApp.Esi
+ alias WandererApp.Utils.HttpUtil
+ use Retry
@heartbeat_interval 1_000
+ @max_esi_retries 3
# Called by `KillsProvider.handle_connect`
def handle_connect(_status, _headers, %{connected: _} = state) do
@@ -69,14 +72,39 @@ defmodule WandererApp.Zkb.KillsProvider.Websocket do
# The partial from zKillboard has killmail_id + zkb.hash, but no time/victim/attackers
defp parse_and_store_zkb_partial(%{"killmail_id" => kill_id, "zkb" => %{"hash" => kill_hash}} = partial) do
Logger.debug(fn -> "[KillsProvider.Websocket] parse_and_store_zkb_partial => kill_id=#{kill_id}" end)
- case Esi.get_killmail(kill_id, kill_hash) do
- {:ok, full_esi_data} ->
- # Merge partial zKB fields (like totalValue) onto ESI data
- enriched = Map.merge(full_esi_data, %{"zkb" => partial["zkb"]})
- Parser.parse_and_store_killmail(enriched)
+ result = retry with: exponential_backoff(300) |> randomize() |> cap(5_000) |> expiry(30_000), rescue_only: [RuntimeError] do
+ case Esi.get_killmail(kill_id, kill_hash) do
+ {:ok, full_esi_data} ->
+ # Merge partial zKB fields (like totalValue) onto ESI data
+ enriched = Map.merge(full_esi_data, %{"zkb" => partial["zkb"]})
+ Parser.parse_and_store_killmail(enriched)
+ :ok
+
+ {:error, :timeout} ->
+ Logger.warning("[KillsProvider.Websocket] ESI get_killmail timeout => kill_id=#{kill_id}, retrying...")
+ raise "ESI timeout, will retry"
+
+ {:error, :not_found} ->
+ Logger.warning("[KillsProvider.Websocket] ESI get_killmail not_found => kill_id=#{kill_id}")
+ :skip
+
+ {:error, reason} ->
+ if HttpUtil.retriable_error?(reason) do
+ Logger.warning("[KillsProvider.Websocket] ESI get_killmail retriable error => kill_id=#{kill_id}, reason=#{inspect(reason)}")
+ raise "ESI error: #{inspect(reason)}, will retry"
+ else
+ Logger.warning("[KillsProvider.Websocket] ESI get_killmail failed => kill_id=#{kill_id}, reason=#{inspect(reason)}")
+ :skip
+ end
+ end
+ end
+
+ case result do
+ :ok -> :ok
+ :skip -> :skip
{:error, reason} ->
- Logger.warning("[KillsProvider.Websocket] ESI get_killmail failed => kill_id=#{kill_id}, reason=#{inspect(reason)}")
+ Logger.error("[KillsProvider.Websocket] ESI get_killmail exhausted retries => kill_id=#{kill_id}, reason=#{inspect(reason)}")
:skip
end
end