diff --git a/assets/static/images/news/06-21-webhooks/webhooks-hero.png b/assets/static/images/news/06-21-webhooks/webhooks-hero.png new file mode 100755 index 00000000..70419793 Binary files /dev/null and b/assets/static/images/news/06-21-webhooks/webhooks-hero.png differ diff --git a/lib/wanderer_app/api.ex b/lib/wanderer_app/api.ex index e3148daa..3687e321 100644 --- a/lib/wanderer_app/api.ex +++ b/lib/wanderer_app/api.ex @@ -30,5 +30,6 @@ defmodule WandererApp.Api do resource WandererApp.Api.License resource WandererApp.Api.MapPing resource WandererApp.Api.MapInvite + resource WandererApp.Api.MapWebhookSubscription end end diff --git a/lib/wanderer_app/api/map_webhook_subscription.ex b/lib/wanderer_app/api/map_webhook_subscription.ex new file mode 100644 index 00000000..3ee5a9e3 --- /dev/null +++ b/lib/wanderer_app/api/map_webhook_subscription.ex @@ -0,0 +1,258 @@ +defmodule WandererApp.Api.MapWebhookSubscription do + @moduledoc """ + Ash resource for managing webhook subscriptions for map events. + + Stores webhook endpoint configurations that receive HTTP POST notifications + when events occur on a specific map. + """ + + use Ash.Resource, + domain: WandererApp.Api, + data_layer: AshPostgres.DataLayer, + extensions: [AshCloak] + + postgres do + repo(WandererApp.Repo) + table("map_webhook_subscriptions_v1") + end + + cloak do + vault(WandererApp.Vault) + + attributes([:secret]) + end + + code_interface do + define(:create, action: :create) + define(:update, action: :update) + define(:destroy, action: :destroy) + + define(:by_id, + get_by: [:id], + action: :read + ) + + define(:by_map, action: :by_map) + define(:active_by_map, action: :active_by_map) + define(:rotate_secret, action: :rotate_secret) + end + + actions do + default_accept [ + :map_id, + :url, + :events, + :active? + ] + + defaults [:read, :update, :destroy] + + read :by_map do + argument :map_id, :uuid, allow_nil?: false + filter expr(map_id == ^arg(:map_id)) + prepare build(sort: [inserted_at: :desc]) + end + + read :active_by_map do + argument :map_id, :uuid, allow_nil?: false + filter expr(map_id == ^arg(:map_id) and active? == true) + prepare build(sort: [inserted_at: :desc]) + end + + create :create do + accept [ + :map_id, + :url, + :events, + :active? + ] + + # Validate webhook URL format + change fn changeset, _context -> + case Ash.Changeset.get_attribute(changeset, :url) do + nil -> + changeset + + url -> + case validate_webhook_url_format(url) do + :ok -> + changeset + + {:error, message} -> + Ash.Changeset.add_error(changeset, field: :url, message: message) + end + end + end + + # Validate events list + change fn changeset, _context -> + case Ash.Changeset.get_attribute(changeset, :events) do + nil -> + changeset + + events when is_list(events) -> + case validate_events_list(events) do + :ok -> + changeset + + {:error, message} -> + Ash.Changeset.add_error(changeset, field: :events, message: message) + end + + _ -> + changeset + end + end + + # Generate secret on creation + change fn changeset, _context -> + secret = generate_webhook_secret() + Ash.Changeset.change_attribute(changeset, :secret, secret) + end + end + + update :rotate_secret do + accept [] + require_atomic? false + + change fn changeset, _context -> + new_secret = generate_webhook_secret() + Ash.Changeset.change_attribute(changeset, :secret, new_secret) + end + end + end + + + validations do + validate present(:url), message: "URL is required" + validate present(:events), message: "Events array is required" + validate present(:map_id), message: "Map ID is required" + end + + attributes do + uuid_primary_key :id + + attribute :map_id, :uuid do + allow_nil? false + end + + attribute :url, :string do + allow_nil? false + constraints max_length: 2000 # 2KB limit as per security requirements + end + + attribute :events, {:array, :string} do + allow_nil? false + default [] + constraints [ + min_length: 1, + max_length: 50, # Reasonable limit on number of event types + items: [max_length: 100] # Max length per event type + ] + end + + attribute :secret, :string do + allow_nil? false + sensitive? true # Hide in logs and API responses + end + + attribute :active?, :boolean do + allow_nil? false + default true + end + + # Delivery tracking fields + attribute :last_delivery_at, :utc_datetime do + allow_nil? true + end + + attribute :last_error, :string do + allow_nil? true + constraints max_length: 1000 + end + + attribute :last_error_at, :utc_datetime do + allow_nil? true + end + + attribute :consecutive_failures, :integer do + allow_nil? false + default 0 + end + + create_timestamp(:inserted_at) + update_timestamp(:updated_at) + end + + relationships do + belongs_to :map, WandererApp.Api.Map do + source_attribute :map_id + destination_attribute :id + attribute_writable? true + end + end + + identities do + # Allow multiple webhooks per map, but prevent duplicate URLs per map + identity :unique_url_per_map, [:map_id, :url] + end + + # Private helper functions + + defp generate_webhook_secret do + :crypto.strong_rand_bytes(32) |> Base.encode64() + end + + defp validate_webhook_url_format(url) do + uri = URI.parse(url) + + cond do + uri.scheme != "https" -> + {:error, "Webhook URL must use HTTPS"} + + uri.host == nil -> + {:error, "Webhook URL must have a valid host"} + + uri.host in ["localhost", "127.0.0.1", "0.0.0.0"] -> + {:error, "Webhook URL cannot use localhost or loopback addresses"} + + String.starts_with?(uri.host, "192.168.") or String.starts_with?(uri.host, "10.") or is_private_ip_172_range?(uri.host) -> + {:error, "Webhook URL cannot use private network addresses"} + + byte_size(url) > 2000 -> + {:error, "Webhook URL cannot exceed 2000 characters"} + + true -> + :ok + end + end + + defp validate_events_list(events) do + alias WandererApp.ExternalEvents.Event + + # Get valid event types as strings + valid_event_strings = Event.supported_event_types() + |> Enum.map(&Atom.to_string/1) + + # Add wildcard as valid option + valid_events = ["*" | valid_event_strings] + + invalid_events = Enum.reject(events, fn event -> event in valid_events end) + + if Enum.empty?(invalid_events) do + :ok + else + {:error, "Invalid event types: #{Enum.join(invalid_events, ", ")}"} + end + end + + # Check if IP is in the 172.16.0.0/12 range (172.16.0.0 to 172.31.255.255) + defp is_private_ip_172_range?(host) do + case :inet.parse_address(String.to_charlist(host)) do + {:ok, {172, b, _, _}} when b >= 16 and b <= 31 -> + true + _ -> + false + end + end +end \ No newline at end of file diff --git a/lib/wanderer_app/application.ex b/lib/wanderer_app/application.ex index aa32ef63..25223926 100644 --- a/lib/wanderer_app/application.ex +++ b/lib/wanderer_app/application.ex @@ -56,6 +56,8 @@ defmodule WandererApp.Application do {WandererApp.Character.TrackerPoolSupervisor, []}, WandererApp.Character.TrackerManager, WandererApp.Map.Manager, + WandererApp.ExternalEvents.MapEventRelay, + WandererApp.ExternalEvents.WebhookDispatcher, WandererAppWeb.Presence, WandererAppWeb.Endpoint ] ++ diff --git a/lib/wanderer_app/external_events/event.ex b/lib/wanderer_app/external_events/event.ex new file mode 100644 index 00000000..888daf02 --- /dev/null +++ b/lib/wanderer_app/external_events/event.ex @@ -0,0 +1,170 @@ +defmodule WandererApp.ExternalEvents.Event do + @moduledoc """ + Event struct for external webhook and WebSocket delivery. + + This is completely separate from the internal PubSub event system + and is only used for external client notifications. + """ + + @type event_type :: + :add_system | + :deleted_system | + :system_renamed | + :system_metadata_changed | + :signatures_updated | + :signature_added | + :signature_removed | + :connection_added | + :connection_removed | + :connection_updated | + :character_added | + :character_removed | + :character_updated | + :map_kill + + @type t :: %__MODULE__{ + id: String.t(), # ULID for ordering + map_id: String.t(), # Map identifier + type: event_type(), # Event type + payload: map(), # Event-specific data + timestamp: DateTime.t() # When the event occurred + } + + defstruct [:id, :map_id, :type, :payload, :timestamp] + + @doc """ + Creates a new external event with ULID for ordering. + + Validates that the event_type is supported before creating the event. + """ + @spec new(String.t(), event_type(), map()) :: t() | {:error, :invalid_event_type} + def new(map_id, event_type, payload) when is_binary(map_id) and is_map(payload) do + if valid_event_type?(event_type) do + %__MODULE__{ + id: Ulid.generate(System.system_time(:millisecond)), + map_id: map_id, + type: event_type, + payload: payload, + timestamp: DateTime.utc_now() + } + else + raise ArgumentError, "Invalid event type: #{inspect(event_type)}. Must be one of: #{inspect(supported_event_types())}" + end + end + + @doc """ + Converts an event to JSON format for delivery. + """ + @spec to_json(t()) :: map() + def to_json(%__MODULE__{} = event) do + %{ + "id" => event.id, + "type" => to_string(event.type), + "map_id" => event.map_id, + "timestamp" => DateTime.to_iso8601(event.timestamp), + "payload" => serialize_payload(event.payload) + } + end + + # Convert Ash structs and other complex types to plain maps + defp serialize_payload(payload) when is_struct(payload) do + serialize_payload(payload, MapSet.new()) + end + + defp serialize_payload(payload) when is_map(payload) do + serialize_payload(payload, MapSet.new()) + end + + # Define allowlisted fields for different struct types + @system_fields [:id, :solar_system_id, :name, :position_x, :position_y, :visible, :locked] + @character_fields [:id, :character_id, :character_eve_id, :name, :corporation_id, :alliance_id, :ship_type_id, :online] + @connection_fields [:id, :source_id, :target_id, :connection_type, :time_status, :mass_status, :ship_size] + @signature_fields [:id, :signature_id, :name, :type, :group] + + # Overloaded versions with visited tracking + defp serialize_payload(payload, visited) when is_struct(payload) do + # Check for circular reference + ref = {payload.__struct__, Map.get(payload, :id)} + if MapSet.member?(visited, ref) do + # Return a reference indicator instead of recursing + %{"__ref__" => to_string(ref)} + else + visited = MapSet.put(visited, ref) + + # Get allowlisted fields based on struct type + allowed_fields = get_allowed_fields(payload.__struct__) + + payload + |> Map.from_struct() + |> Map.take(allowed_fields) + |> serialize_fields(visited) + end + end + + # Get allowed fields based on struct type + defp get_allowed_fields(module) do + module_name = module |> Module.split() |> List.last() + + case module_name do + "MapSystem" -> @system_fields + "MapCharacter" -> @character_fields + "MapConnection" -> @connection_fields + "MapSystemSignature" -> @signature_fields + _ -> [:id, :name] # Default minimal fields for unknown types + end + end + + defp serialize_payload(payload, visited) when is_map(payload) do + Map.new(payload, fn {k, v} -> {to_string(k), serialize_value(v, visited)} end) + end + + defp serialize_fields(fields, visited) do + Enum.reduce(fields, %{}, fn {k, v}, acc -> + if is_nil(v) do + acc + else + Map.put(acc, to_string(k), serialize_value(v, visited)) + end + end) + end + + defp serialize_value(%DateTime{} = dt, _visited), do: DateTime.to_iso8601(dt) + defp serialize_value(%NaiveDateTime{} = dt, _visited), do: NaiveDateTime.to_iso8601(dt) + defp serialize_value(v, visited) when is_struct(v), do: serialize_payload(v, visited) + defp serialize_value(v, visited) when is_map(v), do: serialize_payload(v, visited) + defp serialize_value(v, visited) when is_list(v), do: Enum.map(v, &serialize_value(&1, visited)) + defp serialize_value(v, _visited), do: v + + @doc """ + Returns all supported event types. + """ + @spec supported_event_types() :: [event_type()] + def supported_event_types do + [ + :add_system, + :deleted_system, + :system_renamed, + :system_metadata_changed, + :signatures_updated, + :signature_added, + :signature_removed, + :connection_added, + :connection_removed, + :connection_updated, + :character_added, + :character_removed, + :character_updated, + :map_kill + ] + end + + @doc """ + Validates an event type. + """ + @spec valid_event_type?(atom()) :: boolean() + def valid_event_type?(event_type) when is_atom(event_type) do + event_type in supported_event_types() + end + + def valid_event_type?(_), do: false +end \ No newline at end of file diff --git a/lib/wanderer_app/external_events/external_events.ex b/lib/wanderer_app/external_events/external_events.ex new file mode 100644 index 00000000..59431c2c --- /dev/null +++ b/lib/wanderer_app/external_events/external_events.ex @@ -0,0 +1,111 @@ +defmodule WandererApp.ExternalEvents do + @moduledoc """ + External event system for webhook and WebSocket delivery. + + This system is completely separate from the internal Phoenix PubSub + event system and does NOT modify any existing event flows. + + External events are delivered to: + - WebSocket clients via MapEventsChannel + - HTTP webhooks via WebhookDispatcher + + ## Usage + + # From event producers, call this in ADDITION to existing broadcasts + WandererApp.ExternalEvents.broadcast("map_123", :add_system, %{ + solar_system_id: 31000199, + name: "J123456" + }) + + This is additive-only and does not replace any existing functionality. + """ + + alias WandererApp.ExternalEvents.{Event, MapEventRelay} + + require Logger + + @doc """ + Broadcasts an event to external clients only. + + This does NOT affect internal PubSub or LiveView handlers. + It only delivers events to: + - WebSocket clients connected to MapEventsChannel + - Configured webhook endpoints + + ## Parameters + + - `map_id`: The map identifier (string) + - `event_type`: The event type atom (see Event.event_type/0) + - `payload`: The event payload (map) + + ## Examples + + # System events + WandererApp.ExternalEvents.broadcast("map_123", :add_system, %{ + solar_system_id: 31000199, + name: "J123456" + }) + + # Kill events + WandererApp.ExternalEvents.broadcast("map_123", :map_kill, %{ + killmail_id: 98765, + victim_ship_type: "Rifter" + }) + """ + @spec broadcast(String.t(), Event.event_type(), map()) :: :ok + def broadcast(map_id, event_type, payload) when is_binary(map_id) and is_map(payload) do + Logger.debug(fn -> "ExternalEvents.broadcast called - map: #{map_id}, type: #{event_type}" end) + + # Validate event type + if Event.valid_event_type?(event_type) do + # Create normalized event + event = Event.new(map_id, event_type, payload) + + # Emit telemetry for monitoring + :telemetry.execute( + [:wanderer_app, :external_events, :broadcast], + %{count: 1}, + %{map_id: map_id, event_type: event_type} + ) + + # Check if MapEventRelay is alive before sending + if Process.whereis(MapEventRelay) do + try do + # Use call with timeout instead of cast for better error handling + GenServer.call(MapEventRelay, {:deliver_event, event}, 5000) + :ok + catch + :exit, {:timeout, _} -> + Logger.error("Timeout delivering event to MapEventRelay for map #{map_id}") + {:error, :timeout} + + :exit, reason -> + Logger.error("Failed to deliver event to MapEventRelay: #{inspect(reason)}") + {:error, reason} + end + else + Logger.error("MapEventRelay is not running, cannot deliver event for map #{map_id}") + {:error, :relay_not_available} + end + else + Logger.warning("Invalid external event type: #{inspect(event_type)}") + {:error, :invalid_event_type} + end + end + + @doc """ + Lists all supported event types. + """ + @spec supported_event_types() :: [Event.event_type()] + def supported_event_types do + Event.supported_event_types() + end + + @doc """ + Validates an event type atom. + """ + @spec valid_event_type?(atom()) :: boolean() + def valid_event_type?(event_type) do + Event.valid_event_type?(event_type) + end +end \ No newline at end of file diff --git a/lib/wanderer_app/external_events/map_event_relay.ex b/lib/wanderer_app/external_events/map_event_relay.ex new file mode 100644 index 00000000..b5aa2f65 --- /dev/null +++ b/lib/wanderer_app/external_events/map_event_relay.ex @@ -0,0 +1,195 @@ +defmodule WandererApp.ExternalEvents.MapEventRelay do + @moduledoc """ + GenServer that handles delivery of external events to WebSocket and webhook clients. + + This system is completely separate from internal Phoenix PubSub and does NOT + modify any existing event flows. It only handles external client delivery. + + Responsibilities: + - Store events in ETS ring buffer for backfill + - Broadcast to external WebSocket clients (via separate topic) + - Dispatch to webhook endpoints + - Provide event history for reconnecting clients + + Events are stored in an ETS table per map with ULID ordering for backfill support. + Events older than 10 minutes are automatically cleaned up. + """ + + use GenServer + + alias WandererApp.ExternalEvents.Event + alias WandererApp.ExternalEvents.WebhookDispatcher + + require Logger + + @cleanup_interval :timer.minutes(2) + @event_retention_minutes 10 + + def start_link(opts) do + GenServer.start_link(__MODULE__, opts, name: __MODULE__) + end + + @doc """ + Retrieves events since a given timestamp for backfill. + """ + @spec get_events_since(String.t(), DateTime.t(), pos_integer()) :: [map()] + def get_events_since(map_id, since_datetime, limit \\ 100) do + GenServer.call(__MODULE__, {:get_events_since, map_id, since_datetime, limit}) + end + + @impl true + def init(_opts) do + # Create ETS table for event storage + # Using ordered_set for ULID sorting, public for read access + ets_table = :ets.new(:external_events, [ + :ordered_set, + :public, + :named_table, + {:read_concurrency, true} + ]) + + # Schedule periodic cleanup + schedule_cleanup() + + Logger.info("MapEventRelay started for external events") + + {:ok, %{ + ets_table: ets_table, + event_count: 0 + }} + end + + @impl true + def handle_cast({:deliver_event, %Event{} = event}, state) do + Logger.debug(fn -> "MapEventRelay received :deliver_event (cast) for map #{event.map_id}, type: #{event.type}" end) + new_state = deliver_single_event(event, state) + {:noreply, new_state} + end + + @impl true + def handle_call({:deliver_event, %Event{} = event}, _from, state) do + Logger.debug(fn -> "MapEventRelay received :deliver_event (call) for map #{event.map_id}, type: #{event.type}" end) + new_state = deliver_single_event(event, state) + {:reply, :ok, new_state} + end + + @impl true + def handle_call({:get_events_since, map_id, since_datetime, limit}, _from, state) do + events = get_events_from_ets(map_id, since_datetime, limit, state.ets_table) + {:reply, events, state} + end + + @impl true + def handle_info(:cleanup_events, state) do + cleanup_old_events(state.ets_table) + schedule_cleanup() + {:noreply, state} + end + + @impl true + def handle_info(msg, state) do + Logger.warning("MapEventRelay received unexpected message: #{inspect(msg)}") + {:noreply, state} + end + + defp deliver_single_event(%Event{} = event, state) do + Logger.debug(fn -> "MapEventRelay.deliver_single_event processing event for map #{event.map_id}" end) + + # Emit telemetry + :telemetry.execute( + [:wanderer_app, :external_events, :relay, :received], + %{count: 1}, + %{map_id: event.map_id, event_type: event.type} + ) + + # 1. Store in ETS for backfill + store_event(event, state.ets_table) + + # 2. Broadcast to external WebSocket clients + # Use separate topic to avoid conflicts with internal PubSub + event_json = Event.to_json(event) + topic = "external_events:map:#{event.map_id}" + Logger.debug(fn -> "Broadcasting to PubSub topic: #{topic}" end) + + case Phoenix.PubSub.broadcast( + WandererApp.PubSub, + topic, + {:external_event, event_json} + ) do + :ok -> + Logger.debug(fn -> "Successfully broadcast event to topic: #{topic}" end) + + {:error, reason} -> + Logger.error("Failed to broadcast event to topic #{topic}: #{inspect(reason)}") + # Emit error telemetry + :telemetry.execute( + [:wanderer_app, :external_events, :relay, :broadcast_error], + %{count: 1}, + %{map_id: event.map_id, event_type: event.type, reason: reason} + ) + end + + # 3. Send to webhook subscriptions via WebhookDispatcher + WebhookDispatcher.dispatch_event(event.map_id, event) + + # Emit delivered telemetry + :telemetry.execute( + [:wanderer_app, :external_events, :relay, :delivered], + %{count: 1}, + %{map_id: event.map_id, event_type: event.type} + ) + + %{state | event_count: state.event_count + 1} + end + + defp store_event(%Event{} = event, ets_table) do + # Store with ULID as key for ordering + # Value includes map_id for efficient filtering + :ets.insert(ets_table, {event.id, event.map_id, Event.to_json(event)}) + end + + defp get_events_from_ets(map_id, since_datetime, limit, ets_table) do + # Convert datetime to ULID for comparison + # If no since_datetime, retrieve all events for the map + if since_datetime do + since_ulid = datetime_to_ulid(since_datetime) + + # Get all events since the ULID, filtered by map_id + :ets.select(ets_table, [ + {{:"$1", :"$2", :"$3"}, + [{:andalso, {:>=, :"$1", since_ulid}, {:==, :"$2", map_id}}], + [:"$3"]} + ]) + |> Enum.take(limit) + else + # Get all events for the map_id + :ets.select(ets_table, [ + {{:"$1", :"$2", :"$3"}, + [{:==, :"$2", map_id}], + [:"$3"]} + ]) + |> Enum.take(limit) + end + end + + defp cleanup_old_events(ets_table) do + cutoff_time = DateTime.add(DateTime.utc_now(), -@event_retention_minutes, :minute) + cutoff_ulid = datetime_to_ulid(cutoff_time) + + # Delete events older than cutoff + :ets.select_delete(ets_table, [ + {{:"$1", :_, :_}, [{:<, :"$1", cutoff_ulid}], [true]} + ]) + end + + defp schedule_cleanup do + Process.send_after(self(), :cleanup_events, @cleanup_interval) + end + + # Convert DateTime to ULID timestamp for comparison + defp datetime_to_ulid(datetime) do + timestamp = DateTime.to_unix(datetime, :millisecond) + # Create a ULID with the timestamp (rest will be zeros for comparison) + Ulid.generate(timestamp) + end +end \ No newline at end of file diff --git a/lib/wanderer_app/external_events/webhook_dispatcher.ex b/lib/wanderer_app/external_events/webhook_dispatcher.ex new file mode 100644 index 00000000..e018dca9 --- /dev/null +++ b/lib/wanderer_app/external_events/webhook_dispatcher.ex @@ -0,0 +1,352 @@ +defmodule WandererApp.ExternalEvents.WebhookDispatcher do + @moduledoc """ + GenServer that handles HTTP delivery of webhook events. + + This system processes webhook delivery requests asynchronously, + handles retry logic with exponential backoff, and tracks delivery status. + + Features: + - Async HTTP delivery using Task.Supervisor + - Exponential backoff retry logic (3 attempts max) + - HMAC-SHA256 signature generation for security + - Delivery status tracking and telemetry + - Payload size limits and filtering + """ + + use GenServer + + alias WandererApp.Api.MapWebhookSubscription + alias WandererApp.ExternalEvents.Event + + require Logger + + @max_payload_size 1_048_576 # 1MB + @max_retries 3 + @base_backoff_ms 1000 # 1 second + @max_backoff_ms 60_000 # 60 seconds + @jitter_range 0.25 # ±25% jitter + @max_consecutive_failures 10 + + def start_link(opts) do + GenServer.start_link(__MODULE__, opts, name: __MODULE__) + end + + @doc """ + Dispatches a single event to all matching webhook subscriptions. + """ + @spec dispatch_event(map_id :: String.t(), Event.t()) :: :ok + def dispatch_event(map_id, %Event{} = event) do + GenServer.cast(__MODULE__, {:dispatch_event, map_id, event}) + end + + @doc """ + Dispatches multiple events to all matching webhook subscriptions. + Optimized for batch processing. + """ + @spec dispatch_events(map_id :: String.t(), [Event.t()]) :: :ok + def dispatch_events(map_id, events) when is_list(events) do + GenServer.cast(__MODULE__, {:dispatch_events, map_id, events}) + end + + @impl true + def init(_opts) do + Logger.info("WebhookDispatcher started for HTTP event delivery") + + # Extract the pid from the tuple returned by start_link + {:ok, task_supervisor_pid} = Task.Supervisor.start_link(name: WebhookDispatcher.TaskSupervisor) + + {:ok, %{ + task_supervisor: task_supervisor_pid, + delivery_count: 0 + }} + end + + @impl true + def handle_cast({:dispatch_event, map_id, event}, state) do + Logger.debug(fn -> "WebhookDispatcher received single event for map #{map_id}, type: #{event.type}" end) + + # Emit telemetry for received event + :telemetry.execute( + [:wanderer_app, :webhook_dispatcher, :event_received], + %{count: 1}, + %{map_id: map_id, event_type: event.type} + ) + + new_state = process_webhook_delivery(map_id, [event], state) + {:noreply, new_state} + end + + @impl true + def handle_cast({:dispatch_events, map_id, events}, state) do + Logger.debug(fn -> "WebhookDispatcher received #{length(events)} events for map #{map_id}" end) + + # Emit telemetry for batch events + :telemetry.execute( + [:wanderer_app, :webhook_dispatcher, :batch_received], + %{count: length(events)}, + %{map_id: map_id} + ) + + new_state = process_webhook_delivery(map_id, events, state) + {:noreply, new_state} + end + + @impl true + def handle_info(msg, state) do + Logger.warning("WebhookDispatcher received unexpected message: #{inspect(msg)}") + {:noreply, state} + end + + defp process_webhook_delivery(map_id, events, state) do + # Get active webhook subscriptions for this map + case get_active_subscriptions(map_id) do + {:ok, [_ | _] = subscriptions} -> + Logger.debug(fn -> "Found #{length(subscriptions)} active webhook subscriptions for map #{map_id}" end) + process_active_subscriptions(subscriptions, events, state) + + {:ok, []} -> + Logger.debug(fn -> "No webhook subscriptions found for map #{map_id}" end) + + {:error, reason} -> + Logger.error("Failed to get webhook subscriptions for map #{map_id}: #{inspect(reason)}") + end + + %{state | delivery_count: state.delivery_count + length(events)} + end + + defp process_active_subscriptions(subscriptions, events, state) do + # Filter subscriptions based on event types + relevant_subscriptions = filter_subscriptions_by_events(subscriptions, events) + + if length(relevant_subscriptions) > 0 do + Logger.debug(fn -> "#{length(relevant_subscriptions)} subscriptions match event types" end) + + # Start async delivery tasks for each subscription + Enum.each(relevant_subscriptions, fn subscription -> + start_delivery_task(subscription, events, state) + end) + end + end + + defp get_active_subscriptions(map_id) do + try do + subscriptions = MapWebhookSubscription.active_by_map!(map_id) + {:ok, subscriptions} + rescue + # Catch specific Ash errors + error in [Ash.Error.Query.NotFound] -> + {:ok, []} + + error in [Ash.Error.Invalid] -> + Logger.error("Invalid query for map #{map_id}: #{inspect(error)}") + {:error, error} + + # Only catch database/connection errors + error in [DBConnection.ConnectionError] -> + Logger.error("Database connection error getting subscriptions for map #{map_id}: #{inspect(error)}") + {:error, error} + end + end + + defp filter_subscriptions_by_events(subscriptions, events) do + event_types = Enum.map(events, & &1.type) |> Enum.uniq() + + Enum.filter(subscriptions, fn subscription -> + # Check if subscription matches any of the event types + "*" in subscription.events or + Enum.any?(event_types, fn event_type -> + to_string(event_type) in subscription.events + end) + end) + end + + defp start_delivery_task(subscription, events, _state) do + Task.Supervisor.start_child(WebhookDispatcher.TaskSupervisor, fn -> + deliver_webhook(subscription, events, 1) + end) + end + + defp deliver_webhook(subscription, events, attempt) do + Logger.debug(fn -> "Attempting webhook delivery to #{subscription.url} (attempt #{attempt}/#{@max_retries})" end) + + start_time = System.monotonic_time(:millisecond) + + # Prepare payload + case prepare_webhook_payload(events) do + {:ok, payload} -> + # Generate timestamp once for both signature and request + timestamp = System.os_time(:second) + + # Generate signature with the timestamp + signature = generate_signature(payload, subscription.secret, timestamp) + + # Make HTTP request with the same timestamp + case make_http_request(subscription.url, payload, signature, timestamp) do + {:ok, status_code} when status_code >= 200 and status_code < 300 -> + delivery_time = System.monotonic_time(:millisecond) - start_time + handle_delivery_success(subscription, delivery_time) + + {:ok, status_code} -> + handle_delivery_failure(subscription, events, attempt, "HTTP #{status_code}") + + {:error, reason} -> + handle_delivery_failure(subscription, events, attempt, inspect(reason)) + end + + {:error, reason} -> + Logger.error("Failed to prepare webhook payload: #{inspect(reason)}") + handle_delivery_failure(subscription, events, attempt, "Payload preparation failed") + end + end + + defp prepare_webhook_payload(events) do + try do + # Convert events to JSON + json_events = Enum.map(events, fn event -> + Event.to_json(event) + end) + + # Create webhook payload + payload = case length(json_events) do + 1 -> hd(json_events) # Single event + _ -> %{events: json_events} # Batch events + end + + json_payload = Jason.encode!(payload) + + # Check payload size + if byte_size(json_payload) > @max_payload_size do + {:error, :payload_too_large} + else + {:ok, json_payload} + end + + rescue + e -> {:error, e} + end + end + + defp generate_signature(payload, secret, timestamp) do + data_to_sign = "#{timestamp}.#{payload}" + + signature = :crypto.mac(:hmac, :sha256, secret, data_to_sign) + |> Base.encode16(case: :lower) + + "sha256=#{signature}" + end + + defp make_http_request(url, payload, signature, timestamp) do + + headers = [ + {"Content-Type", "application/json"}, + {"User-Agent", "Wanderer-Webhook/1.0"}, + {"X-Wanderer-Signature", signature}, + {"X-Wanderer-Timestamp", to_string(timestamp)}, + {"X-Wanderer-Version", "1"} + ] + + request = Finch.build(:post, url, headers, payload) + + case Finch.request(request, WandererApp.Finch, timeout: 30_000) do + {:ok, %Finch.Response{status: status}} -> + {:ok, status} + + {:error, %Finch.Error{reason: reason}} -> + {:error, reason} + + {:error, reason} -> + {:error, reason} + end + end + + defp handle_delivery_success(subscription, delivery_time_ms) do + Logger.debug(fn -> "Webhook delivery successful to #{subscription.url} (#{delivery_time_ms}ms)" end) + + # Update subscription with successful delivery + try do + MapWebhookSubscription.update!(subscription, %{ + last_delivery_at: DateTime.utc_now(), + consecutive_failures: 0, + last_error: nil, + last_error_at: nil + }) + rescue + e -> + Logger.error("Failed to update webhook subscription after successful delivery: #{inspect(e)}") + end + + # Emit telemetry + :telemetry.execute( + [:wanderer_app, :webhook_dispatcher, :delivery_success], + %{delivery_time: delivery_time_ms}, + %{url: subscription.url, subscription_id: subscription.id} + ) + end + + defp handle_delivery_failure(subscription, events, attempt, error_reason) do + Logger.warning("Webhook delivery failed to #{subscription.url}: #{error_reason} (attempt #{attempt}/#{@max_retries})") + + if attempt < @max_retries do + # Calculate backoff delay with jitter + backoff_ms = calculate_backoff(attempt) + Logger.debug(fn -> "Retrying webhook delivery in #{backoff_ms}ms" end) + + # Schedule retry + Process.sleep(backoff_ms) + deliver_webhook(subscription, events, attempt + 1) + else + # All retries exhausted + Logger.error("Webhook delivery failed permanently to #{subscription.url} after #{@max_retries} attempts") + + new_consecutive_failures = subscription.consecutive_failures + 1 + + # Update subscription with failure + update_attrs = %{ + consecutive_failures: new_consecutive_failures, + last_error: String.slice(error_reason, 0, 1000), # Truncate to 1000 chars + last_error_at: DateTime.utc_now() + } + + # Disable subscription if too many consecutive failures + update_attrs = if new_consecutive_failures >= @max_consecutive_failures do + Logger.warning("Disabling webhook subscription #{subscription.id} due to #{@max_consecutive_failures} consecutive failures") + Map.put(update_attrs, :active?, false) + else + update_attrs + end + + try do + MapWebhookSubscription.update!(subscription, update_attrs) + rescue + e -> + Logger.error("Failed to update webhook subscription after failure: #{inspect(e)}") + end + + # Emit telemetry + :telemetry.execute( + [:wanderer_app, :webhook_dispatcher, :delivery_failure], + %{consecutive_failures: new_consecutive_failures}, + %{ + url: subscription.url, + subscription_id: subscription.id, + error: error_reason, + disabled: new_consecutive_failures >= @max_consecutive_failures + } + ) + end + end + + defp calculate_backoff(attempt) do + # Exponential backoff: base * 2^(attempt-1) + base_delay = @base_backoff_ms * :math.pow(2, attempt - 1) + + # Cap at max backoff + capped_delay = min(base_delay, @max_backoff_ms) + + # Add jitter (±25%) + jitter_amount = capped_delay * @jitter_range + jitter = :rand.uniform() * 2 * jitter_amount - jitter_amount + + round(capped_delay + jitter) + end +end \ No newline at end of file diff --git a/lib/wanderer_app/kills/subscription/map_integration.ex b/lib/wanderer_app/kills/subscription/map_integration.ex index 2cacdfa6..8a057b2e 100644 --- a/lib/wanderer_app/kills/subscription/map_integration.ex +++ b/lib/wanderer_app/kills/subscription/map_integration.ex @@ -172,6 +172,18 @@ defmodule WandererApp.Kills.Subscription.MapIntegration do ) end) + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + # This does NOT modify existing behavior, it's purely additive + Enum.each(map_ids, fn map_id -> + try do + WandererApp.ExternalEvents.broadcast(map_id, :map_kill, kill_data) + rescue + error -> + Logger.error("Failed to broadcast external event for map #{map_id}: #{inspect(error)}") + # Continue processing other maps even if one fails + end + end) + :ok system_id when is_binary(system_id) -> diff --git a/lib/wanderer_app/map/server/map_server_characters_impl.ex b/lib/wanderer_app/map/server/map_server_characters_impl.ex index 771340e8..20b992cc 100644 --- a/lib/wanderer_app/map/server/map_server_characters_impl.ex +++ b/lib/wanderer_app/map/server/map_server_characters_impl.ex @@ -16,6 +16,9 @@ defmodule WandererApp.Map.Server.CharactersImpl do }), {:ok, character} <- WandererApp.Character.get_character(character_id) do Impl.broadcast!(map_id, :character_added, character) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :character_added, character) :telemetry.execute([:wanderer_app, :map, :character, :added], %{count: 1}) :ok else @@ -25,6 +28,9 @@ defmodule WandererApp.Map.Server.CharactersImpl do _error -> {:ok, character} = WandererApp.Character.get_character(character_id) Impl.broadcast!(map_id, :character_added, character) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :character_added, character) :ok end end) @@ -37,6 +43,9 @@ defmodule WandererApp.Map.Server.CharactersImpl do with :ok <- WandererApp.Map.remove_character(map_id, character_id), {:ok, character} <- WandererApp.Character.get_map_character(map_id, character_id) do Impl.broadcast!(map_id, :character_removed, character) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :character_removed, character) :telemetry.execute([:wanderer_app, :map, :character, :removed], %{count: 1}) @@ -300,6 +309,9 @@ defmodule WandererApp.Map.Server.CharactersImpl do defp update_character(map_id, character_id) do {:ok, character} = WandererApp.Character.get_map_character(map_id, character_id) Impl.broadcast!(map_id, :character_updated, character) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :character_updated, character) end defp update_location( diff --git a/lib/wanderer_app/map/server/map_server_connections_impl.ex b/lib/wanderer_app/map/server/map_server_connections_impl.ex index 0df06ffd..be956691 100644 --- a/lib/wanderer_app/map/server/map_server_connections_impl.ex +++ b/lib/wanderer_app/map/server/map_server_connections_impl.ex @@ -302,8 +302,7 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do solar_system_target: solar_system_target_id }, state -> - state - |> delete_connection(%{ + delete_connection(state, %{ solar_system_source_id: solar_system_source_id, solar_system_target_id: solar_system_target_id }) @@ -388,6 +387,17 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do }) Impl.broadcast!(map_id, :add_connection, connection) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :connection_added, %{ + connection_id: connection.id, + solar_system_source_id: old_location.solar_system_id, + solar_system_target_id: location.solar_system_id, + type: connection_type, + ship_size_type: ship_size_type, + mass_status: connection.mass_status, + time_status: connection.time_status + }) {:ok, character} = WandererApp.Character.get_character(character_id) @@ -560,6 +570,13 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do Impl.broadcast!(map_id, :remove_connections, [connection]) map_id |> WandererApp.Map.remove_connection(connection) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :connection_removed, %{ + connection_id: connection.id, + solar_system_source_id: location.solar_system_id, + solar_system_target_id: old_location.solar_system_id + }) WandererApp.Cache.delete("map_#{map_id}:conn_#{connection.id}:start_time") @@ -602,6 +619,19 @@ defmodule WandererApp.Map.Server.ConnectionsImpl do end Impl.broadcast!(map_id, :update_connection, updated_connection) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :connection_updated, %{ + connection_id: updated_connection.id, + solar_system_source_id: solar_system_source_id, + solar_system_target_id: solar_system_target_id, + type: updated_connection.type, + ship_size_type: updated_connection.ship_size_type, + mass_status: updated_connection.mass_status, + time_status: updated_connection.time_status, + locked: updated_connection.locked, + custom_info: updated_connection.custom_info + }) state else diff --git a/lib/wanderer_app/map/server/map_server_signatures_impl.ex b/lib/wanderer_app/map/server/map_server_signatures_impl.ex index 6a0b2b76..84113064 100644 --- a/lib/wanderer_app/map/server/map_server_signatures_impl.ex +++ b/lib/wanderer_app/map/server/map_server_signatures_impl.ex @@ -145,6 +145,34 @@ defmodule WandererApp.Map.Server.SignaturesImpl do # 5. Broadcast to any live subscribers Impl.broadcast!(state.map_id, :signatures_updated, system.solar_system_id) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + # Send individual signature events + Enum.each(added_sigs, fn sig -> + WandererApp.ExternalEvents.broadcast(state.map_id, :signature_added, %{ + solar_system_id: system.solar_system_id, + signature_id: sig.eve_id, + name: sig.name, + kind: sig.kind, + group: sig.group, + type: sig.type + }) + end) + + Enum.each(removed_ids, fn sig_eve_id -> + WandererApp.ExternalEvents.broadcast(state.map_id, :signature_removed, %{ + solar_system_id: system.solar_system_id, + signature_id: sig_eve_id + }) + end) + + # Also send the summary event for backwards compatibility + WandererApp.ExternalEvents.broadcast(state.map_id, :signatures_updated, %{ + solar_system_id: system.solar_system_id, + added_count: length(added_ids), + updated_count: length(updated_ids), + removed_count: length(removed_ids) + }) state end diff --git a/lib/wanderer_app/map/server/map_server_systems_impl.ex b/lib/wanderer_app/map/server/map_server_systems_impl.ex index c4cb78cc..4f73b910 100644 --- a/lib/wanderer_app/map/server/map_server_systems_impl.ex +++ b/lib/wanderer_app/map/server/map_server_systems_impl.ex @@ -278,6 +278,16 @@ defmodule WandererApp.Map.Server.SystemsImpl do :ok = WandererApp.Map.remove_system(map_id, solar_system_id) @ddrt.delete([solar_system_id], rtree_name) Impl.broadcast!(map_id, :systems_removed, [solar_system_id]) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + Logger.debug(fn -> "SystemsImpl.delete_systems calling ExternalEvents.broadcast for map #{map_id}, system: #{solar_system_id}" end) + # For consistency, include basic fields even for deleted systems + WandererApp.ExternalEvents.broadcast(map_id, :deleted_system, %{ + solar_system_id: solar_system_id, + name: nil, # System is deleted, name not available + position_x: nil, + position_y: nil + }) track_systems_removed(map_id, user_id, character_id, [solar_system_id]) remove_system_connections(map_id, [solar_system_id]) @@ -425,6 +435,14 @@ defmodule WandererApp.Map.Server.SystemsImpl do WandererApp.Map.add_system(map_id, updated_system) Impl.broadcast!(map_id, :add_system, updated_system) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :add_system, %{ + solar_system_id: updated_system.solar_system_id, + name: updated_system.name, + position_x: updated_system.position_x, + position_y: updated_system.position_y + }) :ok _ -> @@ -454,6 +472,14 @@ defmodule WandererApp.Map.Server.SystemsImpl do WandererApp.Map.add_system(map_id, new_system) Impl.broadcast!(map_id, :add_system, new_system) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :add_system, %{ + solar_system_id: new_system.solar_system_id, + name: new_system.name, + position_x: new_system.position_x, + position_y: new_system.position_y + }) :ok @@ -560,6 +586,15 @@ defmodule WandererApp.Map.Server.SystemsImpl do ) Impl.broadcast!(map_id, :add_system, system) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + Logger.debug(fn -> "SystemsImpl._add_system calling ExternalEvents.broadcast for map #{map_id}, system: #{solar_system_id}" end) + WandererApp.ExternalEvents.broadcast(map_id, :add_system, %{ + solar_system_id: system.solar_system_id, + name: system.name, + position_x: system.position_x, + position_y: system.position_y + }) {:ok, _} = WandererApp.User.ActivityTracker.track_map_event(:system_added, %{ @@ -624,5 +659,11 @@ defmodule WandererApp.Map.Server.SystemsImpl do ) Impl.broadcast!(map_id, :update_system, updated_system) + + # ADDITIVE: Also broadcast to external event system (webhooks/WebSocket) + WandererApp.ExternalEvents.broadcast(map_id, :system_metadata_changed, %{ + solar_system_id: updated_system.solar_system_id, + name: updated_system.name + }) end end diff --git a/lib/wanderer_app_web/channels/map_events_channel.ex b/lib/wanderer_app_web/channels/map_events_channel.ex new file mode 100644 index 00000000..5b098ed3 --- /dev/null +++ b/lib/wanderer_app_web/channels/map_events_channel.ex @@ -0,0 +1,247 @@ +defmodule WandererAppWeb.MapEventsChannel do + @moduledoc """ + WebSocket channel for external map events. + + This channel delivers events from the external event system to WebSocket clients. + It uses separate topics from the internal PubSub system to avoid conflicts. + + ## Topic Format + + Clients subscribe to: `external_events:map:MAP_ID` + + ## Usage + + ```javascript + // Connect with API key authentication + const socket = new Phoenix.Socket("/socket/websocket", { + params: { api_key: "your_map_api_key_here" } + }) + socket.connect() + + const channel = socket.channel("external_events:map:123", {}) + channel.join() + .receive("ok", resp => { console.log("Joined successfully", resp) }) + .receive("error", resp => { console.log("Unable to join", resp) }) + + channel.on("external_event", payload => { + console.log("Received event:", payload) + }) + ``` + """ + + use WandererAppWeb, :channel + + require Logger + + # Log when module is loaded + Logger.info("MapEventsChannel module loaded") + + @impl true + def join("external_events:map:" <> map_id, payload, socket) do + Logger.info("Attempting to join external events channel for map: #{map_id}") + + with {:ok, api_key} <- get_api_key(socket), + {:ok, map} <- validate_map_access(map_id, api_key) do + handle_successful_join(map_id, map, payload, socket) + else + {:error, :missing_api_key} -> + Logger.warning("WebSocket join failed: missing API key") + {:error, %{reason: "Authentication required. Provide api_key parameter."}} + + {:error, :map_not_found} -> + Logger.warning("WebSocket join failed: map not found - #{map_id}") + {:error, %{reason: "Map not found"}} + + {:error, :unauthorized} -> + Logger.warning("WebSocket join failed: unauthorized for map #{map_id}") + {:error, %{reason: "Unauthorized: Invalid API key for this map"}} + + error -> + Logger.error("WebSocket join failed: #{inspect(error)}") + {:error, %{reason: "Authentication failed"}} + end + end + + def join(topic, _payload, _socket) do + Logger.warning("Attempted to join invalid external events topic: #{topic}") + {:error, %{reason: "Invalid topic format. Use: external_events:map:MAP_ID"}} + end + + defp handle_successful_join(map_id, map, payload, socket) do + Logger.info("Client authenticated and joined external events for map #{map_id}") + + # Parse event filters from join payload + event_filter = parse_event_filter(payload) + Logger.debug(fn -> "Event filter: #{inspect(event_filter)}" end) + + # Subscribe to external events for this map + topic = "external_events:map:#{map_id}" + Phoenix.PubSub.subscribe(WandererApp.PubSub, topic) + Logger.debug(fn -> "Subscribed to PubSub topic: #{topic}" end) + + # Store map information and event filter in socket assigns + socket = socket + |> assign(:map_id, map_id) + |> assign(:map, map) + |> assign(:event_filter, event_filter) + + # Send initial connection acknowledgment + {:ok, %{status: "connected", map_id: map_id, map_name: map.name, event_filter: event_filter}, socket} + end + + @impl true + def handle_info({:external_event, event}, socket) do + # Check if this event should be sent based on the client's filter + if should_send_event?(event, socket.assigns[:event_filter]) do + # Forward external events to WebSocket clients + # The event is a map that needs to be sent directly + push(socket, "external_event", event) + end + {:noreply, socket} + end + + @impl true + def handle_info(_msg, socket) do + # Silently ignore other messages - this can happen when multiple + # channels are subscribed to the same PubSub topic + {:noreply, socket} + end + + @impl true + def handle_in("ping", payload, socket) do + # Simple ping/pong for client heartbeat testing + {:reply, {:ok, %{pong: payload}}, socket} + end + + @impl true + def handle_in(event, payload, socket) do + Logger.debug(fn -> "Unhandled incoming event: #{event} with payload: #{inspect(payload)}" end) + {:noreply, socket} + end + + @impl true + def terminate(reason, socket) do + map_id = socket.assigns[:map_id] + Logger.debug(fn -> "Client disconnected from external events for map #{map_id}, reason: #{inspect(reason)}" end) + :ok + end + + # Private helper functions for authentication + + defp get_api_key(socket) do + case socket.assigns[:api_key] do + api_key when is_binary(api_key) and api_key != "" -> + {:ok, api_key} + _ -> + {:error, :missing_api_key} + end + end + + defp validate_map_access(map_id, api_key) do + Logger.debug(fn -> "Validating map access for map_id: #{map_id}" end) + alias WandererApp.Api.Map, as: ApiMap + alias Plug.Crypto + + case resolve_map_identifier(map_id) do + {:ok, map} -> + Logger.info("Map found: #{map.name}, checking API key...") + Logger.info("Map public_api_key present: #{not is_nil(map.public_api_key)}") + Logger.info("Provided API key: #{String.slice(api_key, 0..7)}...") + + if is_binary(map.public_api_key) && + Crypto.secure_compare(map.public_api_key, api_key) do + Logger.info("API key matches, access granted") + {:ok, map} + else + Logger.info("API key mismatch or invalid") + Logger.info("Map has public_api_key: #{is_binary(map.public_api_key)}") + {:error, :unauthorized} + end + {:error, :not_found} -> + Logger.debug(fn -> "Map not found" end) + {:error, :map_not_found} + error -> + Logger.error("Map validation error: #{inspect(error)}") + {:error, :validation_failed} + end + end + + # Try to resolve map identifier - could be map_id or slug + defp resolve_map_identifier(identifier) do + Logger.debug(fn -> "Resolving map identifier: #{identifier}" end) + alias WandererApp.Api.Map, as: ApiMap + + # Try ID lookup first + Logger.debug(fn -> "Trying ID lookup..." end) + + case ApiMap.by_id(identifier) do + {:ok, map} -> + Logger.debug(fn -> "Found by ID: #{map.name}" end) + {:ok, map} + + error -> + Logger.debug(fn -> "ID lookup failed: #{inspect(error)}, trying slug lookup..." end) + resolve_by_slug(identifier) + end + end + + defp resolve_by_slug(identifier) do + alias WandererApp.Api.Map, as: ApiMap + + case ApiMap.get_map_by_slug(identifier) do + {:ok, map} -> + Logger.debug(fn -> "Found by slug: #{map.name}" end) + {:ok, map} + + error -> + Logger.debug(fn -> "Slug lookup failed: #{inspect(error)}" end) + {:error, :not_found} + end + end + + # Event filtering helper functions + + defp parse_event_filter(%{"events" => events}) when is_list(events) do + # Convert string event types to atoms and validate them + events + |> Enum.map(&parse_event_type/1) + |> Enum.filter(& &1) # Remove nil values from invalid event types + |> case do + [] -> :all # If no valid events specified, default to all + valid_events -> valid_events + end + end + + defp parse_event_filter(%{"events" => "*"}), do: :all + defp parse_event_filter(%{"events" => ["*"]}), do: :all + defp parse_event_filter(_), do: :all # Default to all events if no filter specified + + defp parse_event_type(event_type) when is_binary(event_type) do + alias WandererApp.ExternalEvents.Event + + # Convert string to atom if it's a valid event type + try do + atom = String.to_existing_atom(event_type) + if Event.valid_event_type?(atom), do: atom, else: nil + rescue + ArgumentError -> nil + end + end + + defp parse_event_type(_), do: nil + + defp should_send_event?(_event, :all), do: true + + defp should_send_event?(event, event_filter) when is_list(event_filter) do + # Extract event type from the event map + event_type = case event do + %{"type" => type} when is_binary(type) -> String.to_existing_atom(type) + %{"type" => type} when is_atom(type) -> type + _ -> nil + end + + event_type in event_filter + end + + defp should_send_event?(_event, _filter), do: true # Default to sending if filter format is unexpected +end \ No newline at end of file diff --git a/lib/wanderer_app_web/channels/user_socket.ex b/lib/wanderer_app_web/channels/user_socket.ex new file mode 100644 index 00000000..428464d4 --- /dev/null +++ b/lib/wanderer_app_web/channels/user_socket.ex @@ -0,0 +1,56 @@ +defmodule WandererAppWeb.UserSocket do + use Phoenix.Socket + + # External events channel for webhooks/WebSocket delivery + channel "external_events:map:*", WandererAppWeb.MapEventsChannel + + @impl true + def connect(params, socket, connect_info) do + # Extract API key from connection params + # Client should connect with: /socket/websocket?api_key= + require Logger + + # Log connection attempt for security auditing + remote_ip = get_remote_ip(connect_info) + Logger.info("WebSocket connection attempt from #{remote_ip}") + + case params["api_key"] do + api_key when is_binary(api_key) and api_key != "" -> + # Store the API key in socket assigns for channel authentication + # Full validation happens in channel join where we have the map context + socket = socket + |> assign(:api_key, api_key) + |> assign(:remote_ip, remote_ip) + + Logger.info("WebSocket connection accepted from #{remote_ip}, pending channel authentication") + {:ok, socket} + + _ -> + # Require API key for external events + Logger.warning("WebSocket connection rejected - missing API key from #{remote_ip}") + :error + end + end + + # Extract remote IP from connection info + defp get_remote_ip(connect_info) do + case connect_info do + %{peer_data: %{address: {a, b, c, d}}} -> + "#{a}.#{b}.#{c}.#{d}" + + %{x_headers: headers} -> + # Check for X-Forwarded-For or X-Real-IP headers (for proxied connections) + Enum.find_value(headers, "unknown", fn + {"x-forwarded-for", ip} -> String.split(ip, ",") |> List.first() |> String.trim() + {"x-real-ip", ip} -> ip + _ -> nil + end) + + _ -> + "unknown" + end + end + + @impl true + def id(_socket), do: nil +end \ No newline at end of file diff --git a/lib/wanderer_app_web/controllers/map_events_api_controller.ex b/lib/wanderer_app_web/controllers/map_events_api_controller.ex new file mode 100644 index 00000000..cbd12404 --- /dev/null +++ b/lib/wanderer_app_web/controllers/map_events_api_controller.ex @@ -0,0 +1,218 @@ +defmodule WandererAppWeb.MapEventsAPIController do + use WandererAppWeb, :controller + use OpenApiSpex.ControllerSpecs + + require Logger + + alias WandererApp.ExternalEvents.MapEventRelay + alias WandererAppWeb.Schemas.{ApiSchemas, ResponseSchemas} + + # ----------------------------------------------------------------- + # Schema Definitions + # ----------------------------------------------------------------- + + @event_schema %OpenApiSpex.Schema{ + type: :object, + properties: %{ + id: %OpenApiSpex.Schema{type: :string, description: "ULID event identifier"}, + map_id: %OpenApiSpex.Schema{type: :string, description: "Map UUID"}, + type: %OpenApiSpex.Schema{ + type: :string, + enum: [ + "add_system", + "deleted_system", + "system_metadata_changed", + "system_renamed", + "signature_added", + "signature_removed", + "signatures_updated", + "connection_added", + "connection_removed", + "connection_updated", + "character_added", + "character_removed", + "character_updated", + "map_kill" + ], + description: "Event type" + }, + payload: %OpenApiSpex.Schema{ + type: :object, + description: "Event-specific payload data", + additionalProperties: true + }, + ts: %OpenApiSpex.Schema{ + type: :string, + format: :date_time, + description: "Event timestamp (ISO8601)" + } + }, + required: [:id, :map_id, :type, :payload, :ts], + example: %{ + id: "01J7KZXYZ123456789ABCDEF", + map_id: "550e8400-e29b-41d4-a716-446655440000", + type: "add_system", + payload: %{ + solar_system_id: 30000142, + solar_system_name: "Jita" + }, + ts: "2025-01-20T12:34:56Z" + } + } + + @events_response_schema ApiSchemas.data_wrapper( + %OpenApiSpex.Schema{ + type: :array, + items: @event_schema + } + ) + + @events_list_params %OpenApiSpex.Schema{ + type: :object, + properties: %{ + since: %OpenApiSpex.Schema{ + type: :string, + format: :date_time, + description: "Return events after this timestamp (ISO8601)" + }, + limit: %OpenApiSpex.Schema{ + type: :integer, + minimum: 1, + maximum: 100, + default: 100, + description: "Maximum number of events to return" + } + } + } + + # ----------------------------------------------------------------- + # OpenApiSpex Operations + # ----------------------------------------------------------------- + + operation :list_events, + summary: "List recent events for a map", + description: """ + Retrieves recent events for the specified map. This endpoint provides a way to catch up on missed events + after a WebSocket disconnection. Events are retained for approximately 10 minutes. + """, + tags: ["Map Events"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ], + since: [ + in: :query, + description: "Return events after this timestamp (ISO8601)", + type: :string, + required: false, + example: "2025-01-20T12:00:00Z" + ], + limit: [ + in: :query, + description: "Maximum number of events to return (1-100)", + type: :integer, + required: false + ] + ], + responses: %{ + 200 => {"Success", "application/json", @events_response_schema}, + 400 => ResponseSchemas.bad_request("Invalid parameters"), + 401 => ResponseSchemas.bad_request("Unauthorized"), + 404 => ResponseSchemas.not_found("Map not found"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + # ----------------------------------------------------------------- + # Controller Actions + # ----------------------------------------------------------------- + + def list_events(conn, %{"map_identifier" => map_identifier} = params) do + with {:ok, map} <- get_map(conn, map_identifier), + {:ok, since} <- parse_since_param(params), + {:ok, limit} <- parse_limit_param(params) do + + # If no 'since' parameter provided, default to 10 minutes ago + since_datetime = since || DateTime.add(DateTime.utc_now(), -10, :minute) + + # Check if MapEventRelay is running before calling + events = if Process.whereis(MapEventRelay) do + try do + MapEventRelay.get_events_since(map.id, since_datetime, limit) + catch + :exit, {:noproc, _} -> + Logger.error("MapEventRelay process not available") + [] + + :exit, reason -> + Logger.error("Failed to get events from MapEventRelay: #{inspect(reason)}") + [] + end + else + Logger.error("MapEventRelay is not running") + [] + end + + # Events are already in JSON format from ETS + + json(conn, %{data: events}) + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, :invalid_since} -> + conn + |> put_status(:bad_request) + |> json(%{error: "Invalid 'since' parameter. Must be ISO8601 datetime."}) + + {:error, :invalid_limit} -> + conn + |> put_status(:bad_request) + |> json(%{error: "Invalid 'limit' parameter. Must be between 1 and 100."}) + + {:error, reason} -> + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + # ----------------------------------------------------------------- + # Private Functions + # ----------------------------------------------------------------- + + defp get_map(conn, map_identifier) do + # The map should already be loaded by the CheckMapApiKey plug + case conn.assigns[:map] do + nil -> {:error, :map_not_found} + map -> {:ok, map} + end + end + + defp parse_since_param(%{"since" => since_str}) when is_binary(since_str) do + case DateTime.from_iso8601(since_str) do + {:ok, datetime, _offset} -> {:ok, datetime} + {:error, _} -> {:error, :invalid_since} + end + end + defp parse_since_param(_), do: {:ok, nil} + + defp parse_limit_param(%{"limit" => limit_str}) when is_binary(limit_str) do + case Integer.parse(limit_str) do + {limit, ""} when limit >= 1 and limit <= 100 -> {:ok, limit} + _ -> {:error, :invalid_limit} + end + end + defp parse_limit_param(%{"limit" => limit}) when is_integer(limit) do + if limit >= 1 and limit <= 100 do + {:ok, limit} + else + {:error, :invalid_limit} + end + end + defp parse_limit_param(_), do: {:ok, 100} +end \ No newline at end of file diff --git a/lib/wanderer_app_web/controllers/map_webhooks_api_controller.ex b/lib/wanderer_app_web/controllers/map_webhooks_api_controller.ex new file mode 100644 index 00000000..546b1082 --- /dev/null +++ b/lib/wanderer_app_web/controllers/map_webhooks_api_controller.ex @@ -0,0 +1,574 @@ +defmodule WandererAppWeb.MapWebhooksAPIController do + use WandererAppWeb, :controller + use OpenApiSpex.ControllerSpecs + + alias WandererApp.Api.MapWebhookSubscription + alias WandererAppWeb.Schemas.{ApiSchemas, ResponseSchemas} + + require Logger + + # ----------------------------------------------------------------- + # Schema Definitions + # ----------------------------------------------------------------- + + @webhook_subscription_schema %OpenApiSpex.Schema{ + type: :object, + properties: %{ + id: %OpenApiSpex.Schema{type: :string, description: "Webhook subscription UUID"}, + map_id: %OpenApiSpex.Schema{type: :string, description: "Map UUID"}, + url: %OpenApiSpex.Schema{ + type: :string, + description: "HTTPS webhook endpoint URL", + example: "https://example.com/webhook" + }, + events: %OpenApiSpex.Schema{ + type: :array, + items: %OpenApiSpex.Schema{type: :string}, + description: "Array of event types to subscribe to, or ['*'] for all", + example: ["add_system", "map_kill", "*"] + }, + active: %OpenApiSpex.Schema{type: :boolean, description: "Whether webhook is active"}, + last_delivery_at: %OpenApiSpex.Schema{ + type: :string, + format: :date_time, + description: "Last successful delivery timestamp", + nullable: true + }, + last_error: %OpenApiSpex.Schema{ + type: :string, + description: "Last error message if delivery failed", + nullable: true + }, + consecutive_failures: %OpenApiSpex.Schema{ + type: :integer, + description: "Number of consecutive delivery failures" + }, + inserted_at: %OpenApiSpex.Schema{type: :string, format: :date_time}, + updated_at: %OpenApiSpex.Schema{type: :string, format: :date_time} + }, + required: [:id, :map_id, :url, :events, :active, :consecutive_failures], + example: %{ + id: "550e8400-e29b-41d4-a716-446655440000", + map_id: "550e8400-e29b-41d4-a716-446655440001", + url: "https://example.com/wanderer-webhook", + events: ["add_system", "map_kill"], + active: true, + last_delivery_at: "2025-06-21T12:34:56Z", + last_error: nil, + consecutive_failures: 0, + inserted_at: "2025-06-21T10:00:00Z", + updated_at: "2025-06-21T12:34:56Z" + } + } + + @webhook_create_schema %OpenApiSpex.Schema{ + type: :object, + properties: %{ + url: %OpenApiSpex.Schema{ + type: :string, + description: "HTTPS webhook endpoint URL (max 2000 characters)", + example: "https://example.com/wanderer-webhook" + }, + events: %OpenApiSpex.Schema{ + type: :array, + items: %OpenApiSpex.Schema{type: :string}, + description: "Array of event types to subscribe to, or ['*'] for all events", + example: ["add_system", "map_kill"] + }, + active: %OpenApiSpex.Schema{ + type: :boolean, + description: "Whether webhook should be active (default: true)", + default: true + } + }, + required: [:url, :events], + example: %{ + url: "https://example.com/wanderer-webhook", + events: ["add_system", "signatures_updated", "map_kill"], + active: true + } + } + + @webhook_update_schema %OpenApiSpex.Schema{ + type: :object, + properties: %{ + url: %OpenApiSpex.Schema{ + type: :string, + description: "HTTPS webhook endpoint URL (max 2000 characters)" + }, + events: %OpenApiSpex.Schema{ + type: :array, + items: %OpenApiSpex.Schema{type: :string}, + description: "Array of event types to subscribe to, or ['*'] for all events" + }, + active: %OpenApiSpex.Schema{ + type: :boolean, + description: "Whether webhook should be active" + } + }, + example: %{ + events: ["*"], + active: false + } + } + + @webhook_secret_response_schema %OpenApiSpex.Schema{ + type: :object, + properties: %{ + secret: %OpenApiSpex.Schema{ + type: :string, + description: "New webhook secret for HMAC signature verification" + } + }, + required: [:secret], + example: %{ + secret: "abc123def456ghi789jkl012mno345pqr678stu901vwx234yz=" + } + } + + @webhooks_response_schema ApiSchemas.data_wrapper( + %OpenApiSpex.Schema{ + type: :array, + items: @webhook_subscription_schema + } + ) + + @webhook_response_schema ApiSchemas.data_wrapper(@webhook_subscription_schema) + @secret_response_schema ApiSchemas.data_wrapper(@webhook_secret_response_schema) + + # ----------------------------------------------------------------- + # OpenApiSpex Operations + # ----------------------------------------------------------------- + + operation :index, + summary: "List webhook subscriptions for a map", + description: "Retrieves all webhook subscriptions configured for the specified map.", + tags: ["Webhook Management"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ] + ], + responses: %{ + 200 => {"Success", "application/json", @webhooks_response_schema}, + 401 => ResponseSchemas.bad_request("Unauthorized"), + 404 => ResponseSchemas.not_found("Map not found"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + operation :show, + summary: "Get a specific webhook subscription", + description: "Retrieves details of a specific webhook subscription.", + tags: ["Webhook Management"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ], + id: [ + in: :path, + description: "Webhook subscription UUID", + type: :string, + required: true + ] + ], + responses: %{ + 200 => {"Success", "application/json", @webhook_response_schema}, + 401 => ResponseSchemas.bad_request("Unauthorized"), + 404 => ResponseSchemas.not_found("Webhook not found"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + operation :create, + summary: "Create a new webhook subscription", + description: """ + Creates a new webhook subscription for the map. The webhook will receive HTTP POST + requests for the specified event types. A secret is automatically generated for + HMAC signature verification. + """, + tags: ["Webhook Management"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ] + ], + request_body: {"Webhook subscription data", "application/json", @webhook_create_schema}, + responses: %{ + 201 => {"Created", "application/json", @webhook_response_schema}, + 400 => ResponseSchemas.bad_request("Invalid webhook data"), + 401 => ResponseSchemas.bad_request("Unauthorized"), + 409 => ResponseSchemas.bad_request("Webhook URL already exists for this map"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + operation :update, + summary: "Update a webhook subscription", + description: "Updates an existing webhook subscription. Partial updates are supported.", + tags: ["Webhook Management"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ], + id: [ + in: :path, + description: "Webhook subscription UUID", + type: :string, + required: true + ] + ], + request_body: {"Webhook update data", "application/json", @webhook_update_schema}, + responses: %{ + 200 => {"Updated", "application/json", @webhook_response_schema}, + 400 => ResponseSchemas.bad_request("Invalid webhook data"), + 401 => ResponseSchemas.bad_request("Unauthorized"), + 404 => ResponseSchemas.not_found("Webhook not found"), + 409 => ResponseSchemas.bad_request("Webhook URL already exists for this map"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + operation :delete, + summary: "Delete a webhook subscription", + description: "Permanently deletes a webhook subscription.", + tags: ["Webhook Management"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ], + id: [ + in: :path, + description: "Webhook subscription UUID", + type: :string, + required: true + ] + ], + responses: %{ + 204 => {"Deleted", "application/json", nil}, + 401 => ResponseSchemas.bad_request("Unauthorized"), + 404 => ResponseSchemas.not_found("Webhook not found"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + operation :rotate_secret, + summary: "Rotate webhook secret", + description: """ + Generates a new secret for the webhook subscription. The old secret will be + invalidated immediately. Update your webhook endpoint to use the new secret + for HMAC signature verification. + """, + tags: ["Webhook Management"], + parameters: [ + map_identifier: [ + in: :path, + description: "Map UUID or slug", + type: :string, + required: true + ], + id: [ + in: :path, + description: "Webhook subscription UUID", + type: :string, + required: true + ] + ], + responses: %{ + 200 => {"Secret rotated", "application/json", @secret_response_schema}, + 401 => ResponseSchemas.bad_request("Unauthorized"), + 404 => ResponseSchemas.not_found("Webhook not found"), + 500 => ResponseSchemas.internal_server_error("Internal server error") + } + + # ----------------------------------------------------------------- + # Controller Actions + # ----------------------------------------------------------------- + + def index(conn, %{"map_identifier" => map_identifier}) do + with {:ok, map} <- get_map(conn, map_identifier) do + webhooks = MapWebhookSubscription.by_map!(map.id) + + json_webhooks = Enum.map(webhooks, &webhook_to_json/1) + json(conn, %{data: json_webhooks}) + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, reason} -> + Logger.error("Failed to list webhooks: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + def show(conn, %{"map_identifier" => map_identifier, "id" => webhook_id}) do + with {:ok, map} <- get_map(conn, map_identifier), + {:ok, webhook} <- get_webhook(webhook_id, map.id) do + + json(conn, %{data: webhook_to_json(webhook)}) + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, :webhook_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Webhook not found"}) + + {:error, reason} -> + Logger.error("Failed to get webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + def create(conn, %{"map_identifier" => map_identifier} = params) do + with {:ok, map} <- get_map(conn, map_identifier), + {:ok, webhook_params} <- validate_create_params(params, map.id) do + + case MapWebhookSubscription.create(webhook_params) do + {:ok, webhook} -> + conn + |> put_status(:created) + |> json(%{data: webhook_to_json(webhook)}) + + {:error, %Ash.Error.Invalid{errors: errors}} -> + error_messages = Enum.map(errors, & &1.message) + conn + |> put_status(:bad_request) + |> json(%{error: "Validation failed", details: error_messages}) + + {:error, reason} -> + Logger.error("Failed to create webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, :invalid_params} -> + conn + |> put_status(:bad_request) + |> json(%{error: "Invalid webhook parameters"}) + + {:error, reason} -> + Logger.error("Failed to create webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + def update(conn, %{"map_identifier" => map_identifier, "id" => webhook_id} = params) do + with {:ok, map} <- get_map(conn, map_identifier), + {:ok, webhook} <- get_webhook(webhook_id, map.id), + {:ok, update_params} <- validate_update_params(params) do + + case MapWebhookSubscription.update(webhook, update_params) do + {:ok, updated_webhook} -> + json(conn, %{data: webhook_to_json(updated_webhook)}) + + {:error, %Ash.Error.Invalid{errors: errors}} -> + error_messages = Enum.map(errors, & &1.message) + conn + |> put_status(:bad_request) + |> json(%{error: "Validation failed", details: error_messages}) + + {:error, reason} -> + Logger.error("Failed to update webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, :webhook_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Webhook not found"}) + + {:error, :invalid_params} -> + conn + |> put_status(:bad_request) + |> json(%{error: "Invalid webhook parameters"}) + + {:error, reason} -> + Logger.error("Failed to update webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + def delete(conn, %{"map_identifier" => map_identifier, "id" => webhook_id}) do + with {:ok, map} <- get_map(conn, map_identifier), + {:ok, webhook} <- get_webhook(webhook_id, map.id) do + + case MapWebhookSubscription.destroy(webhook) do + :ok -> + conn |> put_status(:no_content) + + {:error, reason} -> + Logger.error("Failed to delete webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, :webhook_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Webhook not found"}) + + {:error, reason} -> + Logger.error("Failed to delete webhook: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + def rotate_secret(conn, %{"map_identifier" => map_identifier, "id" => webhook_id}) do + with {:ok, map} <- get_map(conn, map_identifier), + {:ok, webhook} <- get_webhook(webhook_id, map.id) do + + case MapWebhookSubscription.rotate_secret(webhook) do + {:ok, updated_webhook} -> + # Return the new secret (this is the only time it's exposed) + json(conn, %{data: %{secret: updated_webhook.secret}}) + + {:error, reason} -> + Logger.error("Failed to rotate webhook secret: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + else + {:error, :map_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Map not found"}) + + {:error, :webhook_not_found} -> + conn + |> put_status(:not_found) + |> json(%{error: "Webhook not found"}) + + {:error, reason} -> + Logger.error("Failed to rotate webhook secret: #{inspect(reason)}") + conn + |> put_status(:internal_server_error) + |> json(%{error: "Internal server error"}) + end + end + + # ----------------------------------------------------------------- + # Private Functions + # ----------------------------------------------------------------- + + defp get_map(conn, map_identifier) do + # The map should already be loaded by the CheckMapApiKey plug + case conn.assigns[:map] do + nil -> {:error, :map_not_found} + map -> {:ok, map} + end + end + + defp get_webhook(webhook_id, map_id) do + try do + case MapWebhookSubscription.by_id(webhook_id) do + nil -> {:error, :webhook_not_found} + webhook -> + if webhook.map_id == map_id do + {:ok, webhook} + else + {:error, :webhook_not_found} + end + end + rescue + # Only catch specific Ash-related exceptions + error in [Ash.Error.Query.NotFound, Ash.Error.Invalid] -> + Logger.debug("Webhook lookup error: #{inspect(error)}") + {:error, :webhook_not_found} + end + end + + defp validate_create_params(params, map_id) do + required_fields = ["url", "events"] + + if Enum.all?(required_fields, &Map.has_key?(params, &1)) do + webhook_params = %{ + map_id: map_id, + url: params["url"], + events: params["events"], + active?: Map.get(params, "active", true) + } + {:ok, webhook_params} + else + {:error, :invalid_params} + end + end + + defp validate_update_params(params) do + # Filter out non-updatable fields and map identifier + allowed_fields = ["url", "events", "active"] + + update_params = params + |> Map.take(allowed_fields) + |> Enum.reduce(%{}, fn {k, v}, acc -> + case k do + "active" -> Map.put(acc, :active?, v) + "url" -> Map.put(acc, :url, v) + "events" -> Map.put(acc, :events, v) + _ -> acc + end + end) + + {:ok, update_params} + end + + defp webhook_to_json(webhook) do + %{ + id: webhook.id, + map_id: webhook.map_id, + url: webhook.url, + events: webhook.events, + active: webhook.active?, + last_delivery_at: webhook.last_delivery_at, + last_error: webhook.last_error, + consecutive_failures: webhook.consecutive_failures, + inserted_at: webhook.inserted_at, + updated_at: webhook.updated_at + } + end +end \ No newline at end of file diff --git a/lib/wanderer_app_web/endpoint.ex b/lib/wanderer_app_web/endpoint.ex index 425e6965..f9c6505d 100644 --- a/lib/wanderer_app_web/endpoint.ex +++ b/lib/wanderer_app_web/endpoint.ex @@ -42,6 +42,10 @@ defmodule WandererAppWeb.Endpoint do socket "/live", Phoenix.LiveView.Socket, websocket: [compress: true, connect_info: [session: @session_options]] + socket "/socket", WandererAppWeb.UserSocket, + websocket: true, + longpoll: false + plug PhoenixDDoS # Serve at "/" the static files from "priv/static" directory. diff --git a/lib/wanderer_app_web/router.ex b/lib/wanderer_app_web/router.ex index f31a18da..1c3ca208 100644 --- a/lib/wanderer_app_web/router.ex +++ b/lib/wanderer_app_web/router.ex @@ -240,6 +240,12 @@ defmodule WandererAppWeb.Router do resources "/signatures", MapSystemSignatureAPIController, except: [:new, :edit] get "/user-characters", MapAPIController, :show_user_characters get "/tracked-characters", MapAPIController, :show_tracked_characters + get "/events", MapEventsAPIController, :list_events + + # Webhook management endpoints + resources "/webhooks", MapWebhooksAPIController, except: [:new, :edit] do + post "/rotate-secret", MapWebhooksAPIController, :rotate_secret + end end # diff --git a/mix.exs b/mix.exs index 083d11e7..0dd64df4 100644 --- a/mix.exs +++ b/mix.exs @@ -92,6 +92,7 @@ defmodule WandererApp.MixProject do {:ash_postgres, "~> 2.4"}, {:exsync, "~> 0.4", only: :dev}, {:nimble_csv, "~> 1.2.0"}, + {:ulid, "~> 0.2.0"}, {:cachex, "~> 3.6"}, {:live_select, "~> 1.5"}, {:nebulex, "~> 2.6"}, diff --git a/mix.lock b/mix.lock index aa1001a0..d3cb6333 100644 --- a/mix.lock +++ b/mix.lock @@ -140,6 +140,7 @@ "typable": {:hex, :typable, "0.3.0", "0431e121d124cd26f312123e313d2689b9a5322b15add65d424c07779eaa3ca1", [:mix], [], "hexpm", "880a0797752da1a4c508ac48f94711e04c86156f498065a83d160eef945858f8"}, "tzdata": {:hex, :tzdata, "1.1.3", "b1cef7bb6de1de90d4ddc25d33892b32830f907e7fc2fccd1e7e22778ab7dfbc", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "d4ca85575a064d29d4e94253ee95912edfb165938743dbf002acdf0dcecb0c28"}, "ueberauth": {:hex, :ueberauth, "0.10.8", "ba78fbcbb27d811a6cd06ad851793aaf7d27c3b30c9e95349c2c362b344cd8f0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "f2d3172e52821375bccb8460e5fa5cb91cfd60b19b636b6e57e9759b6f8c10c1"}, + "ulid": {:hex, :ulid, "0.2.0", "1ef02026b7c8fa78a6ae6cb5e0d8f4ba92ed726b369849da328f93b7c0dab9cd", [:mix], [], "hexpm", "fadcc1d4cfa49028172f54bab9e464a69fb14f48f7652dad706d2bbb1ef76a6c"}, "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"}, "unsafe": {:hex, :unsafe, "1.0.2", "23c6be12f6c1605364801f4b47007c0c159497d0446ad378b5cf05f1855c0581", [:mix], [], "hexpm", "b485231683c3ab01a9cd44cb4a79f152c6f3bb87358439c6f68791b85c2df675"}, "uuid": {:hex, :uuid, "1.1.8", "e22fc04499de0de3ed1116b770c7737779f226ceefa0badb3592e64d5cfb4eb9", [:mix], [], "hexpm", "c790593b4c3b601f5dc2378baae7efaf5b3d73c4c6456ba85759905be792f2ac"}, diff --git a/priv/posts/2025/03-18-bots.md b/priv/posts/2025/03-18-bots.md index f21c98be..1cf71ddd 100644 --- a/priv/posts/2025/03-18-bots.md +++ b/priv/posts/2025/03-18-bots.md @@ -33,7 +33,7 @@ There are two ways to install Wanderer Notifier: a **Quick Install** option usin For a streamlined installation that creates the necessary directory and files automatically, run: ```bash -curl -fsSL https://gist.githubusercontent.com/guarzo/3f05f3c57005c3cf3585869212caecfe/raw/wanderer-notifier-setup.sh | bash +curl -fsSL https://gist.githubusercontent.com/guarzo/3f05f3c57005c3cf3585869212caecfe/raw/33cba423f27c12a09ec3054d4eb76b283da66ab4/wanderer-notifier-setup.sh | bash ``` Once the script finishes, update the `wanderer-notifier/.env` file with your configuration values, then proceed to [Step 4](#4-run-it). @@ -58,18 +58,26 @@ Create a `.env` file in your working directory with the following content. Repla # Required Configuration DISCORD_BOT_TOKEN=your_discord_bot_token DISCORD_CHANNEL_ID=your_discord_channel_id -MAP_URL_WITH_NAME="https://wanderer.ltd/" -MAP_TOKEN=your_map_api_token +MAP_URL="https://wanderer.ltd" +MAP_NAME=your_map_name +MAP_ID=your_map_id # UUID of your map (found in map settings) +MAP_API_KEY=your_map_api_token + +# Discord Application Configuration (optional - enables slash commands) +DISCORD_APPLICATION_ID=your_discord_application_id # Enables /notifier commands # Map Subscription Configuration (for enhanced features) # Note: Premium features are enabled with your map subscription LICENSE_KEY=your_map_license_key # Provided with your map subscription # Notification Control (all enabled by default) -# ENABLE_KILL_NOTIFICATIONS=true -# ENABLE_CHARACTER_NOTIFICATIONS=true -# ENABLE_SYSTEM_NOTIFICATIONS=true -# ENABLE_TRACK_KSPACE_SYSTEMS=false # Set to 'true' to track K-Space systems in addition to wormholes +NOTIFICATIONS_ENABLED=true +KILL_NOTIFICATIONS_ENABLED=true +CHARACTER_NOTIFICATIONS_ENABLED=true +SYSTEM_NOTIFICATIONS_ENABLED=true + +# Advanced Configuration (optional) +WEBSOCKET_MAP_URL=ws://host.docker.internal:4444 # WebSocket URL for real-time events ``` > **Note:** If you don't have a Discord bot yet, follow our [guide on creating a Discord bot](https://gist.github.com/guarzo/a4d238b932b6a168ad1c5f0375c4a561) or search the web for more information. @@ -81,14 +89,17 @@ Create a file named `docker-compose.yml` with the following content: ```yaml services: wanderer_notifier: - image: guarzo/wanderer-notifier:v1 + image: guarzo/wanderer-notifier:latest container_name: wanderer_notifier restart: unless-stopped environment: - DISCORD_BOT_TOKEN=${DISCORD_BOT_TOKEN} - DISCORD_CHANNEL_ID=${DISCORD_CHANNEL_ID} - - MAP_URL_WITH_NAME=${MAP_URL_WITH_NAME} - - MAP_TOKEN=${MAP_TOKEN} + - DISCORD_APPLICATION_ID=${DISCORD_APPLICATION_ID} + - MAP_URL=${MAP_URL} + - MAP_NAME=${MAP_NAME} + - MAP_ID=${MAP_ID} + - MAP_API_KEY=${MAP_API_KEY} - LICENSE_KEY=${LICENSE_KEY} ports: - 4000:4000 @@ -265,22 +276,75 @@ Premium map subscribers also gain access to detailed statistics and advanced vis --- +## Discord Slash Commands + +Wanderer Notifier supports Discord slash commands for interactive map control when `DISCORD_APPLICATION_ID` is configured in your environment. + +### Available Commands + +#### `/notifier` Command Group + +The `/notifier` command provides priority system management and status monitoring for your Discord server. + +**Commands:** +- `/notifier system ` - Add a system to priority notifications (receives @here mentions) +- `/notifier system action:add-priority` - Add system to priority list +- `/notifier system action:remove-priority` - Remove system from priority list +- `/notifier status` - View current bot status, priority systems, and configuration + +**Examples:** +``` +/notifier system J104809 +/notifier system Jita action:add-priority +/notifier system Amarr action:remove-priority +/notifier status +``` + +**Priority Systems:** +- Priority systems receive enhanced notifications with @here mentions +- Useful for highlighting activity in high-value hunting grounds or strategic locations +- Priority systems list is maintained across bot restarts + +### Setting Up Slash Commands + +To enable slash commands, you need to configure your Discord application: + +1. **Get your Discord Application ID:** + - Visit the [Discord Developer Portal](https://discord.com/developers/applications) + - Select your bot application + - Copy the "Application ID" from the General Information tab + +2. **Add to Environment Configuration:** + ```dotenv + DISCORD_APPLICATION_ID=your_discord_application_id + ``` + +3. **Bot Permissions:** + Ensure your bot has the following permissions in your Discord server: + - Use Slash Commands + - Send Messages + - Embed Links + +4. **Command Registration:** + Slash commands are automatically registered when the bot starts with a valid `DISCORD_APPLICATION_ID`. + +--- + ## Configuration Options Customize your notification experience with several configuration options available through environment variables. ### Notification Control Variables -- **ENABLE_KILL_NOTIFICATIONS:** Enable/disable kill notifications (default: true). -- **ENABLE_CHARACTER_NOTIFICATIONS:** Enable/disable notifications when new characters are added (default: true). -- **ENABLE_SYSTEM_NOTIFICATIONS:** Enable/disable system notifications (default: true). -- **ENABLE_TRACK_KSPACE_SYSTEMS:** Enable/disable tracking of K-Space (non-wormhole) systems (default: false). +- **KILL_NOTIFICATIONS_ENABLED:** Enable/disable kill notifications (default: true). +- **CHARACTER_NOTIFICATIONS_ENABLED:** Enable/disable notifications when new characters are added (default: true). +- **SYSTEM_NOTIFICATIONS_ENABLED:** Enable/disable system notifications (default: true). To disable a notification type, set the corresponding variable to `false` or `0` in your `.env` file: ```dotenv # Example: Disable kill notifications while keeping other notifications enabled -ENABLE_KILL_NOTIFICATIONS=false +KILL_NOTIFICATIONS_ENABLED=false ``` --- diff --git a/priv/posts/2025/06-21-webhooks.md b/priv/posts/2025/06-21-webhooks.md new file mode 100644 index 00000000..9a448205 --- /dev/null +++ b/priv/posts/2025/06-21-webhooks.md @@ -0,0 +1,555 @@ +%{ +title: "Real-Time Events API: WebSockets and Webhooks for Wanderer", +author: "Wanderer Team", +cover_image_uri: "/images/news/06-21-webhooks/webhooks-hero.png", +tags: ~w(api webhooks websocket real-time discord integration developer), +description: "Connect to Wanderer's real-time events using WebSockets or webhooks. Learn how to receive instant notifications for map changes, kills, and more - including a complete Discord integration guide." +} + +--- + +# Real-Time Events API: WebSockets and Webhooks for Wanderer + +We're excited to announce the launch of Wanderer's Real-Time Events API, giving developers and power users instant access to map events as they happen. Whether you're building a Discord bot, creating custom alerts, or integrating with external tools, our new API provides two powerful methods to receive real-time updates: WebSockets for persistent connections and webhooks for HTTP-based integrations. + +In the dynamic world of EVE Online wormhole mapping, every second counts. When a new signature appears, when a hostile kill occurs in your chain, or when a scout reports a new connection - having this information delivered instantly to your tools and teams can make all the difference. Our Real-Time Events API eliminates the need for polling and provides sub-second delivery of critical map events. + +## What's New? + +### WebSocket Connections +- **Persistent real-time streaming** of map events +- **Event filtering** to receive only the events you care about +- **Automatic reconnection** support with event backfill +- **Lightweight protocol** using Phoenix Channels V2 + +### Webhook Delivery +- **HTTP POST notifications** to your endpoints +- **HMAC-SHA256 signatures** for security +- **Automatic retries** with exponential backoff +- **Secret rotation** for enhanced security + +### Event Types Available +- **System Events**: `add_system`, `deleted_system`, `system_metadata_changed` +- **Connection Events**: `connection_added`, `connection_removed`, `connection_updated` +- **Signature Events**: `signature_added`, `signature_removed`, `signatures_updated` +- **Kill Events**: `map_kill` + +## Getting Started + +### Prerequisites +- A Wanderer map with API access enabled +- Your map API token (found in map settings) +- Basic programming knowledge for integration + +### Authentication +Both WebSocket and webhook APIs use your existing map API token for authentication. This token should be kept secure and never exposed in client-side code. + +## WebSocket Quick Start + +Connect to Wanderer's WebSocket endpoint to receive a real-time stream of events: + +### JavaScript Example +```javascript +import { Socket } from "phoenix"; + +// Initialize connection +const socket = new Socket("wss://wanderer.ltd/socket/events", { + params: { token: "your-map-api-token" } +}); + +socket.connect(); + +// Join your map's event channel +const channel = socket.channel(`events:map:${mapId}`, { + // Optional: Filter specific events + events: ["add_system", "map_kill"] +}); + +// Handle events +channel.on("add_system", (event) => { + console.log("New system added:", event); +}); + +channel.on("map_kill", (event) => { + console.log("Kill detected:", event); +}); + +// Connect to the channel +channel.join() + .receive("ok", () => console.log("Connected to events")) + .receive("error", (err) => console.error("Connection failed:", err)); +``` + +### Event Filtering +You can subscribe to specific events or use `"*"` to receive all events: + +```javascript +// Subscribe to specific events only +const channel = socket.channel(`events:map:${mapId}`, { + events: ["add_system", "connection_added", "map_kill"] +}); + +// Or subscribe to all events +const channel = socket.channel(`events:map:${mapId}`, { + events: "*" +}); +``` + +## Webhook Setup + +Webhooks provide an alternative to WebSockets, delivering events via HTTP POST to your endpoint: + +### 1. Create a Webhook Subscription + +```bash +curl -X POST https://wanderer.ltd/api/maps/${MAP_ID}/webhooks \ + -H "Authorization: Bearer ${API_TOKEN}" \ + -H "Content-Type: application/json" \ + -d '{ + "url": "https://your-server.com/webhook", + "events": ["add_system", "map_kill"], + "active": true + }' +``` + +### 2. Handle Incoming Webhooks + +Your endpoint will receive POST requests with events: + +```javascript +// Express.js webhook handler +app.post('/webhook', (req, res) => { + // Verify signature + const signature = req.headers['x-wanderer-signature']; + const timestamp = req.headers['x-wanderer-timestamp']; + + if (!verifyWebhookSignature(req.body, signature, timestamp, webhookSecret)) { + return res.status(401).send('Invalid signature'); + } + + // Process the event + const event = req.body; + console.log(`Received ${event.type} event for map ${event.map_id}`); + + // Always respond quickly + res.status(200).send('OK'); + + // Process event asynchronously + processEvent(event); +}); +``` + +### 3. Signature Verification + +Verify webhook authenticity using HMAC-SHA256: + +```javascript +function verifyWebhookSignature(payload, signature, timestamp, secret) { + const data = `${timestamp}.${JSON.stringify(payload)}`; + const hmac = crypto.createHmac('sha256', secret); + const expectedSignature = `sha256=${hmac.update(data).digest('hex')}`; + + return crypto.timingSafeEqual( + Buffer.from(signature), + Buffer.from(expectedSignature) + ); +} +``` + +## Discord Integration Guide + +One of the most popular uses for real-time events is sending notifications to Discord. Here's how to integrate Wanderer events with Discord webhooks. + +### Ready-Made Solution: Wanderer Notifier + +If you want a fully-featured Discord integration without writing any code, check out [Wanderer Notifier](https://wanderer.ltd/news/03-18-bots) - our official Discord bot that provides: +- Rich formatted notifications with images and embeds +- Kill tracking with zKillboard integration +- Character and system tracking +- Easy Docker deployment +- Premium features for map subscribers + +The examples below are for developers who want to build custom integrations or understand how the webhook system works. + +### Understanding Discord Webhooks + +Discord webhooks require messages in a specific format - you can't send raw Wanderer events directly. Discord expects either: +- A `content` field with plain text +- An `embeds` array with structured message objects + +Since Wanderer sends events as `{id, type, map_id, ts, payload}`, you'll need a small transformer service to wrap the data in Discord's format. You have two options: +1. **Simple text notifications** (minimal transformation) +2. **Rich embeds** (formatted messages with colors and fields) + +### Step 1: Create a Discord Webhook + +1. In your Discord server, go to Server Settings → Integrations → Webhooks +2. Click "New Webhook" and configure: + - Name: "Wanderer Events" + - Channel: Select your notification channel +3. Copy the webhook URL + +### Option A: Minimal Transformation (Simple Text) + +If you want the simplest possible integration, here's a minimal transformer that sends raw event data as text: + +```javascript +const express = require('express'); +const axios = require('axios'); + +const app = express(); +app.use(express.json()); + +const DISCORD_WEBHOOK_URL = process.env.DISCORD_WEBHOOK_URL; + +app.post('/webhook', async (req, res) => { + // Respond immediately + res.status(200).send('OK'); + + // Send raw event as Discord message + const event = req.body; + try { + await axios.post(DISCORD_WEBHOOK_URL, { + content: `**${event.type}** event in map: \`\`\`json\n${JSON.stringify(event.payload, null, 2)}\n\`\`\`` + }); + } catch (error) { + console.error('Discord error:', error); + } +}); + +app.listen(3000); +``` + +This sends events to Discord as formatted code blocks, preserving all the raw data. + +### Option B: Rich Embed Transformer (Formatted Messages) + +For a better user experience with formatted messages, colors, and clickable links: + +```javascript +const express = require('express'); +const crypto = require('crypto'); +const axios = require('axios'); + +const app = express(); +app.use(express.json()); + +// Configuration +const WEBHOOK_SECRET = process.env.WEBHOOK_SECRET; +const DISCORD_WEBHOOK_URL = process.env.DISCORD_WEBHOOK_URL; + +// Event formatters for Discord +const formatters = { + add_system: (event) => ({ + embeds: [{ + title: "New System Added", + description: `System **${event.payload.name}** has been added to the map`, + color: 0x00ff00, + fields: [ + { name: "System ID", value: event.payload.solar_system_id, inline: true }, + { name: "Type", value: event.payload.type || "Unknown", inline: true } + ], + timestamp: event.ts + }] + }), + + map_kill: (event) => ({ + embeds: [{ + title: "Kill Detected", + description: `${event.payload.victim.ship} destroyed in ${event.payload.system_name}`, + color: 0xff0000, + fields: [ + { name: "Victim", value: event.payload.victim.name, inline: true }, + { name: "Ship", value: event.payload.victim.ship, inline: true }, + { name: "Value", value: `${(event.payload.value / 1000000).toFixed(1)}M ISK`, inline: true } + ], + url: `https://zkillboard.com/kill/${event.payload.killmail_id}`, + timestamp: event.ts + }] + }), + + connection_added: (event) => ({ + embeds: [{ + title: "New Connection", + description: `Connection established: **${event.payload.from_name}** → **${event.payload.to_name}**`, + color: 0x0099ff, + fields: [ + { name: "Type", value: event.payload.type || "Unknown", inline: true }, + { name: "Mass Status", value: event.payload.mass_status || "Fresh", inline: true } + ], + timestamp: event.ts + }] + }) +}; + +// Webhook endpoint +app.post('/webhook', async (req, res) => { + // Verify signature + const signature = req.headers['x-wanderer-signature']; + const timestamp = req.headers['x-wanderer-timestamp']; + + if (!verifySignature(req.body, signature, timestamp)) { + return res.status(401).send('Invalid signature'); + } + + // Respond immediately + res.status(200).send('OK'); + + // Process event + const event = req.body; + const formatter = formatters[event.type]; + + if (formatter) { + try { + const discordPayload = formatter(event); + await axios.post(DISCORD_WEBHOOK_URL, discordPayload); + } catch (error) { + console.error('Failed to send to Discord:', error); + } + } +}); + +function verifySignature(payload, signature, timestamp) { + const data = `${timestamp}.${JSON.stringify(payload)}`; + const hmac = crypto.createHmac('sha256', WEBHOOK_SECRET); + const expected = `sha256=${hmac.update(data).digest('hex')}`; + return crypto.timingSafeEqual(Buffer.from(signature), Buffer.from(expected)); +} + +app.listen(3000, () => { + console.log('Discord webhook transformer running on port 3000'); +}); +``` + +### Step 2: Deploy Your Transformer + +Deploy this service to any platform that can run Node.js applications: + +#### Using Docker: +```dockerfile +FROM node:18-alpine +WORKDIR /app +COPY package*.json ./ +RUN npm ci --only=production +COPY . . +EXPOSE 3000 +CMD ["node", "index.js"] +``` + +#### Using Docker Compose: +```yaml +version: '3' +services: + discord-transformer: + build: . + environment: + - WEBHOOK_SECRET=${WEBHOOK_SECRET} + - DISCORD_WEBHOOK_URL=${DISCORD_WEBHOOK_URL} + ports: + - "3000:3000" + restart: unless-stopped +``` + +### Step 3: Register Your Webhook + +Register your transformer service with Wanderer: + +```bash +curl -X POST https://wanderer.ltd/api/maps/${MAP_ID}/webhooks \ + -H "Authorization: Bearer ${API_TOKEN}" \ + -H "Content-Type: application/json" \ + -d '{ + "url": "https://your-transformer.com/webhook", + "events": ["add_system", "map_kill", "connection_added"], + "active": true + }' +``` + +Your Discord channel will now receive formatted notifications for all map events! + +## Event Payload Examples + +### System Added Event +```json +{ + "id": "01J0XXXXXXXXXXXXXXXXXXX", + "type": "add_system", + "map_id": "550e8400-e29b-41d4-a716-446655440000", + "ts": "2025-06-21T12:34:56.789Z", + "payload": { + "solar_system_id": 31000001, + "name": "J123456", + "type": "wormhole", + "class": "C3", + "statics": ["C3", "HS"] + } +} +``` + +### Kill Event +```json +{ + "id": "01J0YYYYYYYYYYYYYYYYYYY", + "type": "map_kill", + "map_id": "550e8400-e29b-41d4-a716-446655440000", + "ts": "2025-06-21T12:35:00.123Z", + "payload": { + "killmail_id": 12345678, + "system_name": "J123456", + "victim": { + "name": "Pilot Name", + "ship": "Stratios", + "corporation": "Corp Name" + }, + "value": 250000000 + } +} +``` + +## Best Practices + +### For WebSocket Connections +- **Implement reconnection logic** with exponential backoff +- **Handle connection drops** gracefully +- **Use event filtering** to reduce bandwidth +- **Process events asynchronously** to avoid blocking + +### For Webhooks +- **Respond quickly** (within 3 seconds) to webhook deliveries +- **Verify signatures** on every request +- **Handle retries** idempotently +- **Monitor your endpoint** availability +- **Use HTTPS** exclusively + +### Security Considerations +- **Never expose** your API token in client-side code +- **Rotate webhook secrets** regularly +- **Validate all inputs** from events +- **Use environment variables** for sensitive configuration +- **Monitor for unusual activity** + +## API Reference + +### WebSocket Endpoints +- **Connection URL**: `wss://wanderer.ltd/socket/events` +- **Channel Topic**: `events:map:{map_id}` +- **Authentication**: Bearer token in connection params + +### REST API Endpoints +- **List Webhooks**: `GET /api/maps/{map_id}/webhooks` +- **Create Webhook**: `POST /api/maps/{map_id}/webhooks` +- **Update Webhook**: `PUT /api/maps/{map_id}/webhooks/{id}` +- **Delete Webhook**: `DELETE /api/maps/{map_id}/webhooks/{id}` +- **Rotate Secret**: `POST /api/maps/{map_id}/webhooks/{id}/rotate-secret` +- **Get Events** (backfill): `GET /api/maps/{map_id}/events?since={timestamp}` + +### Rate Limits +- **WebSocket Connections**: 10 per map +- **Webhook Subscriptions**: 5 per map +- **Event Delivery**: No limit (all events delivered) +- **API Requests**: 100 per minute + +## Advanced Use Cases + +### Multi-Map Monitoring +Connect to multiple maps simultaneously: + +```javascript +const maps = ['map-id-1', 'map-id-2', 'map-id-3']; +const channels = {}; + +maps.forEach(mapId => { + const channel = socket.channel(`events:map:${mapId}`, { + events: "*" + }); + + channel.on("*", (eventType, event) => { + console.log(`[${mapId}] ${eventType}:`, event); + }); + + channel.join(); + channels[mapId] = channel; +}); +``` + +### Event Aggregation +Build activity summaries: + +```javascript +const activityTracker = { + kills: 0, + systemsAdded: 0, + connectionsAdded: 0, + + handleEvent(event) { + switch(event.type) { + case 'map_kill': this.kills++; break; + case 'add_system': this.systemsAdded++; break; + case 'connection_added': this.connectionsAdded++; break; + } + }, + + getHourlyStats() { + return { + kills: this.kills, + systemsAdded: this.systemsAdded, + connectionsAdded: this.connectionsAdded, + timestamp: new Date() + }; + } +}; +``` + +### Custom Alerting +Create sophisticated alert conditions: + +```javascript +// Alert on high-value kills +channel.on("map_kill", (event) => { + if (event.payload.value > 1000000000) { // 1B+ ISK + sendUrgentAlert({ + title: "High Value Kill Detected!", + message: `${event.payload.victim.ship} worth ${event.payload.value / 1e9}B ISK destroyed`, + priority: "high" + }); + } +}); + +// Alert on new connections to specific systems +channel.on("connection_added", (event) => { + const watchedSystems = ["J123456", "J234567"]; + if (watchedSystems.includes(event.payload.to_name)) { + sendAlert({ + title: "Connection to Watched System", + message: `New connection to ${event.payload.to_name} from ${event.payload.from_name}` + }); + } +}); +``` + +## Coming Soon + +We're continuously improving our real-time events API. Upcoming features include: + +- **Batch event delivery** for high-volume maps +- **Historical event replay** for analysis +- **Event transformations** and filtering rules +- **Additional event types** (structure timers, ACL changes) + +## Get Support + +Need help with the Real-Time Events API? + +- **Documentation**: [Full API Reference](https://docs.wanderer.ltd/api/events) +- **Discord Community**: [Join our Discord](https://discord.gg/wanderer) + + +## Conclusion + +The Real-Time Events API opens up endless possibilities for integrating Wanderer with your tools and workflows. Whether you're sending notifications to Discord, building custom dashboards, or creating advanced alerting systems, you now have instant access to everything happening in your maps. + +Start building with real-time events today and take your wormhole operations to the next level! + +--- + +*The Real-Time Events API is available now for all Wanderer maps. No additional subscription required - if you have API access to a map, you can use webhooks and WebSockets.* \ No newline at end of file diff --git a/priv/repo/migrations/20250621183139_add_webhook_subscriptions.exs b/priv/repo/migrations/20250621183139_add_webhook_subscriptions.exs new file mode 100644 index 00000000..8009d3c3 --- /dev/null +++ b/priv/repo/migrations/20250621183139_add_webhook_subscriptions.exs @@ -0,0 +1,56 @@ +defmodule WandererApp.Repo.Migrations.AddWebhookSubscriptions do + @moduledoc """ + Updates resources based on their most recent snapshots. + + This file was autogenerated with `mix ash_postgres.generate_migrations` + """ + + use Ecto.Migration + + def up do + create table(:map_webhook_subscriptions_v1, primary_key: false) do + add :id, :uuid, null: false, default: fragment("gen_random_uuid()"), primary_key: true + + add :map_id, + references(:maps_v1, + column: :id, + name: "map_webhook_subscriptions_v1_map_id_fkey", + type: :uuid, + prefix: "public" + ), + null: false + + add :url, :text, null: false + add :events, {:array, :text}, null: false, default: [] + add :active?, :boolean, null: false, default: true + add :last_delivery_at, :utc_datetime + add :last_error, :text + add :last_error_at, :utc_datetime + add :consecutive_failures, :bigint, null: false, default: 0 + + add :inserted_at, :utc_datetime_usec, + null: false, + default: fragment("(now() AT TIME ZONE 'utc')") + + add :updated_at, :utc_datetime_usec, + null: false, + default: fragment("(now() AT TIME ZONE 'utc')") + + add :encrypted_secret, :binary, null: false + end + + create unique_index(:map_webhook_subscriptions_v1, [:map_id, :url], + name: "map_webhook_subscriptions_v1_unique_url_per_map_index" + ) + end + + def down do + drop_if_exists unique_index(:map_webhook_subscriptions_v1, [:map_id, :url], + name: "map_webhook_subscriptions_v1_unique_url_per_map_index" + ) + + drop constraint(:map_webhook_subscriptions_v1, "map_webhook_subscriptions_v1_map_id_fkey") + + drop table(:map_webhook_subscriptions_v1) + end +end diff --git a/priv/resource_snapshots/repo/map_webhook_subscriptions_v1/20250621183139.json b/priv/resource_snapshots/repo/map_webhook_subscriptions_v1/20250621183139.json new file mode 100644 index 00000000..f4a159cc --- /dev/null +++ b/priv/resource_snapshots/repo/map_webhook_subscriptions_v1/20250621183139.json @@ -0,0 +1,180 @@ +{ + "attributes": [ + { + "allow_nil?": false, + "default": "fragment(\"gen_random_uuid()\")", + "generated?": false, + "primary_key?": true, + "references": null, + "size": null, + "source": "id", + "type": "uuid" + }, + { + "allow_nil?": false, + "default": "nil", + "generated?": false, + "primary_key?": false, + "references": { + "deferrable": false, + "destination_attribute": "id", + "destination_attribute_default": null, + "destination_attribute_generated": null, + "index?": false, + "match_type": null, + "match_with": null, + "multitenancy": { + "attribute": null, + "global": null, + "strategy": null + }, + "name": "map_webhook_subscriptions_v1_map_id_fkey", + "on_delete": null, + "on_update": null, + "primary_key?": true, + "schema": "public", + "table": "maps_v1" + }, + "size": null, + "source": "map_id", + "type": "uuid" + }, + { + "allow_nil?": false, + "default": "nil", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "url", + "type": "text" + }, + { + "allow_nil?": false, + "default": "[]", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "events", + "type": [ + "array", + "text" + ] + }, + { + "allow_nil?": false, + "default": "true", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "active?", + "type": "boolean" + }, + { + "allow_nil?": true, + "default": "nil", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "last_delivery_at", + "type": "utc_datetime" + }, + { + "allow_nil?": true, + "default": "nil", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "last_error", + "type": "text" + }, + { + "allow_nil?": true, + "default": "nil", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "last_error_at", + "type": "utc_datetime" + }, + { + "allow_nil?": false, + "default": "0", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "consecutive_failures", + "type": "bigint" + }, + { + "allow_nil?": false, + "default": "fragment(\"(now() AT TIME ZONE 'utc')\")", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "inserted_at", + "type": "utc_datetime_usec" + }, + { + "allow_nil?": false, + "default": "fragment(\"(now() AT TIME ZONE 'utc')\")", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "updated_at", + "type": "utc_datetime_usec" + }, + { + "allow_nil?": false, + "default": "nil", + "generated?": false, + "primary_key?": false, + "references": null, + "size": null, + "source": "encrypted_secret", + "type": "binary" + } + ], + "base_filter": null, + "check_constraints": [], + "custom_indexes": [], + "custom_statements": [], + "has_create_action": true, + "hash": "94ED15D366A0D310B7B8B462CFE1E8C21F78FBB82A28228DE9362F3B1F8BAA5C", + "identities": [ + { + "all_tenants?": false, + "base_filter": null, + "index_name": "map_webhook_subscriptions_v1_unique_url_per_map_index", + "keys": [ + { + "type": "atom", + "value": "map_id" + }, + { + "type": "atom", + "value": "url" + } + ], + "name": "unique_url_per_map", + "nils_distinct?": true, + "where": null + } + ], + "multitenancy": { + "attribute": null, + "global": null, + "strategy": null + }, + "repo": "Elixir.WandererApp.Repo", + "schema": null, + "table": "map_webhook_subscriptions_v1" +} \ No newline at end of file