{# ── Configured connections table ──────────────────── #}
{{ _('LLM Connections') }}
{{ _('Default') }}
{{ _('Name') }}
{{ _('Model') }}
{{ _('API Key') }}
{{ _('TPM') }}
{# ── Add connection ─────────────────────────────────── #} {% set nf = plugin_form.new_connection.form %}
{{ _('Add a connection') }}
{{ nf.preset.label }} {{ nf.preset() }}
{{ nf.name.label }} {{ nf.name(placeholder=_('e.g. My OpenAI')) }}
{{ nf.model.label }} {{ nf.model() }}
{{ _('API Key') }}
({{ _('leave blank for local') }})
{{ nf.api_key() }}
{{ _('show') }}
{{ _('API Endpoint') }}
({{ _('optional') }})
{{ nf.api_base() }}
{{ _('Tokens/min limit') }}
({{ _('0 = unlimited') }})
{{ nf.tokens_per_minute() }}
{{ _('+ Add connection') }}
{# ── Prompt configuration ────────────────────────────────── #}
{{ _('Summary Prompt') }}
{{ plugin_form.llm_diff_context_lines.label }} {{ plugin_form.llm_diff_context_lines() }}
{{ _('Unchanged lines shown around each change in the diff sent to the LLM. More lines = more context but higher token cost. (default: 2)') }}
{{ render_field(plugin_form.llm_summary_prompt) }}
{{ _('Instruction appended after the diff in every LLM call. Leave blank to use the built-in default (structured JSON output).') }}