{# ── Configured connections table ──────────────────── #}
{{ _('LLM Connections') }}
{{ _('Default') }} {{ _('Name') }} {{ _('Model') }} {{ _('API Key') }} {{ _('TPM') }}
{# ── Add connection ─────────────────────────────────── #} {% set nf = plugin_form.new_connection.form %}
{{ _('Add a connection') }}
{{ nf.preset.label }} {{ nf.preset() }}
{{ nf.name.label }} {{ nf.name(placeholder=_('e.g. My OpenAI')) }}
{{ nf.model.label }} {{ nf.model() }}
{{ nf.api_key() }}
{{ nf.tokens_per_minute() }}
{# ── Prompt configuration ────────────────────────────────── #}
{{ _('Summary Prompt') }}
{{ plugin_form.llm_diff_context_lines.label }} {{ plugin_form.llm_diff_context_lines() }} {{ _('Unchanged lines shown around each change in the diff sent to the LLM. More lines = more context but higher token cost. (default: 2)') }}
{{ render_field(plugin_form.llm_summary_prompt) }} {{ _('Instruction appended after the diff in every LLM call. Leave blank to use the built-in default (structured JSON output).') }}