diff --git a/CHANGELOG.md b/CHANGELOG.md index 72adff8..d0a69c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,6 +5,10 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## 1.5.2 - 2026-03-09 + +- Added the JiraClient, allowing us to create, search, comment on Jira tickets for support processes. + ## 1.5.1 - 2026-02-05 - Handles cases where `$callers` and `$ancestors` may not be pids to avoid crashing metric handler. diff --git a/lib/zexbox/auto_escalation/adf_builder.ex b/lib/zexbox/auto_escalation/adf_builder.ex new file mode 100644 index 0000000..a05b155 --- /dev/null +++ b/lib/zexbox/auto_escalation/adf_builder.ex @@ -0,0 +1,195 @@ +defmodule Zexbox.AutoEscalation.AdfBuilder do + @moduledoc """ + Builds Atlassian Document Format (ADF) maps for Jira issue descriptions and comments. + + Mirrors `Opsbox::AutoEscalation::AdfBuilder`. Produces the same ADF structure so + headings, links, bullet lists, and the stack-trace expand render correctly in Jira. + + Unlike the Ruby version, `stacktrace` must be passed explicitly (as `__STACKTRACE__` + from a `rescue` block) because Elixir exceptions do not carry their own backtrace. + """ + + alias Zexbox.OpenTelemetry + + @doc """ + Builds an ADF description map for a new Jira issue. + + Structure: + 1. Telemetry links (Tempo | Kibana) + 2. Divider (if `custom_description` present) + 3. Custom description paragraphs (split on `\\n\\n`) + 4. User context bullet list (if non-empty) + 5. Additional context bullet list (if non-empty) + 6. H3 "Error Details" + 7. Exception summary + expandable stack trace + """ + @spec build_description( + Exception.t(), + map(), + map(), + Exception.stacktrace() | nil, + String.t() | nil + ) :: map() + def build_description( + exception, + user_context, + additional_context, + stacktrace \\ nil, + custom_description \\ nil + ) do + [] + |> build_body(exception, user_context, additional_context, stacktrace, custom_description) + |> doc() + end + + @doc """ + Builds an ADF comment map for an additional occurrence on an existing Jira issue. + + Structure: + 1. H2 "Additional Occurrence (action)" + 2. Telemetry links + 3. Divider + custom description (if present) + 4. Context bullet lists (if non-empty) + 5. H3 "Error Details" + 6. Exception summary + expandable stack trace + """ + @spec build_comment( + Exception.t(), + String.t(), + map(), + map(), + Exception.stacktrace() | nil, + String.t() | nil + ) :: map() + def build_comment( + exception, + action, + user_context, + additional_context, + stacktrace \\ nil, + custom_description \\ nil + ) do + [heading(2, "Additional Occurrence (#{action})")] + |> build_body(exception, user_context, additional_context, stacktrace, custom_description) + |> doc() + end + + # --- Private --- + + defp build_body( + acc, + exception, + user_context, + additional_context, + stacktrace, + custom_description + ) do + acc + |> append_telemetry() + |> append_description(custom_description) + |> append_context(user_context, additional_context) + |> append_error_details(exception, stacktrace) + end + + defp append_telemetry(acc) do + trace_url = OpenTelemetry.generate_trace_url() + kibana_url = OpenTelemetry.kibana_log_url() + + inline = + link_or_plain("Tempo Trace View", trace_url) ++ + [text(" | ")] ++ + link_or_plain("Kibana Logs", kibana_url) + + acc ++ [%{type: "paragraph", content: inline}] + end + + defp append_description(acc, nil), do: acc + defp append_description(acc, ""), do: acc + + defp append_description(acc, desc) do + case String.trim(desc) do + "" -> acc + trimmed -> acc ++ [divider() | custom_description_blocks(trimmed)] + end + end + + defp append_context(acc, user_context, additional_context) do + acc + |> append_single_context("User Context", user_context) + |> append_single_context("Additional Context", additional_context) + end + + defp append_single_context(acc, _label, context) when not is_map(context), do: acc + defp append_single_context(acc, _label, context) when map_size(context) == 0, do: acc + + defp append_single_context(acc, label, context) do + acc ++ [%{type: "paragraph", content: [bold(label)]}, key_value_bullet_list(context)] + end + + defp append_error_details(acc, exception, stacktrace) do + error_class = inspect(exception.__struct__) + message = Exception.message(exception) + summary = "#{error_class}: #{message}" + + backtrace = + case stacktrace do + nil -> "No stack trace available" + [] -> "No stack trace available" + st -> Exception.format_stacktrace(st) + end + + acc ++ + [ + heading(3, "Error Details"), + %{type: "paragraph", content: [text(summary)]}, + expand("Stack trace", [code_block(backtrace)]) + ] + end + + defp custom_description_blocks(desc) do + desc + |> String.split(~r/\n\n+/) + |> Enum.map(fn paragraph -> + %{type: "paragraph", content: [text(String.trim(paragraph))]} + end) + end + + defp key_value_bullet_list(hash) do + items = + Enum.map(hash, fn {key, value} -> + %{ + type: "listItem", + content: [ + %{ + type: "paragraph", + content: [bold(key), text(": "), text(to_string(value))] + } + ] + } + end) + + %{type: "bulletList", content: items} + end + + defp doc(content), do: %{version: 1, type: "doc", content: content} + defp text(str), do: %{type: "text", text: str} + defp bold(str), do: %{type: "text", text: to_string(str), marks: [%{type: "strong"}]} + defp divider, do: %{type: "rule"} + + defp heading(level, text_content), + do: %{type: "heading", attrs: %{level: level}, content: [text(text_content)]} + + defp code_block(content), + do: %{type: "codeBlock", content: [%{type: "text", text: to_string(content)}]} + + defp expand(title, content_blocks), + do: %{type: "expand", attrs: %{title: title}, content: content_blocks} + + defp link_or_plain(label, url) when is_binary(url) and url != "" do + [%{type: "text", text: label, marks: [%{type: "link", attrs: %{href: url}}]}] + end + + defp link_or_plain(label, _url) do + [%{type: "text", text: "#{label} (Missing)"}] + end +end diff --git a/lib/zexbox/jira_client.ex b/lib/zexbox/jira_client.ex new file mode 100644 index 0000000..7ad5075 --- /dev/null +++ b/lib/zexbox/jira_client.ex @@ -0,0 +1,193 @@ +defmodule Zexbox.JiraClient do + @moduledoc """ + HTTP client for the Jira Cloud REST API v3. + + Mirrors `Opsbox::JiraClient`. Authenticates with Basic auth using + `JIRA_USER_EMAIL_ADDRESS` and `JIRA_API_TOKEN` environment variables + (or `:jira_email` / `:jira_api_token` application config). + + ## Configuration + + ```elixir + config :zexbox, + jira_base_url: "https://your-org.atlassian.net", + jira_email: System.get_env("JIRA_USER_EMAIL_ADDRESS"), + jira_api_token: System.get_env("JIRA_API_TOKEN") + ``` + + All public functions return `{:ok, result}` or `{:error, reason}`. + """ + + @bug_fingerprint_field %{id: "customfield_13442", name: "Bug Fingerprint[Short text]"} + @zigl_team_field %{id: "customfield_10101", name: "ZIGL Team[Dropdown]"} + + @doc "Returns the bug fingerprint custom field metadata." + @spec bug_fingerprint_field() :: %{id: String.t(), name: String.t()} + def bug_fingerprint_field, do: @bug_fingerprint_field + + @doc "Returns the ZIGL team custom field metadata." + @spec zigl_team_field() :: %{id: String.t(), name: String.t()} + def zigl_team_field, do: @zigl_team_field + + @doc """ + Search for the latest issues matching a JQL query (max 50 results). + + - `jql` – JQL query string. + - `project_key` – optional; prepends `project = KEY AND` to the JQL. + + Returns `{:ok, [issue_map]}` where each map includes a `"url"` browse key, + or `{:error, reason}` on failure. + """ + @spec search_latest_issues(String.t(), String.t() | nil) :: {:ok, [map()]} | {:error, term()} + def search_latest_issues(jql, project_key \\ nil) do + fetch_issues(build_query(jql, project_key)) + end + + @doc """ + Create a new Jira issue. + + - `project_key` – Jira project key (e.g. `"SS"`). + - `summary` – issue summary string. + - `description` – ADF map (already built; not converted). + - `issuetype` – issue type name (e.g. `"Bug"`). + - `priority` – priority name (e.g. `"High"`). + - `custom_fields` – optional map of custom field ID → value (string keys). + + Returns `{:ok, issue_map}` with a `"url"` browse key added, or `{:error, reason}`. + """ + @spec create_issue(String.t(), String.t(), map(), String.t(), String.t(), map()) :: + {:ok, map()} | {:error, term()} + def create_issue(project_key, summary, description, issuetype, priority, custom_fields \\ %{}) do + fields = + Map.merge( + %{ + "project" => %{"key" => project_key}, + "summary" => summary, + "description" => description, + "issuetype" => %{"name" => issuetype}, + "priority" => %{"name" => priority} + }, + custom_fields + ) + + post_issue(fields) + end + + @doc """ + Transition a Jira issue to a new status by name (case-insensitive match). + + - `issue_key` – issue key (e.g. `"SS-42"`). + - `status_name` – target status name (e.g. `"To do"`). + + Returns `{:ok, %{success: true, status: name}}` or `{:error, reason}`. + """ + @spec transition_issue(String.t(), String.t()) :: {:ok, map()} | {:error, term()} + def transition_issue(issue_key, status_name) do + client = build_client() + + with {:ok, data} <- jira_get(client, "/rest/api/3/issue/#{issue_key}/transitions"), + transitions = Map.get(data, "transitions", []), + {:ok, target} <- find_transition(transitions, status_name), + {:ok, _resp} <- + jira_post(client, "/rest/api/3/issue/#{issue_key}/transitions", %{ + "transition" => %{"id" => target["id"]} + }) do + {:ok, %{success: true, status: get_in(target, ["to", "name"])}} + end + end + + @doc """ + Add a comment to an existing Jira issue. + + - `issue_key` – issue key (e.g. `"SS-42"`). + - `comment` – ADF map for the comment body (already built; not converted). + + Returns `{:ok, comment_map}` or `{:error, reason}`. + """ + @spec add_comment(String.t(), map()) :: {:ok, map()} | {:error, term()} + def add_comment(issue_key, comment), do: post_comment(issue_key, comment) + + # --- Private --- + + defp build_query(jql, nil), do: jql + defp build_query(jql, project_key), do: "project = #{project_key} AND #{jql}" + + defp fetch_issues(query) do + build_client() + |> jira_get("/rest/api/3/issue/search", + jql: query, + maxResults: 50, + fields: ["key", "id", "self", "status", "summary"] + ) + |> attach_issue_urls() + end + + defp post_issue(fields) do + build_client() + |> jira_post("/rest/api/3/issue", %{"fields" => fields}) + |> attach_issue_url() + end + + defp post_comment(issue_key, comment) do + build_client() + |> jira_post("/rest/api/3/issue/#{issue_key}/comment", %{"body" => comment}) + end + + defp attach_issue_urls({:ok, body}) do + issues = Map.get(body, "issues", []) + {:ok, Enum.map(issues, &Map.put(&1, "url", browse_url(&1["key"])))} + end + + defp attach_issue_urls({:error, _reason} = err), do: err + + defp attach_issue_url({:ok, result}), + do: {:ok, Map.put(result, "url", browse_url(result["key"]))} + + defp attach_issue_url({:error, _reason} = err), do: err + + defp browse_url(key), do: "#{config(:jira_base_url, nil)}/browse/#{key}" + + defp find_transition(transitions, status_name) do + case Enum.find(transitions, &matches_status?(&1, status_name)) do + nil -> {:error, "Cannot transition to '#{status_name}'"} + target -> {:ok, target} + end + end + + defp matches_status?(transition, status_name) do + to_name = get_in(transition, ["to", "name"]) |> to_string() + String.downcase(to_name) == String.downcase(status_name) + end + + defp jira_get(client, path, params \\ []) do + Req.get(client, url: path, params: params) + |> handle_response() + end + + defp jira_post(client, path, body) do + Req.post(client, url: path, json: body) + |> handle_response() + end + + defp handle_response({:ok, %{status: status, body: body}}) when status in 200..299, + do: {:ok, body || %{}} + + defp handle_response({:ok, %{status: status, body: body}}), + do: {:error, "HTTP #{status}: #{inspect(body)}"} + + defp handle_response({:error, reason}), + do: {:error, inspect(reason)} + + defp build_client do + email = config(:jira_email, System.get_env("JIRA_USER_EMAIL_ADDRESS", "")) + token = config(:jira_api_token, System.get_env("JIRA_API_TOKEN", "")) + + Req.new( + base_url: config(:jira_base_url, nil), + auth: {:basic, "#{email}:#{token}"}, + headers: [{"accept", "application/json"}] + ) + end + + defp config(key, default), do: Application.get_env(:zexbox, key, default) +end diff --git a/lib/zexbox/open_telemetry.ex b/lib/zexbox/open_telemetry.ex new file mode 100644 index 0000000..0cc851a --- /dev/null +++ b/lib/zexbox/open_telemetry.ex @@ -0,0 +1,136 @@ +defmodule Zexbox.OpenTelemetry do + @moduledoc """ + OpenTelemetry URL helpers for enriching Jira tickets with observability links. + + Mirrors `Opsbox::OpenTelemetry`. Reads from the current process's OTEL context + via `opentelemetry_api`. + + All functions return `nil` gracefully when OTEL is unconfigured or there is no + active span, so they are safe to call from any rescue block. + + ## Configuration + + ```elixir + config :zexbox, + service_name: "my-app", + app_env: :production, # :production | :sandbox | :dev | :test + tempo_datasource_uid: nil # optional override for the Grafana Tempo datasource UID + ``` + """ + + @production_tempo_uid "een1tos42jnk0d" + @sandbox_tempo_uid "eehwibr22b6kgb" + @grafana_host "zappi.grafana.net" + @kibana_host "zappi.tools" + + @doc """ + Returns a Grafana Tempo trace URL for the current span, or `nil` if no active span + or if the environment is not `:production` or `:sandbox`. + """ + @spec generate_trace_url() :: String.t() | nil + def generate_trace_url do + build_trace_url(get_span_context()) + rescue + _e -> nil + end + + @doc """ + Returns a Kibana Discover URL for `:production` and `:sandbox` environments, `nil` otherwise. + Includes the current trace ID in the query when an active span is present. + """ + @spec kibana_log_url() :: String.t() | nil + def kibana_log_url do + build_kibana_url(get_span_context()) + rescue + _e -> nil + end + + # --- Private --- + + defp build_trace_url(nil), do: nil + + defp build_trace_url(context) do + if app_env() in [:production, :sandbox] && :otel_span.is_valid(context) do + trace_id = hex_trace_id(context) + pane_json = build_pane_json(trace_id) + "https://#{@grafana_host}/explore?schemaVersion=1&panes=#{Jason.encode!(pane_json)}" + end + end + + defp build_kibana_url(context) do + env = app_env() + + if env in [:production, :sandbox] do + service = Application.get_env(:zexbox, :service_name, "app") + app_encoded = URI.encode(service, &URI.char_unreserved?/1) + trace_part = trace_filter(context) + + "https://kibana.#{kibana_subdomain(env)}#{@kibana_host}/app/discover#/?_a=(columns:!(log.message),filters:!()," <> + "query:(language:kuery,query:'zappi.app:%20%22#{app_encoded}%22#{trace_part}')," <> + "sort:!(!('@timestamp',asc)))&_g=(filters:!(),time:(from:now-1d,to:now))" + end + end + + defp kibana_subdomain(:production), do: "" + defp kibana_subdomain(env), do: "#{env}." + + defp trace_filter(nil), do: "" + + defp trace_filter(context) do + if :otel_span.is_valid(context), + do: "%20AND%20%22#{hex_trace_id(context)}%22", + else: "" + end + + defp get_span_context do + case :otel_tracer.current_span_ctx() do + :undefined -> nil + context -> context + end + rescue + _e -> nil + catch + _kind, _e -> nil + end + + defp hex_trace_id(context) do + context + |> :otel_span.trace_id() + |> Integer.to_string(16) + |> String.pad_leading(32, "0") + |> String.downcase() + end + + defp build_pane_json(trace_id) do + uid = tempo_datasource_uid() + + %{ + plo: %{ + datasource: uid, + queries: [ + %{ + refId: "A", + datasource: %{type: "tempo", uid: uid}, + queryType: "traceql", + limit: 20, + tableType: "traces", + metricsQueryType: "range", + query: trace_id + } + ], + range: %{from: "now-1h", to: "now"} + } + } + end + + defp tempo_datasource_uid do + Application.get_env(:zexbox, :tempo_datasource_uid) || + case app_env() do + :production -> @production_tempo_uid + :sandbox -> @sandbox_tempo_uid + _env -> nil + end + end + + defp app_env, do: Application.get_env(:zexbox, :app_env, :test) +end diff --git a/mix.exs b/mix.exs index 5e5dcb2..74a2d22 100644 --- a/mix.exs +++ b/mix.exs @@ -4,7 +4,7 @@ defmodule Zexbox.MixProject do def project do [ app: :zexbox, - version: "1.5.1", + version: "1.5.2", elixir: "~> 1.14", start_permanent: Mix.env() == :prod, dialyzer: [plt_add_apps: [:mix, :ex_unit]], @@ -37,9 +37,12 @@ defmodule Zexbox.MixProject do {:doctor, "~> 0.22.0", only: [:dev, :test]}, {:ex_doc, "~> 0.35.1", only: :dev, runtime: false}, {:instream, "~> 2.2"}, + {:jason, "~> 1.4"}, {:ldclient, "~> 3.8.0", hex: :launchdarkly_server_sdk}, {:mix_audit, "~> 2.0", only: [:dev, :test], runtime: false}, {:mock, "~> 0.3.0", only: :test}, + {:opentelemetry_api, "~> 1.4"}, + {:req, "~> 0.5"}, {:sobelow, "~> 0.8", only: [:dev, :test]}, {:telemetry, "~> 1.3"} ] diff --git a/mix.lock b/mix.lock index f3472fc..079c4b4 100644 --- a/mix.lock +++ b/mix.lock @@ -11,8 +11,10 @@ "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, "ex_doc": {:hex, :ex_doc, "0.35.1", "de804c590d3df2d9d5b8aec77d758b00c814b356119b3d4455e4b8a8687aecaf", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "2121c6402c8d44b05622677b761371a759143b958c6c19f6558ff64d0aed40df"}, "file_system": {:hex, :file_system, "1.1.0", "08d232062284546c6c34426997dd7ef6ec9f8bbd090eb91780283c9016840e8f", [:mix], [], "hexpm", "bfcf81244f416871f2a2e15c1b515287faa5db9c6bcf290222206d120b3d43f6"}, + "finch": {:hex, :finch, "0.21.0", "b1c3b2d48af02d0c66d2a9ebfb5622be5c5ecd62937cf79a88a7f98d48a8290c", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "87dc6e169794cb2570f75841a19da99cfde834249568f2a5b121b809588a4377"}, "gun": {:hex, :gun, "2.2.0", "b8f6b7d417e277d4c2b0dc3c07dfdf892447b087f1cc1caff9c0f556b884e33d", [:make, :rebar3], [{:cowlib, ">= 2.15.0 and < 3.0.0", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "76022700c64287feb4df93a1795cff6741b83fb37415c40c34c38d2a4645261a"}, "hackney": {:hex, :hackney, "1.25.0", "390e9b83f31e5b325b9f43b76e1a785cbdb69b5b6cd4e079aa67835ded046867", [:rebar3], [{:certifi, "~> 2.15.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.4", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.1", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "7209bfd75fd1f42467211ff8f59ea74d6f2a9e81cbcee95a56711ee79fd6b1d4"}, + "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"}, "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"}, "influxql": {:hex, :influxql, "0.2.1", "71bfd5c0d81bf870f239baf3357bf5226b44fce16e1b9399ba1368203ca71245", [:mix], [], "hexpm", "75faf04960d6830ca0827869eaac1ba092655041c5e96deb2a588bafb601205c"}, "instream": {:hex, :instream, "2.2.1", "8f27352b0490f3d43387d9dfb926e6235570ea8a52b3675347c98efd7863a86d", [:mix], [{:hackney, "~> 1.1", [hex: :hackney, repo: "hexpm", optional: false]}, {:influxql, "~> 0.2.0", [hex: :influxql, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}], "hexpm", "e20c7cc24991fdd228fa93dc080ee7b9683f4c1509b3b718fdd385128d018c2a"}, @@ -25,14 +27,20 @@ "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, "meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"}, "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"}, + "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"}, "mimerl": {:hex, :mimerl, "1.4.0", "3882a5ca67fbbe7117ba8947f27643557adec38fa2307490c4c4207624cb213b", [:rebar3], [], "hexpm", "13af15f9f68c65884ecca3a3891d50a7b57d82152792f3e19d88650aa126b144"}, + "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"}, "mix_audit": {:hex, :mix_audit, "2.1.5", "c0f77cee6b4ef9d97e37772359a187a166c7a1e0e08b50edf5bf6959dfe5a016", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "87f9298e21da32f697af535475860dc1d3617a010e0b418d2ec6142bc8b42d69"}, "mock": {:hex, :mock, "0.3.9", "10e44ad1f5962480c5c9b9fa779c6c63de9bd31997c8e04a853ec990a9d841af", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "9e1b244c4ca2551bb17bb8415eed89e40ee1308e0fbaed0a4fdfe3ec8a4adbd3"}, "nimble_csv": {:hex, :nimble_csv, "1.3.0", "b7f998dc62b222bce9596e46f028c7a5af04cb5dde6df2ea197c583227c54971", [:mix], [], "hexpm", "41ccdc18f7c8f8bb06e84164fc51635321e80d5a3b450761c4997d620925d619"}, + "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"}, "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, + "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, + "opentelemetry_api": {:hex, :opentelemetry_api, "1.5.0", "1a676f3e3340cab81c763e939a42e11a70c22863f645aa06aafefc689b5550cf", [:mix, :rebar3], [], "hexpm", "f53ec8a1337ae4a487d43ac89da4bd3a3c99ddf576655d071deed8b56a2d5dda"}, "parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"}, "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"}, "quickrand": {:hex, :quickrand, "2.0.7", "d2bd76676a446e6a058d678444b7fda1387b813710d1af6d6e29bb92186c8820", [:rebar3], [], "hexpm", "b8acbf89a224bc217c3070ca8bebc6eb236dbe7f9767993b274084ea044d35f0"}, + "req": {:hex, :req, "0.5.17", "0096ddd5b0ed6f576a03dde4b158a0c727215b15d2795e59e0916c6971066ede", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0b8bc6ffdfebbc07968e59d3ff96d52f2202d0536f10fef4dc11dc02a2a43e39"}, "shotgun": {:hex, :shotgun, "1.2.1", "a720063b49a763a97b245cc1ab6ee34e0e50d1ef61858e080db8e3b0dcd31af2", [:rebar3], [{:gun, "2.2.0", [hex: :gun, repo: "hexpm", optional: false]}], "hexpm", "a5ed7a1ff851419a70e292c4e2649c4d2c633141eb9a3432a4896c72b6d3f212"}, "sobelow": {:hex, :sobelow, "0.14.0", "dd82aae8f72503f924fe9dd97ffe4ca694d2f17ec463dcfd365987c9752af6ee", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "7ecf91e298acfd9b24f5d761f19e8f6e6ac585b9387fb6301023f1f2cd5eed5f"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"}, diff --git a/test/zexbox/auto_escalation/adf_builder_test.exs b/test/zexbox/auto_escalation/adf_builder_test.exs new file mode 100644 index 0000000..7908236 --- /dev/null +++ b/test/zexbox/auto_escalation/adf_builder_test.exs @@ -0,0 +1,219 @@ +defmodule Zexbox.AutoEscalation.AdfBuilderTest do + use ExUnit.Case + import Mock + alias Zexbox.AutoEscalation.AdfBuilder + + defp with_telemetry_urls(trace, kibana, fun) do + with_mocks([ + {Zexbox.OpenTelemetry, [], + [ + generate_trace_url: fn -> trace end, + kibana_log_url: fn -> kibana end + ]} + ]) do + fun.() + end + end + + defp with_all_urls(fun), + do: + with_telemetry_urls( + "https://grafana.example.com/trace", + "https://kibana.example.com/logs", + fun + ) + + defp with_no_urls(fun), do: with_telemetry_urls(nil, nil, fun) + + defp runtime_error(msg \\ "Something broke"), do: %RuntimeError{message: msg} + + describe "build_description/5" do + test "returns an ADF doc map with correct version and type" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{}) + assert result.version == 1 + assert result.type == "doc" + assert is_list(result.content) + end) + end + + test "first block is a telemetry paragraph with Tempo and Kibana links" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{}) + [telemetry | _rest] = result.content + assert telemetry.type == "paragraph" + json = Jason.encode!(telemetry) + assert json =~ "Tempo Trace View" + assert json =~ "https://grafana.example.com/trace" + assert json =~ "Kibana Logs" + assert json =~ "https://kibana.example.com/logs" + end) + end + + test "shows '(Missing)' for unavailable telemetry URLs" do + with_no_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{}) + [telemetry | _rest] = result.content + json = Jason.encode!(telemetry) + assert json =~ "Tempo Trace View (Missing)" + assert json =~ "Kibana Logs (Missing)" + end) + end + + test "includes Error Details heading, exception class, message, and stack trace expand" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error("boom"), %{}, %{}) + json = Jason.encode!(result) + assert json =~ "Error Details" + assert json =~ "RuntimeError" + assert json =~ "boom" + assert json =~ "Stack trace" + assert json =~ "No stack trace available" + end) + end + + test "formats provided stacktrace in the expand block" do + with_all_urls(fn -> + stacktrace = [{MyModule, :my_fn, 2, [file: ~c"lib/my_module.ex", line: 42]}] + result = AdfBuilder.build_description(runtime_error(), %{}, %{}, stacktrace) + json = Jason.encode!(result) + assert json =~ "Stack trace" + assert json =~ "my_module.ex" + end) + end + + test "includes custom_description paragraphs above Error Details" do + with_all_urls(fn -> + result = + AdfBuilder.build_description( + runtime_error(), + %{}, + %{}, + nil, + "This happened during sync." + ) + + json = Jason.encode!(result) + assert json =~ "This happened during sync." + assert json =~ "Error Details" + + {cd_pos, _len} = :binary.match(json, "This happened during sync.") + {ed_pos, _len} = :binary.match(json, "Error Details") + assert cd_pos < ed_pos + end) + end + + test "splits custom_description on double newlines into multiple paragraphs" do + with_all_urls(fn -> + result = + AdfBuilder.build_description(runtime_error(), %{}, %{}, nil, "First.\n\nSecond.") + + json = Jason.encode!(result) + assert json =~ "First." + assert json =~ "Second." + end) + end + + test "adds a divider before custom_description when present" do + with_all_urls(fn -> + result = + AdfBuilder.build_description(runtime_error(), %{}, %{}, nil, "Some context.") + + json = Jason.encode!(result) + assert json =~ ~s("type":"rule") + end) + end + + test "does not add a divider when custom_description is nil" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{}) + json = Jason.encode!(result) + refute json =~ ~s("type":"rule") + end) + end + + test "includes user_context as a bold key-value bullet list" do + with_all_urls(fn -> + result = + AdfBuilder.build_description(runtime_error(), %{email: "u@example.com"}, %{}) + + json = Jason.encode!(result) + assert json =~ "User Context" + assert json =~ "email" + assert json =~ "u@example.com" + end) + end + + test "includes additional_context as a bold key-value bullet list" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{basket_id: 42}) + json = Jason.encode!(result) + assert json =~ "Additional Context" + assert json =~ "basket_id" + assert json =~ "42" + end) + end + + test "omits User Context section when empty" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{}) + json = Jason.encode!(result) + refute json =~ "User Context" + end) + end + + test "omits Additional Context section when empty" do + with_all_urls(fn -> + result = AdfBuilder.build_description(runtime_error(), %{}, %{}) + json = Jason.encode!(result) + refute json =~ "Additional Context" + end) + end + end + + describe "build_comment/6" do + test "starts with an Additional Occurrence heading including action name" do + with_all_urls(fn -> + result = AdfBuilder.build_comment(runtime_error(), "checkout", %{}, %{}) + json = Jason.encode!(result) + assert json =~ "Additional Occurrence (checkout)" + end) + end + + test "includes telemetry paragraph" do + with_all_urls(fn -> + result = AdfBuilder.build_comment(runtime_error(), "pay", %{}, %{}) + json = Jason.encode!(result) + assert json =~ "Tempo Trace View" + end) + end + + test "includes exception details and stack trace" do + with_all_urls(fn -> + result = AdfBuilder.build_comment(runtime_error("boom"), "pay", %{}, %{}) + json = Jason.encode!(result) + assert json =~ "RuntimeError" + assert json =~ "boom" + assert json =~ "Stack trace" + end) + end + + test "includes context bullet lists when provided" do + with_all_urls(fn -> + result = + AdfBuilder.build_comment( + runtime_error(), + "checkout", + %{email: "u@example.com"}, + %{basket_id: 123} + ) + + json = Jason.encode!(result) + assert json =~ "User Context" + assert json =~ "u@example.com" + assert json =~ "Additional Context" + assert json =~ "123" + end) + end + end +end diff --git a/test/zexbox/jira_client_test.exs b/test/zexbox/jira_client_test.exs new file mode 100644 index 0000000..4936ab7 --- /dev/null +++ b/test/zexbox/jira_client_test.exs @@ -0,0 +1,186 @@ +defmodule Zexbox.JiraClientTest do + use ExUnit.Case + import Mock + alias Zexbox.JiraClient + + @base_url "https://zigroup.atlassian.net" + + setup do + Application.put_env(:zexbox, :jira_base_url, @base_url) + Application.put_env(:zexbox, :jira_email, "test@example.com") + Application.put_env(:zexbox, :jira_api_token, "test-token") + + on_exit(fn -> + Application.delete_env(:zexbox, :jira_base_url) + Application.delete_env(:zexbox, :jira_email) + Application.delete_env(:zexbox, :jira_api_token) + end) + + :ok + end + + describe "bug_fingerprint_field/0" do + test "returns the correct field metadata" do + assert %{id: "customfield_13442", name: "Bug Fingerprint[Short text]"} = + JiraClient.bug_fingerprint_field() + end + end + + describe "zigl_team_field/0" do + test "returns the correct field metadata" do + assert %{id: "customfield_10101", name: "ZIGL Team[Dropdown]"} = + JiraClient.zigl_team_field() + end + end + + describe "search_latest_issues/2" do + test_with_mock "returns issues with url keys added on success", Req, + new: fn _opts -> :mock_client end, + get: fn :mock_client, _opts -> + {:ok, + %{ + status: 200, + body: %{ + "issues" => [ + %{ + "key" => "SS-1", + "id" => "10001", + "self" => "#{@base_url}/rest/api/3/issue/10001" + } + ] + } + }} + end do + assert {:ok, [issue]} = JiraClient.search_latest_issues("status = Open", "SS") + assert issue["key"] == "SS-1" + assert issue["url"] == "#{@base_url}/browse/SS-1" + end + + test_with_mock "returns empty list when no issues found", Req, + new: fn _opts -> :mock_client end, + get: fn :mock_client, _opts -> + {:ok, %{status: 200, body: %{"issues" => []}}} + end do + assert {:ok, []} = JiraClient.search_latest_issues("status = Open") + end + + test_with_mock "returns error on non-2xx response", Req, + new: fn _opts -> :mock_client end, + get: fn :mock_client, _opts -> + {:ok, %{status: 401, body: %{"message" => "Unauthorized"}}} + end do + assert {:error, reason} = JiraClient.search_latest_issues("status = Open") + assert reason =~ "HTTP 401" + end + + test_with_mock "returns error on request failure", Req, + new: fn _opts -> :mock_client end, + get: fn :mock_client, _opts -> + {:error, %{reason: :econnrefused}} + end do + assert {:error, _reason} = JiraClient.search_latest_issues("status = Open") + end + end + + describe "create_issue/6" do + test_with_mock "creates issue and adds url to result", Req, + new: fn _opts -> :mock_client end, + post: fn :mock_client, _opts -> + {:ok, + %{ + status: 201, + body: %{ + "key" => "SS-99", + "id" => "10099", + "self" => "#{@base_url}/rest/api/3/issue/10099" + } + }} + end do + assert {:ok, result} = + JiraClient.create_issue( + "SS", + "checkout: RuntimeError", + %{version: 1, type: "doc", content: []}, + "Bug", + "High", + %{"customfield_13442" => "checkout::RuntimeError"} + ) + + assert result["key"] == "SS-99" + assert result["url"] == "#{@base_url}/browse/SS-99" + end + + test_with_mock "returns error on non-2xx response", Req, + new: fn _opts -> :mock_client end, + post: fn :mock_client, _opts -> + {:ok, %{status: 400, body: %{"errorMessages" => ["Invalid project"]}}} + end do + assert {:error, reason} = + JiraClient.create_issue("INVALID", "test", %{}, "Bug", "High") + + assert reason =~ "HTTP 400" + end + end + + describe "transition_issue/2" do + test_with_mock "transitions issue to target status", Req, + new: fn _opts -> :mock_client end, + get: fn :mock_client, _opts -> + {:ok, + %{ + status: 200, + body: %{ + "transitions" => [ + %{"id" => "11", "to" => %{"name" => "To do"}}, + %{"id" => "21", "to" => %{"name" => "In Progress"}} + ] + } + }} + end, + post: fn :mock_client, opts -> + assert opts[:json] == %{"transition" => %{"id" => "11"}} + {:ok, %{status: 204, body: nil}} + end do + assert {:ok, %{success: true, status: "To do"}} = + JiraClient.transition_issue("SS-1", "To do") + end + + test_with_mock "returns error when target status not found", Req, + new: fn _opts -> :mock_client end, + get: fn :mock_client, _opts -> + {:ok, + %{ + status: 200, + body: %{"transitions" => [%{"id" => "11", "to" => %{"name" => "Done"}}]} + }} + end do + assert {:error, reason} = JiraClient.transition_issue("SS-1", "Nonexistent") + assert reason =~ "Cannot transition to" + end + end + + describe "add_comment/2" do + test_with_mock "adds comment and returns the response", Req, + new: fn _opts -> :mock_client end, + post: fn :mock_client, _opts -> + {:ok, + %{ + status: 201, + body: %{"id" => "30001", "body" => %{}, "author" => %{"displayName" => "Bot"}} + }} + end do + comment = %{version: 1, type: "doc", content: []} + assert {:ok, result} = JiraClient.add_comment("SS-42", comment) + assert result["id"] == "30001" + end + + test_with_mock "returns error on non-2xx response", Req, + new: fn _opts -> :mock_client end, + post: fn :mock_client, _opts -> + {:ok, %{status: 404, body: %{"errorMessages" => ["Issue not found"]}}} + end do + assert {:error, reason} = JiraClient.add_comment("SS-999", %{}) + assert reason =~ "HTTP 404" + end + end +end