APIv2: docs-related changes (#4453)

* Order QueryResult in API response

This improves experience in docs when querying interactively

* More utm in seeds

* More improved seeds

* Proper QueryResult.query structure

* Allow docs to query /api/v2/query and sites

The new endpoints use cookie authentication. The docs site uses
these endpoints to provide an interactive docs editor.

* query_result ordering test

* Refresh router

* Test module name
This commit is contained in:
Karl-Aksel Puulmann 2024-08-22 10:44:41 +03:00 committed by GitHub
parent 83b7a7ec53
commit 11acadfde9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 145 additions and 21 deletions

View File

@ -23,7 +23,7 @@ defmodule Plausible.Stats do
optimized_query
|> SQL.QueryBuilder.build(site)
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(optimized_query)
|> QueryResult.from(site, optimized_query)
end
def breakdown(site, query, metrics, pagination) do

View File

@ -34,7 +34,7 @@ defmodule Plausible.Stats.Breakdown do
q
|> apply_pagination(pagination)
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(query_with_metrics)
|> QueryResult.from(site, query_with_metrics)
|> build_breakdown_result(query_with_metrics, metrics)
|> maybe_add_time_on_page(site, query_with_metrics, metrics)
|> update_currency_metrics(site, query_with_metrics)

View File

@ -1,15 +1,20 @@
defmodule Plausible.Stats.QueryResult do
@moduledoc false
@moduledoc """
This struct contains the (JSON-encodable) response for a query and
is responsible for building it from database query results.
For the convenience of API docs and consumers, the JSON result
produced by Jason.encode(query_result) is ordered.
"""
alias Plausible.Stats.Util
alias Plausible.Stats.Filters
@derive Jason.Encoder
defstruct results: [],
query: nil,
meta: %{}
meta: %{},
query: nil
def from(results, query) do
def from(results, site, query) do
results_list =
results
|> Enum.map(fn entry ->
@ -22,14 +27,17 @@ defmodule Plausible.Stats.QueryResult do
struct!(
__MODULE__,
results: results_list,
query: %{
metrics: query.metrics,
date_range: [query.date_range.first, query.date_range.last],
filters: query.filters,
dimensions: query.dimensions,
order_by: query.order_by |> Enum.map(&Tuple.to_list/1)
},
meta: meta(query)
meta: meta(query),
query:
Jason.OrderedObject.new(
site_id: site.domain,
metrics: query.metrics,
date_range: [query.date_range.first, query.date_range.last],
filters: query.filters,
dimensions: query.dimensions,
order_by: query.order_by |> Enum.map(&Tuple.to_list/1),
include: query.include |> Map.filter(fn {_key, val} -> val end)
)
)
end
@ -72,3 +80,10 @@ defmodule Plausible.Stats.QueryResult do
|> Enum.into(%{})
end
end
defimpl Jason.Encoder, for: Plausible.Stats.QueryResult do
def encode(%Plausible.Stats.QueryResult{results: results, meta: meta, query: query}, opts) do
Jason.OrderedObject.new(results: results, meta: meta, query: query)
|> Jason.Encoder.encode(opts)
end
end

View File

@ -40,7 +40,7 @@ defmodule Plausible.Stats.Timeseries do
q
|> ClickhouseRepo.all(query: query)
|> QueryResult.from(query_with_metrics)
|> QueryResult.from(site, query_with_metrics)
|> build_timeseries_result(query_with_metrics, currency)
|> transform_keys(%{group_conversion_rate: :conversion_rate})
end

View File

@ -8,7 +8,8 @@ defmodule PlausibleWeb.AuthorizeSiteAccess do
def call(conn, allowed_roles) do
site =
Repo.get_by(Plausible.Site,
domain: conn.path_params["domain"] || conn.path_params["website"]
domain:
conn.path_params["domain"] || conn.path_params["website"] || conn.params["site_id"]
)
shared_link_auth = conn.params["auth"]

View File

@ -181,6 +181,12 @@ defmodule PlausibleWeb.Router do
post "/query", ExternalQueryApiController, :query
end
scope "/api/docs", PlausibleWeb.Api do
pipe_through :internal_stats_api
post "/query", ExternalQueryApiController, :query
end
on_ee do
scope "/api/v1/sites", PlausibleWeb.Api do
pipe_through :public_api

View File

@ -170,22 +170,34 @@ geolocations = [
[]
]
sources = ["", "Facebook", "Twitter", "DuckDuckGo", "Google"]
utm_medium = %{
"" => ["email", ""],
"Facebook" => ["social"],
"Twitter" => ["social"]
}
native_stats_range
|> Enum.with_index()
|> Enum.flat_map(fn {date, index} ->
Enum.map(0..Enum.random(1..500), fn _ ->
geolocation = Enum.random(geolocations)
referrer_source = Enum.random(sources)
[
site_id: site.id,
hostname: Enum.random(["en.dummy.site", "es.dummy.site", "dummy.site"]),
timestamp: put_random_time.(date, index),
referrer_source: Enum.random(["", "Facebook", "Twitter", "DuckDuckGo", "Google"]),
referrer_source: referrer_source,
browser: Enum.random(["Microsoft Edge", "Chrome", "curl", "Safari", "Firefox", "Vivaldi"]),
browser_version: to_string(Enum.random(0..50)),
screen_size: Enum.random(["Mobile", "Tablet", "Desktop", "Laptop"]),
operating_system: Enum.random(["Windows", "Mac", "GNU/Linux"]),
operating_system_version: to_string(Enum.random(0..15)),
utm_medium: Enum.random(Map.get(utm_medium, referrer_source, [""])),
utm_source: String.downcase(referrer_source),
utm_campaign: Enum.random(["", "Referral", "Advertisement", "Email"]),
pathname:
Enum.random([
@ -197,7 +209,14 @@ native_stats_range
"/docs/1",
"/docs/2" | long_random_paths
]),
user_id: Enum.random(1..1200)
user_id: Enum.random(1..1200),
"meta.key": ["url", "logged_in", "is_customer", "amount"],
"meta.value": [
Enum.random(long_random_urls),
Enum.random(["true", "false"]),
Enum.random(["true", "false"]),
to_string(Enum.random(1..9000))
]
]
|> Keyword.merge(geolocation)
|> then(&Plausible.Factory.build(:pageview, &1))
@ -211,6 +230,8 @@ native_stats_range
Enum.map(0..Enum.random(1..50), fn _ ->
geolocation = Enum.random(geolocations)
referrer_source = Enum.random(sources)
[
name: goal4.event_name,
site_id: site.id,
@ -222,6 +243,8 @@ native_stats_range
screen_size: Enum.random(["Mobile", "Tablet", "Desktop", "Laptop"]),
operating_system: Enum.random(["Windows", "Mac", "GNU/Linux"]),
operating_system_version: to_string(Enum.random(0..15)),
utm_medium: Enum.random(Map.get(utm_medium, referrer_source, [""])),
utm_source: String.downcase(referrer_source),
pathname:
Enum.random([
"/",
@ -234,7 +257,14 @@ native_stats_range
]),
user_id: Enum.random(1..1200),
revenue_reporting_amount: Decimal.new(Enum.random(100..10000)),
revenue_reporting_currency: "USD"
revenue_reporting_currency: "USD",
"meta.key": ["url", "logged_in", "is_customer", "amount"],
"meta.value": [
Enum.random(long_random_urls),
Enum.random(["true", "false"]),
Enum.random(["true", "false"]),
to_string(Enum.random(1..9000))
]
]
|> Keyword.merge(geolocation)
|> then(&Plausible.Factory.build(:event, &1))
@ -248,17 +278,21 @@ native_stats_range
Enum.map(0..Enum.random(1..50), fn _ ->
geolocation = Enum.random(geolocations)
referrer_source = Enum.random(sources)
[
name: outbound.event_name,
site_id: site.id,
hostname: site.domain,
timestamp: put_random_time.(date, index),
referrer_source: Enum.random(["", "Facebook", "Twitter", "DuckDuckGo", "Google"]),
referrer_source: referrer_source,
browser: Enum.random(["Microsoft Edge", "Chrome", "Safari", "Firefox", "Vivaldi"]),
browser_version: to_string(Enum.random(0..50)),
screen_size: Enum.random(["Mobile", "Tablet", "Desktop", "Laptop"]),
operating_system: Enum.random(["Windows", "Mac", "GNU/Linux"]),
operating_system_version: to_string(Enum.random(0..15)),
utm_medium: Enum.random(Map.get(utm_medium, referrer_source, [""])),
utm_source: String.downcase(referrer_source),
user_id: Enum.random(1..1200),
"meta.key": ["url", "logged_in", "is_customer", "amount"],
"meta.value": [

View File

@ -0,0 +1,68 @@
defmodule Plausible.Stats.QueryResultTest do
use Plausible.DataCase, async: true
alias Plausible.Stats.{Query, QueryResult, QueryOptimizer}
setup do
user = insert(:user)
site =
insert(:site,
members: [user],
inserted_at: ~N[2020-01-01T00:00:00],
stats_start_date: ~D[2020-01-01]
)
{:ok, site: site}
end
test "serializing query to JSON keeps keys ordered" do
site = insert(:site)
{:ok, query} =
Query.build(
site,
%{
"site_id" => site.domain,
"metrics" => ["pageviews"],
"date_range" => ["2024-01-01", "2024-02-01"],
"include" => %{"imports" => true}
},
%{}
)
query = QueryOptimizer.optimize(query)
query_result_json =
QueryResult.from([], site, query)
|> Jason.encode!(pretty: true)
|> String.replace(site.domain, "dummy.site")
assert query_result_json == """
{
"results": [],
"meta": {},
"query": {
"site_id": "dummy.site",
"metrics": [
"pageviews"
],
"date_range": [
"2024-01-01",
"2024-02-01"
],
"filters": [],
"dimensions": [],
"order_by": [
[
"pageviews",
"desc"
]
],
"include": {
"imports": true
}
}
}\
"""
end
end