Merge branch 'master' into release-3.0.0

This commit is contained in:
Adam Rutkowski 2025-04-10 09:53:09 +02:00
commit c524748002
47 changed files with 1176 additions and 512 deletions

View File

@ -6,8 +6,9 @@ All notable changes to this project will be documented in this file.
## v3.0.0 - 2025-04-08
### Added
- Ability to sort by and compare the `exit_rate` metric in the dashboard Exit Pages > Details report
- Add top 3 pages into the traffic spike email
- Two new shorthand time periods `28d` and `90d` available on both dashboard and in public API
- Two new shorthand time periods `28d` and `91d` available on both dashboard and in public API
- Average scroll depth metric
- Scroll Depth goals
- Dashboard shows comparisons for all reports
@ -34,6 +35,8 @@ All notable changes to this project will be documented in this file.
### Changed
- Default period for brand new sites is now `today` rather than `last 28 days`. On the next day, the default changes to `last 28 days`.
- Increase decimal precision of the "Exit rate" metric from 0 to 1 (e.g. 67 -> 66.7)
- Increase decimal precision of the "Conversion rate" metric from 1 to 2 (e.g. 16.7 -> 16.67)
- The "Last 30 days" period is now "Last 28 days" on the dashboard and also the new default. Keyboard shortcut `T` still works for last 30 days.
- Last `7d` and `30d` periods do not include today anymore
@ -48,6 +51,8 @@ All notable changes to this project will be documented in this file.
- Make Stats and Sites API keys scoped to teams they are created in
- Remove permissions to manage sites guests and run destructive actions from team editor and guest editor roles in favour of team admin role
- Time-on-page metric has been reworked. It now uses `engagement` events sent by plausible tracker script. We still use the old calculation methods for periods before the self-hosted instance was upgraded. Warnings are shown in the dashboard and API when legacy calculation methods are used.
- Always set site and team member limits to unlimited for Community Edition
- Stats API now supports more `date_range` shorthand options like `30d`, `3mo`.
### Fixed

View File

@ -28,7 +28,7 @@ test('if no period is stored, loads with default value of "Last 28 days", all ex
['Realtime', 'R'],
['Last 7 Days', 'W'],
['Last 28 Days', 'F'],
['Last 90 Days', 'N'],
['Last 91 Days', 'N'],
['Month to Date', 'M'],
['Last Month', 'P'],
['Year to Date', 'Y'],

View File

@ -68,6 +68,7 @@ export default function QueryContextProvider({
const defaultValues = queryDefaultValue
const storedValues = getSavedTimePreferencesFromStorage({ site })
const timeQuery = getDashboardTimeSettings({
site,
searchValues: { period, comparison, match_day_of_week },
storedValues,
defaultValues,

View File

@ -1,3 +1,4 @@
import { DEFAULT_SITE } from '../../test-utils/app-context-providers'
import {
ComparisonMode,
getDashboardTimeSettings,
@ -5,6 +6,7 @@ import {
getStoredPeriod,
QueryPeriod
} from './query-time-periods'
import { formatISO, utcNow } from './util/date'
describe(`${getStoredPeriod.name}`, () => {
const domain = 'any.site'
@ -22,8 +24,10 @@ describe(`${getStoredPeriod.name}`, () => {
})
describe(`${getDashboardTimeSettings.name}`, () => {
const site = DEFAULT_SITE
const defaultValues = {
period: QueryPeriod['7d'],
period: QueryPeriod['28d'],
comparison: null,
match_day_of_week: true
}
@ -41,6 +45,7 @@ describe(`${getDashboardTimeSettings.name}`, () => {
it('returns defaults if nothing stored and no search', () => {
expect(
getDashboardTimeSettings({
site: site,
searchValues: emptySearchValues,
storedValues: emptyStoredValues,
defaultValues,
@ -49,9 +54,37 @@ describe(`${getDashboardTimeSettings.name}`, () => {
).toEqual(defaultValues)
})
it('defaults period to today if the site was created today', () => {
expect(
getDashboardTimeSettings({
site: { ...site, nativeStatsBegin: formatISO(utcNow()) },
searchValues: emptySearchValues,
storedValues: emptyStoredValues,
defaultValues,
segmentIsExpanded: false
})
).toEqual({ ...defaultValues, period: 'day' })
})
it('defaults period to today if the site was created yesterday', () => {
expect(
getDashboardTimeSettings({
site: {
...site,
nativeStatsBegin: formatISO(utcNow().subtract(1, 'day'))
},
searchValues: emptySearchValues,
storedValues: emptyStoredValues,
defaultValues,
segmentIsExpanded: false
})
).toEqual({ ...defaultValues, period: 'day' })
})
it('returns stored values if no search', () => {
expect(
getDashboardTimeSettings({
site: site,
searchValues: emptySearchValues,
storedValues: {
period: QueryPeriod['12mo'],
@ -71,6 +104,7 @@ describe(`${getDashboardTimeSettings.name}`, () => {
it('uses values from search above all else, treats ComparisonMode.off as null', () => {
expect(
getDashboardTimeSettings({
site: site,
searchValues: {
period: QueryPeriod['year'],
comparison: ComparisonMode.off,
@ -94,6 +128,7 @@ describe(`${getDashboardTimeSettings.name}`, () => {
it('respects segmentIsExpanded: true option: comparison and edit segment mode are mutually exclusive', () => {
expect(
getDashboardTimeSettings({
site: site,
searchValues: {
period: QueryPeriod['custom'],
comparison: ComparisonMode.previous_period,

View File

@ -16,6 +16,7 @@ import {
isThisMonth,
isThisYear,
isToday,
isTodayOrYesterday,
lastMonth,
nowForSite,
parseNaiveDate,
@ -31,7 +32,7 @@ export enum QueryPeriod {
'7d' = '7d',
'28d' = '28d',
'30d' = '30d',
'90d' = '90d',
'91d' = '91d',
'6mo' = '6mo',
'12mo' = '12mo',
'year' = 'year',
@ -360,15 +361,15 @@ export const getDatePeriodGroups = ({
}
],
[
['Last 90 Days', 'N'],
['Last 91 Days', 'N'],
{
search: (s) => ({
...s,
...clearedDateSearch,
period: QueryPeriod['90d'],
period: QueryPeriod['91d'],
keybindHint: 'N'
}),
isActive: ({ query }) => query.period === QueryPeriod['90d'],
isActive: ({ query }) => query.period === QueryPeriod['91d'],
onEvent
}
]
@ -533,11 +534,13 @@ export function getSavedTimePreferencesFromStorage({
}
export function getDashboardTimeSettings({
site,
searchValues,
storedValues,
defaultValues,
segmentIsExpanded
}: {
site: PlausibleSite
searchValues: Record<'period' | 'comparison' | 'match_day_of_week', unknown>
storedValues: ReturnType<typeof getSavedTimePreferencesFromStorage>
defaultValues: Pick<
@ -549,10 +552,12 @@ export function getDashboardTimeSettings({
let period: QueryPeriod
if (isValidPeriod(searchValues.period)) {
period = searchValues.period
} else if (isValidPeriod(storedValues.period)) {
period = storedValues.period
} else if (isTodayOrYesterday(site.nativeStatsBegin)) {
period = QueryPeriod.day
} else {
period = isValidPeriod(storedValues.period)
? storedValues.period
: defaultValues.period
period = defaultValues.period
}
let comparison: ComparisonMode | null
@ -606,8 +611,8 @@ export function getCurrentPeriodDisplayName({
if (query.period === '30d') {
return 'Last 30 days'
}
if (query.period === '90d') {
return 'Last 90 days'
if (query.period === '91d') {
return 'Last 91 days'
}
if (query.period === 'month') {
if (isThisMonth(site, query.date)) {

View File

@ -30,7 +30,7 @@ describe('parseSiteFromDataset', () => {
data-current-user-role="owner"
data-current-user-id="1"
data-flags="{}"
data-valid-intervals-by-period='{"12mo":["day","week","month"],"7d":["hour","day"],"28d":["day","week"],"30d":["day","week"],"90d":["day","week","month"],"6mo":["day","week","month"],"all":["week","month"],"custom":["day","week","month"],"day":["minute","hour"],"month":["day","week"],"realtime":["minute"],"year":["day","week","month"]}'
data-valid-intervals-by-period='{"12mo":["day","week","month"],"7d":["hour","day"],"28d":["day","week"],"30d":["day","week"],"91d":["day","week","month"],"6mo":["day","week","month"],"all":["week","month"],"custom":["day","week","month"],"day":["minute","hour"],"month":["day","week"],"realtime":["minute"],"year":["day","week","month"]}'
{...attrs}
/>
)
@ -58,7 +58,7 @@ describe('parseSiteFromDataset', () => {
'7d': ['hour', 'day'],
'28d': ['day', 'week'],
'30d': ['day', 'week'],
'90d': ['day', 'week', 'month'],
'91d': ['day', 'week', 'month'],
'6mo': ['day', 'week', 'month'],
all: ['week', 'month'],
custom: ['day', 'week', 'month'],

View File

@ -207,7 +207,7 @@ export const createExitRate = (props) => {
...props,
key: 'exit_rate',
renderLabel,
sortable: false
sortable: true
})
}

View File

@ -3,6 +3,10 @@ import utc from 'dayjs/plugin/utc'
dayjs.extend(utc)
export function utcNow() {
return dayjs()
}
// https://stackoverflow.com/a/50130338
export function formatISO(date) {
return date.format('YYYY-MM-DD')
@ -95,6 +99,12 @@ export function isToday(site, date) {
return isSameDate(date, nowForSite(site))
}
export function isTodayOrYesterday(isoDate) {
const isoToday = formatISO(dayjs())
const isoYesterday = formatISO(dayjs().subtract(1, 'day'))
return isoDate === isoToday || isoDate === isoYesterday
}
export function isThisMonth(site, date) {
return formatMonthYYYY(date) === formatMonthYYYY(nowForSite(site))
}

View File

@ -12,6 +12,7 @@ export type Metric =
| "conversion_rate"
| "group_conversion_rate"
| "time_on_page"
| "exit_rate"
| "total_revenue"
| "average_revenue"
| "scroll_depth";
@ -23,11 +24,12 @@ export type DateRangeShorthand =
| "7d"
| "28d"
| "30d"
| "90d"
| "91d"
| "month"
| "6mo"
| "12mo"
| "year";
| "year"
| string;
/**
* @minItems 2
* @maxItems 2
@ -71,7 +73,7 @@ export type CustomPropertyFilterDimensions = string;
export type GoalDimension = "event:goal";
export type TimeDimensions = ("time" | "time:month" | "time:week" | "time:day" | "time:hour") | "time:minute";
export type FilterTree = FilterEntry | FilterAndOr | FilterNot | FilterHasDone;
export type FilterEntry = FilterWithoutGoals | FilterWithGoals | FilterWithPattern | FilterForSegment;
export type FilterEntry = FilterWithoutGoals | FilterWithIs | FilterWithContains | FilterWithPattern;
/**
* @minItems 3
* @maxItems 4
@ -95,20 +97,30 @@ export type Clauses = (string | number)[];
* @minItems 3
* @maxItems 4
*/
export type FilterWithGoals =
| [FilterOperationContains, GoalDimension | SimpleFilterDimensions | CustomPropertyFilterDimensions, Clauses]
export type FilterWithIs =
| ["is", GoalDimension | SimpleFilterDimensions | CustomPropertyFilterDimensions | "segment", Clauses]
| [
FilterOperationContains,
GoalDimension | SimpleFilterDimensions | CustomPropertyFilterDimensions,
"is",
GoalDimension | SimpleFilterDimensions | CustomPropertyFilterDimensions | "segment",
Clauses,
{
case_sensitive?: boolean;
}
];
/**
* filter operation
* @minItems 3
* @maxItems 4
*/
export type FilterOperationContains = "is" | "contains";
export type FilterWithContains =
| ["contains", GoalDimension | SimpleFilterDimensions | CustomPropertyFilterDimensions, Clauses]
| [
"contains",
GoalDimension | SimpleFilterDimensions | CustomPropertyFilterDimensions,
Clauses,
{
case_sensitive?: boolean;
}
];
/**
* @minItems 3
* @maxItems 3
@ -122,11 +134,6 @@ export type FilterWithPattern = [
* filter operation
*/
export type FilterOperationRegex = "matches" | "matches_not";
/**
* @minItems 3
* @maxItems 3
*/
export type FilterForSegment = ["is", "segment", number[]];
/**
* @minItems 2
* @maxItems 2

View File

@ -22,6 +22,29 @@ type TestContextProvidersProps = {
preloaded?: { segments?: SavedSegments }
}
export const DEFAULT_SITE: PlausibleSite = {
domain: 'plausible.io/unit',
offset: 0,
hasGoals: false,
hasProps: false,
funnelsAvailable: false,
propsAvailable: false,
siteSegmentsAvailable: false,
conversionsOptedOut: false,
funnelsOptedOut: false,
propsOptedOut: false,
revenueGoals: [],
funnels: [],
statsBegin: '',
nativeStatsBegin: '',
embedded: false,
background: '',
isDbip: false,
flags: {},
validIntervalsByPeriod: {},
shared: false
}
export const TestContextProviders = ({
children,
routerProps,
@ -29,30 +52,7 @@ export const TestContextProviders = ({
preloaded,
user
}: TestContextProvidersProps) => {
const defaultSite: PlausibleSite = {
domain: 'plausible.io/unit',
offset: 0,
hasGoals: false,
hasProps: false,
funnelsAvailable: false,
propsAvailable: false,
siteSegmentsAvailable: false,
conversionsOptedOut: false,
funnelsOptedOut: false,
propsOptedOut: false,
revenueGoals: [],
funnels: [],
statsBegin: '',
nativeStatsBegin: '',
embedded: false,
background: '',
isDbip: false,
flags: {},
validIntervalsByPeriod: {},
shared: false
}
const site = { ...defaultSite, ...siteOptions }
const site = { ...DEFAULT_SITE, ...siteOptions }
const queryClient = new QueryClient({
defaultOptions: {

View File

@ -2,7 +2,7 @@ defmodule Plausible.Stats.Sampling do
@moduledoc """
Sampling related functions
"""
@default_sample_threshold 20_000_000
@default_sample_threshold 10_000_000
# 1 percent
@min_sample_rate 0.01
@ -56,19 +56,9 @@ defmodule Plausible.Stats.Sampling do
end
defp decide_sample_rate(site, query) do
cond do
FunWithFlags.enabled?(:fractional_hardcoded_sample_rate, for: site) ->
# Hard-coded sample rate to temporarily fix an issue for a client.
# To be solved as part of https://3.basecamp.com/5308029/buckets/39750953/messages/7978775089
0.1
FunWithFlags.enabled?(:fractional_sample_rate, for: site) ->
traffic_30_day = SamplingCache.get(site.id)
fractional_sample_rate(traffic_30_day, query)
true ->
@default_sample_threshold
end
site.id
|> SamplingCache.get()
|> fractional_sample_rate(query)
end
def fractional_sample_rate(nil = _traffic_30_day, _query), do: :no_sampling

View File

@ -79,7 +79,7 @@ defmodule Plausible.Props do
"""
def allow(site, prop_or_props) do
with site <- Plausible.Repo.preload(site, :team),
:ok <- Plausible.Billing.Feature.Props.check_availability(site.team) do
:ok <- ensure_prop_key_accessible(prop_or_props, site.team) do
site
|> allow_changeset(prop_or_props)
|> Plausible.Repo.update()
@ -139,6 +139,15 @@ defmodule Plausible.Props do
allow(site, props_to_allow)
end
def ensure_prop_key_accessible(prop_keys, team) when is_list(prop_keys) do
Enum.reduce_while(prop_keys, :ok, fn prop_key, :ok ->
case ensure_prop_key_accessible(prop_key, team) do
:ok -> {:cont, :ok}
error -> {:halt, error}
end
end)
end
def ensure_prop_key_accessible(prop_key, team) do
if prop_key in @internal_keys do
:ok

View File

@ -3,6 +3,12 @@ defmodule Plausible.Stats.Compare do
Float.round(new_value - old_value, 1)
end
def calculate_change(:exit_rate, old_value, new_value) do
if is_float(old_value) and is_float(new_value) do
Float.round(new_value - old_value, 1)
end
end
def calculate_change(:bounce_rate, old_count, new_count) do
if old_count > 0, do: new_count - old_count
end

View File

@ -225,42 +225,12 @@ defmodule Plausible.Stats.Filters.QueryParser do
{:ok, DateTimeRange.new!(date, date, site.timezone)}
end
defp parse_time_range(site, shorthand, date, _now)
when shorthand in ["7d", "28d", "30d", "90d"] do
{days, "d"} = Integer.parse(shorthand)
last = date |> Date.add(-1)
first = date |> Date.add(-days)
{:ok, DateTimeRange.new!(first, last, site.timezone)}
end
defp parse_time_range(site, "month", date, _now) do
last = date |> Date.end_of_month()
first = last |> Date.beginning_of_month()
{:ok, DateTimeRange.new!(first, last, site.timezone)}
end
defp parse_time_range(site, "6mo", date, _now) do
last = date |> Date.end_of_month()
first =
last
|> Date.shift(month: -5)
|> Date.beginning_of_month()
{:ok, DateTimeRange.new!(first, last, site.timezone)}
end
defp parse_time_range(site, "12mo", date, _now) do
last = date |> Date.end_of_month()
first =
last
|> Date.shift(month: -11)
|> Date.beginning_of_month()
{:ok, DateTimeRange.new!(first, last, site.timezone)}
end
defp parse_time_range(site, "year", date, _now) do
last = date |> Timex.end_of_year()
first = last |> Timex.beginning_of_year()
@ -273,6 +243,23 @@ defmodule Plausible.Stats.Filters.QueryParser do
{:ok, DateTimeRange.new!(start_date, date, site.timezone)}
end
defp parse_time_range(site, shorthand, date, _now) when is_binary(shorthand) do
case Integer.parse(shorthand) do
{n, "d"} when n > 0 and n <= 5_000 ->
last = date |> Date.add(-1)
first = date |> Date.add(-n)
{:ok, DateTimeRange.new!(first, last, site.timezone)}
{n, "mo"} when n > 0 and n <= 100 ->
last = date |> Date.end_of_month()
first = last |> Date.shift(month: -n + 1) |> Date.beginning_of_month()
{:ok, DateTimeRange.new!(first, last, site.timezone)}
_ ->
{:error, "Invalid date_range #{i(shorthand)}"}
end
end
defp parse_time_range(site, [from, to], _date, _now) when is_binary(from) and is_binary(to) do
case date_range_from_date_strings(site, from, to) do
{:ok, date_range} -> {:ok, date_range}
@ -281,7 +268,7 @@ defmodule Plausible.Stats.Filters.QueryParser do
end
defp parse_time_range(_site, unknown, _date, _now),
do: {:error, "Invalid date_range '#{i(unknown)}'."}
do: {:error, "Invalid date_range #{i(unknown)}"}
defp date_range_from_date_strings(site, from, to) do
with {:ok, from_date} <- Date.from_iso8601(from),
@ -629,6 +616,20 @@ defmodule Plausible.Stats.Filters.QueryParser do
end
end
defp validate_metric(:exit_rate = metric, query) do
case {query.dimensions, TableDecider.sessions_join_events?(query)} do
{["visit:exit_page"], false} ->
:ok
{["visit:exit_page"], true} ->
{:error, "Metric `#{metric}` cannot be queried when filtering on event dimensions."}
_ ->
{:error,
"Metric `#{metric}` requires a `\"visit:exit_page\"` dimension. No other dimensions are allowed."}
end
end
defp validate_metric(:views_per_visit = metric, query) do
cond do
Filters.filtering_on_dimension?(query, "event:page", behavioral_filters: :ignore) ->

View File

@ -357,6 +357,7 @@ defmodule Plausible.Stats.Imported do
end
@cannot_optimize_metrics [
:exit_rate,
:scroll_depth,
:percentage,
:conversion_rate,

View File

@ -87,6 +87,10 @@ defmodule Plausible.Stats.Imported.SQL.Expression do
wrap_alias([i], %{bounces: sum(i.bounces), __internal_visits: sum(i.visits)})
end
defp select_metric(:exit_rate, "imported_exit_pages", _query) do
wrap_alias([i], %{__internal_visits: sum(i.exits)})
end
defp select_metric(:visit_duration, "imported_pages", _query) do
wrap_alias([i], %{visit_duration: 0})
end
@ -398,6 +402,12 @@ defmodule Plausible.Stats.Imported.SQL.Expression do
|> Map.merge(time_on_page_metric(query))
end
defp joined_metric(:exit_rate, _query) do
wrap_alias([s, i], %{
__internal_visits: s.__internal_visits + i.__internal_visits
})
end
# Ignored as it's calculated separately
defp joined_metric(metric, _query)
when metric in [:conversion_rate, :group_conversion_rate, :percentage] do

View File

@ -31,7 +31,7 @@ defmodule Plausible.Stats.Interval do
case period do
period when period in ["realtime", "30m"] -> "minute"
"day" -> "hour"
period when period in ["custom", "7d", "28d", "30d", "90d", "month"] -> "day"
period when period in ["custom", "7d", "28d", "30d", "91d", "month"] -> "day"
period when period in ["6mo", "12mo", "year"] -> "month"
end
end
@ -59,7 +59,7 @@ defmodule Plausible.Stats.Interval do
"7d" => ["hour", "day"],
"28d" => ["day", "week"],
"30d" => ["day", "week"],
"90d" => ["day", "week", "month"],
"91d" => ["day", "week", "month"],
"month" => ["day", "week"],
"6mo" => ["day", "week", "month"],
"12mo" => ["day", "week", "month"],

View File

@ -93,7 +93,7 @@ defmodule Plausible.Stats.Legacy.QueryBuilder do
end
defp put_period(query, site, %{"period" => period} = params)
when period in ["7d", "28d", "30d", "90d"] do
when period in ["7d", "28d", "30d", "91d"] do
{days, "d"} = Integer.parse(period)
end_date = parse_single_date(query, params) |> Date.shift(day: -1)

View File

@ -11,6 +11,7 @@ defmodule Plausible.Stats.Metrics do
:visitors,
:visits,
:pageviews,
:exit_rate,
:views_per_visit,
:bounce_rate,
:visit_duration,
@ -33,6 +34,7 @@ defmodule Plausible.Stats.Metrics do
end
def default_value(:visit_duration, _query, _dimensions), do: nil
def default_value(:exit_rate, _query, _dimensions), do: nil
def default_value(:scroll_depth, _query, _dimensions), do: nil
def default_value(:time_on_page, _query, _dimensions), do: nil

View File

@ -332,6 +332,12 @@ defmodule Plausible.Stats.SQL.Expression do
})
end
def session_metric(:exit_rate, _query) do
wrap_alias([s], %{
__internal_visits: fragment("toUInt32(sum(sign))")
})
end
def session_metric(:visits, _query) do
wrap_alias([s], %{
visits: scale_sample(fragment("sum(?)", s.sign))

View File

@ -11,74 +11,76 @@ defmodule Plausible.Stats.SQL.SpecialMetrics do
import Ecto.Query
import Plausible.Stats.Util
@special_metrics [
:percentage,
:conversion_rate,
:group_conversion_rate,
:scroll_depth,
:exit_rate
]
def add(q, site, query) do
q
|> maybe_add_percentage_metric(site, query)
|> maybe_add_global_conversion_rate(site, query)
|> maybe_add_group_conversion_rate(site, query)
|> maybe_add_scroll_depth(query)
Enum.reduce(@special_metrics, q, fn special_metric, q ->
if special_metric in query.metrics do
add_special_metric(q, special_metric, site, query)
else
q
end
end)
end
defp maybe_add_percentage_metric(q, site, query) do
if :percentage in query.metrics do
total_query =
query
|> remove_filters_ignored_in_totals_query()
|> Query.set(
dimensions: [],
include_imported: query.include_imported,
pagination: nil
)
defp add_special_metric(q, :percentage, site, query) do
total_query =
query
|> remove_filters_ignored_in_totals_query()
|> Query.set(
dimensions: [],
include_imported: query.include_imported,
pagination: nil
)
q
|> select_merge_as([], total_visitors_subquery(site, total_query, query.include_imported))
|> select_merge_as([], %{
percentage:
fragment(
"if(? > 0, round(? / ? * 100, 1), null)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
else
q
end
q
|> select_merge_as([], total_visitors_subquery(site, total_query, query.include_imported))
|> select_merge_as([], %{
percentage:
fragment(
"if(? > 0, round(? / ? * 100, 1), null)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
end
# Adds conversion_rate metric to query, calculated as
# X / Y where Y is the same breakdown value without goal or props
# filters.
def maybe_add_global_conversion_rate(q, site, query) do
if :conversion_rate in query.metrics do
total_query =
query
|> Query.remove_top_level_filters(["event:goal", "event:props"])
|> remove_filters_ignored_in_totals_query()
|> Query.set(
dimensions: [],
include_imported: query.include_imported,
preloaded_goals: Map.put(query.preloaded_goals, :matching_toplevel_filters, []),
pagination: nil
)
q
|> select_merge_as(
[],
total_visitors_subquery(site, total_query, query.include_imported)
defp add_special_metric(q, :conversion_rate, site, query) do
total_query =
query
|> Query.remove_top_level_filters(["event:goal", "event:props"])
|> remove_filters_ignored_in_totals_query()
|> Query.set(
dimensions: [],
include_imported: query.include_imported,
preloaded_goals: Map.put(query.preloaded_goals, :matching_toplevel_filters, []),
pagination: nil
)
|> select_merge_as([e], %{
conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 2), 0)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
else
q
end
q
|> select_merge_as(
[],
total_visitors_subquery(site, total_query, query.include_imported)
)
|> select_merge_as([e], %{
conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 2), 0)",
selected_as(:total_visitors),
selected_as(:visitors),
selected_as(:total_visitors)
)
})
end
# This function injects a group_conversion_rate metric into
@ -92,118 +94,159 @@ defmodule Plausible.Stats.SQL.SpecialMetrics do
# * Y is the number of all visitors for this set of dimensions
# result without the `event:goal` and `event:props:*`
# filters.
def maybe_add_group_conversion_rate(q, site, query) do
if :group_conversion_rate in query.metrics do
group_totals_query =
query
|> Query.remove_top_level_filters(["event:goal", "event:props"])
|> remove_filters_ignored_in_totals_query()
|> Query.set(
metrics: [:visitors],
order_by: [],
include_imported: query.include_imported,
preloaded_goals: Map.put(query.preloaded_goals, :matching_toplevel_filters, []),
pagination: nil
)
from(e in subquery(q),
left_join: c in subquery(SQL.QueryBuilder.build(group_totals_query, site)),
on: ^SQL.QueryBuilder.build_group_by_join(query)
defp add_special_metric(q, :group_conversion_rate, site, query) do
group_totals_query =
query
|> Query.remove_top_level_filters(["event:goal", "event:props"])
|> remove_filters_ignored_in_totals_query()
|> Query.set(
metrics: [:visitors],
order_by: [],
include_imported: query.include_imported,
preloaded_goals: Map.put(query.preloaded_goals, :matching_toplevel_filters, []),
pagination: nil
)
|> select_merge_as([e, c], %{
total_visitors: c.visitors,
group_conversion_rate:
from(e in subquery(q),
left_join: c in subquery(SQL.QueryBuilder.build(group_totals_query, site)),
on: ^SQL.QueryBuilder.build_group_by_join(query)
)
|> select_merge_as([e, c], %{
total_visitors: c.visitors,
group_conversion_rate:
fragment(
"if(? > 0, round(? / ? * 100, 2), 0)",
c.visitors,
e.visitors,
c.visitors
)
})
|> select_join_fields(query, query.dimensions, e)
|> select_join_fields(query, List.delete(query.metrics, :group_conversion_rate), e)
end
defp add_special_metric(q, :scroll_depth, _site, query) do
max_per_session_q =
Base.base_event_query(query)
|> where([e], e.name == "engagement" and e.scroll_depth <= 100)
|> select([e], %{
session_id: e.session_id,
max_scroll_depth: max(e.scroll_depth)
})
|> SQL.QueryBuilder.build_group_by(:events, query)
|> group_by([e], e.session_id)
dim_shortnames = Enum.map(query.dimensions, fn dim -> shortname(query, dim) end)
dim_select =
dim_shortnames
|> Enum.map(fn dim -> {dim, dynamic([p], field(p, ^dim))} end)
|> Map.new()
dim_group_by =
dim_shortnames
|> Enum.map(fn dim -> dynamic([p], field(p, ^dim)) end)
total_scroll_depth_q =
subquery(max_per_session_q)
|> select([], %{})
|> select_merge_as([p], %{
# Note: No need to upscale sample size here since it would end up cancelling out due to the result being an average
total_scroll_depth: fragment("sum(?)", p.max_scroll_depth),
total_scroll_depth_visits: fragment("uniq(?)", p.session_id)
})
|> select_merge(^dim_select)
|> group_by(^dim_group_by)
join_on_dim_condition =
if dim_shortnames == [] do
true
else
dim_shortnames
|> Enum.map(fn dim -> dynamic([_e, ..., s], selected_as(^dim) == field(s, ^dim)) end)
# credo:disable-for-next-line Credo.Check.Refactor.Nesting
|> Enum.reduce(fn condition, acc -> dynamic([], ^acc and ^condition) end)
end
joined_q =
join(q, :left, [e], s in subquery(total_scroll_depth_q), on: ^join_on_dim_condition)
if query.include_imported do
joined_q
|> select_merge_as([..., s], %{
scroll_depth:
fragment(
"if(? > 0, round(? / ? * 100, 2), 0)",
c.visitors,
e.visitors,
c.visitors
"""
if(? + ? > 0, toInt8(round((? + ?) / (? + ?))), NULL)
""",
s.total_scroll_depth_visits,
selected_as(:__imported_total_scroll_depth_visits),
s.total_scroll_depth,
selected_as(:__imported_total_scroll_depth),
s.total_scroll_depth_visits,
selected_as(:__imported_total_scroll_depth_visits)
)
})
|> select_join_fields(query, query.dimensions, e)
|> select_join_fields(query, List.delete(query.metrics, :group_conversion_rate), e)
else
q
joined_q
|> select_merge_as([..., s], %{
scroll_depth:
fragment(
"if(any(?) > 0, toUInt8(round(any(?) / any(?))), NULL)",
s.total_scroll_depth_visits,
s.total_scroll_depth,
s.total_scroll_depth_visits
)
})
end
end
def maybe_add_scroll_depth(q, query) do
if :scroll_depth in query.metrics do
max_per_session_q =
Base.base_event_query(query)
|> where([e], e.name == "engagement" and e.scroll_depth <= 100)
|> select([e], %{
session_id: e.session_id,
max_scroll_depth: max(e.scroll_depth)
})
|> SQL.QueryBuilder.build_group_by(:events, query)
|> group_by([e], e.session_id)
# Selects exit_rate into the query, calculated as X / Y, where X is the
# total number of exits from a page (i.e. the number of sessions with a
# specific exit page), and Y is the total pageviews on that page.
defp add_special_metric(q, :exit_rate, site, query) do
total_pageviews_query =
query
|> Query.remove_top_level_filters(["visit:exit_page"])
|> remove_filters_ignored_in_totals_query()
|> Query.set(
pagination: nil,
order_by: [],
metrics: [:pageviews],
include_imported: query.include_imported,
dimensions: ["event:page"]
)
dim_shortnames = Enum.map(query.dimensions, fn dim -> shortname(query, dim) end)
dim_select =
dim_shortnames
|> Enum.map(fn dim -> {dim, dynamic([p], field(p, ^dim))} end)
|> Map.new()
dim_group_by =
dim_shortnames
|> Enum.map(fn dim -> dynamic([p], field(p, ^dim)) end)
total_scroll_depth_q =
subquery(max_per_session_q)
|> select([], %{})
|> select_merge_as([p], %{
# Note: No need to upscale sample size here since it would end up cancelling out due to the result being an average
total_scroll_depth: fragment("sum(?)", p.max_scroll_depth),
total_scroll_depth_visits: fragment("uniq(?)", p.session_id)
})
|> select_merge(^dim_select)
|> group_by(^dim_group_by)
join_on_dim_condition =
if dim_shortnames == [] do
true
else
dim_shortnames
|> Enum.map(fn dim -> dynamic([_e, ..., s], selected_as(^dim) == field(s, ^dim)) end)
# credo:disable-for-next-line Credo.Check.Refactor.Nesting
|> Enum.reduce(fn condition, acc -> dynamic([], ^acc and ^condition) end)
end
joined_q =
join(q, :left, [e], s in subquery(total_scroll_depth_q), on: ^join_on_dim_condition)
if query.include_imported do
joined_q
|> select_merge_as([..., s], %{
scroll_depth:
fragment(
"""
if(? + ? > 0, toInt8(round((? + ?) / (? + ?))), NULL)
""",
s.total_scroll_depth_visits,
selected_as(:__imported_total_scroll_depth_visits),
s.total_scroll_depth,
selected_as(:__imported_total_scroll_depth),
s.total_scroll_depth_visits,
selected_as(:__imported_total_scroll_depth_visits)
)
})
else
joined_q
|> select_merge_as([..., s], %{
scroll_depth:
fragment(
"if(any(?) > 0, toUInt8(round(any(?) / any(?))), NULL)",
s.total_scroll_depth_visits,
s.total_scroll_depth,
s.total_scroll_depth_visits
)
})
end
else
joined_q =
q
|> join(:left, [], p in subquery(SQL.QueryBuilder.build(total_pageviews_query, site)),
on:
selected_as(^shortname(query, "visit:exit_page")) ==
field(p, ^shortname(total_pageviews_query, "event:page"))
)
if query.include_imported do
joined_q
|> select_merge_as([..., p], %{
exit_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), NULL)",
p.pageviews,
selected_as(:__internal_visits),
p.pageviews
)
})
else
joined_q
|> select_merge_as([..., p], %{
exit_rate:
fragment(
"if(? > 0, round(? / ? * 100, 1), NULL)",
fragment("any(?)", p.pageviews),
selected_as(:__internal_visits),
fragment("any(?)", p.pageviews)
)
})
end
end

View File

@ -149,6 +149,7 @@ defmodule Plausible.Stats.TableDecider do
defp metric_partitioner(_, :time_on_page), do: :event
defp metric_partitioner(_, :visit_duration), do: :session
defp metric_partitioner(_, :views_per_visit), do: :session
defp metric_partitioner(_, :exit_rate), do: :session
# Calculated metrics - handled on callsite separately from other metrics.
defp metric_partitioner(_, :total_visitors), do: :other

View File

@ -17,9 +17,7 @@ defmodule Plausible.Teams.Billing do
require Plausible.Billing.Subscription.Status
@team_member_limit_for_trials 3
@limit_sites_since ~D[2021-05-05]
@site_limit_for_trials 10
@type cycles_usage() :: %{cycle() => usage_cycle()}
@ -193,16 +191,33 @@ defmodule Plausible.Teams.Billing do
end
end
def site_limit(nil) do
@site_limit_for_trials
end
on_ee do
@site_limit_for_trials 10
def site_limit(team) do
if grandfathered_team?(team) do
:unlimited
else
get_site_limit_from_plan(team)
def site_limit(nil) do
@site_limit_for_trials
end
def site_limit(team) do
if grandfathered_team?(team) do
:unlimited
else
get_site_limit_from_plan(team)
end
end
defp get_site_limit_from_plan(team) do
team =
Teams.with_subscription(team)
case Plans.get_subscription_plan(team.subscription) do
%{site_limit: site_limit} -> site_limit
:free_10k -> 50
nil -> @site_limit_for_trials
end
end
else
def site_limit(_team), do: :unlimited
end
@doc """
@ -217,29 +232,24 @@ defmodule Plausible.Teams.Billing do
|> length()
end
defp get_site_limit_from_plan(team) do
team =
Teams.with_subscription(team)
on_ee do
@team_member_limit_for_trials 3
case Plans.get_subscription_plan(team.subscription) do
%{site_limit: site_limit} -> site_limit
:free_10k -> 50
nil -> @site_limit_for_trials
def team_member_limit(nil) do
@team_member_limit_for_trials
end
end
def team_member_limit(nil) do
@team_member_limit_for_trials
end
def team_member_limit(team) do
team = Teams.with_subscription(team)
def team_member_limit(team) do
team = Teams.with_subscription(team)
case Plans.get_subscription_plan(team.subscription) do
%{team_member_limit: limit} -> limit
:free_10k -> :unlimited
nil -> @team_member_limit_for_trials
case Plans.get_subscription_plan(team.subscription) do
%{team_member_limit: limit} -> limit
:free_10k -> :unlimited
nil -> @team_member_limit_for_trials
end
end
else
def team_member_limit(_team), do: :unlimited
end
@doc """

View File

@ -933,13 +933,20 @@ defmodule PlausibleWeb.Api.StatsController do
params = Map.put(params, "property", "visit:exit_page")
query = Query.from(site, params, debug_metadata(conn))
{limit, page} = parse_pagination(params)
metrics = breakdown_metrics(query, [:visits])
extra_metrics =
if TableDecider.sessions_join_events?(query) do
[:visits]
else
[:visits, :exit_rate]
end
metrics = breakdown_metrics(query, extra_metrics)
%{results: results, meta: meta} = Stats.breakdown(site, query, metrics, {limit, page})
exit_pages =
results
|> add_exit_rate(site, query, limit)
|> transform_keys(%{exit_page: :name})
if params["csv"] do
@ -966,37 +973,6 @@ defmodule PlausibleWeb.Api.StatsController do
end
end
defp add_exit_rate(breakdown_results, site, query, limit) do
if TableDecider.sessions_join_events?(query) do
breakdown_results
else
pages = Enum.map(breakdown_results, & &1[:exit_page])
total_pageviews_query =
query
|> struct!(order_by: [])
|> Query.remove_top_level_filters(["visit:exit_page"])
|> Query.add_filter([:is, "event:page", pages])
|> Query.set(dimensions: ["event:page"])
%{results: total_pageviews} =
Stats.breakdown(site, total_pageviews_query, [:pageviews], {limit, 1})
Enum.map(breakdown_results, fn result ->
exit_rate =
case Enum.find(total_pageviews, &(&1[:page] == result[:exit_page])) do
%{pageviews: pageviews} ->
Float.floor(result[:visits] / pageviews * 100)
nil ->
nil
end
Map.put(result, :exit_rate, exit_rate)
end)
end
end
def countries(conn, params) do
site = conn.assigns[:site]
params = Map.put(params, "property", "visit:country")

View File

@ -1,10 +1,10 @@
defmodule Plausible.IngestRepo.Migrations.CleanUpOldTablesAfterV2Migration do
use Ecto.Migration
def change do
selfhost? = Application.fetch_env!(:plausible, :is_selfhost)
import Plausible.MigrationUtils
unless selfhost? do
def change do
unless community_edition?() do
drop_if_exists table(:events)
drop_if_exists table(:sessions)
end

View File

@ -171,7 +171,7 @@
]
},
"date_range_shorthand": {
"oneOf": [
"anyOf": [
{
"const": "30m",
"$comment": "only :internal"
@ -201,8 +201,8 @@
"description": "Last 30 days relative to today"
},
{
"const": "90d",
"description": "Last 90 days relative to today"
"const": "91d",
"description": "Last 91 days relative to today"
},
{
"const": "month",
@ -219,6 +219,16 @@
{
"const": "year",
"description": "Since the start of this year"
},
{
"type": "string",
"pattern": "^\\d+d$",
"description": "Last n days relative to today"
},
{
"type": "string",
"pattern": "^\\d+mo$",
"description": "Last n months relative to this month"
}
]
},
@ -268,6 +278,10 @@
"const": "time_on_page",
"markdownDescription": "Average time spent on a given page in a visit in seconds. Requires: `event:page` filter or dimension."
},
{
"const": "exit_rate",
"$comment": "only :internal"
},
{
"const": "total_revenue",
"markdownDescription": "Total revenue",
@ -367,11 +381,6 @@
"enum": ["is_not", "contains_not"],
"description": "filter operation"
},
"filter_operation_contains": {
"type": "string",
"enum": ["is", "contains"],
"description": "filter operation"
},
"filter_with_pattern": {
"type": "array",
"additionalItems": false,
@ -422,14 +431,48 @@
}
]
},
"filter_with_goals": {
"filter_with_is": {
"type": "array",
"additionalItems": false,
"minItems": 3,
"maxItems": 4,
"items": [
{
"$ref": "#/definitions/filter_operation_contains"
"const": "is",
"description": "filter operation"
},
{
"oneOf": [
{ "$ref": "#/definitions/goal_dimension" },
{ "$ref": "#/definitions/simple_filter_dimensions" },
{ "$ref": "#/definitions/custom_property_filter_dimensions" },
{ "const": "segment" }
]
},
{
"$ref": "#/definitions/clauses"
},
{
"type": "object",
"additionalProperties": false,
"properties": {
"case_sensitive": {
"type": "boolean",
"default": true
}
}
}
]
},
"filter_with_contains": {
"type": "array",
"additionalItems": false,
"minItems": 3,
"maxItems": 4,
"items": [
{
"const": "contains",
"description": "filter operation"
},
{
"oneOf": [
@ -453,32 +496,12 @@
}
]
},
"filter_for_segment": {
"type": "array",
"additionalItems": false,
"minItems": 3,
"maxItems": 3,
"items": [
{
"const": "is"
},
{
"const": "segment"
},
{
"type": "array",
"items": {
"type": ["integer"]
}
}
]
},
"filter_entry": {
"oneOf": [
{ "$ref": "#/definitions/filter_without_goals" },
{ "$ref": "#/definitions/filter_with_goals" },
{ "$ref": "#/definitions/filter_with_pattern" },
{ "$ref": "#/definitions/filter_for_segment" }
{ "$ref": "#/definitions/filter_with_is" },
{ "$ref": "#/definitions/filter_with_contains" },
{ "$ref": "#/definitions/filter_with_pattern" }
]
},
"filter_tree": {

View File

@ -1,8 +1,10 @@
defmodule Plausible.Repo.Migrations.CreatePlansTable do
use Ecto.Migration
import Plausible.MigrationUtils
def change do
if !Application.get_env(:plausible, :is_selfhost) do
if enterprise_edition?() do
create table(:plans) do
add :generation, :integer, null: false
add :kind, :string, null: false

View File

@ -1,8 +1,10 @@
defmodule Plausible.Repo.Migrations.AddDataRetentionInYearsToPlans do
use Ecto.Migration
import Plausible.MigrationUtils
def change do
if !Application.get_env(:plausible, :is_selfhost) do
if enterprise_edition?() do
alter table(:plans) do
add :data_retention_in_years, :integer, null: true
end

View File

@ -2,6 +2,8 @@ defmodule Plausible.Repo.Migrations.SiteLegacyTimeOnPageCutoff do
use Ecto.Migration
use Plausible
import Plausible.MigrationUtils
def change do
alter table(:sites) do
# New sites will have new time-on-page enabled by default.
@ -9,7 +11,7 @@ defmodule Plausible.Repo.Migrations.SiteLegacyTimeOnPageCutoff do
default: fragment("to_date('1970-01-01', 'YYYY-MM-DD')")
end
if Application.get_env(:plausible, :is_selfhost) do
if community_edition?() do
# On self-hosted, new time-on-page will be populated during first deploy.
execute(
fn ->

View File

@ -16,7 +16,6 @@ defmodule Plausible.Billing.QuotaTest do
@v1_plan_id "558018"
@v2_plan_id "654177"
@v3_plan_id "749342"
@v3_business_plan_id "857481"
@v4_1m_plan_id "857101"
@v4_10m_growth_plan_id "857104"
@v4_10m_business_plan_id "857112"
@ -24,68 +23,70 @@ defmodule Plausible.Billing.QuotaTest do
@highest_growth_plan Plausible.Billing.Plans.find(@v4_10m_growth_plan_id)
@highest_business_plan Plausible.Billing.Plans.find(@v4_10m_business_plan_id)
describe "site_limit/1" do
@describetag :ee_only
on_ee do
@v3_business_plan_id "857481"
test "returns 50 when user is on an old plan" do
team_on_v1 = new_user() |> subscribe_to_plan(@v1_plan_id) |> team_of()
team_on_v2 = new_user() |> subscribe_to_plan(@v2_plan_id) |> team_of()
team_on_v3 = new_user() |> subscribe_to_plan(@v3_plan_id) |> team_of()
describe "site_limit/1" do
test "returns 50 when user is on an old plan" do
team_on_v1 = new_user() |> subscribe_to_plan(@v1_plan_id) |> team_of()
team_on_v2 = new_user() |> subscribe_to_plan(@v2_plan_id) |> team_of()
team_on_v3 = new_user() |> subscribe_to_plan(@v3_plan_id) |> team_of()
assert 50 == Plausible.Teams.Billing.site_limit(team_on_v1)
assert 50 == Plausible.Teams.Billing.site_limit(team_on_v2)
assert 50 == Plausible.Teams.Billing.site_limit(team_on_v3)
end
assert 50 == Plausible.Teams.Billing.site_limit(team_on_v1)
assert 50 == Plausible.Teams.Billing.site_limit(team_on_v2)
assert 50 == Plausible.Teams.Billing.site_limit(team_on_v3)
end
test "returns 50 when user is on free_10k plan" do
team = new_user() |> subscribe_to_plan("free_10k") |> team_of()
assert 50 == Plausible.Teams.Billing.site_limit(team)
end
test "returns 50 when user is on free_10k plan" do
team = new_user() |> subscribe_to_plan("free_10k") |> team_of()
assert 50 == Plausible.Teams.Billing.site_limit(team)
end
test "returns the configured site limit for enterprise plan" do
team = new_user() |> subscribe_to_enterprise_plan(site_limit: 500) |> team_of()
assert Plausible.Teams.Billing.site_limit(team) == 500
end
test "returns the configured site limit for enterprise plan" do
team = new_user() |> subscribe_to_enterprise_plan(site_limit: 500) |> team_of()
assert Plausible.Teams.Billing.site_limit(team) == 500
end
test "returns 10 when user in on trial" do
team = new_user(trial_expiry_date: Date.shift(Date.utc_today(), day: 7)) |> team_of()
assert Plausible.Teams.Billing.site_limit(team) == 10
end
test "returns 10 when user in on trial" do
team = new_user(trial_expiry_date: Date.shift(Date.utc_today(), day: 7)) |> team_of()
assert Plausible.Teams.Billing.site_limit(team) == 10
end
test "returns the subscription limit for enterprise users who have not paid yet" do
team =
new_user()
|> subscribe_to_plan(@v1_plan_id)
|> subscribe_to_enterprise_plan(paddle_plan_id: "123321", subscription?: false)
|> team_of()
test "returns the subscription limit for enterprise users who have not paid yet" do
team =
new_user()
|> subscribe_to_plan(@v1_plan_id)
|> subscribe_to_enterprise_plan(paddle_plan_id: "123321", subscription?: false)
|> team_of()
assert Plausible.Teams.Billing.site_limit(team) == 50
end
assert Plausible.Teams.Billing.site_limit(team) == 50
end
test "returns 10 for enterprise users who have not upgraded yet and are on trial" do
team =
new_user()
|> subscribe_to_enterprise_plan(paddle_plan_id: "123321", subscription?: false)
|> team_of()
test "returns 10 for enterprise users who have not upgraded yet and are on trial" do
team =
new_user()
|> subscribe_to_enterprise_plan(paddle_plan_id: "123321", subscription?: false)
|> team_of()
assert Plausible.Teams.Billing.site_limit(team) == 10
end
assert Plausible.Teams.Billing.site_limit(team) == 10
end
test "grandfathered site limit should be unlimited when accepting transfer invitations" do
# must be before ~D[2021-05-05]
owner = new_user(team: [inserted_at: ~N[2021-01-01 00:00:00]])
# plan with site_limit: 10
subscribe_to_plan(owner, "857097")
_site = for _ <- 1..10, do: new_site(owner: owner)
test "grandfathered site limit should be unlimited when accepting transfer invitations" do
# must be before ~D[2021-05-05]
owner = new_user(team: [inserted_at: ~N[2021-01-01 00:00:00]])
# plan with site_limit: 10
subscribe_to_plan(owner, "857097")
_site = for _ <- 1..10, do: new_site(owner: owner)
other_owner = new_user()
other_site = new_site(owner: other_owner)
invite_transfer(other_site, owner, inviter: other_owner)
other_owner = new_user()
other_site = new_site(owner: other_owner)
invite_transfer(other_site, owner, inviter: other_owner)
team = owner |> team_of()
team = owner |> team_of()
assert Plausible.Teams.Billing.site_limit(team) == :unlimited
assert Plausible.Teams.Invitations.ensure_can_take_ownership(other_site, team) == :ok
assert Plausible.Teams.Billing.site_limit(team) == :unlimited
assert Plausible.Teams.Invitations.ensure_can_take_ownership(other_site, team) == :ok
end
end
end
@ -419,51 +420,52 @@ defmodule Plausible.Billing.QuotaTest do
end
end
describe "team_member_limit/1" do
@describetag :ee_only
test "returns unlimited when user is on an old plan" do
team_on_v1 = new_user() |> subscribe_to_plan(@v1_plan_id) |> team_of()
team_on_v2 = new_user() |> subscribe_to_plan(@v2_plan_id) |> team_of()
team_on_v3 = new_user() |> subscribe_to_plan(@v3_plan_id) |> team_of()
on_ee do
describe "team_member_limit/1" do
test "returns unlimited when user is on an old plan" do
team_on_v1 = new_user() |> subscribe_to_plan(@v1_plan_id) |> team_of()
team_on_v2 = new_user() |> subscribe_to_plan(@v2_plan_id) |> team_of()
team_on_v3 = new_user() |> subscribe_to_plan(@v3_plan_id) |> team_of()
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team_on_v1)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team_on_v2)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team_on_v3)
end
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team_on_v1)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team_on_v2)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team_on_v3)
end
test "returns unlimited when user is on free_10k plan" do
user = new_user()
subscribe_to_plan(user, "free_10k")
team = team_of(user)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team)
end
test "returns unlimited when user is on free_10k plan" do
user = new_user()
subscribe_to_plan(user, "free_10k")
team = team_of(user)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team)
end
test "returns 5 when user in on trial" do
team = new_user(trial_expiry_date: Date.shift(Date.utc_today(), day: 7)) |> team_of()
test "returns 5 when user in on trial" do
team = new_user(trial_expiry_date: Date.shift(Date.utc_today(), day: 7)) |> team_of()
assert 3 == Plausible.Teams.Billing.team_member_limit(team)
end
assert 3 == Plausible.Teams.Billing.team_member_limit(team)
end
test "returns the enterprise plan limit" do
user = new_user()
subscribe_to_enterprise_plan(user, team_member_limit: 27)
team = team_of(user)
test "returns the enterprise plan limit" do
user = new_user()
subscribe_to_enterprise_plan(user, team_member_limit: 27)
team = team_of(user)
assert 27 == Plausible.Teams.Billing.team_member_limit(team)
end
assert 27 == Plausible.Teams.Billing.team_member_limit(team)
end
test "reads from json file when the user is on a v4 plan" do
team_on_growth = new_user() |> subscribe_to_growth_plan() |> team_of()
team_on_business = new_user() |> subscribe_to_business_plan() |> team_of()
test "reads from json file when the user is on a v4 plan" do
team_on_growth = new_user() |> subscribe_to_growth_plan() |> team_of()
team_on_business = new_user() |> subscribe_to_business_plan() |> team_of()
assert 3 == Plausible.Teams.Billing.team_member_limit(team_on_growth)
assert 10 == Plausible.Teams.Billing.team_member_limit(team_on_business)
end
assert 3 == Plausible.Teams.Billing.team_member_limit(team_on_growth)
assert 10 == Plausible.Teams.Billing.team_member_limit(team_on_business)
end
test "returns unlimited when user is on a v3 business plan" do
team = new_user() |> subscribe_to_plan(@v3_business_plan_id) |> team_of()
test "returns unlimited when user is on a v3 business plan" do
team = new_user() |> subscribe_to_plan(@v3_business_plan_id) |> team_of()
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team)
assert :unlimited == Plausible.Teams.Billing.team_member_limit(team)
end
end
end

View File

@ -30,6 +30,7 @@ defmodule Plausible.ReleaseTest do
end
end
@tag :ee_only
test "dump_plans/0 inserts plans" do
stdout =
capture_io(fn ->

View File

@ -32,7 +32,7 @@ defmodule Plausible.Stats.IntervalTest do
"7d" => ["hour", "day"],
"28d" => ["day", "week"],
"30d" => ["day", "week"],
"90d" => ["day", "week", "month"],
"91d" => ["day", "week", "month"],
"6mo" => ["day", "week", "month"],
"12mo" => ["day", "week", "month"],
"year" => ["day", "week", "month"],
@ -51,7 +51,7 @@ defmodule Plausible.Stats.IntervalTest do
"7d" => ["hour", "day"],
"28d" => ["day", "week"],
"30d" => ["day", "week"],
"90d" => ["day", "week", "month"],
"91d" => ["day", "week", "month"],
"6mo" => ["day", "week", "month"],
"12mo" => ["day", "week", "month"],
"year" => ["day", "week", "month"],
@ -71,7 +71,7 @@ defmodule Plausible.Stats.IntervalTest do
"7d" => ["hour", "day"],
"28d" => ["day", "week"],
"30d" => ["day", "week"],
"90d" => ["day", "week", "month"],
"91d" => ["day", "week", "month"],
"6mo" => ["day", "week", "month"],
"12mo" => ["day", "week", "month"],
"year" => ["day", "week", "month"],

View File

@ -27,6 +27,10 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
first: DateTime.new!(~D[2021-04-28], ~T[00:00:00], "Etc/UTC"),
last: DateTime.new!(~D[2021-05-04], ~T[23:59:59], "Etc/UTC")
}
@date_range_10d %DateTimeRange{
first: DateTime.new!(~D[2021-04-25], ~T[00:00:00], "Etc/UTC"),
last: DateTime.new!(~D[2021-05-04], ~T[23:59:59], "Etc/UTC")
}
@date_range_30d %DateTimeRange{
first: DateTime.new!(~D[2021-04-05], ~T[00:00:00], "Etc/UTC"),
last: DateTime.new!(~D[2021-05-04], ~T[23:59:59], "Etc/UTC")
@ -35,6 +39,10 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
first: DateTime.new!(~D[2021-05-01], ~T[00:00:00], "Etc/UTC"),
last: DateTime.new!(~D[2021-05-31], ~T[23:59:59], "Etc/UTC")
}
@date_range_3mo %DateTimeRange{
first: DateTime.new!(~D[2021-03-01], ~T[00:00:00], "Etc/UTC"),
last: DateTime.new!(~D[2021-05-31], ~T[23:59:59], "Etc/UTC")
}
@date_range_6mo %DateTimeRange{
first: DateTime.new!(~D[2020-12-01], ~T[00:00:00], "Etc/UTC"),
last: DateTime.new!(~D[2021-05-31], ~T[23:59:59], "Etc/UTC")
@ -1255,8 +1263,10 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
test "parsing shortcut options", %{site: site} do
check_date_range(%{"date_range" => "day"}, site, @date_range_day)
check_date_range(%{"date_range" => "7d"}, site, @date_range_7d)
check_date_range(%{"date_range" => "10d"}, site, @date_range_10d)
check_date_range(%{"date_range" => "30d"}, site, @date_range_30d)
check_date_range(%{"date_range" => "month"}, site, @date_range_month)
check_date_range(%{"date_range" => "3mo"}, site, @date_range_3mo)
check_date_range(%{"date_range" => "6mo"}, site, @date_range_6mo)
check_date_range(%{"date_range" => "12mo"}, site, @date_range_12mo)
check_date_range(%{"date_range" => "year"}, site, @date_range_year)
@ -1323,11 +1333,17 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
end
test "parsing invalid custom date range with invalid dates", %{site: site} do
%{"site_id" => site.domain, "date_range" => "-1d", "metrics" => ["visitors"]}
|> check_error(site, "#/date_range: Invalid date range \"-1d\"")
%{"site_id" => site.domain, "date_range" => "foo", "metrics" => ["visitors"]}
|> check_error(site, "#/date_range: Invalid date range \"foo\"")
%{"site_id" => site.domain, "date_range" => ["21415-00", "eee"], "metrics" => ["visitors"]}
|> check_error(site, "#/date_range: Invalid date range [\"21415-00\", \"eee\"]")
%{"site_id" => site.domain, "date_range" => "999999999mo", "metrics" => ["visitors"]}
|> check_error(site, "Invalid date_range \"999999999mo\"")
end
test "custom date range is invalid when timestamps do not include timezone info", %{
@ -1374,8 +1390,10 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
for {date_range_shortcut, expected_date_range} <- [
{"day", @date_range_day},
{"7d", @date_range_7d},
{"10d", @date_range_10d},
{"30d", @date_range_30d},
{"month", @date_range_month},
{"3mo", @date_range_3mo},
{"6mo", @date_range_6mo},
{"12mo", @date_range_12mo},
{"year", @date_range_year}
@ -1785,6 +1803,73 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
end
end
describe "exit_rate metric" do
test "fails validation without visit:exit_page dimension", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all"
}
|> check_error(
site,
"Metric `exit_rate` requires a `\"visit:exit_page\"` dimension. No other dimensions are allowed.",
:internal
)
end
test "fails validation with event only filters", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"dimensions" => ["visit:exit_page"],
"filters" => [["is", "event:page", ["/"]]],
"date_range" => "all"
}
|> check_error(
site,
"Metric `exit_rate` cannot be queried when filtering on event dimensions.",
:internal
)
end
test "fails validation with event metrics", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["exit_rate", "pageviews"],
"dimensions" => ["visit:exit_page"],
"date_range" => "all"
}
|> check_error(
site,
"Event metric(s) `pageviews` cannot be queried along with session dimension(s) `visit:exit_page`",
:internal
)
end
test "passes validation", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"dimensions" => ["visit:exit_page"],
"date_range" => "all"
}
|> check_success(
site,
%{
metrics: [:exit_rate],
utc_time_range: @date_range_day,
filters: [],
dimensions: ["visit:exit_page"],
order_by: nil,
timezone: site.timezone,
include: @default_include,
pagination: %{limit: 10_000, offset: 0}
},
:internal
)
end
end
describe "scroll_depth metric" do
test "fails validation on its own", %{site: site} do
%{
@ -2589,5 +2674,18 @@ defmodule Plausible.Stats.Filters.QueryParserTest do
}
)
end
test "validation fails with string segment ids", %{site: site} do
%{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "all",
"filters" => [["is", "segment", ["123"]]]
}
|> check_error(
site,
"Invalid filter '[\"is\", \"segment\", [\"123\"]]'."
)
end
end
end

View File

@ -12,6 +12,8 @@ defmodule Plausible.Stats.SamplingCacheTest do
@site_id4 400_000
describe "getter" do
@threshold Plausible.Stats.Sampling.default_sample_threshold()
test "returns cached values for traffic in past 30 days", %{test: test} do
now = DateTime.utc_now()
@ -19,42 +21,42 @@ defmodule Plausible.Stats.SamplingCacheTest do
%{
site_id: @site_id1,
domain: "1.com",
value: 11_000_000,
value: (@threshold * 0.55) |> trunc(),
event_timebucket: add(now, -1, :day),
metric: "buffered"
},
%{
site_id: @site_id2,
domain: "2.com",
value: 11_000_000,
value: (@threshold * 0.55) |> trunc(),
event_timebucket: add(now, -1, :day),
metric: "buffered"
},
%{
site_id: @site_id2,
domain: "2.com",
value: 11_000_000,
value: (@threshold * 0.55) |> trunc(),
event_timebucket: add(now, -5, :day),
metric: "buffered"
},
%{
site_id: @site_id2,
domain: "2.com",
value: 11_000_000,
value: (@threshold * 0.55) |> trunc(),
event_timebucket: add(now, -35, :day),
metric: "buffered"
},
%{
site_id: @site_id3,
domain: "3.com",
value: 44_000_000,
value: (@threshold * 2.05) |> trunc(),
event_timebucket: add(now, -35, :day),
metric: "buffered"
},
%{
site_id: @site_id4,
domain: "4.com",
value: 11_000_000,
value: (@threshold * 0.55) |> trunc(),
event_timebucket: add(now, -35, :day),
metric: "buffered"
}
@ -64,14 +66,14 @@ defmodule Plausible.Stats.SamplingCacheTest do
assert SamplingCache.count_all() == 1
assert SamplingCache.get(@site_id1, force?: true, cache_name: test) == nil
assert SamplingCache.get(@site_id2, force?: true, cache_name: test) == 22_000_000
assert SamplingCache.get(@site_id2, force?: true, cache_name: test) == 1.1 * @threshold
assert SamplingCache.get(@site_id3, force?: true, cache_name: test) == nil
assert SamplingCache.get(@site_id4, force?: true, cache_name: test) == nil
Plausible.IngestRepo.insert_all(Plausible.Ingestion.Counters.Record, [
%{
site_id: @site_id1,
value: 11_000_000,
value: (@threshold * 0.55) |> trunc(),
event_timebucket: add(now, -1, :day),
metric: "buffered"
}
@ -80,8 +82,8 @@ defmodule Plausible.Stats.SamplingCacheTest do
:ok = SamplingCache.refresh_all(cache_name: test)
assert SamplingCache.count_all() == 2
assert SamplingCache.get(@site_id1, force?: true, cache_name: test) == 22_000_000
assert SamplingCache.get(@site_id2, force?: true, cache_name: test) == 22_000_000
assert SamplingCache.get(@site_id1, force?: true, cache_name: test) == 1.1 * @threshold
assert SamplingCache.get(@site_id2, force?: true, cache_name: test) == 1.1 * @threshold
end
end

View File

@ -5,37 +5,39 @@ defmodule Plausible.Stats.SamplingTest do
on_ee do
import Plausible.Stats.Sampling, only: [fractional_sample_rate: 2]
alias Plausible.Stats.{Query, DateTimeRange}
alias Plausible.Stats.{Query, DateTimeRange, Sampling}
describe "&fractional_sample_rate/2" do
@threshold Sampling.default_sample_threshold()
test "no traffic estimate" do
assert fractional_sample_rate(nil, query(30)) == :no_sampling
end
test "scales sampling rate according to query duration" do
assert fractional_sample_rate(40_000_000, query(30)) == :no_sampling
assert fractional_sample_rate(40_000_000, query(60)) == 0.25
assert fractional_sample_rate(40_000_000, query(100)) == 0.15
assert fractional_sample_rate(@threshold * 2, query(30)) == :no_sampling
assert fractional_sample_rate(@threshold * 2, query(60)) == 0.25
assert fractional_sample_rate(@threshold * 2, query(100)) == 0.15
assert fractional_sample_rate(100_000_000, query(1)) == :no_sampling
assert fractional_sample_rate(100_000_000, query(5)) == :no_sampling
assert fractional_sample_rate(100_000_000, query(10)) == :no_sampling
assert fractional_sample_rate(100_000_000, query(15)) == 0.40
assert fractional_sample_rate(100_000_000, query(30)) == 0.20
assert fractional_sample_rate(100_000_000, query(60)) == 0.10
assert fractional_sample_rate(100_000_000, query(100)) == 0.06
assert fractional_sample_rate(@threshold * 5, query(1)) == :no_sampling
assert fractional_sample_rate(@threshold * 5, query(5)) == :no_sampling
assert fractional_sample_rate(@threshold * 5, query(10)) == :no_sampling
assert fractional_sample_rate(@threshold * 5, query(15)) == 0.40
assert fractional_sample_rate(@threshold * 5, query(30)) == 0.20
assert fractional_sample_rate(@threshold * 5, query(60)) == 0.10
assert fractional_sample_rate(@threshold * 5, query(100)) == 0.06
assert fractional_sample_rate(300_000_000, query(2)) == :no_sampling
assert fractional_sample_rate(300_000_000, query(5)) == 0.40
assert fractional_sample_rate(300_000_000, query(10)) == 0.20
assert fractional_sample_rate(@threshold * 15, query(2)) == :no_sampling
assert fractional_sample_rate(@threshold * 15, query(5)) == 0.40
assert fractional_sample_rate(@threshold * 15, query(10)) == 0.20
end
test "short durations" do
assert fractional_sample_rate(300_000_000_000, query(1, :hour)) == :no_sampling
assert fractional_sample_rate(@threshold * 15, query(1, :hour)) == :no_sampling
end
test "very low sampling rate" do
assert fractional_sample_rate(300_000_000_000, query(30)) == 0.01
assert fractional_sample_rate(@threshold * 500, query(30)) == 0.01
end
end

View File

@ -307,14 +307,16 @@ defmodule Plausible.Teams.Management.LayoutTest do
end
test "limits are checked", %{user: user, team: team} do
assert {:error, {:over_limit, 3}} =
team
|> Layout.init()
|> Layout.schedule_send("test1@example.com", :admin)
|> Layout.schedule_send("test2@example.com", :admin)
|> Layout.schedule_send("test3@example.com", :admin)
|> Layout.schedule_send("test4@example.com", :admin)
|> Layout.persist(%{current_user: user, current_team: team})
on_ee do
assert {:error, {:over_limit, 3}} =
team
|> Layout.init()
|> Layout.schedule_send("test1@example.com", :admin)
|> Layout.schedule_send("test2@example.com", :admin)
|> Layout.schedule_send("test3@example.com", :admin)
|> Layout.schedule_send("test4@example.com", :admin)
|> Layout.persist(%{current_user: user, current_team: team})
end
assert {:error, :only_one_owner} =
team

View File

@ -1,4 +1,4 @@
name,unique_exits,total_exits,exit_rate
/,2,2,66.0
/,2,2,66.7
/signup,1,1,100.0
/some-other-page,1,1,100.0

1 name unique_exits total_exits exit_rate
2 / 2 2 66.0 66.7
3 /signup 1 1 100.0
4 /some-other-page 1 1 100.0

View File

@ -169,7 +169,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryComparisonsTest do
assert actual_comparison_last_date == expected_comparison_last_date
end
test "timeseries last 90d period in year_over_year comparison", %{
test "timeseries last 91d period in year_over_year comparison", %{
conn: conn,
site: site
} do
@ -178,19 +178,19 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryComparisonsTest do
build(:pageview, timestamp: ~N[2021-04-01 00:00:00]),
build(:pageview, timestamp: ~N[2021-04-05 00:00:00]),
build(:pageview, timestamp: ~N[2021-04-05 00:00:00]),
build(:pageview, timestamp: ~N[2021-06-29 00:00:00]),
build(:pageview, timestamp: ~N[2021-06-30 00:00:00]),
build(:pageview, timestamp: ~N[2022-04-01 00:00:00]),
build(:pageview, timestamp: ~N[2022-04-05 00:00:00]),
build(:pageview, timestamp: ~N[2022-06-29 00:00:00]),
build(:pageview, timestamp: ~N[2022-06-30 00:00:00])
build(:pageview, timestamp: ~N[2022-06-30 00:00:00]),
build(:pageview, timestamp: ~N[2022-07-01 00:00:00])
])
conn =
post(conn, "/api/v2/query-internal-test", %{
"site_id" => site.domain,
"metrics" => ["visitors"],
"date_range" => "90d",
"date" => "2022-06-30",
"date_range" => "91d",
"date" => "2022-07-01",
"dimensions" => ["time:day"],
"include" => %{
"time_labels" => true,
@ -205,7 +205,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryComparisonsTest do
assert "2022-04-01" = List.first(time_labels)
assert "2022-04-05" = Enum.at(time_labels, 4)
assert "2022-06-29" = List.last(time_labels)
assert "2022-06-30" = List.last(time_labels)
assert %{
"dimensions" => ["2022-04-01"],
@ -226,13 +226,13 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QueryComparisonsTest do
} = Enum.find(results, &(&1["dimensions"] == ["2022-04-05"]))
assert %{
"dimensions" => ["2022-06-29"],
"dimensions" => ["2022-06-30"],
"metrics" => [1],
"comparison" => %{
"dimensions" => ["2021-06-29"],
"dimensions" => ["2021-06-30"],
"metrics" => [1]
}
} = Enum.find(results, &(&1["dimensions"] == ["2022-06-29"]))
} = Enum.find(results, &(&1["dimensions"] == ["2022-06-30"]))
end
test "dimensional comparison with low limit", %{conn: conn, site: site} do

View File

@ -216,4 +216,288 @@ defmodule PlausibleWeb.Api.ExternalStatsController.QuerySpecialMetricsTest do
%{"dimensions" => ["Mobile"], "metrics" => [25.0]}
]
end
describe "exit_rate" do
test "in visit:exit_page breakdown without filters", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 1, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 1, pathname: "/two", timestamp: ~N[2021-01-01 00:10:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 3, pathname: "/never-exit", timestamp: ~N[2021-01-01 00:00:00]),
build(:event, user_id: 3, name: "a", pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-01 00:10:00])
])
conn =
post(
conn,
"/api/v2/query-internal-test",
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all",
"dimensions" => ["visit:exit_page"],
"order_by" => [["exit_rate", "desc"]]
}
)
%{"results" => results} = json_response(conn, 200)
assert results == [
%{"dimensions" => ["/two"], "metrics" => [100]},
%{"dimensions" => ["/one"], "metrics" => [33.3]}
]
end
test "in visit:exit_page breakdown filtered by visit:exit_page", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 1, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 1, pathname: "/two", timestamp: ~N[2021-01-01 00:10:00])
])
conn =
post(
conn,
"/api/v2/query-internal-test",
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all",
"dimensions" => ["visit:exit_page"],
"filters" => [["is", "visit:exit_page", ["/one"]]]
}
)
%{"results" => results} = json_response(conn, 200)
assert results == [
%{"dimensions" => ["/one"], "metrics" => [66.7]}
]
end
test "in visit:exit_page breakdown filtered by visit:exit_page and visit:entry_page", %{
conn: conn,
site: site
} do
populate_stats(site, [
# Bounced sessions: Match both entry- and exit page filters
build(:pageview, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
# Session 1: Matches both entry- and exit page filters
build(:pageview, user_id: 1, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 1, pathname: "/two", timestamp: ~N[2021-01-01 00:10:00]),
build(:pageview, user_id: 1, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
# Session 2: Does not match exit_page filter, BUT the pageview on /one still
# gets counted towards total pageviews.
build(:pageview, user_id: 2, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 2, pathname: "/two", timestamp: ~N[2021-01-01 00:10:00]),
# Session 3: Does not match entry_page filter, should be ignored
build(:pageview, user_id: 3, pathname: "/two", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-01 00:20:00])
])
conn =
post(
conn,
"/api/v2/query-internal-test",
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all",
"dimensions" => ["visit:exit_page"],
"filters" => [
["is", "visit:exit_page", ["/one"]],
["is", "visit:entry_page", ["/one"]]
]
}
)
%{"results" => results} = json_response(conn, 200)
assert results == [
%{"dimensions" => ["/one"], "metrics" => [60]}
]
end
test "in visit:exit_page breakdown filtered by visit:country", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, pathname: "/one", country_code: "EE", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, pathname: "/one", country_code: "US", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview,
user_id: 1,
pathname: "/one",
country_code: "EE",
timestamp: ~N[2021-01-01 00:00:00]
),
build(:pageview,
user_id: 1,
pathname: "/two",
country_code: "EE",
timestamp: ~N[2021-01-01 00:10:00]
)
])
conn =
post(
conn,
"/api/v2/query-internal-test",
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all",
"filters" => [["is", "visit:country", ["EE"]]],
"dimensions" => ["visit:exit_page"],
"order_by" => [["exit_rate", "asc"]]
}
)
%{"results" => results} = json_response(conn, 200)
assert results == [
%{"dimensions" => ["/one"], "metrics" => [50]},
%{"dimensions" => ["/two"], "metrics" => [100.0]}
]
end
test "sorting and pagination", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 2, pathname: "/two", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 2, pathname: "/two", timestamp: ~N[2021-01-01 00:01:00]),
build(:pageview, user_id: 3, pathname: "/three", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 3, pathname: "/three", timestamp: ~N[2021-01-01 00:01:00]),
build(:pageview, user_id: 3, pathname: "/three", timestamp: ~N[2021-01-01 00:02:00]),
build(:pageview, user_id: 4, pathname: "/four", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 4, pathname: "/four", timestamp: ~N[2021-01-01 00:01:00]),
build(:pageview, user_id: 4, pathname: "/four", timestamp: ~N[2021-01-01 00:02:00]),
build(:pageview, user_id: 4, pathname: "/four", timestamp: ~N[2021-01-01 00:03:00])
])
do_query = fn order_by, pagination ->
conn
|> post("/api/v2/query-internal-test", %{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all",
"dimensions" => ["visit:exit_page"],
"order_by" => order_by,
"pagination" => pagination
})
|> json_response(200)
|> Map.get("results")
end
all_results_asc = do_query.([["exit_rate", "asc"]], %{"limit" => 4})
all_results_desc = do_query.([["exit_rate", "desc"]], %{"limit" => 4})
assert all_results_asc == Enum.reverse(all_results_desc)
assert do_query.([["exit_rate", "desc"]], %{"limit" => 2, "offset" => 0}) == [
%{"dimensions" => ["/one"], "metrics" => [100]},
%{"dimensions" => ["/two"], "metrics" => [50]}
]
assert do_query.([["exit_rate", "desc"]], %{"limit" => 2, "offset" => 2}) == [
%{"dimensions" => ["/three"], "metrics" => [33.3]},
%{"dimensions" => ["/four"], "metrics" => [25]}
]
assert do_query.([["exit_rate", "asc"]], %{"limit" => 3, "offset" => 1}) == [
%{"dimensions" => ["/three"], "metrics" => [33.3]},
%{"dimensions" => ["/two"], "metrics" => [50]},
%{"dimensions" => ["/one"], "metrics" => [100]}
]
end
test "with comparisons", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 1, pathname: "/one", timestamp: ~N[2021-01-09 00:00:00]),
build(:pageview, user_id: 1, pathname: "/three", timestamp: ~N[2021-01-09 00:00:00]),
build(:pageview, pathname: "/one", timestamp: ~N[2021-01-09 00:10:00]),
build(:pageview, user_id: 2, pathname: "/one", timestamp: ~N[2021-01-10 00:00:00]),
build(:pageview, user_id: 2, pathname: "/two", timestamp: ~N[2021-01-10 00:10:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-10 00:00:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-10 00:10:00])
])
conn =
post(
conn,
"/api/v2/query-internal-test",
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => ["2021-01-10", "2021-01-10"],
"dimensions" => ["visit:exit_page"],
"include" => %{"comparisons" => %{"mode" => "previous_period"}},
"order_by" => [["exit_rate", "desc"]]
}
)
%{"results" => results} = json_response(conn, 200)
assert results == [
%{
"dimensions" => ["/two"],
"metrics" => [100],
"comparison" => %{
"change" => [nil],
"dimensions" => ["/two"],
"metrics" => [nil]
}
},
%{
"dimensions" => ["/one"],
"metrics" => [33.3],
"comparison" => %{
"change" => [-16.7],
"dimensions" => ["/one"],
"metrics" => [50]
}
}
]
end
test "with imported data", %{conn: conn, site: site} do
site_import =
insert(:site_import,
site: site,
start_date: ~D[2020-01-01],
end_date: ~D[2020-12-31]
)
populate_stats(site, site_import.id, [
build(:pageview, user_id: 1, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 1, pathname: "/two", timestamp: ~N[2021-01-01 00:10:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 3, pathname: "/three", timestamp: ~N[2021-01-01 00:00:00]),
build(:pageview, user_id: 3, pathname: "/one", timestamp: ~N[2021-01-01 00:10:00]),
build(:imported_pages, page: "/one", visits: 10, pageviews: 20, date: ~D[2020-01-01]),
build(:imported_exit_pages, exit_page: "/one", exits: 2, date: ~D[2020-01-01])
])
conn =
post(
conn,
"/api/v2/query-internal-test",
%{
"site_id" => site.domain,
"metrics" => ["exit_rate"],
"date_range" => "all",
"include" => %{"imports" => true},
"dimensions" => ["visit:exit_page"],
"order_by" => [["exit_rate", "desc"]]
}
)
%{"results" => results} = json_response(conn, 200)
assert results == [
%{"dimensions" => ["/two"], "metrics" => [100]},
%{"dimensions" => ["/one"], "metrics" => [13]}
]
end
end
end

View File

@ -167,21 +167,21 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
assert Enum.sum(plot) == 2
end
test "displays visitors for last 90d", %{conn: conn, site: site} do
test "displays visitors for last 91d", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, timestamp: ~N[2021-01-16 00:00:00]),
build(:pageview, timestamp: ~N[2021-04-15 00:00:00])
build(:pageview, timestamp: ~N[2021-04-16 00:00:00])
])
conn =
get(
conn,
"/api/stats/#{site.domain}/main-graph?period=90d&date=2021-04-16&metric=visitors"
"/api/stats/#{site.domain}/main-graph?period=91d&date=2021-04-17&metric=visitors"
)
assert %{"plot" => plot} = json_response(conn, 200)
assert Enum.count(plot) == 90
assert Enum.count(plot) == 91
assert List.first(plot) == 1
assert List.last(plot) == 1
assert Enum.sum(plot) == 2
@ -1276,14 +1276,14 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
}
end
test "shows imperfect month-split for last 90d with full month indicators", %{
test "shows imperfect month-split for last 91d with full month indicators", %{
conn: conn,
site: site
} do
conn =
get(
conn,
"/api/stats/#{site.domain}/main-graph?period=90d&metric=visitors&interval=month&date=2021-12-13"
"/api/stats/#{site.domain}/main-graph?period=91d&metric=visitors&interval=month&date=2021-12-13"
)
assert %{"labels" => labels, "full_intervals" => full_intervals} = json_response(conn, 200)
@ -1298,14 +1298,14 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
}
end
test "shows half-perfect month-split for last 90d with full month indicators", %{
test "shows perfect month-split for last 91d with full month indicators", %{
conn: conn,
site: site
} do
conn =
get(
conn,
"/api/stats/#{site.domain}/main-graph?period=90d&metric=visitors&interval=month&date=2021-12-01"
"/api/stats/#{site.domain}/main-graph?period=91d&metric=visitors&interval=month&date=2021-12-01"
)
assert %{"labels" => labels, "full_intervals" => full_intervals} = json_response(conn, 200)
@ -1313,7 +1313,7 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
assert labels == ["2021-09-01", "2021-10-01", "2021-11-01"]
assert full_intervals == %{
"2021-09-01" => false,
"2021-09-01" => true,
"2021-10-01" => true,
"2021-11-01" => true
}
@ -1863,7 +1863,7 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
refute present_index
end
for period <- ["7d", "28d", "30d", "90d"] do
for period <- ["7d", "28d", "30d", "91d"] do
test "#{period} period does not include today", %{conn: conn, site: site} do
today = "2021-01-01"
yesterday = "2020-12-31"

View File

@ -2580,7 +2580,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
conn = get(conn, "/api/stats/#{site.domain}/exit-pages?period=day&date=2021-01-01")
assert json_response(conn, 200)["results"] == [
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66},
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66.7},
%{"name" => "/page2", "visitors" => 1, "visits" => 1, "exit_rate" => 100}
]
end
@ -2615,7 +2615,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
assert json_response(conn, 200)["results"] == [
%{"name" => "/page2", "visitors" => 1, "visits" => 1, "exit_rate" => 100},
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66}
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66.7}
]
end
@ -2744,7 +2744,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
conn1 = get(conn, "/api/stats/#{site.domain}/exit-pages?period=day&date=2021-01-01")
assert json_response(conn1, 200)["results"] == [
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66},
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66.7},
%{"name" => "/page2", "visitors" => 1, "visits" => 1, "exit_rate" => 100}
]
@ -2761,7 +2761,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
"visits" => 4,
"exit_rate" => 80.0
},
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66}
%{"name" => "/page1", "visitors" => 2, "visits" => 2, "exit_rate" => 66.7}
]
end

View File

@ -121,6 +121,7 @@ defmodule PlausibleWeb.Live.TeamMangementTest do
refute element_exists?(html, "#guest-list")
end
@tag :ee_only
test "fails to save layout with limits breached", %{conn: conn, team: team} do
lv = get_liveview(conn)
add_invite(lv, "new1@example.com", "admin")
@ -143,7 +144,8 @@ defmodule PlausibleWeb.Live.TeamMangementTest do
"Error! Make sure the e-mail is valid and is not taken already"
end
test "allows removing any type of entry", %{
@tag :ee_only
test "allows removing any type of entry (EE)", %{
conn: conn,
user: user,
team: team
@ -214,6 +216,72 @@ defmodule PlausibleWeb.Live.TeamMangementTest do
assert_no_emails_delivered()
end
@tag :ce_only
test "allows removing any type of entry (CE)", %{
conn: conn,
user: user,
team: team
} do
member2 = add_member(team, role: :admin)
_invitation = invite_member(team, "sent@example.com", inviter: user, role: :viewer)
site = new_site(team: team)
guest =
add_guest(site,
role: :viewer,
user: new_user(name: "Mr Guest", email: "guest@example.com")
)
lv = get_liveview(conn)
html = render(lv)
assert html |> find(member_el()) |> Enum.count() == 3
assert html |> find(guest_el()) |> Enum.count() == 1
sent = find(html, "#{member_el()}:nth-of-type(1)") |> text()
owner = find(html, "#{member_el()}:nth-of-type(2)") |> text()
admin = find(html, "#{member_el()}:nth-of-type(3)") |> text()
guest_member = find(html, "#{guest_el()}:first-of-type") |> text()
assert sent =~ "Invitation Sent"
assert owner =~ "You"
assert admin =~ "Team Member"
assert guest_member =~ "Guest"
remove_member(lv, 1)
# last becomes second
remove_member(lv, 2)
# remove guest
remove_member(lv, 1, guest_el())
html = render(lv) |> text()
refute html =~ "Invitation Sent"
refute html =~ "Team Member"
refute html =~ "Guest"
html = render(lv)
assert html |> find(member_el()) |> Enum.count() == 1
refute element_exists?(html, "#guest-list")
assert_email_delivered_with(
to: [nil: member2.email],
subject: @subject_prefix <> "Your access to \"#{team.name}\" team has been revoked"
)
assert_email_delivered_with(
to: [nil: guest.email],
subject: @subject_prefix <> "Your access to \"#{team.name}\" team has been revoked"
)
assert_no_emails_delivered()
end
test "guest->owner promotion",
%{
conn: conn,

View File

@ -199,6 +199,7 @@ defmodule PlausibleWeb.Live.TeamSetupTest do
)
end
@tag :ee_only
test "fails to save layout with limits breached", %{conn: conn} do
lv = get_child_lv(conn)
html = render(lv)

View File

@ -37,6 +37,60 @@ defmodule PlausibleWeb.Plugins.API.Controllers.CustomPropsTest do
describe "business tier" do
@describetag :ee_only
test "allows prop enable for special key", %{
site: site,
token: token,
conn: conn
} do
[owner | _] = Plausible.Repo.preload(site, :owners).owners
subscribe_to_growth_plan(owner)
url = Routes.plugins_api_custom_props_url(PlausibleWeb.Endpoint, :enable)
payload = %{
custom_prop: %{key: "search_query"}
}
assert_request_schema(payload, "CustomProp.EnableRequest", spec())
conn
|> authenticate(site.domain, token)
|> put_req_header("content-type", "application/json")
|> put(url, payload)
|> json_response(201)
|> assert_schema("CustomProp.ListResponse", spec())
end
test "allows bulk prop enable for special keys", %{
site: site,
token: token,
conn: conn
} do
[owner | _] = Plausible.Repo.preload(site, :owners).owners
subscribe_to_growth_plan(owner)
url = Routes.plugins_api_custom_props_url(PlausibleWeb.Endpoint, :enable)
payload = %{
custom_props: [
%{
custom_prop: %{key: "search_query"}
},
%{
custom_prop: %{key: "url"}
}
]
}
conn
|> authenticate(site.domain, token)
|> put_req_header("content-type", "application/json")
|> put(url, payload)
|> json_response(201)
|> assert_schema("CustomProp.ListResponse", spec())
end
test "fails on custom prop enable attempt with insufficient plan", %{
site: site,
token: token,
@ -76,6 +130,9 @@ defmodule PlausibleWeb.Plugins.API.Controllers.CustomPropsTest do
%{
custom_prop: %{key: "author"}
},
%{
custom_prop: %{key: "search_query"}
},
%{
custom_prop: %{key: "category"}
}

View File

@ -1,5 +1,5 @@
{
"tracker_script_version": 1,
"tracker_script_version": 2,
"scripts": {
"deploy": "node compile.js",
"test": "npm run deploy && npx playwright test",

View File

@ -287,7 +287,7 @@
body: JSON.stringify(payload)
}).then(function(response) {
options && options.callback && options.callback({status: response.status})
})
}).catch(function() {})
}
{{/if}}
}