diff --git a/.formatter.exs b/.formatter.exs index 27607cac3e..54fbb910ac 100644 --- a/.formatter.exs +++ b/.formatter.exs @@ -1,14 +1,6 @@ [ - inputs: [ - "config/*.{ex,exs}", - "lib/*.{ex,exs}", - "lib/**/*.{ex,exs}", - "test/*.{ex,exs}", - "test/**/*.{ex,exs}", - "priv/**/*.{ex,exs}", - "mix.exs", - ".formatter.exs" - ], - # plugins: [Phoenix.LiveView.HTMLFormatter], - inputs: ["*.{heex,ex,exs}", "{config,lib,test}/**/*.{heex,ex,exs}", "priv/*/seeds.exs"] + import_deps: [:ecto, :ecto_sql, :phoenix], + plugins: [Phoenix.LiveView.HTMLFormatter], + inputs: ["*.{heex,ex,exs}", "{config,lib,test}/**/*.{heex,ex,exs}", "priv/*/seeds.exs"], + subdirectories: ["priv/*/migrations"] ] diff --git a/.iex.exs b/.iex.exs index 35b63f2b8b..da4189507a 100644 --- a/.iex.exs +++ b/.iex.exs @@ -8,7 +8,6 @@ alias Sanbase.Model.{ MarketSegment, LatestCoinmarketcapData, ModelUtils, - Currency, Ico, IcoCurrency diff --git a/assets/tailwind.config.js b/assets/tailwind.config.js index 11740338e8..a761e5d7e2 100644 --- a/assets/tailwind.config.js +++ b/assets/tailwind.config.js @@ -18,6 +18,7 @@ module.exports = { } }, }, + safelist: [ ], plugins: [ require("@tailwindcss/forms"), // Allows prefixing tailwind classes with LiveView classes to add rules diff --git a/lib/sanbase/balances/balance.ex b/lib/sanbase/balances/balance.ex index 9689882936..2586ec3611 100644 --- a/lib/sanbase/balances/balance.ex +++ b/lib/sanbase/balances/balance.ex @@ -307,18 +307,22 @@ defmodule Sanbase.Balance do def current_balance_top_addresses(slug, opts) do with {:ok, {decimals, infrastructure, blockchain}} <- info_by_slug(slug), {:ok, table} <- realtime_balances_table(slug, infrastructure) do - query_struct = top_addresses_query(slug, decimals, blockchain, table, opts) - - ClickhouseRepo.query_transform(query_struct, fn [address, balance] -> - %{ - address: address, - infrastructure: infrastructure, - balance: balance - } - end) + current_balance_top_addresses(slug, decimals, infrastructure, blockchain, table, opts) end end + def current_balance_top_addresses(slug, decimals, infrastructure, blockchain, table, opts) do + query_struct = top_addresses_query(slug, decimals, blockchain, table, opts) + + ClickhouseRepo.query_transform(query_struct, fn [address, balance] -> + %{ + address: address, + infrastructure: infrastructure, + balance: balance + } + end) + end + def realtime_balances_table_or_nil(slug, infr) do case realtime_balances_table(slug, infr) do {:ok, table} -> {:ok, table} diff --git a/lib/sanbase/balances/balance_sql_query.ex b/lib/sanbase/balances/balance_sql_query.ex index d5b948ab8d..aa6841c803 100644 --- a/lib/sanbase/balances/balance_sql_query.ex +++ b/lib/sanbase/balances/balance_sql_query.ex @@ -359,16 +359,23 @@ defmodule Sanbase.Balance.SqlQuery do sql = """ SELECT address, balance FROM ( - SELECT address, argMax(balance, dt) / pow(10, {{decimals}}) AS balance - FROM #{table} - PREWHERE - addressType = 'normal' - GROUP BY address + SELECT + ebr.address, + argMax(ebr.balance, ebr.dt) / pow(10, {{decimals}}) AS balance + FROM #{table} AS ebr + WHERE (ebr.address GLOBAL IN ( + SELECT address + FROM eth_top_holders_daily + WHERE value > 1e10 AND(dt = toStartOfDay(today() - toIntervalDay(1))) AND (rank > 0) + ORDER BY value #{direction} + LIMIT {{limit}}*2 + )) AND (ebr.addressType = 'normal') + GROUP BY ebr.address ) #{labels_join_str} - WHERE balance > 1e-10 ORDER BY balance #{direction} - LIMIT {{limit}} OFFSET {{offset}} + LIMIT {{limit}} + OFFSET {{offset}} """ Sanbase.Clickhouse.Query.new(sql, params) @@ -397,7 +404,7 @@ defmodule Sanbase.Balance.SqlQuery do FROM #{table} PREWHERE assetRefId = (SELECT asset_ref_id FROM asset_metadata FINAL WHERE name = {{slug}} LIMIT 1) AND - addressType = 'normal' + addressType = 'normal' AND (dt > (now() - toIntervalDay(1))) GROUP BY address ) #{labels_join_str} @@ -420,8 +427,11 @@ defmodule Sanbase.Balance.SqlQuery do GLOBAL ANY INNER JOIN ( SELECT address - FROM blockchain_address_labels - PREWHERE blockchain = {{#{blockchain_key}}} AND label IN ({{#{labels_key}}})}) + FROM current_label_addresses + WHERE blockchain = {{#{blockchain_key}}} AND + label_id IN ( + SELECT label_id FROM label_metadata WHERE key IN ({{#{labels_key}}}) + ) ) USING (address) """ diff --git a/lib/sanbase/billing/billing.ex b/lib/sanbase/billing/billing.ex index c1f4ebc05a..92eae9db60 100644 --- a/lib/sanbase/billing/billing.ex +++ b/lib/sanbase/billing/billing.ex @@ -48,6 +48,11 @@ defmodule Sanbase.Billing do |> Enum.empty?() end + def eligible_for_api_trial?(user_id) do + Subscription.all_user_subscriptions_for_product(user_id, Product.product_api()) + |> Enum.empty?() + end + @doc ~s""" Sync the locally defined Products and Plans with stripe. diff --git a/lib/sanbase/billing/subscription/subscription.ex b/lib/sanbase/billing/subscription/subscription.ex index 550c8ac093..5b137be22f 100644 --- a/lib/sanbase/billing/subscription/subscription.ex +++ b/lib/sanbase/billing/subscription/subscription.ex @@ -25,13 +25,15 @@ defmodule Sanbase.Billing.Subscription do Please, contact administrator of the site for more information. """ @product_sanbase Product.product_sanbase() + @product_api Product.product_api() @sanbase_basic_plan_id 205 @preload_fields [:user, plan: [:product]] + @trial_days 14 # Unused due to disabling of annual discounts # @one_month_discount_days 30 # @during_trial_discount_percent_off 50 - # @trial_days 14 + # @one_month_trial_discount_percent_off 35 # @annual_discount_plan_ids [202] @@ -139,7 +141,7 @@ defmodule Sanbase.Billing.Subscription do {:ok, stripe_subscription} <- create_stripe_subscription(user, plan, coupon), {:ok, db_subscription} <- create_subscription_db(stripe_subscription, user, plan) do if db_subscription.status == :active do - maybe_delete_trialing_subscriptions(user.id) + maybe_delete_trialing_subscriptions(user.id, plan) end {:ok, default_preload(db_subscription, force: true)} @@ -155,16 +157,26 @@ defmodule Sanbase.Billing.Subscription do {:ok, stripe_subscription} <- create_stripe_subscription(user, plan, coupon), {:ok, db_subscription} <- create_subscription_db(stripe_subscription, user, plan) do if db_subscription.status == :active do - maybe_delete_trialing_subscriptions(user.id) + maybe_delete_trialing_subscriptions(user.id, plan) end {:ok, default_preload(db_subscription, force: true)} end end - def maybe_delete_trialing_subscriptions(user_id) do + def maybe_delete_trialing_subscriptions(user_id, %Plan{product_id: product_id}) + when product_id == @product_sanbase do + __MODULE__ + |> __MODULE__.Query.user_has_any_subscriptions_for_product(user_id, @product_sanbase) + |> Repo.all() + |> Enum.filter(fn subscription -> subscription.status == :trialing end) + |> Enum.each(fn subscription -> StripeApi.delete_subscription(subscription.stripe_id) end) + end + + def maybe_delete_trialing_subscriptions(user_id, %Plan{product_id: product_id}) + when product_id == @product_api do __MODULE__ - |> __MODULE__.Query.user_has_any_subscriptions_for_product(user_id, Product.product_sanbase()) + |> __MODULE__.Query.user_has_any_subscriptions_for_product(user_id, @product_api) |> Repo.all() |> Enum.filter(fn subscription -> subscription.status == :trialing end) |> Enum.each(fn subscription -> StripeApi.delete_subscription(subscription.stripe_id) end) @@ -499,26 +511,24 @@ defmodule Sanbase.Billing.Subscription do end defp subscription_defaults(user, %Plan{product_id: product_id} = plan) - when product_id == @product_sanbase do + when product_id in [@product_sanbase, @product_api] do defaults = %{ customer: user.stripe_customer_id, items: [%{plan: plan.stripe_id}] } - defaults = - case Billing.eligible_for_sanbase_trial?(user.id) do - true -> - Map.put( - defaults, - :trial_end, - Sanbase.DateTimeUtils.days_after(14) |> DateTime.to_unix() - ) - - false -> - defaults - end + trial_end_unix = Sanbase.DateTimeUtils.days_after(@trial_days) |> DateTime.to_unix() + + cond do + product_id == @product_sanbase and Billing.eligible_for_sanbase_trial?(user.id) -> + Map.put(defaults, :trial_end, trial_end_unix) - defaults + product_id == @product_api and Billing.eligible_for_api_trial?(user.id) -> + Map.put(defaults, :trial_end, trial_end_unix) + + true -> + defaults + end end defp subscription_defaults(user, plan) do diff --git a/lib/sanbase/clickhouse/github/sql_query.ex b/lib/sanbase/clickhouse/github/sql_query.ex index 2f96743c18..57ec53950a 100644 --- a/lib/sanbase/clickhouse/github/sql_query.ex +++ b/lib/sanbase/clickhouse/github/sql_query.ex @@ -1,5 +1,12 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do - import Sanbase.Metric.SqlQuery.Helper, only: [timerange_parameters: 3] + import Sanbase.Metric.SqlQuery.Helper, + only: [ + timerange_parameters: 3, + to_unix_timestamp: 3, + to_unix_timestamp_from_number: 2 + ] + + import Sanbase.DateTimeUtils, only: [maybe_str_to_sec: 1] @non_dev_events [ "IssueCommentEvent", @@ -52,12 +59,24 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do end def dev_activity_contributors_count_query(organizations, from, to, interval) do + {from, to, _interval, span} = timerange_parameters(from, to, interval) + + params = %{ + interval: maybe_str_to_sec(interval), + organizations: organizations |> Enum.map(&String.downcase/1), + from: from, + to: to, + span: span, + non_dev_events: @non_dev_events + } + + # {to_unix_timestamp(interval, "dt", argument_name: "interval")} AS time, sql = """ SELECT time, toUInt32(SUM(uniq_contributors)) AS value FROM ( SELECT - toUnixTimestamp(intDiv(toUInt32(dt), {{interval}}) * {{interval}}) AS time, + #{to_unix_timestamp(interval, "dt", argument_name: "interval")} AS time, uniqExact(actor) AS uniq_contributors FROM #{@table} PREWHERE @@ -69,29 +88,29 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do ) GROUP BY time """ - |> wrap_timeseries_in_gap_filling_query() + |> wrap_timeseries_in_gap_filling_query(interval) + + Sanbase.Clickhouse.Query.new(sql, params) + end - {from, to, interval, span} = timerange_parameters(from, to, interval) + def github_activity_contributors_count_query(organizations, from, to, interval) do + {from, to, _interval, span} = timerange_parameters(from, to, interval) params = %{ - interval: interval, + interval: maybe_str_to_sec(interval), organizations: organizations |> Enum.map(&String.downcase/1), from: from, to: to, - non_dev_events: @non_dev_events, - span: span + span: span, + non_dev_events: @non_dev_events } - Sanbase.Clickhouse.Query.new(sql, params) - end - - def github_activity_contributors_count_query(organizations, from, to, interval) do sql = """ SELECT time, toUInt32(SUM(uniq_contributors)) AS value FROM ( SELECT - toUnixTimestamp(intDiv(toUInt32(dt), {{interval}}) * {{interval}}) AS time, + #{to_unix_timestamp(interval, "dt", argument_name: "interval")} AS time, uniqExact(actor) AS uniq_contributors FROM #{@table} PREWHERE @@ -102,28 +121,29 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do ) GROUP BY time """ - |> wrap_timeseries_in_gap_filling_query() + |> wrap_timeseries_in_gap_filling_query(interval) + + Sanbase.Clickhouse.Query.new(sql, params) + end - {from, to, interval, span} = timerange_parameters(from, to, interval) + def dev_activity_query(organizations, from, to, interval) do + {from, to, _interval, span} = timerange_parameters(from, to, interval) params = %{ - interval: interval, + interval: maybe_str_to_sec(interval), organizations: organizations |> Enum.map(&String.downcase/1), from: from, to: to, - span: span + span: span, + non_dev_events: @non_dev_events } - Sanbase.Clickhouse.Query.new(sql, params) - end - - def dev_activity_query(organizations, from, to, interval) do sql = """ SELECT time, SUM(events) AS value FROM ( SELECT - toUnixTimestamp(intDiv(toUInt32(dt), {{interval}}) * {{interval}}) AS time, + #{to_unix_timestamp(interval, "dt", argument_name: "interval")} AS time, count(events) AS events FROM ( SELECT any(event) AS events, dt @@ -139,29 +159,29 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do ) GROUP BY time """ - |> wrap_timeseries_in_gap_filling_query() + |> wrap_timeseries_in_gap_filling_query(interval) - {from, to, interval, span} = timerange_parameters(from, to, interval) + Sanbase.Clickhouse.Query.new(sql, params) + end + + def github_activity_query(organizations, from, to, interval) do + {from, to, _interval, span} = timerange_parameters(from, to, interval) params = %{ - interval: interval, + interval: maybe_str_to_sec(interval), organizations: organizations |> Enum.map(&String.downcase/1), from: from, to: to, - non_dev_events: @non_dev_events, - span: span + span: span, + non_dev_events: @non_dev_events } - Sanbase.Clickhouse.Query.new(sql, params) - end - - def github_activity_query(organizations, from, to, interval) do sql = """ SELECT time, SUM(events) AS value FROM ( SELECT - toUnixTimestamp(intDiv(toUInt32(dt), {{interval}}) * {{interval}}) AS time, + #{to_unix_timestamp(interval, "dt", argument_name: "interval")} AS time, count(events) AS events FROM ( SELECT any(event) AS events, dt @@ -176,17 +196,7 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do ) GROUP BY time """ - |> wrap_timeseries_in_gap_filling_query() - - {from, to, interval, span} = timerange_parameters(from, to, interval) - - params = %{ - interval: interval, - organizations: organizations |> Enum.map(&String.downcase/1), - from: from, - to: to, - span: span - } + |> wrap_timeseries_in_gap_filling_query(interval) Sanbase.Clickhouse.Query.new(sql, params) end @@ -307,12 +317,12 @@ defmodule Sanbase.Clickhouse.Github.SqlQuery do """ end - defp wrap_timeseries_in_gap_filling_query(query) do + defp wrap_timeseries_in_gap_filling_query(query, interval) do """ SELECT time, SUM(value) FROM ( SELECT - toUnixTimestamp(intDiv(toUInt32({{from}} + number * {{interval}}), {{interval}}) * {{interval}}) AS time, + #{to_unix_timestamp_from_number(interval, from_argument_name: "from")} AS time, toUInt32(0) AS value FROM numbers({{span}}) diff --git a/lib/sanbase/clickhouse/metric/metric_files/available_v2_metrics.json b/lib/sanbase/clickhouse/metric/metric_files/available_v2_metrics.json index 681254254d..06b9a39c54 100644 --- a/lib/sanbase/clickhouse/metric/metric_files/available_v2_metrics.json +++ b/lib/sanbase/clickhouse/metric/metric_files/available_v2_metrics.json @@ -4093,5 +4093,56 @@ "table": "intraday_metrics", "has_incomplete_data": false, "data_type": "timeseries" + }, + { + "human_readable_name": "Funding Rates Aggregated by Exchange", + "name": "funding_rates_aggregated_by_exchange", + "metric": "funding_rates_aggregated_per_exchange", + "version": "2019-01-01", + "access": "restricted", + "selectors": ["slug", "owner", "label"], + "min_plan": { + "SANAPI": "pro", + "SANBASE": "free" + }, + "aggregation": "last", + "min_interval": "5m", + "table": "intraday_label_based_metrics", + "has_incomplete_data": false, + "data_type": "timeseries" + }, + { + "human_readable_name": "Funding Rates Aggregated by Settlement Currency", + "name": "funding_rates_aggregated_by_settlement_currency", + "metric": "funding_rates_aggregated_per_settlement_currency", + "version": "2019-01-01", + "access": "restricted", + "selectors": ["slug", "owner", "label"], + "min_plan": { + "SANAPI": "pro", + "SANBASE": "free" + }, + "aggregation": "last", + "min_interval": "5m", + "table": "intraday_label_based_metrics", + "has_incomplete_data": false, + "data_type": "timeseries" + }, + { + "human_readable_name": "Total Funding Rates Aggregated by Asset", + "name": "total_funding_rates_aggregated_per_asset", + "metric": "total_funding_rates_aggregated_per_asset", + "version": "2019-01-01", + "access": "restricted", + "selectors": ["slug"], + "min_plan": { + "SANAPI": "pro", + "SANBASE": "free" + }, + "aggregation": "last", + "min_interval": "5m", + "table": "intraday_metrics", + "has_incomplete_data": false, + "data_type": "timeseries" } ] diff --git a/lib/sanbase/clickhouse/metric/sql_query/metric_histogram_sql_query.ex b/lib/sanbase/clickhouse/metric/sql_query/metric_histogram_sql_query.ex index f44dc41a2a..6989a1f7f9 100644 --- a/lib/sanbase/clickhouse/metric/sql_query/metric_histogram_sql_query.ex +++ b/lib/sanbase/clickhouse/metric/sql_query/metric_histogram_sql_query.ex @@ -136,26 +136,63 @@ defmodule Sanbase.Clickhouse.MetricAdapter.HistogramSqlQuery do def histogram_data_query("eth2_staked_amount_per_label", "ethereum", from, to, _interval, limit) do sql = """ - SELECT - label, - sumKahan(locked_sum) AS value - FROM ( - SELECT - address, - locked_sum, - #{label_select(label_as: "label")} - FROM ( - SELECT address, sumKahan(amount) AS locked_sum - FROM ( - SELECT DISTINCT * - FROM eth2_staking_transfers_v2 FINAL - WHERE - dt < toDateTime({{to}}) - #{if from, do: "AND dt >= toDateTime({{from}})"} + WITH addresses_sum AS + ( + SELECT + address, + locked_sum + FROM + ( + SELECT + address, + sumKahan(amount) AS locked_sum + FROM + ( + SELECT DISTINCT * + FROM eth2_staking_transfers_v2 + FINAL + WHERE (dt < toDateTime({{to}})) + #{if from, do: "AND dt >= toDateTime({{from}})"} + ) + GROUP BY address ) - GROUP BY address - ) ) + + SELECT + -- Creating Proper View of key + arrayStringConcat( + arrayMap(x -> concat(upper(substring(x, 1, 1)), substring(x, 2, length(x) - 1)), + splitByChar('_', + -- If key == 'owner' -> we will take value + multiIf( + m.key = 'owner', m.value, + m.key = '', 'Unlabeled', + m.key + ) + )), ' ' + ) as label, + sumKahan(addresses_sum.locked_sum) as value + FROM addresses_sum + LEFT JOIN + ( + SELECT + blockchain, + label_id, + address + FROM current_label_addresses + WHERE (address IN ( + SELECT address + FROM addresses_sum + )) AND (blockchain = 'ethereum') + ) AS cla ON addresses_sum.address = cla.address + LEFT JOIN + ( + SELECT + label_id, + key, + value + FROM label_metadata + ) AS m ON cla.label_id = m.label_id GROUP BY label ORDER BY value DESC LIMIT {{limit}} @@ -179,20 +216,45 @@ defmodule Sanbase.Clickhouse.MetricAdapter.HistogramSqlQuery do limit ) do sql = """ + WITH staking_address AS ( + SELECT DISTINCT(address) + FROM + eth2_staking_transfers_v2 FINAL + WHERE + dt < toDateTime({{to}}) + #{if from, do: "AND dt >= toDateTime({{from}})"} + ) SELECT label, - count(address) AS value + count(address) as value FROM ( SELECT - address, - #{label_select(label_as: "label")} - FROM ( - SELECT DISTINCT(address) - FROM eth2_staking_transfers_v2 FINAL - WHERE - dt < toDateTime({{to}}) - #{if from, do: "AND dt >= toDateTime({{from}})"} - ) + staking_address.address as address, + -- Creating Proper View of key + arrayStringConcat( + arrayMap(x -> concat(upper(substring(x, 1, 1)), substring(x, 2, length(x) - 1)), + splitByChar('_', + -- If key == 'owner' -> we will take value + multiIf( + m.key = 'owner', m.value, + m.key = '', 'Unlabeled', + m.key + ) + )), ' ' + ) as label + FROM staking_address + LEFT JOIN ( + SELECT * + FROM + current_label_addresses + WHERE + address IN (SELECT address FROM staking_address) AND + blockchain = 'ethereum' + ) AS cla + ON staking_address.address = cla.address + LEFT JOIN ( + SELECT label_id, key, value FROM label_metadata + ) AS m ON cla.label_id = m.label_id ) GROUP BY label ORDER BY value DESC @@ -208,61 +270,6 @@ defmodule Sanbase.Clickhouse.MetricAdapter.HistogramSqlQuery do Sanbase.Clickhouse.Query.new(sql, params) end - def histogram_data_query( - "eth2_unlabeled_staker_inflow_sources", - "ethereum", - from, - to, - _interval, - limit - ) do - sql = """ - SELECT - label, - sumKahan(address_inflow) AS value - FROM ( - SELECT - address, - address_inflow, - #{label_select(label_as: "label", label_str_as: "label_str")} - FROM ( - SELECT - from AS address, - sumKahan(value / 1e18) AS address_inflow - FROM eth_transfers - WHERE to GLOBAL IN ( - SELECT address - FROM ( - SELECT - address, - dictGet('default.eth_label_dict', 'labels', (cityHash64(address), toUInt64(0))) AS label_str - FROM ( - SELECT DISTINCT(address) - FROM eth2_staking_transfers_v2 FINAL - WHERE - dt < toDateTime({{to}}) - #{if from, do: "AND dt >= toDateTime({{from}})"} - ) - ) - WHERE label_str = '' - ) - GROUP BY address - ) - ) - GROUP BY label - ORDER BY value DESC - LIMIT {{limit}} - """ - - params = %{ - from: from && from |> DateTime.to_unix(), - to: to |> DateTime.to_unix(), - limit: limit - } - - Sanbase.Clickhouse.Query.new(sql, params) - end - def histogram_data_query( "eth2_top_stakers", "ethereum", @@ -272,29 +279,57 @@ defmodule Sanbase.Clickhouse.MetricAdapter.HistogramSqlQuery do limit ) do sql = """ - SELECT - address, - label, - locked_value AS staked - FROM ( + WITH lock_addresses AS ( SELECT - address, - locked_value, - #{label_select(label_as: "label")} - FROM ( - SELECT address, SUM(amount) AS locked_value - FROM eth2_staking_transfers_v2 FINAL - WHERE + FROM + eth2_staking_transfers_v2 FINAL + WHERE dt < toDateTime({{to}}) #{if from, do: "AND dt >= toDateTime({{from}})"} - GROUP BY address - ORDER BY locked_value DESC - LIMIT {{limit}} - ) + GROUP BY address + ORDER BY locked_value DESC + LIMIT {{limit}} ) - ORDER BY staked DESC + + SELECT + lock_addresses.address as address, + arrayStringConcat(groupUniqArray(multiIf( + m.key = 'owner', m.value, + m.key = '', 'Unlabeled', + m.key) + ), ', ') AS labels, + max(lock_addresses.locked_value) AS max_staked + FROM lock_addresses + LEFT JOIN ( + SELECT + blockchain, + label_id, + address + FROM + current_label_addresses + WHERE + address IN ( + SELECT + address + FROM + lock_addresses + ) + AND + blockchain = 'ethereum' + ) AS cla + ON lock_addresses.address = cla.address + LEFT JOIN ( + SELECT + label_id, + key, + value + FROM label_metadata + ) AS m + ON cla.label_id = m.label_id + GROUP BY lock_addresses.address + ORDER BY max_staked DESC """ params = %{ @@ -694,29 +729,4 @@ defmodule Sanbase.Clickhouse.MetricAdapter.HistogramSqlQuery do Sanbase.Clickhouse.Query.new(sql, params) end - - defp label_select(opts) do - label_as = Keyword.get(opts, :label_as, "label") - label_str_as = Keyword.get(opts, :label_str_as, "label_str") - - """ - dictGet('default.eth_label_dict', 'labels', (cityHash64(address), toUInt64(0))) AS #{label_str_as}, - splitByChar(',', #{label_str_as}) AS label_arr_internal, - multiIf( - has(label_arr_internal, 'decentralized_exchange'), 'DEX', - hasAny(label_arr_internal, ['centralized_exchange', 'deposit']), 'CEX', - has(label_arr_internal, 'defi'), 'DeFi', - has(label_arr_internal, 'genesis'), 'Genesis', - has(label_arr_internal, 'miner'), 'Miner', - has(label_arr_internal, 'makerdao-cdp-owner'), 'CDP Owner', - has(label_arr_internal, 'whale'), 'Whale', - hasAll(label_arr_internal, ['dex_trader', 'withdrawal']), 'CEX & DEX Trader', - has(label_arr_internal, 'withdrawal'), 'CEX Trader', - has(label_arr_internal, 'proxy'), 'Proxy', - has(label_arr_internal, 'dex_trader'), 'DEX Trader', - #{label_str_as} = '', 'Unlabeled', - label_arr_internal[1] - ) AS #{label_as} - """ - end end diff --git a/lib/sanbase/dashboard/dashboard_query.ex b/lib/sanbase/dashboard/dashboard_query.ex index c1139c6cf6..643dda88a9 100644 --- a/lib/sanbase/dashboard/dashboard_query.ex +++ b/lib/sanbase/dashboard/dashboard_query.ex @@ -65,7 +65,7 @@ defmodule Sanbase.Dashboard.Query do compressed_rows |> Base.decode64!() |> :zlib.gunzip() - |> :erlang.binary_to_term() + |> Plug.Crypto.non_executable_binary_to_term([:safe]) end def valid_sql?(args) do diff --git a/lib/sanbase/email/template_mailer.ex b/lib/sanbase/email/template_mailer.ex index 9dc1334cc7..d1c4ee2a26 100644 --- a/lib/sanbase/email/template_mailer.ex +++ b/lib/sanbase/email/template_mailer.ex @@ -10,7 +10,7 @@ defmodule Sanbase.TemplateMailer do def send(rcpt_email, template_slug, vars) do template = Sanbase.Email.Template.templates()[template_slug] - vars = process_vars(vars) + vars = Map.put(vars, :current_year, Date.utc_today().year()) if template do subject = @@ -33,14 +33,4 @@ defmodule Sanbase.TemplateMailer do :ok end end - - # Fixme - it is only used migrate vars of already scheduled emails in the past - # After a couple of weeks it can be removed - defp process_vars(vars) do - if Map.has_key?(vars, :expire_at) do - Map.merge(vars, %{date: vars.expire_at, end_subscription_date: vars.expire_at}) - else - vars - end - end end diff --git a/lib/sanbase/metric/sql_query_helper.ex b/lib/sanbase/metric/sql_query_helper.ex index 1ff658a7a2..b843f44a54 100644 --- a/lib/sanbase/metric/sql_query_helper.ex +++ b/lib/sanbase/metric/sql_query_helper.ex @@ -60,7 +60,9 @@ defmodule Sanbase.Metric.SqlQuery.Helper do "if({{#{arg_name}}} = {{#{arg_name}}}, toUnixTimestamp(toDateTime(#{function}(#{dt_column}))), null)" end - def to_unix_timestamp_from_number(<> = _interval, opts \\ []) + def to_unix_timestamp_from_number(interval_or_function, opts \\ []) + + def to_unix_timestamp_from_number(<> = _interval, opts) when digit in ?0..?9 do interval_name = Keyword.get(opts, :interval_argument_name, "interval") from_name = Keyword.get(opts, :from_argument_name, "from") @@ -68,6 +70,37 @@ defmodule Sanbase.Metric.SqlQuery.Helper do "toUnixTimestamp(intDiv(toUInt32({{#{from_name}}} + number * {{#{interval_name}}}), {{#{interval_name}}}) * {{#{interval_name}}})" end + def to_unix_timestamp_from_number(function, opts) + when function in @supported_interval_functions do + from_name = Keyword.get(opts, :from_argument_name, "from") + + expression = + case function do + "toStartOfMonth" -> + "addMonths(toStartOfMonth(toDateTime({{#{from_name}}})), number)" + + "toStartOfWeek" -> + "addDays(toStartOfWeek(toDateTime({{#{from_name}}})), number * 7)" + + "toMonday" -> + "addDays(toMonday(toDateTime({{#{from_name}}})), number * 7)" + + "toStartOfDay" -> + "addDays(toStartOfDay(toDateTime({{#{from_name}}})), number)" + + "toStartOfQuarter" -> + "addQuarters(toStartOfQuarter(toDateTime({{#{from_name}}})), number)" + + "toStartOfYear" -> + "addYears(toStartOfYear(toDateTime({{#{from_name}}})), number)" + + "toStartOfHour" -> + "addHours(toStartOfHour(toDateTime({{#{from_name}}})), number)" + end + + "toUnixTimestamp(toDateTime(#{expression}))" + end + def aggregation(:ohlc, value_column, dt_column) do """ argMin(#{value_column}, #{dt_column}) AS open, diff --git a/lib/sanbase/monitored_twitter_handle/monitored_twitter_handle.ex b/lib/sanbase/monitored_twitter_handle/monitored_twitter_handle.ex index c472b7f00e..ef8b766e29 100644 --- a/lib/sanbase/monitored_twitter_handle/monitored_twitter_handle.ex +++ b/lib/sanbase/monitored_twitter_handle/monitored_twitter_handle.ex @@ -35,7 +35,7 @@ defmodule Sanbase.MonitoredTwitterHandle do end def is_handle_monitored(handle) do - handle = String.downcase(handle) + handle = normalize_handle(handle) query = from(m in __MODULE__, where: m.handle == ^handle) {:ok, Repo.exists?(query)} @@ -48,7 +48,7 @@ defmodule Sanbase.MonitoredTwitterHandle do {:ok, Sanbase.MonitoredTwitterHandle.t()} | {:error, String.t()} def add_new(handle, user_id, origin, notes) do %__MODULE__{} - |> change(%{handle: String.downcase(handle), user_id: user_id, origin: origin, notes: notes}) + |> change(%{handle: normalize_handle(handle), user_id: user_id, origin: origin, notes: notes}) |> validate_required([:handle, :user_id, :origin]) |> unique_constraint(:handle) |> Repo.insert() @@ -85,13 +85,20 @@ defmodule Sanbase.MonitoredTwitterHandle do end def list_all_submissions() do - query = from(m in __MODULE__, where: m.origin == "graphql_api") + query = from(m in __MODULE__, where: m.origin == "graphql_api", preload: [:user]) Repo.all(query) end # Private functions + defp normalize_handle(handle) do + handle + |> String.downcase() + |> String.trim() + |> String.trim_leading("@") + end + defp count_user_approved_submissions(user_id) do query = from(m in __MODULE__, where: m.user_id == ^user_id and m.status == "approved") @@ -121,20 +128,21 @@ defmodule Sanbase.MonitoredTwitterHandle do # This includes all used and unused promo codes for that campaign. codes_count = length(codes) - cond do - records_count >= 7 and codes_count <= 1 -> - create_user_promo_code(user_id, 27) - - records_count >= 3 and codes_count == 0 -> - create_user_promo_code(user_id, 54) + # Run the creation in 2 ifs so in case of re-issuing of promo codes, + # we create all the necessary promo codes on one run + if records_count >= 3 and codes_count == 0 do + create_user_promo_code_for_campaign(user_id, 27) + end - true -> - :ok + if records_count >= 7 and codes_count == 1 do + create_user_promo_code_for_campaign(user_id, 54) end + + :ok end end - defp create_user_promo_code(user_id, percent_off) do + defp create_user_promo_code_for_campaign(user_id, percent_off) do redeem_by = DateTime.utc_now() |> DateTime.add(30, :day) |> DateTime.truncate(:second) {:ok, coupon} = diff --git a/lib/sanbase/queries/dashboard/dashboard_cache.ex b/lib/sanbase/queries/dashboard/dashboard_cache.ex new file mode 100644 index 0000000000..fcf6cd6059 --- /dev/null +++ b/lib/sanbase/queries/dashboard/dashboard_cache.ex @@ -0,0 +1,314 @@ +defmodule Sanbase.Queries.DashboardCache do + @moduledoc ~s""" + Holds the last computed result of dashboard's queries. + + The cache is the dynamic part of the dashboard as the result of execution + of the SQL can change on every run. Dashboards can be slow to compute or + can be viewed by many users. Because of this, a cached version is kept in + the database and is shown to the users. + """ + use Ecto.Schema + + import Ecto.Query + import Ecto.Changeset + import Sanbase.Utils.Transform, only: [maybe_apply_function: 2] + import Sanbase.Utils.ErrorHandling, only: [changeset_errors_string: 1] + + alias Sanbase.Repo + alias Sanbase.Queries.Dashboard + alias Sanbase.Queries.Executor.Result + alias Sanbase.Queries.QueryCache + + @type user_id :: Sanbase.Accounts.User.user_id() + @type dashboard_id :: Dashboard.dashboard_id() + @type dashboard_query_mapping_id :: Dashboard.dashboard_query_mapping_id() + + @type query_cache :: %{ + dashboard_query_mapping_id => map() + } + + @type t :: %__MODULE__{ + id: non_neg_integer(), + dashboard_id: dashboard_id(), + queries: %{ + # the key is dashboard_query_mapping_id + optional(String.t()) => query_cache() + } + } + + schema "dashboards_cache" do + field(:dashboard_id, :integer) + field(:queries, :map, default: %{}) + + timestamps() + end + + # defp get_for_read(dashboard_id, querying_user_id) do + # end + + # def get_for_mutation(dashboard_id, querying_user_id) do + # from(dc in __MODULE__, + # where: dc.dashboard_id == ^dashboard_id, + # join: d in Dashboard, + # on: dc.dashboard_id == d.id + # ) + # end + + @doc ~s""" + Fetch the latest cache values for the given dashboard. + + The second `opts` argument can contain the following options: + - lock_for_update - Set to true, if the record is fetched for an update + - transform_loaded_queries - If set to true, the loaded query caches are + transformed from having `compressed_rows` to `rows`. Defaults to true. + """ + @spec by_dashboard_id(dashboard_id(), user_id, Keyword.t()) :: {:ok, t()} | {:error, String.t()} + # def by_dashboard_id(dashboard_id, querying_user_id, opts) do + # Ecto.Multi.new() + # |> Ecto.Multi.run(:get_dashboard_cache, fn _ -> + # get_for_read(dashboard_id, querying_user_id) + # end) + # end + + def by_dashboard_id(dashboard_id, querying_user_id, opts \\ []) do + query = from(d in __MODULE__, where: d.dashboard_id == ^dashboard_id) + + query = + case Keyword.get(opts, :lock_for_update, false) do + false -> query + true -> query |> lock("FOR UPDATE") + end + + case Repo.one(query) do + nil -> new(dashboard_id, querying_user_id) + %__MODULE__{} = cache -> {:ok, cache} + end + |> maybe_apply_function(&transform_loaded_dashboard_cache(&1, opts)) + end + + @doc ~s""" + Get the latest query cache for the given dashboard and dashboard_query_mapping_id. + + The second `opts` argument can contain the following options: + - transform_loaded_queries - If set to true, the loaded query caches are + transformed from having `compressed_rows` to `rows`. Defaults to true. + """ + @spec by_dashboard_and_dashboard_query_mapping_id( + dashboard_id(), + dashboard_query_mapping_id(), + Keyword.t() + ) :: {:ok, query_cache()} | {:error, String.t()} + def by_dashboard_and_dashboard_query_mapping_id( + dashboard_id, + dashboard_query_mapping_id, + opts \\ [] + ) do + query = """ + SELECT t.query + FROM dashboards_cache cache + CROSS JOIN LATERAL ( + SELECT value AS query + FROM jsonb_each(cache.queries) AS x(key, value) + WHERE cache.dashboard_id = $1 AND key = $2 + ) AS t + """ + + params = [dashboard_id, dashboard_query_mapping_id] + + case Repo.query(query, params) do + {:ok, %{rows: [[%{} = query_cache]]}} -> {:ok, query_cache} + _ -> {:error, "Cannot load dashboard query cache"} + end + |> maybe_apply_function(&transform_loaded_dashboard_cache(&1, opts)) + end + + @doc ~s""" + Create a new empty record for the given dashboard_id. + """ + @spec new(non_neg_integer(), user_id) :: {:ok, t()} | {:error, any()} + def new(dashboard_id, querying_user_id) do + case Sanbase.Dashboards.get_visibility_data(dashboard_id) do + {:ok, %{user_id: ^querying_user_id}} -> + %__MODULE__{} + |> change(%{dashboard_id: dashboard_id}) + |> Repo.insert() + |> maybe_transform_error() + + _ -> + {:error, + "Dashboard with id #{dashboard_id} does not exist or the user with id #{querying_user_id} is not the owner of it."} + end + end + + @doc ~s""" + Update the dashboard's query cache with the provided result. + """ + @spec update_query_cache(non_neg_integer(), String.t(), Result.t(), user_id(), Keyword.t()) :: + {:ok, t()} | {:error, any()} + def update_query_cache( + dashboard_id, + dashboard_query_mapping_id, + query_result, + querying_user_id, + opts \\ [] + ) do + # Do not transform the loaded queries cache. Transforming it would + # convert `compressed_rows` to `rows`, which will be written back and break + with query_cache = + QueryCache.from_query_result(query_result, dashboard_query_mapping_id, dashboard_id), + true <- query_result_size_allowed?(query_cache), + {:ok, cache} <- + by_dashboard_id(dashboard_id, querying_user_id, + transform_loaded_queries: false, + lock_for_update: true + ) do + query_cache = + query_cache + |> Map.from_struct() + |> Map.delete(:rows) + + queries = + Map.update(cache.queries, dashboard_query_mapping_id, query_cache, fn _ -> query_cache end) + + cache + |> change(%{queries: queries}) + |> Repo.update() + |> maybe_transform_error() + |> maybe_apply_function(&transform_loaded_dashboard_cache(&1, opts)) + end + end + + @doc ~s""" + Remove the query result from the cache. This is invoked when the panel is + removed from the dashboard. + """ + @spec remove_query_cache(dashboard_id(), dashboard_query_mapping_id(), user_id) :: + {:ok, t()} | {:error, any()} + def remove_query_cache(dashboard_id, dashboard_query_mapping_id, querying_user_id) do + {:ok, cache} = by_dashboard_id(dashboard_id, querying_user_id, lock_for_update: true) + queries = Enum.reject(cache.queries, &(&1.id == dashboard_query_mapping_id)) + + cache + |> change(%{queries: queries}) + |> Repo.update() + |> maybe_transform_error() + end + + # Private functions + + defp transform_loaded_dashboard_cache(%__MODULE__{} = cache, opts) do + flag = Keyword.get(opts, :transform_loaded_queries, true) + + queries = + cache.queries + |> Map.new(fn {dashboard_query_mapping_id, query_cache} -> + query_cache = + case flag do + true -> transform_loaded_queries(query_cache) + false -> query_cache + end + + {dashboard_query_mapping_id, query_cache} + end) + + %{cache | queries: queries} + end + + defp transform_loaded_queries(query_cache) do + query_cache = atomize_keys(query_cache) + + %{compressed_rows: compressed_rows, updated_at: updated_at} = query_cache + %{query_start_time: start_dt, query_end_time: query_end_dt} = query_cache + + {:ok, rows} = + compressed_rows + |> Result.decompress_rows() + |> transform_rows() + + updated_at = to_datetime(updated_at) + + query_cache + |> Map.drop([:compressed_rows, :updated_at, :query_start_time, :query_end_time]) + |> Map.merge(%{ + rows: rows, + updated_at: updated_at, + id: query_cache["id"], + query_start_time: Sanbase.DateTimeUtils.from_iso8601!(start_dt), + query_end_time: Sanbase.DateTimeUtils.from_iso8601!(query_end_dt) + }) + end + + defp to_datetime(data) do + case data do + %DateTime{} -> + data + + <<_::binary>> -> + {:ok, dt, _} = DateTime.from_iso8601(data) + dt + end + end + + defp atomize_keys(map) do + map + |> Map.new(fn + {k, v} when is_atom(k) -> + {k, v} + + {k, v} when is_binary(k) -> + # Ignore old, no longer existing keys like san_query_id + try do + {String.to_existing_atom(k), v} + rescue + _ -> {nil, nil} + end + end) + |> Map.delete(nil) + end + + defp transform_rows(rows) do + transformed_rows = + rows + |> Enum.map(fn row -> + row + |> Enum.map(fn + elem when is_binary(elem) -> + case DateTime.from_iso8601(elem) do + {:ok, datetime, _} -> datetime + _ -> elem + end + + elem -> + elem + end) + end) + + {:ok, transformed_rows} + end + + # The byte size of the compressed rows should not exceed the allowed limit. + # Otherwise simple queries like `select * from intraday_metircs limit 9999999` + # can be executed and fill the database with lots of data. + @allowed_kb_size 500 + defp query_result_size_allowed?(query_result) do + kb_size = byte_size(query_result.compressed_rows) / 1024 + kb_size = Float.round(kb_size, 2) + + case kb_size do + size when size <= @allowed_kb_size -> + true + + size -> + {:error, + """ + Cannot cache the panel because its compressed size is #{size}KB \ + which is over the limit of #{@allowed_kb_size}KB + """} + end + end + + defp maybe_transform_error({:ok, _} = result), do: result + + defp maybe_transform_error({:error, changeset}), + do: {:error, changeset_errors_string(changeset)} +end diff --git a/lib/sanbase/queries/dashboards.ex b/lib/sanbase/queries/dashboards.ex index 8f47661738..0b99f232bc 100644 --- a/lib/sanbase/queries/dashboards.ex +++ b/lib/sanbase/queries/dashboards.ex @@ -641,7 +641,7 @@ defmodule Sanbase.Dashboards do """ @spec get_visibility_data(dashboard_id()) :: {:ok, visibility_data()} | {:error, String.t()} def get_visibility_data(dashboard_id) do - query = get_visibility_data(dashboard_id) + query = Dashboard.get_visibility_data(dashboard_id) case Repo.one(query) do %{} = data -> {:ok, data} @@ -717,7 +717,7 @@ defmodule Sanbase.Dashboards do |> Ecto.Multi.run(:remove_dashboard_query_mapping, fn _repo, %{get_mapping: struct} -> Repo.delete(struct) end) - |> Ecto.Multi.run(:add_preloads, fn _repo, %{add_query_to_dashboard: struct} -> + |> Ecto.Multi.run(:add_preloads, fn _repo, %{remove_dashboard_query_mapping: struct} -> # Do not preload the dashboard as it will be added in the next step {:ok, Repo.preload(struct, [:query])} end) @@ -772,6 +772,47 @@ defmodule Sanbase.Dashboards do |> process_transaction_result(:fetch_dashboard_queries) end + ## Cache-related + + @doc ~s""" + + """ + @spec store_dashboard_query_execution( + dashboard_id(), + dashboard_query_mapping_id(), + map(), + user_id() + ) :: + {:ok, DashboardQueryMappingCache.t()} | {:error, String.t()} + def store_dashboard_query_execution( + dashboard_id, + dashboard_query_mapping_id, + query_result, + user_id + ) do + Sanbase.Queries.DashboardCache.update_query_cache( + dashboard_id, + dashboard_query_mapping_id, + query_result, + user_id + ) + end + + @doc ~s""" + + """ + @spec get_cached_dashboard_queries_executions( + dashboard_id(), + user_id() + ) :: + {:ok, DashboardQueryMappingCache.t()} | {:error, String.t()} + def get_cached_dashboard_queries_executions( + dashboard_id, + user_id + ) do + Sanbase.Queries.DashboardCache.by_dashboard_id(dashboard_id, user_id) + end + # Private functions defp get_dashboard_by_mapping_id_for_mutation( diff --git a/lib/sanbase/queries/executor/executor.ex b/lib/sanbase/queries/executor/executor.ex index c30a980332..e0a30674e9 100644 --- a/lib/sanbase/queries/executor/executor.ex +++ b/lib/sanbase/queries/executor/executor.ex @@ -43,7 +43,7 @@ defmodule Sanbase.Queries.Executor do @spec run(Query.t(), QueryMetadata.t(), Environment.t()) :: {:ok, Result.t()} | {:error, String.t()} def run(%Query{} = query, %{} = query_metadata, %{} = environment) do - query_start_time = DateTime.utc_now() + query_start_time = DateTime.utc_now() |> DateTime.truncate(:millisecond) _ = put_read_only_repo() @@ -62,11 +62,11 @@ defmodule Sanbase.Queries.Executor do clickhouse_query_id: map.query_id, summary: make_summary_values_numbers(map.summary), rows: map.rows, - compressed_rows: Result.compress_rows(map.rows), + compressed_rows: nil, columns: map.column_names, column_types: map.column_types, query_start_time: query_start_time, - query_end_time: DateTime.utc_now() + query_end_time: DateTime.utc_now() |> DateTime.truncate(:millisecond) }} {:error, error} -> @@ -92,6 +92,7 @@ defmodule Sanbase.Queries.Executor do defp create_clickhouse_query(query, query_metadata, environment) do query_metadata = QueryMetadata.sanitize(query_metadata) + opts = [settings: "log_comment='#{Jason.encode!(query_metadata)}'", environment: environment] Sanbase.Clickhouse.Query.new(query.sql_query_text, query.sql_query_parameters, opts) diff --git a/lib/sanbase/queries/executor/result.ex b/lib/sanbase/queries/executor/result.ex index 9e2117a499..5217b9420d 100644 --- a/lib/sanbase/queries/executor/result.ex +++ b/lib/sanbase/queries/executor/result.ex @@ -8,7 +8,7 @@ defmodule Sanbase.Queries.Executor.Result do clickhouse_query_id: String.t(), summary: Map.t(), rows: list(String.t() | number() | boolean() | DateTime.t()), - compressed_rows: String.t(), + compressed_rows: String.t() | nil, columns: list(String.t()), column_types: list(String.t()), query_start_time: DateTime.t(), @@ -25,6 +25,68 @@ defmodule Sanbase.Queries.Executor.Result do query_start_time: nil, query_end_time: nil + @doc ~s""" + Accept a binary that is a base64-encoded gzip binary, and return + the decoded and decompressed value + + In order to reduce the size of data sent from the frontend to the backend + when storing cached values. + """ + @spec decode_and_decompress(String.t()) :: {:ok, String.t()} | {:error, String.t()} + def decode_and_decompress(base64_gzip) when is_binary(base64_gzip) do + with {:ok, gzip} <- Base.decode64(base64_gzip), + decompressed when is_binary(decompressed) <- :zlib.gunzip(gzip) do + {:ok, decompressed} + else + :error -> + {:error, "The provided value is not a valid base64-encoded binary"} + end + rescue + _e in [ErlangError] -> + {:error, "The provided value is not a valid gzip binary"} + end + + @doc ~s""" + Accept a string that is a stringified JSON object representing the result + of executing an SQL query, and return a `Result` struct. + The GraphQL API uses snake_case internally, but the JS frontend uses camelCase, + so the keys might be in any of those two formats. This function will handle + both cases. + """ + @spec from_json_string(String.t()) :: {:ok, t()} | {:error, String.t()} + def from_json_string(json) do + # map_from_json/1 will also convert all keys to snake_case + case map_from_json(json) do + {:ok, map} -> + result = %__MODULE__{ + query_id: map["query_id"], + clickhouse_query_id: map["clickhouse_query_id"], + summary: map["summary"], + rows: map["rows"], + compressed_rows: map["compressed_rows"], + columns: map["columns"], + column_types: map["column_types"], + query_start_time: map["query_start_time"], + query_end_time: map["query_end_time"] + } + + {:ok, result} + + {:error, error} -> + {:error, "Provided JSON is malformed: #{inspect(error)}"} + end + end + + defp map_from_json(json) do + with {:ok, map} <- Jason.decode(json) do + # The JSON provided by the frontend to the API might include + # keys like queryStartTime, queryEndTime, etc. + map_with_underscore_keys = Map.new(map, fn {k, v} -> {Inflex.underscore(k), v} end) + + {:ok, map_with_underscore_keys} + end + end + def compress_rows(rows) do rows |> :erlang.term_to_binary() @@ -36,6 +98,6 @@ defmodule Sanbase.Queries.Executor.Result do compressed_rows |> Base.decode64!() |> :zlib.gunzip() - |> :erlang.binary_to_term() + |> Plug.Crypto.non_executable_binary_to_term([:safe]) end end diff --git a/lib/sanbase/queries/queries.ex b/lib/sanbase/queries/queries.ex index 20f7b18733..2980aadfe7 100644 --- a/lib/sanbase/queries/queries.ex +++ b/lib/sanbase/queries/queries.ex @@ -300,18 +300,46 @@ defmodule Sanbase.Queries do # Private functions + defp get_store_execution_opts(opts) do + # In test env allow the configuration to be provided as application env + # so that we can disable the storing of details even when the function is called through + # the API where we cannot provide the opts arg. + case @compile_env do + :test -> + [ + store_execution_details: + Application.get_env( + :__sanbase_queires__, + :store_execution_details, + Keyword.get(opts, :store_execution_details, true) + ), + wait_fetching_details_ms: + Application.get_env( + :__sanbase_queires__, + :__wait_fetching_details_ms_, + Keyword.get(opts, :wait_fetching_details_ms, 7500) + ) + ] + + _ -> + [ + store_execution_details: Keyword.get(opts, :store_execution_details, true), + wait_fetching_details_ms: Keyword.get(opts, :wait_fetching_details_ms, 7500) + ] + end + end + defp maybe_store_execution_data_async(result, user_id, opts) do # When a Clickhouse query is executed, the query details are buffered in # memory for up to 7500ms before they flush to the database table. # Because of this, storing the execution data is done in a separate process # to avoid blocking the main process and to return the result to the user # faster. + opts = get_store_execution_opts(opts) - if Keyword.get(opts, :store_execution_details, true) do - wait_fetching_details_ms = Keyword.get(opts, :wait_fetching_details_ms, 7500) - + if opts[:store_execution_details] do store = fn -> - QueryExecution.store_execution(result, user_id, wait_fetching_details_ms) + QueryExecution.store_execution(result, user_id, opts[:wait_fetching_details_ms]) end # In test do not do it in an async way as this can lead to mocking issues. diff --git a/lib/sanbase/queries/query/query_cache.ex b/lib/sanbase/queries/query/query_cache.ex new file mode 100644 index 0000000000..1954ef9ec2 --- /dev/null +++ b/lib/sanbase/queries/query/query_cache.ex @@ -0,0 +1,56 @@ +defmodule Sanbase.Queries.QueryCache do + @type t :: %__MODULE__{ + query_id: String.t(), + dashboard_query_mapping_id: non_neg_integer(), + clickhouse_query_id: String.t(), + dashboard_id: non_neg_integer(), + columns: list(String.t()), + column_types: list(String.t()), + rows: List.t(), + compressed_rows: String.t(), + updated_at: DateTime.t(), + query_start_time: DateTime.t(), + query_end_time: DateTime.t(), + summary: String.t() + } + + defstruct query_id: nil, + dashboard_query_mapping_id: nil, + clickhouse_query_id: nil, + dashboard_id: nil, + columns: nil, + column_types: nil, + rows: nil, + compressed_rows: nil, + updated_at: nil, + query_start_time: nil, + query_end_time: nil, + summary: nil + + alias Sanbase.Queries.Executor.Result + + @spec from_query_result(Result.t(), String.t(), non_neg_integer()) :: t() + def from_query_result(%Result{} = result, dashboard_query_mapping_id, dashboard_id) do + compressed_rows = + if is_nil(result.compressed_rows) do + Result.compress_rows(result.rows) + else + result.compressed_rows + end + + %__MODULE__{ + query_id: result.query_id, + dashboard_query_mapping_id: dashboard_query_mapping_id, + dashboard_id: dashboard_id, + columns: result.columns, + column_types: result.column_types, + rows: result.rows, + compressed_rows: compressed_rows, + updated_at: DateTime.utc_now(), + query_start_time: result.query_start_time, + query_end_time: result.query_end_time, + clickhouse_query_id: result.clickhouse_query_id, + summary: result.summary + } + end +end diff --git a/lib/sanbase/queries/query/query_metadata.ex b/lib/sanbase/queries/query/query_metadata.ex index 212787646d..cfa10c0595 100644 --- a/lib/sanbase/queries/query/query_metadata.ex +++ b/lib/sanbase/queries/query/query_metadata.ex @@ -21,7 +21,7 @@ defmodule Sanbase.Queries.QueryMetadata do end @doc false - def from_local_dev(user_id) do + def from_local_dev(user_id) when is_integer(user_id) do # To be used only in test and dev environment %{ sanbase_user_id: user_id, diff --git a/lib/sanbase/run_examples.ex b/lib/sanbase/run_examples.ex index 3d955898d9..f7fce5b0d3 100644 --- a/lib/sanbase/run_examples.ex +++ b/lib/sanbase/run_examples.ex @@ -7,6 +7,7 @@ defmodule Sanbase.RunExamples do Do not run in tests, as if mocked, the purpose of this module would be lost. """ @queries [ + :santiment_queries, :basic_metric_queries, :available_metrics, :trending_words, @@ -23,7 +24,8 @@ defmodule Sanbase.RunExamples do :transfers, :san_burn_credit_transactions, :signals, - :additional_filters + :additional_filters, + :top_addresses ] @from ~U[2023-01-01 00:00:00Z] @@ -386,95 +388,99 @@ defmodule Sanbase.RunExamples do end defp do_run(:github) do - {:ok, [_ | _]} = - Sanbase.Clickhouse.Github.dev_activity( - ["santiment"], - @from, - @to, - "1d", - "None", - nil - ) - - {:ok, [_ | _]} = - Sanbase.Clickhouse.Github.github_activity( - ["santiment"], - @from, - @to, - "1d", - "None", - nil - ) + for interval <- ["1d", "toStartOfHour"] do + {:ok, [_ | _]} = + Sanbase.Clickhouse.Github.dev_activity( + ["santiment"], + @from, + @to, + interval, + "None", + nil + ) - {:ok, %{"santiment" => _}} = - Sanbase.Clickhouse.Github.total_dev_activity( - ["santiment"], - @from, - @to - ) + {:ok, [_ | _]} = + Sanbase.Clickhouse.Github.github_activity( + ["santiment"], + @from, + @to, + interval, + "None", + nil + ) - {:ok, %{"santiment" => _, "bitcoin" => _}} = - Sanbase.Clickhouse.Github.total_github_activity( - ["santiment", "bitcoin"], - @from, - @to - ) + {:ok, [_ | _]} = + Sanbase.Clickhouse.Github.dev_activity_contributors_count( + ["santiment"], + @from, + @to, + interval, + "None", + nil + ) - {:ok, [_ | _]} = - Sanbase.Clickhouse.Github.dev_activity_contributors_count( - ["santiment"], - @from, - @to, - "1d", - "None", - nil - ) + {:ok, [_ | _]} = + Sanbase.Clickhouse.Github.github_activity_contributors_count( + ["santiment"], + @from, + @to, + interval, + "None", + nil + ) - {:ok, %{"santiment" => _}} = - Sanbase.Clickhouse.Github.total_dev_activity_contributors_count( - ["santiment"], - @from, - @to - ) + for metric <- ["dev_activity", "dev_activity_contributors_count"] do + {:ok, [_ | _]} = + Sanbase.Metric.timeseries_data( + metric, + %{slug: "ethereum"}, + @from, + @to, + interval + ) + end - {:ok, [_ | _]} = - Sanbase.Clickhouse.Github.github_activity_contributors_count( - ["santiment"], - @from, - @to, - "1d", - "None", - nil - ) + {:ok, %{"santiment" => _}} = + Sanbase.Clickhouse.Github.total_dev_activity_contributors_count( + ["santiment"], + @from, + @to + ) - {:ok, %{"santiment" => _}} = - Sanbase.Clickhouse.Github.total_github_activity_contributors_count( - ["santiment"], - @from, - @to - ) + {:ok, %{"santiment" => _}} = + Sanbase.Clickhouse.Github.total_github_activity_contributors_count( + ["santiment"], + @from, + @to + ) - for metric <- ["dev_activity", "dev_activity_contributors_count"] do - {:ok, [_ | _]} = - Sanbase.Metric.timeseries_data( - metric, - %{slug: "ethereum"}, + {:ok, %{"santiment" => _}} = + Sanbase.Clickhouse.Github.total_dev_activity( + ["santiment"], @from, - @to, - "1d" + @to ) - {:ok, _} = - Sanbase.Metric.aggregated_timeseries_data( - metric, - %{slug: "ethereum"}, + {:ok, %{"santiment" => _, "bitcoin" => _}} = + Sanbase.Clickhouse.Github.total_github_activity( + ["santiment", "bitcoin"], @from, @to ) - {:ok, _} = Sanbase.Metric.first_datetime(metric, %{slug: "ethereum"}, []) + for metric <- ["dev_activity", "dev_activity_contributors_count"] do + {:ok, _} = + Sanbase.Metric.aggregated_timeseries_data( + metric, + %{slug: "ethereum"}, + @from, + @to + ) - {:ok, _} = Sanbase.Metric.last_datetime_computed_at(metric, %{slug: "ethereum"}, []) + {:ok, _} = Sanbase.Metric.first_datetime(metric, %{slug: "ethereum"}, []) + + {:ok, _} = Sanbase.Metric.last_datetime_computed_at(metric, %{slug: "ethereum"}, []) + end end {:ok, :success} @@ -522,6 +528,18 @@ defmodule Sanbase.RunExamples do infrastructure: "ETH", address: @null_address }) + end + + defp do_run(:top_addresses) do + {:ok, _} = + Sanbase.Balance.current_balance_top_addresses( + "ethereum", + 18, + "ETH", + "ethereum", + "eth_balances_realtime", + labels: ["whale_usd_balance"] + ) {:ok, :success} end @@ -682,4 +700,49 @@ defmodule Sanbase.RunExamples do ] ) end + + defp do_run(:santiment_queries) do + user = Sanbase.Factory.insert(:user) + + {:ok, query} = + Sanbase.Queries.create_query( + %{ + sql_query_text: "SELECT {{big_num:human_readable}} AS big_num, {{big_num}} AS num", + sql_query_parameters: %{slug: "bitcoin", big_num: 2_123_801_239_123} + }, + user.id + ) + + {:ok, dashboard} = Sanbase.Dashboards.create_dashboard(%{name: "MyName"}, user.id) + + {:ok, mapping} = Sanbase.Dashboards.add_query_to_dashboard(dashboard.id, query.id, user.id) + + # Add and remove the mapping to test the removal + {:ok, mapping2} = Sanbase.Dashboards.add_query_to_dashboard(dashboard.id, query.id, user.id) + + {:ok, _} = Sanbase.Dashboards.remove_query_from_dashboard(dashboard.id, mapping2.id, user.id) + + {:ok, q} = Sanbase.Queries.get_dashboard_query(dashboard.id, mapping.id, user.id) + + query_metadata = Sanbase.Queries.QueryMetadata.from_local_dev(user.id) + + {:ok, result} = + Sanbase.Queries.run_query(q, user, query_metadata, store_execution_details: false) + + {:ok, stored} = + Sanbase.Dashboards.store_dashboard_query_execution( + dashboard.id, + mapping.id, + result, + user.id + ) + + {:ok, dashboard_cache} = + Sanbase.Dashboards.get_cached_dashboard_queries_executions(dashboard.id, user.id) + + for r <- [dashboard_cache, mapping, dashboard, query], + do: Sanbase.Repo.delete(r) + + {:ok, :success} + end end diff --git a/lib/sanbase_web/components/layouts/app.html.heex b/lib/sanbase_web/components/layouts/app.html.heex index 0afbd61bd6..527c765ca3 100644 --- a/lib/sanbase_web/components/layouts/app.html.heex +++ b/lib/sanbase_web/components/layouts/app.html.heex @@ -1,7 +1,6 @@ -
-
+
-
+
<.flash_group flash={@flash} /> <%= @inner_content %>
diff --git a/lib/sanbase_web/controllers/custom_admin_controller.ex b/lib/sanbase_web/controllers/custom_admin_controller.ex index baedd722b0..e815d95919 100644 --- a/lib/sanbase_web/controllers/custom_admin_controller.ex +++ b/lib/sanbase_web/controllers/custom_admin_controller.ex @@ -2,7 +2,7 @@ defmodule SanbaseWeb.CustomAdminController do use SanbaseWeb, :controller def index(conn, _params) do - render(conn, "index.html", + render(conn, :index, search_value: "", routes: [ {"Users", ~p"/admin2/users"}, diff --git a/lib/sanbase_web/graphql/resolvers/queries_resolver.ex b/lib/sanbase_web/graphql/resolvers/queries_resolver.ex index b58a27bb3c..f6d759b122 100644 --- a/lib/sanbase_web/graphql/resolvers/queries_resolver.ex +++ b/lib/sanbase_web/graphql/resolvers/queries_resolver.ex @@ -2,6 +2,7 @@ defmodule SanbaseWeb.Graphql.Resolvers.QueriesResolver do alias Sanbase.Queries alias Sanbase.Dashboards alias Sanbase.Queries.QueryMetadata + alias Sanbase.Queries.Executor.Result require Logger @@ -182,6 +183,40 @@ defmodule SanbaseWeb.Graphql.Resolvers.QueriesResolver do Dashboards.remove_query_from_dashboard(dashboard_id, mapping_id, user.id) end + def store_dashboard_query_execution( + _root, + %{ + dashboard_id: dashboard_id, + dashboard_query_mapping_id: mapping_id, + compressed_query_execution_result: compressed_query_execution_result + }, + %{context: %{auth: %{current_user: user}}} + ) do + with {:ok, result_string} <- Result.decode_and_decompress(compressed_query_execution_result), + {:ok, query_execution_result} <- Result.from_json_string(result_string), + {:ok, dashboard_cache} <- + Dashboards.store_dashboard_query_execution( + dashboard_id, + mapping_id, + query_execution_result, + user.id + ) do + queries = Map.values(dashboard_cache.queries) + + {:ok, %{queries: queries}} + end + end + + def get_cached_dashboard_queries_executions(_root, %{dashboard_id: dashboard_id}, resolution) do + querying_user_id = get_in(resolution.context.auth, [:current_user, Access.key(:id)]) + + with {:ok, dashboard_cache} <- + Dashboards.get_cached_dashboard_queries_executions(dashboard_id, querying_user_id) do + queries = Map.values(dashboard_cache.queries) + {:ok, %{queries: queries}} + end + end + # Dashboard Global Parameters CRUD (without explicit read) def add_dashboard_global_parameter( diff --git a/lib/sanbase_web/graphql/resolvers/user/user_resolver.ex b/lib/sanbase_web/graphql/resolvers/user/user_resolver.ex index 5d874e1a0a..ffa53d9e31 100644 --- a/lib/sanbase_web/graphql/resolvers/user/user_resolver.ex +++ b/lib/sanbase_web/graphql/resolvers/user/user_resolver.ex @@ -255,6 +255,14 @@ defmodule SanbaseWeb.Graphql.Resolvers.UserResolver do {:ok, Sanbase.Billing.UserPromoCode.get_user_promo_codes(user.id)} end + def user_promo_codes(%Sanbase.Accounts.User{} = user, _args, _resolution) do + {:ok, Sanbase.Billing.UserPromoCode.get_user_promo_codes(user.id)} + end + + def user_promo_codes(_, _, _) do + {:ok, []} + end + def user_no_preloads(%{user_id: user_id}, _args, %{context: %{loader: loader}}) do loader |> Dataloader.load(SanbaseDataloader, :users_by_id, user_id) diff --git a/lib/sanbase_web/graphql/schema/queries/queries_queries.ex b/lib/sanbase_web/graphql/schema/queries/queries_queries.ex index 9f9833cb87..7d71692a9a 100644 --- a/lib/sanbase_web/graphql/schema/queries/queries_queries.ex +++ b/lib/sanbase_web/graphql/schema/queries/queries_queries.ex @@ -297,6 +297,65 @@ defmodule SanbaseWeb.Graphql.Schema.QueriesQueries do resolve(&QueriesResolver.delete_query/3) end + + @desc ~s""" + Update the dashboard cache with the provided data. + + This mutation, along with computeDashboardPanel, provides + the capabilities to compute and store dashboard panel results + separately. In contrast to computeAndStoreDashboardPanel, having + the methods separated allows users to compute many different panel + configurations and only store the result of the one that satisfies + the requirements. + + All the panel fields are required. + + The `rows` and `summary` fields must be JSON encoded. + + Example: + + mutation { + storeDashboardPanel( + dashboardId: 134 + panelId: "c5a3b5dd-0e31-42ae-954a-83b741818a28" + panel: { + clickhouseQueryId: "177a5a3d-072b-48ac-8cf5-d8375c8314ef" + columns: ["asset_id", "metric_id", "dt", "value", "computed_at"] + columnTypes: ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"] + queryEndTime: "2022-06-14T12:08:10Z" + queryStartTime: "2022-06-14T12:08:10Z" + rows: "[[2503,250,\"2008-12-10T00:00:00Z\",0.0,\"2020-02-28T15:18:42Z\"],[2503,250,\"2008-12-10T00:05:00Z\",0.0,\"2020-02-28T15:18:42Z\"]]" + summary: "{\"read_bytes\":\"0\",\"read_rows\":\"0\",\"total_rows_to_read\":\"0\",\"written_bytes\":\"0\",\"written_rows\":\"0\"}" + } + ){ + id + clickhouseQueryId + dashboardId + columns + rows + summary + updatedAt + queryStartTime + queryEndTime + } + } + """ + field :store_dashboard_query_execution, :dashboard_cached_executions do + arg(:dashboard_id, non_null(:integer)) + arg(:dashboard_query_mapping_id, non_null(:string)) + + @desc ~s""" + This is the result of the query execution. The JSON obtained from + runSqlQuery/runRawSqlQuery/runDashboardSqlQuery is first stringified, + then gzipped and the encoded in base64. This is done to reduce the + size of the data sent from the frontend to the backend. + """ + arg(:compressed_query_execution_result, non_null(:string)) + + middleware(JWTAuth) + + resolve(&QueriesResolver.store_dashboard_query_execution/3) + end end object :dashboard_queries do @@ -310,6 +369,29 @@ defmodule SanbaseWeb.Graphql.Schema.QueriesQueries do resolve(&QueriesResolver.get_dashboard/3) end + @desc ~s""" + Get the last computed version of the queries on a dashboard. + + The query returns a list of the last execution of every + query. The query execution (cache) is described by its + id and a JSON-formatted string of the result. The result + contains the column names, the column types, the rows and + the time they were computed. The SQL query text and parameters that + were used to compute the result can be found in the dashhboard + schema, fetched by the getDashboard query. + + This is called a cache because only the latest result is + stored and all previous states are discarded. Storing data + for long time after other computations and changes are done + is done via snapshots (to be implemented). + """ + field :get_cached_dashboard_queries_executions, :dashboard_cached_executions do + meta(access: :free) + arg(:dashboard_id, non_null(:integer)) + + resolve(&QueriesResolver.get_cached_dashboard_queries_executions/3) + end + @desc ~s""" Fetch a list of the dashboards that belong to a user. diff --git a/lib/sanbase_web/graphql/schema/types/queries_types.ex b/lib/sanbase_web/graphql/schema/types/queries_types.ex index 09a4d1f571..1d870802e7 100644 --- a/lib/sanbase_web/graphql/schema/types/queries_types.ex +++ b/lib/sanbase_web/graphql/schema/types/queries_types.ex @@ -17,14 +17,16 @@ defmodule SanbaseWeb.Graphql.QueriesTypes do object :sql_query do # Identification data field(:id, non_null(:integer)) + field(:dashboard_query_mapping_id, :string) + field(:uuid, non_null(:string)) field(:origin_id, :integer) # Basic Info field(:name, non_null(:string)) - field(:description, non_null(:string)) + field(:description, :string) field(:is_public, non_null(:boolean)) - field(:settings, non_null(:json)) + field(:settings, :json) # SQL Query & Params field(:sql_query_text, non_null(:string)) @@ -51,6 +53,9 @@ defmodule SanbaseWeb.Graphql.QueriesTypes do field(:updated_at, non_null(:datetime)) end + @desc ~s""" + TODO: Document this type + """ object :text_widget do field(:id, non_null(:string)) field(:name, :string) @@ -76,6 +81,7 @@ defmodule SanbaseWeb.Graphql.QueriesTypes do the query's own parameters. The interaction with the global parameter happens through the putDashboardGlobalParameter and putDashboardGlobalParameterOverride mutations. """ + object :dashboard do field(:id, non_null(:integer)) field(:name, non_null(:string)) @@ -214,6 +220,25 @@ defmodule SanbaseWeb.Graphql.QueriesTypes do - queryEndTime: The time when the query finished executing. """ object :sql_query_execution_result do + @desc "Non-null when executing a stored query" + field(:query_id, :integer) + @desc "Non-null when executing a dashboard query" + field(:dashboard_query_mapping_id, :string) + + field(:clickhouse_query_id, non_null(:string)) + field(:summary, non_null(:json)) + field(:rows, non_null(:json)) + field(:columns, non_null(list_of(:string))) + field(:column_types, non_null(list_of(:string))) + field(:query_start_time, non_null(:datetime)) + field(:query_end_time, non_null(:datetime)) + end + + object :dashboard_cached_executions do + field(:queries, list_of(:sql_query_execution_result)) + end + + input_object :sql_query_execution_result_input_object do field(:clickhouse_query_id, non_null(:string)) field(:summary, non_null(:json)) field(:rows, non_null(:json)) diff --git a/lib/sanbase_web/live/monitored_twitter_handle/monitored_twitter_handle_live.ex b/lib/sanbase_web/live/monitored_twitter_handle/monitored_twitter_handle_live.ex index 74d16bddce..dfd99f8e0a 100644 --- a/lib/sanbase_web/live/monitored_twitter_handle/monitored_twitter_handle_live.ex +++ b/lib/sanbase_web/live/monitored_twitter_handle/monitored_twitter_handle_live.ex @@ -7,18 +7,39 @@ defmodule SanbaseWeb.MonitoredTwitterHandleLive do def render(assigns) do ~H"""
-
+
<.table id="monitored_twitter_handles" rows={@handles}> - <:col :let={row} label="Twitter Handle"><%= row.handle %> - <:col :let={row} label="Status"><%= row.status %> + <:col :let={row} label="Status"> +

+ <%= row.status |> String.replace("_", " ") |> String.upcase() %> +

+ + <:col :let={row} label="Twitter Handle (Clickable link)"> + <.link class="underline text-blue-600" href={"https://x.com/#{row.handle}"}> + <%= row.handle %> + + <:col :let={row} label="Notes"><%= row.notes %> + <:col :let={row} label="User ID"><%= row.user_id %> + <:col :let={row} label="Username"><%= row.user_username %> + <:col :let={row} label="Email"><%= row.user_email %> <:col :let={row} label="Moderator comment"><%= row.comment %> <:action :let={row}> - <.form :let={f} for={@form} phx-submit="update_status"> - <.input type="text" field={@form[:comment]} placeholder="Comment..." /> + <.form for={@form} phx-submit="update_status"> + <.input type="text" class="" field={@form[:comment]} placeholder="Comment..." /> - <.button name="status" value="approved">Approve - <.button name="status" value="declined">Decline + + @@ -27,6 +48,21 @@ defmodule SanbaseWeb.MonitoredTwitterHandleLive do """ end + def update_status_button(assigns) do + ~H""" + + """ + end + @impl true def mount(_params, _session, socket) do {:ok, @@ -35,6 +71,7 @@ defmodule SanbaseWeb.MonitoredTwitterHandleLive do |> assign(:form, to_form(%{}))} end + @impl true def handle_event( "update_status", %{"status" => status, "record_id" => record_id} = params, @@ -49,7 +86,7 @@ defmodule SanbaseWeb.MonitoredTwitterHandleLive do {:noreply, assign(socket, :handles, handles)} end - defp update_assigns_handle(handles, record_id, status, comment \\ nil) do + defp update_assigns_handle(handles, record_id, status, comment) do handles |> Enum.map(fn %{id: ^record_id} = record -> @@ -58,6 +95,7 @@ defmodule SanbaseWeb.MonitoredTwitterHandleLive do record |> Map.put(:status, status) |> Map.put(:comment, comment) + |> Map.put(:status_color, status_to_color(status)) record -> record @@ -74,12 +112,20 @@ defmodule SanbaseWeb.MonitoredTwitterHandleLive do handle: struct.handle, notes: struct.notes, comment: struct.comment, - inserted_at: struct.inserted_at + inserted_at: struct.inserted_at, + status_color: status_to_color(struct.status), + user_id: struct.user.id, + user_username: struct.user.username, + user_email: struct.user.email } end) |> order_records() end + defp status_to_color("approved"), do: "text-green-600" + defp status_to_color("declined"), do: "text-red-600" + defp status_to_color("pending_approval"), do: "text-yellow-600" + defp order_records(handles) do handles |> Enum.sort_by( diff --git a/lib/sanbase_web/router.ex b/lib/sanbase_web/router.ex index 1391faf0f2..005584e412 100644 --- a/lib/sanbase_web/router.ex +++ b/lib/sanbase_web/router.ex @@ -54,7 +54,7 @@ defmodule SanbaseWeb.Router do end scope "/admin2", SanbaseWeb do - pipe_through([:browser, :basic_auth]) + pipe_through([:admin_pod_only, :browser, :basic_auth]) import Phoenix.LiveDashboard.Router live_dashboard("/dashboard", metrics: SanbaseWeb.Telemetry, ecto_repos: [Sanbase.Repo]) diff --git a/lib/sanbase_web/sanbase_web.ex b/lib/sanbase_web/sanbase_web.ex index 0068b784a4..1e3da4db80 100644 --- a/lib/sanbase_web/sanbase_web.ex +++ b/lib/sanbase_web/sanbase_web.ex @@ -41,13 +41,12 @@ defmodule SanbaseWeb do def controller do quote do - use Phoenix.Controller, namespace: SanbaseWeb + use Phoenix.Controller, + formats: [:html, :json], + layouts: [html: SanbaseWeb.Layouts] import Plug.Conn import SanbaseWeb.Gettext - import Phoenix.LiveView.Controller - - alias SanbaseWeb.Router.Helpers, as: Routes unquote(verified_routes()) end end diff --git a/lib/sanbase_web/templates/custom_admin/index.html.eex b/lib/sanbase_web/templates/custom_admin_html/index.html.heex similarity index 50% rename from lib/sanbase_web/templates/custom_admin/index.html.eex rename to lib/sanbase_web/templates/custom_admin_html/index.html.heex index 671064ba70..93d8d11b5c 100644 --- a/lib/sanbase_web/templates/custom_admin/index.html.eex +++ b/lib/sanbase_web/templates/custom_admin_html/index.html.heex @@ -7,7 +7,13 @@
<%= for {name, path} <- @routes do %> <% end %>
@@ -15,10 +21,25 @@
- + + + + + + <%= form_for @conn, Routes.user_path(@conn, :search), [as: :user_search], fn f -> %> - <%= text_input f, :user_search, value: @search_value, class: "w-32 pl-10 pr-4 text-indigo-600 border-gray-200 rounded-md sm:w-64 focus:border-indigo-600 focus:ring focus:ring-opacity-40 focus:ring-indigo-500"%> - <%= submit "Search Users" %> + <%= text_input(f, :user_search, + value: @search_value, + class: + "w-32 pl-10 pr-4 text-indigo-600 border-gray-200 rounded-md sm:w-64 focus:border-indigo-600 focus:ring focus:ring-opacity-40 focus:ring-indigo-500" + ) %> + <%= submit("Search Users") %> <% end %>
diff --git a/lib/sanbase_web/templates/custom_plan/edit.html.eex b/lib/sanbase_web/templates/custom_plan_html/edit.html.eex similarity index 100% rename from lib/sanbase_web/templates/custom_plan/edit.html.eex rename to lib/sanbase_web/templates/custom_plan_html/edit.html.eex diff --git a/lib/sanbase_web/templates/custom_plan/edit_form.html.eex b/lib/sanbase_web/templates/custom_plan_html/edit_form.html.eex similarity index 100% rename from lib/sanbase_web/templates/custom_plan/edit_form.html.eex rename to lib/sanbase_web/templates/custom_plan_html/edit_form.html.eex diff --git a/lib/sanbase_web/templates/custom_plan/form.html.eex b/lib/sanbase_web/templates/custom_plan_html/form.html.eex similarity index 100% rename from lib/sanbase_web/templates/custom_plan/form.html.eex rename to lib/sanbase_web/templates/custom_plan_html/form.html.eex diff --git a/lib/sanbase_web/templates/custom_plan/index.html.eex b/lib/sanbase_web/templates/custom_plan_html/index.html.eex similarity index 100% rename from lib/sanbase_web/templates/custom_plan/index.html.eex rename to lib/sanbase_web/templates/custom_plan_html/index.html.eex diff --git a/lib/sanbase_web/templates/custom_plan/new.html.eex b/lib/sanbase_web/templates/custom_plan_html/new.html.eex similarity index 100% rename from lib/sanbase_web/templates/custom_plan/new.html.eex rename to lib/sanbase_web/templates/custom_plan_html/new.html.eex diff --git a/lib/sanbase_web/templates/custom_plan/show.html.eex b/lib/sanbase_web/templates/custom_plan_html/show.html.eex similarity index 100% rename from lib/sanbase_web/templates/custom_plan/show.html.eex rename to lib/sanbase_web/templates/custom_plan_html/show.html.eex diff --git a/lib/sanbase_web/templates/report/edit.html.eex b/lib/sanbase_web/templates/report_html/edit.html.eex similarity index 100% rename from lib/sanbase_web/templates/report/edit.html.eex rename to lib/sanbase_web/templates/report_html/edit.html.eex diff --git a/lib/sanbase_web/templates/report/edit_form.html.eex b/lib/sanbase_web/templates/report_html/edit_form.html.eex similarity index 100% rename from lib/sanbase_web/templates/report/edit_form.html.eex rename to lib/sanbase_web/templates/report_html/edit_form.html.eex diff --git a/lib/sanbase_web/templates/report/form.html.eex b/lib/sanbase_web/templates/report_html/form.html.eex similarity index 100% rename from lib/sanbase_web/templates/report/form.html.eex rename to lib/sanbase_web/templates/report_html/form.html.eex diff --git a/lib/sanbase_web/templates/report/index.html.eex b/lib/sanbase_web/templates/report_html/index.html.eex similarity index 100% rename from lib/sanbase_web/templates/report/index.html.eex rename to lib/sanbase_web/templates/report_html/index.html.eex diff --git a/lib/sanbase_web/templates/report/new.html.eex b/lib/sanbase_web/templates/report_html/new.html.eex similarity index 100% rename from lib/sanbase_web/templates/report/new.html.eex rename to lib/sanbase_web/templates/report_html/new.html.eex diff --git a/lib/sanbase_web/templates/report/show.html.eex b/lib/sanbase_web/templates/report_html/show.html.eex similarity index 100% rename from lib/sanbase_web/templates/report/show.html.eex rename to lib/sanbase_web/templates/report_html/show.html.eex diff --git a/lib/sanbase_web/templates/sheets_template/edit.html.eex b/lib/sanbase_web/templates/sheets_template_html/edit.html.eex similarity index 100% rename from lib/sanbase_web/templates/sheets_template/edit.html.eex rename to lib/sanbase_web/templates/sheets_template_html/edit.html.eex diff --git a/lib/sanbase_web/templates/sheets_template/edit_form.html.eex b/lib/sanbase_web/templates/sheets_template_html/edit_form.html.eex similarity index 100% rename from lib/sanbase_web/templates/sheets_template/edit_form.html.eex rename to lib/sanbase_web/templates/sheets_template_html/edit_form.html.eex diff --git a/lib/sanbase_web/templates/sheets_template/form.html.eex b/lib/sanbase_web/templates/sheets_template_html/form.html.eex similarity index 100% rename from lib/sanbase_web/templates/sheets_template/form.html.eex rename to lib/sanbase_web/templates/sheets_template_html/form.html.eex diff --git a/lib/sanbase_web/templates/sheets_template/index.html.eex b/lib/sanbase_web/templates/sheets_template_html/index.html.eex similarity index 100% rename from lib/sanbase_web/templates/sheets_template/index.html.eex rename to lib/sanbase_web/templates/sheets_template_html/index.html.eex diff --git a/lib/sanbase_web/templates/sheets_template/new.html.eex b/lib/sanbase_web/templates/sheets_template_html/new.html.eex similarity index 100% rename from lib/sanbase_web/templates/sheets_template/new.html.eex rename to lib/sanbase_web/templates/sheets_template_html/new.html.eex diff --git a/lib/sanbase_web/templates/sheets_template/show.html.eex b/lib/sanbase_web/templates/sheets_template_html/show.html.eex similarity index 100% rename from lib/sanbase_web/templates/sheets_template/show.html.eex rename to lib/sanbase_web/templates/sheets_template_html/show.html.eex diff --git a/lib/sanbase_web/templates/user/show.html.heex b/lib/sanbase_web/templates/user/show.html.heex deleted file mode 100644 index fa8d66e2ce..0000000000 --- a/lib/sanbase_web/templates/user/show.html.heex +++ /dev/null @@ -1,44 +0,0 @@ -
-

Show User

- -
-
- - - <%= for f <- @string_fields do %> - - - - - <% end %> - -
<%= to_string(f) %><%= Map.get(@user, f) |> to_string() %>
-
-
- - <%= link "Edit", to: Routes.user_path(@conn, :edit, @user), class: "flex-shrink-0 border-4 text-teal-500 hover:text-teal-800 py-1 px-2 rounded"%> - <%= link "Back", to: Routes.user_path(@conn, :index), class: "flex-shrink-0 border-4 text-teal-500 hover:text-teal-800 py-1 px-2 rounded" %> - - <%= for bt <- @belongs_to do %> -
-

<%= bt.name %>

- - - <%= for field <- bt.fields do %> - - <% end %> - -
<%= field.field_name %>
<%= field.data %>
- - <%= for action <- bt.actions do %> - <%= link to_string(action), to: Routes.user_path(@conn, action, id: @user.id), class: "flex-shrink-0 border-4 text-teal-500 hover:text-teal-800 py-1 px-2 rounded"%> - <% end %> -
- <% end %> - - - - <%= for table <- @has_many do %> - - <% end %> -
diff --git a/lib/sanbase_web/templates/user/index.html.eex b/lib/sanbase_web/templates/user_html/index.html.heex similarity index 74% rename from lib/sanbase_web/templates/user/index.html.eex rename to lib/sanbase_web/templates/user_html/index.html.heex index 724918e337..8b5d1a9369 100644 --- a/lib/sanbase_web/templates/user/index.html.eex +++ b/lib/sanbase_web/templates/user_html/index.html.heex @@ -2,12 +2,25 @@ - - - - - - + + + + + + @@ -18,8 +31,9 @@ <% end %> diff --git a/lib/sanbase_web/templates/user_html/show.html.heex b/lib/sanbase_web/templates/user_html/show.html.heex new file mode 100644 index 0000000000..7416b39a22 --- /dev/null +++ b/lib/sanbase_web/templates/user_html/show.html.heex @@ -0,0 +1,68 @@ +
+

Show User

+ +
+
+
IDNameEmailUsernameActions
+ ID + + Name + + Email + + Username + + Actions +
<%= user.email %> <%= user.username %> - <%= link "Show", to: Routes.user_path(@conn, :show, user) %> | - <%= link "Edit", to: Routes.user_path(@conn, :edit, user) %> | + <%= link("Show", to: Routes.user_path(@conn, :show, user)) %> + | <%= link("Edit", to: Routes.user_path(@conn, :edit, user)) %> + |
+ + <%= for f <- @string_fields do %> + + + + + <% end %> + +
<%= to_string(f) %> + <%= Map.get(@user, f) |> to_string() %> +
+
+
+ + + <%= link("Edit", + to: Routes.user_path(@conn, :edit, @user), + class: "flex-shrink-0 border-4 text-teal-500 hover:text-teal-800 py-1 px-2 rounded" + ) %> + + + <%= link("Back", + to: Routes.user_path(@conn, :index), + class: "flex-shrink-0 border-4 text-teal-500 hover:text-teal-800 py-1 px-2 rounded" + ) %> + + + <%= for bt <- @belongs_to do %> +
+

<%= bt.name %>

+ + + <%= for field <- bt.fields do %> + + + + + + <% end %> + +
<%= field.field_name %>
<%= field.data %>
+ + <%= for action <- bt.actions do %> + + <%= link(to_string(action), + to: Routes.user_path(@conn, action, id: @user.id), + class: "flex-shrink-0 border-4 text-teal-500 hover:text-teal-800 py-1 px-2 rounded" + ) %> + + <% end %> +
+ <% end %> + + <%= for table <- @has_many do %> + + <% end %> +
diff --git a/lib/sanbase_web/templates/webinar/edit.html.eex b/lib/sanbase_web/templates/webinar_html/edit.html.eex similarity index 100% rename from lib/sanbase_web/templates/webinar/edit.html.eex rename to lib/sanbase_web/templates/webinar_html/edit.html.eex diff --git a/lib/sanbase_web/templates/webinar/edit_form.html.eex b/lib/sanbase_web/templates/webinar_html/edit_form.html.eex similarity index 100% rename from lib/sanbase_web/templates/webinar/edit_form.html.eex rename to lib/sanbase_web/templates/webinar_html/edit_form.html.eex diff --git a/lib/sanbase_web/templates/webinar/form.html.eex b/lib/sanbase_web/templates/webinar_html/form.html.eex similarity index 100% rename from lib/sanbase_web/templates/webinar/form.html.eex rename to lib/sanbase_web/templates/webinar_html/form.html.eex diff --git a/lib/sanbase_web/templates/webinar/index.html.eex b/lib/sanbase_web/templates/webinar_html/index.html.eex similarity index 100% rename from lib/sanbase_web/templates/webinar/index.html.eex rename to lib/sanbase_web/templates/webinar_html/index.html.eex diff --git a/lib/sanbase_web/templates/webinar/new.html.eex b/lib/sanbase_web/templates/webinar_html/new.html.eex similarity index 100% rename from lib/sanbase_web/templates/webinar/new.html.eex rename to lib/sanbase_web/templates/webinar_html/new.html.eex diff --git a/lib/sanbase_web/templates/webinar/show.html.eex b/lib/sanbase_web/templates/webinar_html/show.html.eex similarity index 100% rename from lib/sanbase_web/templates/webinar/show.html.eex rename to lib/sanbase_web/templates/webinar_html/show.html.eex diff --git a/lib/sanbase_web/views/custom_admin_html.ex b/lib/sanbase_web/views/custom_admin_html.ex new file mode 100644 index 0000000000..98d80f9692 --- /dev/null +++ b/lib/sanbase_web/views/custom_admin_html.ex @@ -0,0 +1,3 @@ +defmodule SanbaseWeb.CustomAdminHTML do + use SanbaseWeb, :view +end diff --git a/lib/sanbase_web/views/custom_admin_view.ex b/lib/sanbase_web/views/custom_admin_view.ex deleted file mode 100644 index aa655ff566..0000000000 --- a/lib/sanbase_web/views/custom_admin_view.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule SanbaseWeb.CustomAdminView do - use SanbaseWeb, :view -end diff --git a/lib/sanbase_web/views/custom_plan.ex b/lib/sanbase_web/views/custom_plan.ex deleted file mode 100644 index 8a7b8213a4..0000000000 --- a/lib/sanbase_web/views/custom_plan.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule SanbaseWeb.CustomPlanView do - use SanbaseWeb, :view -end diff --git a/lib/sanbase_web/views/custom_plan_html.ex b/lib/sanbase_web/views/custom_plan_html.ex new file mode 100644 index 0000000000..c9b341be6a --- /dev/null +++ b/lib/sanbase_web/views/custom_plan_html.ex @@ -0,0 +1,3 @@ +defmodule SanbaseWeb.CustomPlanHTML do + use SanbaseWeb, :view +end diff --git a/lib/sanbase_web/views/report_html.ex b/lib/sanbase_web/views/report_html.ex new file mode 100644 index 0000000000..c66d8e4c36 --- /dev/null +++ b/lib/sanbase_web/views/report_html.ex @@ -0,0 +1,3 @@ +defmodule SanbaseWeb.ReportHTML do + use SanbaseWeb, :view +end diff --git a/lib/sanbase_web/views/report_view.ex b/lib/sanbase_web/views/report_view.ex deleted file mode 100644 index 6e39a8bd38..0000000000 --- a/lib/sanbase_web/views/report_view.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule SanbaseWeb.ReportView do - use SanbaseWeb, :view -end diff --git a/lib/sanbase_web/views/sheets_template_html.ex b/lib/sanbase_web/views/sheets_template_html.ex new file mode 100644 index 0000000000..4ed7bb2204 --- /dev/null +++ b/lib/sanbase_web/views/sheets_template_html.ex @@ -0,0 +1,3 @@ +defmodule SanbaseWeb.SheetsTemplateHTML do + use SanbaseWeb, :view +end diff --git a/lib/sanbase_web/views/sheets_template_view.ex b/lib/sanbase_web/views/sheets_template_view.ex deleted file mode 100644 index 08c4b6a186..0000000000 --- a/lib/sanbase_web/views/sheets_template_view.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule SanbaseWeb.SheetsTemplateView do - use SanbaseWeb, :view -end diff --git a/lib/sanbase_web/views/user_view.ex b/lib/sanbase_web/views/user_view.ex deleted file mode 100644 index 26b4d27091..0000000000 --- a/lib/sanbase_web/views/user_view.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule SanbaseWeb.UserView do - use SanbaseWeb, :view -end diff --git a/lib/sanbase_web/views/user_view_html.ex b/lib/sanbase_web/views/user_view_html.ex new file mode 100644 index 0000000000..2b489a92e4 --- /dev/null +++ b/lib/sanbase_web/views/user_view_html.ex @@ -0,0 +1,3 @@ +defmodule SanbaseWeb.UserHTML do + use SanbaseWeb, :view +end diff --git a/lib/sanbase_web/views/webinar.ex b/lib/sanbase_web/views/webinar.ex deleted file mode 100644 index 874bfd9977..0000000000 --- a/lib/sanbase_web/views/webinar.ex +++ /dev/null @@ -1,3 +0,0 @@ -defmodule SanbaseWeb.WebinarView do - use SanbaseWeb, :view -end diff --git a/lib/sanbase_web/views/webinar_html.ex b/lib/sanbase_web/views/webinar_html.ex new file mode 100644 index 0000000000..677736a4dd --- /dev/null +++ b/lib/sanbase_web/views/webinar_html.ex @@ -0,0 +1,3 @@ +defmodule SanbaseWeb.WebinarHTML do + use SanbaseWeb, :view +end diff --git a/mix.exs b/mix.exs index 18e890256e..1aa572ff50 100644 --- a/mix.exs +++ b/mix.exs @@ -84,8 +84,9 @@ defmodule Sanbase.Mixfile do {:earmark, "~> 1.2"}, {:ecto_enum, "~> 1.4"}, {:ecto_psql_extras, "~> 0.3"}, - {:ecto_sql, "~> 3.6"}, - {:ecto, "~> 3.6"}, + {:ecto_sql, "== 3.9.0"}, + # Our config breaks with the newest ecto version + {:ecto, "== 3.9.1"}, {:envy, "~> 1.1.1", only: [:dev, :test]}, {:erlex, "~> 0.2.6", override: true}, {:ethereumex, "~> 0.9"}, @@ -142,7 +143,7 @@ defmodule Sanbase.Mixfile do {:phoenix_html, "~> 3.0", override: true}, {:phoenix_live_dashboard, "~> 0.3"}, {:phoenix_live_reload, "~> 1.1", only: :dev}, - {:phoenix_live_view, "~> 0.14"}, + {:phoenix_live_view, "~> 0.20"}, {:phoenix_pubsub, "~> 2.0"}, {:phoenix_view, "~> 2.0"}, {:phoenix, "~> 1.7.0"}, diff --git a/mix.lock b/mix.lock index 6ec028a9d0..0db2f22232 100644 --- a/mix.lock +++ b/mix.lock @@ -28,7 +28,7 @@ "csvlixir": {:hex, :csvlixir, "2.0.4", "e80d6a4f98e3f374151307c6cfda2fea22c33737092bc23d6d085d5ed19396fe", [:mix], [], "hexpm", "2a890aab96f45de234a50ae18d2a9fcb1ee36804d808591b459f8128f2098e42"}, "curve25519": {:hex, :curve25519, "1.0.4", "e570561b832c29b3ce4fd8b9fcd9c9546916188860568f1c1fc9428d7cb00894", [:mix], [], "hexpm", "1a068bf9646e7067bf6aa5bf802b404002734e09bb5300f098109df03e31f9f5"}, "dataloader": {:hex, :dataloader, "1.0.10", "a42f07641b1a0572e0b21a2a5ae1be11da486a6790f3d0d14512d96ff3e3bbe9", [:mix], [{:ecto, ">= 3.4.3 and < 4.0.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:telemetry, "~> 1.0 or ~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "54cd70cec09addf4b2ace14cc186a283a149fd4d3ec5475b155951bf33cd963f"}, - "db_connection": {:hex, :db_connection, "2.5.0", "bb6d4f30d35ded97b29fe80d8bd6f928a1912ca1ff110831edcd238a1973652c", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c92d5ba26cd69ead1ff7582dbb860adeedfff39774105a4f1c92cbb654b55aa2"}, + "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"}, "decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"}, "dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"}, "distillery": {:hex, :distillery, "2.1.1", "f9332afc2eec8a1a2b86f22429e068ef35f84a93ea1718265e740d90dd367814", [:mix], [{:artificery, "~> 0.2", [hex: :artificery, repo: "hexpm", optional: false]}], "hexpm", "bbc7008b0161a6f130d8d903b5b3232351fccc9c31a991f8fcbf2a12ace22995"}, @@ -36,7 +36,7 @@ "earmark_parser": {:hex, :earmark_parser, "1.4.31", "a93921cdc6b9b869f519213d5bc79d9e218ba768d7270d46fdcf1c01bacff9e2", [:mix], [], "hexpm", "317d367ee0335ef037a87e46c91a2269fef6306413f731e8ec11fc45a7efd059"}, "ecto": {:hex, :ecto, "3.9.1", "67173b1687afeb68ce805ee7420b4261649d5e2deed8fe5550df23bab0bc4396", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c80bb3d736648df790f7f92f81b36c922d9dd3203ca65be4ff01d067f54eb304"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, - "ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.11", "6e20144c1446dcccfcdb4c142c9d8b7992a90a569b1d5958cbea5458550b25f0", [:mix], [{:ecto_sql, "~> 3.4", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.15.7 or ~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "def61f1f92d4f40d51c80bbae2157212d6c0a459eb604be446e47369cbd40b23"}, + "ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.14", "7a20cfe913b0476542b43870e67386461258734896035e3f284039fd18bd4c4c", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "22f5f98592dd597db9416fcef00effae0787669fdcb6faf447e982b553798e98"}, "ecto_sql": {:hex, :ecto_sql, "3.9.0", "2bb21210a2a13317e098a420a8c1cc58b0c3421ab8e3acfa96417dab7817918c", [:mix], [{:db_connection, "~> 2.5 or ~> 2.4.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.9.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a8f3f720073b8b1ac4c978be25fa7960ed7fd44997420c304a4a2e200b596453"}, "ed25519": {:hex, :ed25519, "1.4.1", "479fb83c3e31987c9cad780e6aeb8f2015fb5a482618cdf2a825c9aff809afc4", [:mix], [], "hexpm", "0dacb84f3faa3d8148e81019ca35f9d8dcee13232c32c9db5c2fb8ff48c80ec7"}, "envy": {:hex, :envy, "1.1.1", "0bc9bd654dec24fcdf203f7c5aa1b8f30620f12cfb28c589d5e9c38fe1b07475", [:mix], [], "hexpm", "7061eb1a47415fd757145d8dec10dc0b1e48344960265cb108f194c4252c3a89"}, @@ -119,19 +119,19 @@ "phoenix": {:hex, :phoenix, "1.7.9", "9a2b873e2cb3955efdd18ad050f1818af097fa3f5fc3a6aaba666da36bdd3f02", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.6", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "83e32da028272b4bfd076c61a964e6d2b9d988378df2f1276a0ed21b13b5e997"}, "phoenix_ecto": {:hex, :phoenix_ecto, "4.4.0", "0672ed4e4808b3fbed494dded89958e22fb882de47a97634c0b13e7b0b5f7720", [:mix], [{:ecto, "~> 3.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "09864e558ed31ee00bd48fcc1d4fc58ae9678c9e81649075431e69dbabb43cc1"}, "phoenix_html": {:hex, :phoenix_html, "3.3.3", "380b8fb45912b5638d2f1d925a3771b4516b9a78587249cabe394e0a5d579dc9", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "923ebe6fec6e2e3b3e569dfbdc6560de932cd54b000ada0208b5f45024bdd76c"}, - "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.7.0", "9b5ab242e52c33596b132beaf97dccb9e59f7af941f41a22d0fa2465d0b63ab1", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.18.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "374d65e87e1e83528ea30852e34d4ad3022ddb92d642d43ec0b4e3c112046036"}, + "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.8.3", "7ff51c9b6609470f681fbea20578dede0e548302b0c8bdf338b5a753a4f045bf", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:ecto_sqlite3_extras, "~> 1.1.7 or ~> 1.2.0", [hex: :ecto_sqlite3_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.19 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "f9470a0a8bae4f56430a23d42f977b5a6205fdba6559d76f932b876bfaec652d"}, "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.3.3", "3a53772a6118d5679bf50fc1670505a290e32a1d195df9e069d8c53ab040c054", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "766796676e5f558dbae5d1bdb066849673e956005e3730dfd5affd7a6da4abac"}, - "phoenix_live_view": {:hex, :phoenix_live_view, "0.18.18", "1f38fbd7c363723f19aad1a04b5490ff3a178e37daaf6999594d5f34796c47fc", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a5810d0472f3189ede6d2a95bda7f31c6113156b91784a3426cb0ab6a6d85214"}, + "phoenix_live_view": {:hex, :phoenix_live_view, "0.20.1", "92a37acf07afca67ac98bd326532ba8f44ad7d4bdf3e4361b03f7f02594e5ae9", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.15", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "be494fd1215052729298b0e97d5c2ce8e719c00854b82cd8cf15c1cd7fcf6294"}, "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"}, "phoenix_template": {:hex, :phoenix_template, "1.0.3", "32de561eefcefa951aead30a1f94f1b5f0379bc9e340bb5c667f65f1edfa4326", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "16f4b6588a4152f3cc057b9d0c0ba7e82ee23afa65543da535313ad8d25d8e2c"}, - "phoenix_view": {:hex, :phoenix_view, "2.0.2", "6bd4d2fd595ef80d33b439ede6a19326b78f0f1d8d62b9a318e3d9c1af351098", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "a929e7230ea5c7ee0e149ffcf44ce7cf7f4b6d2bfe1752dd7c084cdff152d36f"}, + "phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"}, "plug": {:hex, :plug, "1.15.1", "b7efd81c1a1286f13efb3f769de343236bd8b7d23b4a9f40d3002fc39ad8f74c", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "459497bd94d041d98d948054ec6c0b76feacd28eec38b219ca04c0de13c79d30"}, "plug_cowboy": {:hex, :plug_cowboy, "2.6.1", "9a3bbfceeb65eff5f39dab529e5cd79137ac36e913c02067dba3963a26efe9b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "de36e1a21f451a18b790f37765db198075c25875c64834bcc82d90b309eb6613"}, "plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"}, "poison": {:hex, :poison, "5.0.0", "d2b54589ab4157bbb82ec2050757779bfed724463a544b6e20d79855a9e43b24", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "11dc6117c501b80c62a7594f941d043982a1bd05a1184280c0d9166eb4d8d3fc"}, "poly1305": {:hex, :poly1305, "1.0.4", "7cdc8961a0a6e00a764835918cdb8ade868044026df8ef5d718708ea6cc06611", [:mix], [{:chacha20, "~> 1.0", [hex: :chacha20, repo: "hexpm", optional: false]}, {:equivalex, "~> 1.0", [hex: :equivalex, repo: "hexpm", optional: false]}], "hexpm", "e14e684661a5195e149b3139db4a1693579d4659d65bba115a307529c47dbc3b"}, "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"}, - "postgrex": {:hex, :postgrex, "0.17.1", "01c29fd1205940ee55f7addb8f1dc25618ca63a8817e56fac4f6846fc2cddcbe", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "14b057b488e73be2beee508fb1955d8db90d6485c6466428fe9ccf1d6692a555"}, + "postgrex": {:hex, :postgrex, "0.17.3", "c92cda8de2033a7585dae8c61b1d420a1a1322421df84da9a82a6764580c503d", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "946cf46935a4fdca7a81448be76ba3503cff082df42c6ec1ff16a4bdfbfb098d"}, "prom_ex": {:hex, :prom_ex, "1.8.0", "662615e1d2f2ab3e0dc13a51c92ad0ccfcab24336a90cb9b114ee1bce9ef88aa", [:mix], [{:absinthe, ">= 1.6.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.0.2", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.5.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.15", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.4.0", [hex: :oban, repo: "hexpm", optional: true]}, {:octo_fetch, "~> 0.3", [hex: :octo_fetch, repo: "hexpm", optional: false]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.14.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.12.1", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.5", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry, ">= 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "3eea763dfa941e25de50decbf17a6a94dbd2270e7b32f88279aa6e9bbb8e23e7"}, "quantum": {:hex, :quantum, "3.5.0", "8d2c5ba68c55991e8975aca368e3ab844ba01f4b87c4185a7403280e2c99cf34", [:mix], [{:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.14 or ~> 1.0", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_registry, "~> 0.2", [hex: :telemetry_registry, repo: "hexpm", optional: false]}], "hexpm", "cab737d1d9779f43cb1d701f46dd05ea58146fd96238d91c9e0da662c1982bb6"}, "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"}, diff --git a/priv/repo/migrations/20231012122039_add-queries-to-cache.exs b/priv/repo/migrations/20231012122039_add-queries-to-cache.exs new file mode 100644 index 0000000000..49905d90b0 --- /dev/null +++ b/priv/repo/migrations/20231012122039_add-queries-to-cache.exs @@ -0,0 +1,12 @@ +defmodule :"Elixir.Sanbase.Repo.Migrations.Add-queries-to-cache" do + use Ecto.Migration + + def change do + alter table(:dashboards_cache) do + # Add capabilities to store queries + add(:queries, :map, default: %{}, null: true) + # Allow the panels to be null now that we'll store queries + modify(:panels, :map, default: %{}, null: true) + end + end +end diff --git a/test/sanbase/billing/metric_access_level_test.exs b/test/sanbase/billing/metric_access_level_test.exs index 47fff037c4..385d5402b0 100644 --- a/test/sanbase/billing/metric_access_level_test.exs +++ b/test/sanbase/billing/metric_access_level_test.exs @@ -728,6 +728,9 @@ defmodule Sanbase.Billing.MetricAccessLevelTest do "exchange_open_interest", "open_interest_per_settlement_currency", "total_open_interest", + "funding_rates_aggregated_by_exchange", + "funding_rates_aggregated_by_settlement_currency", + "total_funding_rates_aggregated_per_asset", # social metrics "community_messages_count_telegram", "community_messages_count_total", diff --git a/test/sanbase/billing/query_access_level_test.exs b/test/sanbase/billing/query_access_level_test.exs index 9ea175c2ed..0eadb316ef 100644 --- a/test/sanbase/billing/query_access_level_test.exs +++ b/test/sanbase/billing/query_access_level_test.exs @@ -77,6 +77,7 @@ defmodule Sanbase.Billing.QueryAccessLevelTest do :get_available_metrics, :get_available_signals, :get_blockchain_address_labels, + :get_cached_dashboard_queries_executions, :get_chart_configuration_shared_access_token, :get_clickhouse_database_metadata, :get_clickhouse_query_execution_stats, diff --git a/test/sanbase/queries/queries_test.exs b/test/sanbase/queries/queries_test.exs index 5a6827fbdd..96fed4e5f1 100644 --- a/test/sanbase/queries/queries_test.exs +++ b/test/sanbase/queries/queries_test.exs @@ -590,6 +590,85 @@ defmodule Sanbase.QueriesTest do end end + describe "Caching" do + test "cache dashboard queries", context do + %{ + query: %{id: query_id} = query, + dashboard_query_mapping: %{id: dashboard_query_mapping_id} = dashboard_query_mapping, + dashboard: %{id: dashboard_id} = dashboard, + user: user, + query_metadata: query_metadata + } = context + + Sanbase.Mock.prepare_mock2(&Sanbase.ClickhouseRepo.query/2, {:ok, result_mock()}) + |> Sanbase.Mock.run_with_mocks(fn -> + {:ok, result} = + Sanbase.Queries.run_query(query, user, query_metadata, store_execution_details: false) + + {:ok, dashboard_cache} = + Sanbase.Dashboards.store_dashboard_query_execution( + dashboard.id, + dashboard_query_mapping.id, + result, + user.id + ) + + assert %Sanbase.Queries.DashboardCache{ + queries: %{}, + inserted_at: _, + updated_at: _ + } = dashboard_cache + end) + + # Test outside of the mock to make sure no database queries are made + {:ok, dashboard_cache} = + Sanbase.Dashboards.get_cached_dashboard_queries_executions(dashboard.id, user.id) + + assert %Sanbase.Queries.DashboardCache{ + queries: %{}, + inserted_at: _, + updated_at: _ + } = dashboard_cache + + assert %{ + query_id: ^query_id, + dashboard_query_mapping_id: ^dashboard_query_mapping_id, + clickhouse_query_id: "1774C4BC91E05698", + column_types: ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"], + columns: ["asset_id", "metric_id", "dt", "value", "computed_at"], + dashboard_id: ^dashboard_id, + query_end_time: _, + query_start_time: _, + rows: [ + [ + 1482, + 1645, + ~U[1970-01-01 00:00:00Z], + 0.045183932486757644, + ~U[2023-07-26 13:10:51Z] + ], + [ + 1482, + 1647, + ~U[1970-01-01 00:00:00Z], + -0.13018891098082416, + ~U[2023-07-25 20:27:06Z] + ] + ], + summary: %{ + "read_bytes" => 408_534.0, + "read_rows" => 12667.0, + "result_bytes" => 0.0, + "result_rows" => 0.0, + "total_rows_to_read" => 4475.0, + "written_bytes" => 0.0, + "written_rows" => 0.0 + }, + updated_at: _ + } = dashboard_cache.queries[dashboard_query_mapping.id] + end + end + # MOCKS defp result_mock() do diff --git a/test/sanbase_web/channels/metric_channel_test.exs b/test/sanbase_web/channels/metric_channel_test.exs index 0bd5ce96dc..70f5c76b05 100644 --- a/test/sanbase_web/channels/metric_channel_test.exs +++ b/test/sanbase_web/channels/metric_channel_test.exs @@ -16,7 +16,7 @@ defmodule SanbaseWeb.MetricChannelTest do describe "anonymous user socket" do test "receive broadcast metric data" do # `jti` and `access_token` are not provided - {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}, %{}) + {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}) assert {:ok, %{}, _socket} = subscribe_and_join(socket, SanbaseWeb.MetricChannel, "metrics:price", %{}) @@ -39,7 +39,7 @@ defmodule SanbaseWeb.MetricChannelTest do test "broadcasting is fast" do # This is a very basic test that does not test represent # the real world scenario. - {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}, %{}) + {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}) assert {:ok, %{}, _socket} = subscribe_and_join(socket, SanbaseWeb.MetricChannel, "metrics:price", %{}) @@ -218,8 +218,7 @@ defmodule SanbaseWeb.MetricChannelTest do {:ok, socket} = connect( SanbaseWeb.UserSocket, - %{"access_token" => conn.private.plug_session["access_token"]}, - %{} + %{"access_token" => conn.private.plug_session["access_token"]} ) socket diff --git a/test/sanbase_web/channels/open_restricted_tab_channel_test.exs b/test/sanbase_web/channels/open_restricted_tab_channel_test.exs index cf7a3ea0a6..d7afcf8158 100644 --- a/test/sanbase_web/channels/open_restricted_tab_channel_test.exs +++ b/test/sanbase_web/channels/open_restricted_tab_channel_test.exs @@ -17,8 +17,7 @@ defmodule SanbaseWeb.OpenTabChannelTest do SanbaseWeb.UserSocket, %{ "access_token" => context.conn.private.plug_session["access_token"] - }, - %{} + } ) assert {:ok, %{}, %Phoenix.Socket{}} = @@ -45,8 +44,7 @@ defmodule SanbaseWeb.OpenTabChannelTest do SanbaseWeb.UserSocket, %{ "access_token" => context.conn.private.plug_session["access_token"] - }, - %{} + } ) {:ok, _, socket} = diff --git a/test/sanbase_web/channels/user_channel_test.exs b/test/sanbase_web/channels/user_channel_test.exs index 4ef3287a10..06f677f7e5 100644 --- a/test/sanbase_web/channels/user_channel_test.exs +++ b/test/sanbase_web/channels/user_channel_test.exs @@ -13,7 +13,7 @@ defmodule SanbaseWeb.UserChannelTest do describe "join common channel" do test "test join channel" do - assert {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}, %{}) + assert {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}) assert {:ok, %{}, %Phoenix.Socket{}} = subscribe_and_join( @@ -71,7 +71,7 @@ defmodule SanbaseWeb.UserChannelTest do end defp get_socket() do - {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}, %{}) + {:ok, socket} = connect(SanbaseWeb.UserSocket, %{}) {:ok, _, socket} = subscribe_and_join(socket, SanbaseWeb.UserChannel, "users:common", %{}) @@ -86,8 +86,7 @@ defmodule SanbaseWeb.UserChannelTest do SanbaseWeb.UserSocket, %{ "access_token" => context.conn.private.plug_session["access_token"] - }, - %{} + } ) assert {:ok, %{}, %Phoenix.Socket{}} = @@ -108,8 +107,7 @@ defmodule SanbaseWeb.UserChannelTest do SanbaseWeb.UserSocket, %{ "access_token" => context.conn.private.plug_session["access_token"] - }, - %{} + } ) {:ok, _, socket} = diff --git a/test/sanbase_web/graphql/queries/queries_api_test.exs b/test/sanbase_web/graphql/queries/queries_api_test.exs index 6e9d2dd582..57a25bede5 100644 --- a/test/sanbase_web/graphql/queries/queries_api_test.exs +++ b/test/sanbase_web/graphql/queries/queries_api_test.exs @@ -328,7 +328,7 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do end end - describe "dashboard text widget" do + describe "Dashboards Text Widget" do test "create", context do {:ok, %{id: dashboard_id}} = Sanbase.Dashboards.create_dashboard(%{name: "My Dashboard"}, context.user.id) @@ -455,8 +455,12 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do end end - describe "run queries" do + describe "Run Queries" do test "run raw sql query", context do + # In test env the storing runs not async and there's a 7500ms sleep + Application.put_env(:__sanbase_queires__, :store_execution_details, false) + on_exit(fn -> Application.delete_env(:__sanbase_queires__, :store_execution_details) end) + mock_fun = Sanbase.Mock.wrap_consecutives( [ @@ -478,7 +482,7 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do run_sql_query(context.conn, :run_raw_sql_query, args) |> get_in(["data", "runRawSqlQuery"]) - assert result == %{ + assert %{ "clickhouseQueryId" => "177a5a3d-072b-48ac-8cf5-d8375c8314ef", "columns" => ["asset_id", "metric_id", "dt", "value", "computed_at"], "columnTypes" => ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"], @@ -493,11 +497,15 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do "written_bytes" => 0.0, "written_rows" => 0.0 } - } + } = result end) end test "run sql query by id", context do + # In test env the storing runs not async and there's a 7500ms sleep + Application.put_env(:__sanbase_queires__, :store_execution_details, false) + on_exit(fn -> Application.delete_env(:__sanbase_queires__, :store_execution_details) end) + {:ok, query} = create_query(context.user.id) mock_fun = @@ -515,7 +523,8 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do run_sql_query(context.conn, :run_sql_query, %{id: query.id}) |> get_in(["data", "runSqlQuery"]) - assert result == %{ + # Use match `=` operator to avoid checking the queryStartTime and queryEndTime + assert %{ "clickhouseQueryId" => "177a5a3d-072b-48ac-8cf5-d8375c8314ef", "columns" => ["asset_id", "metric_id", "dt", "value", "computed_at"], "columnTypes" => ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"], @@ -530,11 +539,64 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do "written_bytes" => 0.0, "written_rows" => 0.0 } - } + } = result end) end + test "delete dashboard query", context do + {:ok, query} = create_query(context.user.id) + + {:ok, dashboard} = + Sanbase.Dashboards.create_dashboard(%{name: "My Dashboard"}, context.user.id) + + # Add a query to a dashboard + mapping = + execute_dashboard_query_mutation(context.conn, :create_dashboard_query, %{ + dashboard_id: dashboard.id, + query_id: query.id, + settings: %{layout: [0, 1, 2, 3, 4]} + }) + |> get_in(["data", "createDashboardQuery"]) + + # Assert that the dashboard has exactly 1 query added + assert {:ok, %{queries: [_]}} = + Sanbase.Dashboards.get_dashboard(dashboard.id, context.user.id) + + result = + execute_dashboard_query_mutation(context.conn, :delete_dashboard_query, %{ + dashboard_id: dashboard.id, + dashboard_query_mapping_id: mapping["id"] + }) + |> get_in(["data", "deleteDashboardQuery"]) + + dashboard_query_mapping_id = mapping["id"] + + query_id = query.id + dashboard_id = dashboard.id + + assert %{ + "dashboard" => %{"id" => ^dashboard_id, "parameters" => %{}}, + "id" => ^dashboard_query_mapping_id, + "query" => %{ + "id" => ^query_id, + "sqlQueryParameters" => %{"limit" => 10, "slug" => "bitcoin"}, + "sqlQueryText" => + "SELECT * FROM intraday_metrics WHERE asset_id = get_asset_id({{slug}}) LIMIT {{limit}}" + }, + "settings" => %{"layout" => [0, 1, 2, 3, 4]} + } = result + + # Assert that the dashboard has no queries + + assert {:ok, %{queries: []}} = + Sanbase.Dashboards.get_dashboard(dashboard_id, context.user.id) + end + test "run dashboard query (resolve global params)", context do + # In test env the storing runs not async and there's a 7500ms sleep + Application.put_env(:__sanbase_queires__, :store_execution_details, false) + on_exit(fn -> Application.delete_env(:__sanbase_queires__, :store_execution_details) end) + {:ok, query} = create_query(context.user.id) {:ok, dashboard} = @@ -621,7 +683,7 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do }) |> get_in(["data", "runDashboardSqlQuery"]) - assert result == %{ + assert %{ "clickhouseQueryId" => "177a5a3d-072b-48ac-8cf5-d8375c8314ef", "columns" => ["asset_id", "metric_id", "dt", "value", "computed_at"], "columnTypes" => ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"], @@ -636,9 +698,118 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do "written_bytes" => 0.0, "written_rows" => 0.0 } - } + } = result + end) + end + end + + describe "Caching" do + test "cache queries on a dashboard", context do + # In test env the storing runs not async and there's a 7500ms sleep + Application.put_env(:__sanbase_queires__, :store_execution_details, false) + on_exit(fn -> Application.delete_env(:__sanbase_queires__, :store_execution_details) end) + + {:ok, query} = Sanbase.Queries.create_query(%{name: "Query"}, context.user.id) + + {:ok, dashboard} = + Sanbase.Dashboards.create_dashboard(%{name: "Dashboard"}, context.user.id) + + {:ok, dashboard_query_mapping} = + Sanbase.Dashboards.add_query_to_dashboard( + dashboard.id, + query.id, + context.user.id + ) + + mock_fun = + Sanbase.Mock.wrap_consecutives( + [ + fn -> {:ok, mocked_clickhouse_result()} end, + fn -> {:ok, mocked_execution_details_result()} end + ], + arity: 2 + ) + + # Run a dashboard query. Expect the dashboard parameter to override + # the query local parameter + Sanbase.Mock.prepare_mock(Sanbase.ClickhouseRepo, :query, mock_fun) + |> Sanbase.Mock.run_with_mocks(fn -> + dashboard_query_mapping_id = dashboard_query_mapping.id + query_id = query.id + + result = + run_sql_query(context.conn, :run_dashboard_sql_query, %{ + dashboard_id: dashboard.id, + dashboard_query_mapping_id: dashboard_query_mapping.id + }) + |> get_in(["data", "runDashboardSqlQuery"]) + + compressed_result = Jason.encode!(result) |> :zlib.gzip() |> Base.encode64() + + stored = + store_dashboard_query_execution(context.conn, %{ + dashboard_id: dashboard.id, + dashboard_query_mapping_id: dashboard_query_mapping.id, + compressed_query_execution_result: compressed_result + }) + |> get_in(["data", "storeDashboardQueryExecution"]) + + assert %{ + "queries" => [ + %{ + "clickhouseQueryId" => "177a5a3d-072b-48ac-8cf5-d8375c8314ef", + "columnTypes" => ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"], + "columns" => ["asset_id", "metric_id", "dt", "value", "computed_at"], + "dashboardQueryMappingId" => ^dashboard_query_mapping_id, + "queryStartTime" => query_start_time, + "queryEndTime" => query_end_time, + "queryId" => ^query_id, + "rows" => [ + [2503, 250, "2008-12-10T00:00:00Z", 0.0, "2020-02-28T15:18:42Z"], + [2503, 250, "2008-12-10T00:05:00Z", 0.0, "2020-02-28T15:18:42Z"] + ] + } + ] + } = stored + + assert datetime_close_to_now?(Sanbase.DateTimeUtils.from_iso8601!(query_start_time)) + assert datetime_close_to_now?(Sanbase.DateTimeUtils.from_iso8601!(query_end_time)) + + cache = + get_cached_dashboard_queries_executions(context.conn, %{dashboard_id: dashboard.id}) + |> get_in(["data", "getCachedDashboardQueriesExecutions"]) + + assert %{ + "queries" => [ + %{ + "queryId" => ^query_id, + "dashboardQueryMappingId" => ^dashboard_query_mapping_id, + "clickhouseQueryId" => "177a5a3d-072b-48ac-8cf5-d8375c8314ef", + "columnTypes" => ["UInt64", "UInt64", "DateTime", "Float64", "DateTime"], + "columns" => ["asset_id", "metric_id", "dt", "value", "computed_at"], + "queryStartTime" => query_start_time, + "queryEndTime" => query_end_time, + "rows" => [ + [2503, 250, "2008-12-10T00:00:00Z", 0.0, "2020-02-28T15:18:42Z"], + [2503, 250, "2008-12-10T00:05:00Z", 0.0, "2020-02-28T15:18:42Z"] + ] + } + ] + } = cache + + assert datetime_close_to_now?(Sanbase.DateTimeUtils.from_iso8601!(query_start_time)) + assert datetime_close_to_now?(Sanbase.DateTimeUtils.from_iso8601!(query_end_time)) end) end + + defp datetime_close_to_now?(dt) do + Sanbase.TestUtils.datetime_close_to( + Timex.now(), + dt, + 2, + :seconds + ) + end end describe "get clickhouse database information" do @@ -930,11 +1101,38 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do mutation = """ { #{query_name}(#{map_to_args(args)}){ + queryId + dashboardQueryMappingId + clickhouseQueryId + columnTypes + columns + rows + summary + queryStartTime + queryEndTime + } + } + """ + + conn + |> post("/graphql", mutation_skeleton(mutation)) + |> json_response(200) + end + + defp store_dashboard_query_execution(conn, args) do + mutation = """ + mutation{ + storeDashboardQueryExecution(#{map_to_args(args)}){ + queries{ + queryId + dashboardQueryMappingId + clickhouseQueryId columns - columnTypes rows - clickhouseQueryId - summary + columnTypes + queryStartTime + queryEndTime + } } } """ @@ -944,6 +1142,29 @@ defmodule SanbaseWeb.Graphql.QueriesApiTest do |> json_response(200) end + defp get_cached_dashboard_queries_executions(conn, args) do + query = """ + { + getCachedDashboardQueriesExecutions(#{map_to_args(args)}){ + queries{ + queryId + dashboardQueryMappingId + clickhouseQueryId + columnTypes + columns + rows + queryStartTime + queryEndTime + } + } + } + """ + + conn + |> post("/graphql", query_skeleton(query)) + |> json_response(200) + end + defp get_dashboard(conn, dashboard_id) do query = """ {