From e6853d90fe2d4855ebec84d797b9c894975b26f3 Mon Sep 17 00:00:00 2001 From: Rabbit Date: Mon, 27 Jan 2025 17:47:54 +0800 Subject: [PATCH 1/3] feat: add fiber statistic (#2410) --- .../api/v2/fiber/graph_nodes_controller.rb | 5 ++ .../api/v2/fiber/statistics_controller.rb | 25 ++++++++ app/models/fiber_graph_channel.rb | 4 +- app/models/fiber_statistic.rb | 30 +++++++++ app/workers/fiber_graph_detect_worker.rb | 64 ++++++++++++++----- app/workers/update_fiber_channel_worker.rb | 14 ++++ config/routes/v2.rb | 5 +- .../20250126022459_create_fiber_statistics.rb | 18 ++++++ db/structure.sql | 63 +++++++++++++++++- lib/scheduler.rb | 3 +- 10 files changed, 210 insertions(+), 21 deletions(-) create mode 100644 app/controllers/api/v2/fiber/statistics_controller.rb create mode 100644 app/models/fiber_statistic.rb create mode 100644 app/workers/update_fiber_channel_worker.rb create mode 100644 db/migrate/20250126022459_create_fiber_statistics.rb diff --git a/app/controllers/api/v2/fiber/graph_nodes_controller.rb b/app/controllers/api/v2/fiber/graph_nodes_controller.rb index 482500488..13e72fd0e 100644 --- a/app/controllers/api/v2/fiber/graph_nodes_controller.rb +++ b/app/controllers/api/v2/fiber/graph_nodes_controller.rb @@ -25,6 +25,11 @@ def show @graph_channels = @graph_channels.where.not(closed_transaction_id: nil) end end + + def addresses + nodes = FiberGraphNode.all.select(:node_id, :addresses) + render json: { data: nodes.map { _1.attributes.except("id") } } + end end end end diff --git a/app/controllers/api/v2/fiber/statistics_controller.rb b/app/controllers/api/v2/fiber/statistics_controller.rb new file mode 100644 index 000000000..2a745cdf8 --- /dev/null +++ b/app/controllers/api/v2/fiber/statistics_controller.rb @@ -0,0 +1,25 @@ +module Api + module V2 + module Fiber + class StatisticsController < BaseController + def index + expires_in 15.minutes, public: true, stale_while_revalidate: 5.minutes, stale_if_error: 5.minutes + statistics = FiberStatistic.order(created_at_unixtimestamp: :desc).limit(7) + + render json: { + data: statistics.map do |statistic| + statistic.attributes.except("id", "created_at", "updated_at").transform_values(&:to_s) + end, + } + end + + def show + expires_in 15.minutes, public: true, stale_while_revalidate: 5.minutes, stale_if_error: 5.minutes + statistics = FiberStatistic.filter_by_indicator(params[:id]).order(created_at_unixtimestamp: :desc).limit(14) + + render json: { data: statistics.map { _1.attributes.except("id").transform_values(&:to_s) } } + end + end + end + end +end diff --git a/app/models/fiber_graph_channel.rb b/app/models/fiber_graph_channel.rb index ca2ac583e..5d5e511ef 100644 --- a/app/models/fiber_graph_channel.rb +++ b/app/models/fiber_graph_channel.rb @@ -16,7 +16,7 @@ def open_transaction_info open_transaction.as_json(only: %i[tx_hash block_number block_timestamp]).merge( { capacity: funding_cell.capacity, - udt_amount: funding_cell.udt_amount, + udt_info: funding_cell.udt_info, address: funding_cell.address_hash, }, ) @@ -29,7 +29,7 @@ def closed_transaction_info close_accounts: closed_transaction.outputs.map do |cell| { capacity: cell.capacity, - udt_amount: cell.udt_amount, + udt_info: cell.udt_info, address: cell.address_hash, } end, diff --git a/app/models/fiber_statistic.rb b/app/models/fiber_statistic.rb new file mode 100644 index 000000000..0796fcc9d --- /dev/null +++ b/app/models/fiber_statistic.rb @@ -0,0 +1,30 @@ +class FiberStatistic < ApplicationRecord + VALID_INDICATORS = %w(total_nodes total_channels total_liquidity created_at_unixtimestamp).freeze + + scope :filter_by_indicator, ->(indicator) { + raise ArgumentError, "Invalid indicator" unless VALID_INDICATORS.include?(indicator.to_s) + + select(indicator, :created_at_unixtimestamp) + } +end + +# == Schema Information +# +# Table name: fiber_statistics +# +# id :bigint not null, primary key +# total_nodes :integer +# total_channels :integer +# total_liquidity :bigint +# mean_value_locked :bigint +# mean_fee_rate :integer +# medium_value_locked :bigint +# medium_fee_rate :integer +# created_at_unixtimestamp :integer +# created_at :datetime not null +# updated_at :datetime not null +# +# Indexes +# +# index_fiber_statistics_on_created_at_unixtimestamp (created_at_unixtimestamp) UNIQUE +# diff --git a/app/workers/fiber_graph_detect_worker.rb b/app/workers/fiber_graph_detect_worker.rb index bdb3c8991..528723b1c 100644 --- a/app/workers/fiber_graph_detect_worker.rb +++ b/app/workers/fiber_graph_detect_worker.rb @@ -8,18 +8,15 @@ def perform @graph_node_ids = [] @graph_channel_outpoints = [] - # sync graph nodes and channels - ["nodes", "channels"].each { fetch_graph_infos(_1) } - # purge outdated graph nodes - FiberGraphNode.where.not(node_id: @graph_node_ids).destroy_all - # purge outdated graph channels - FiberGraphChannel.where.not(channel_outpoint: @graph_channel_outpoints).destroy_all - # check channel is closed - FiberGraphChannel.open_channels.each do |channel| - funding_cell = channel.funding_cell - if funding_cell.consumed_by - channel.update(closed_transaction_id: funding_cell.consumed_by_id) - end + ApplicationRecord.transaction do + # sync graph nodes and channels + ["nodes", "channels"].each { fetch_graph_infos(_1) } + # purge outdated graph nodes + FiberGraphNode.where.not(node_id: @graph_node_ids).destroy_all + # purge outdated graph channels + FiberGraphChannel.where.not(channel_outpoint: @graph_channel_outpoints).destroy_all + # generate statistic + compute_statistic end end @@ -42,7 +39,7 @@ def fetch_graph_infos(data_type) def fetch_nodes(last_cursor) data = rpc.graph_nodes(ENV["FIBER_NODE_URL"], { limit: "0x64", after: last_cursor }) - ApplicationRecord.transaction { data.dig("result", "nodes").each { upsert_node_with_cfg_info(_1) } } + data.dig("result", "nodes").each { upsert_node_with_cfg_info(_1) } data.dig("result", "last_cursor") rescue StandardError => e Rails.logger.error("Error fetching nodes: #{e.message}") @@ -54,9 +51,6 @@ def fetch_channels(last_cursor) channel_attributes = data.dig("result", "channels").map { build_channel_attributes(_1) }.compact FiberGraphChannel.upsert_all(channel_attributes, unique_by: %i[channel_outpoint]) if channel_attributes.any? data.dig("result", "last_cursor") - rescue StandardError => e - Rails.logger.error("Error fetching channels: #{e.message}") - nil end def upsert_node_with_cfg_info(node) @@ -124,6 +118,44 @@ def extract_peer_id(addresses) end end + def compute_statistic + total_nodes = FiberGraphNode.count + total_channels = FiberGraphChannel.count + # 资金总量 + total_liquidity = FiberGraphChannel.sum(:capacity) + # 资金均值 + mean_value_locked = total_channels.zero? ? 0.0 : total_liquidity.to_f / total_channels + # fee 均值 + mean_fee_rate = FiberGraphChannel.average("fee_rate_of_node1 + fee_rate_of_node2") || 0.0 + # 获取 capacity 的数据 + capacities = FiberGraphChannel.pluck(:capacity).compact + # 获取 fee_rate_of_node1 和 fee_rate_of_node2 的数据并合并 + fee_rate_of_node1 = FiberGraphChannel.pluck(:fee_rate_of_node1).compact + fee_rate_of_node2 = FiberGraphChannel.pluck(:fee_rate_of_node2).compact + combined_fee_rates = fee_rate_of_node1 + fee_rate_of_node2 + # 计算中位数 + medium_value_locked = calculate_median(capacities) + medium_fee_rate = calculate_median(combined_fee_rates) + created_at_unixtimestamp = Time.now.beginning_of_day.to_i + FiberStatistic.upsert( + { total_nodes:, total_channels:, total_liquidity:, + mean_value_locked:, mean_fee_rate:, medium_value_locked:, + medium_fee_rate:, created_at_unixtimestamp: }, unique_by: %i[created_at_unixtimestamp] + ) + end + + def calculate_median(array) + sorted = array.sort + count = sorted.size + return nil if count.zero? + + if count.odd? + sorted[count / 2] # 奇数个,取中间值 + else + (sorted[(count / 2) - 1] + sorted[count / 2]).to_f / 2 # 偶数个,取中间两个的平均值 + end + end + def rpc @rpc ||= FiberCoordinator.instance end diff --git a/app/workers/update_fiber_channel_worker.rb b/app/workers/update_fiber_channel_worker.rb new file mode 100644 index 000000000..e6012d1eb --- /dev/null +++ b/app/workers/update_fiber_channel_worker.rb @@ -0,0 +1,14 @@ +class UpdateFiberChannelWorker + include Sidekiq::Worker + sidekiq_options queue: "fiber" + + def perform + # check channel is closed + FiberGraphChannel.open_channels.each do |channel| + funding_cell = channel.funding_cell + if funding_cell.consumed_by + channel.update(closed_transaction_id: funding_cell.consumed_by_id) + end + end + end +end diff --git a/config/routes/v2.rb b/config/routes/v2.rb index 250a23fd7..8c15cad3d 100644 --- a/config/routes/v2.rb +++ b/config/routes/v2.rb @@ -103,8 +103,11 @@ namespace :fiber do resources :peers, param: :peer_id, only: %i[index show create] resources :channels, param: :channel_id, only: :show - resources :graph_nodes, param: :node_id, only: %i[index show] + resources :graph_nodes, param: :node_id, only: %i[index show] do + get :addresses, on: :collection + end resources :graph_channels, only: :index + resources :statistics, only: %i[index show] end resources :udt_hourly_statistics, only: :show resources :rgb_assets_statistics, only: :index diff --git a/db/migrate/20250126022459_create_fiber_statistics.rb b/db/migrate/20250126022459_create_fiber_statistics.rb new file mode 100644 index 000000000..6ae66af5f --- /dev/null +++ b/db/migrate/20250126022459_create_fiber_statistics.rb @@ -0,0 +1,18 @@ +class CreateFiberStatistics < ActiveRecord::Migration[7.0] + def change + create_table :fiber_statistics do |t| + t.integer :total_nodes + t.integer :total_channels + t.bigint :total_liquidity + t.bigint :mean_value_locked + t.integer :mean_fee_rate + t.bigint :medium_value_locked + t.integer :medium_fee_rate + t.integer :created_at_unixtimestamp + + t.timestamps + end + + add_index :fiber_statistics, :created_at_unixtimestamp, unique: true + end +end diff --git a/db/structure.sql b/db/structure.sql index 368eb8864..3fa3f6445 100644 --- a/db/structure.sql +++ b/db/structure.sql @@ -1865,6 +1865,44 @@ CREATE SEQUENCE public.fiber_peers_id_seq ALTER SEQUENCE public.fiber_peers_id_seq OWNED BY public.fiber_peers.id; +-- +-- Name: fiber_statistics; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.fiber_statistics ( + id bigint NOT NULL, + total_nodes integer, + total_channels integer, + total_liquidity bigint, + mean_value_locked bigint, + mean_fee_rate integer, + medium_value_locked bigint, + medium_fee_rate integer, + created_at_unixtimestamp integer, + created_at timestamp(6) without time zone NOT NULL, + updated_at timestamp(6) without time zone NOT NULL +); + + +-- +-- Name: fiber_statistics_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.fiber_statistics_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: fiber_statistics_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.fiber_statistics_id_seq OWNED BY public.fiber_statistics.id; + + -- -- Name: fiber_transactions; Type: TABLE; Schema: public; Owner: - -- @@ -3311,6 +3349,13 @@ ALTER TABLE ONLY public.fiber_graph_nodes ALTER COLUMN id SET DEFAULT nextval('p ALTER TABLE ONLY public.fiber_peers ALTER COLUMN id SET DEFAULT nextval('public.fiber_peers_id_seq'::regclass); +-- +-- Name: fiber_statistics id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.fiber_statistics ALTER COLUMN id SET DEFAULT nextval('public.fiber_statistics_id_seq'::regclass); + + -- -- Name: fiber_transactions id; Type: DEFAULT; Schema: public; Owner: - -- @@ -3889,6 +3934,14 @@ ALTER TABLE ONLY public.fiber_peers ADD CONSTRAINT fiber_peers_pkey PRIMARY KEY (id); +-- +-- Name: fiber_statistics fiber_statistics_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.fiber_statistics + ADD CONSTRAINT fiber_statistics_pkey PRIMARY KEY (id); + + -- -- Name: fiber_transactions fiber_transactions_pkey; Type: CONSTRAINT; Schema: public; Owner: - -- @@ -5085,6 +5138,13 @@ CREATE UNIQUE INDEX index_fiber_graph_nodes_on_node_id ON public.fiber_graph_nod CREATE UNIQUE INDEX index_fiber_peers_on_peer_id ON public.fiber_peers USING btree (peer_id); +-- +-- Name: index_fiber_statistics_on_created_at_unixtimestamp; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX index_fiber_statistics_on_created_at_unixtimestamp ON public.fiber_statistics USING btree (created_at_unixtimestamp); + + -- -- Name: index_fiber_udt_cfg_infos_on_fiber_graph_node_id_and_udt_id; Type: INDEX; Schema: public; Owner: - -- @@ -6318,6 +6378,7 @@ INSERT INTO "schema_migrations" (version) VALUES ('20241225045757'), ('20241231022644'), ('20250103072945'), -('20250108053433'); +('20250108053433'), +('20250126022459'); diff --git a/lib/scheduler.rb b/lib/scheduler.rb index b9006f311..06a84ed31 100644 --- a/lib/scheduler.rb +++ b/lib/scheduler.rb @@ -124,6 +124,7 @@ def call_worker(clz) s.every "5m", overlap: false do call_worker TokenCollectionTagWorker + call_worker UpdateFiberChannelWorker end s.every "10m", overlap: false do @@ -134,7 +135,7 @@ def call_worker(clz) call_worker AnalyzeContractFromCellDependencyWorker end -s.every "5m", overlap: false do +s.every "0 */4 * * *", overlap: false do call_worker FiberGraphDetectWorker end From 9ef25e828fda17f1171405420064416a3a13b564 Mon Sep 17 00:00:00 2001 From: Miles Zhang Date: Tue, 28 Jan 2025 10:08:28 +0800 Subject: [PATCH 2/3] fix: scheduler cron expression (#2413) Signed-off-by: Miles Zhang --- lib/scheduler.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/scheduler.rb b/lib/scheduler.rb index 06a84ed31..e22eb954b 100644 --- a/lib/scheduler.rb +++ b/lib/scheduler.rb @@ -135,7 +135,7 @@ def call_worker(clz) call_worker AnalyzeContractFromCellDependencyWorker end -s.every "0 */4 * * *", overlap: false do +s.cron "0 */4 * * *", overlap: false do call_worker FiberGraphDetectWorker end From ea631d5846d79676cc655fc95ad8bcf0b4196477 Mon Sep 17 00:00:00 2001 From: Miles Zhang Date: Sat, 1 Feb 2025 17:00:34 +0800 Subject: [PATCH 3/3] fix: set token collection h24_ckb_transactions_count 0 before update (#2415) Signed-off-by: Miles Zhang --- .../update_h24_ckb_transactions_count_on_collections_worker.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/app/workers/update_h24_ckb_transactions_count_on_collections_worker.rb b/app/workers/update_h24_ckb_transactions_count_on_collections_worker.rb index 2fea4447f..18f2c2c48 100644 --- a/app/workers/update_h24_ckb_transactions_count_on_collections_worker.rb +++ b/app/workers/update_h24_ckb_transactions_count_on_collections_worker.rb @@ -3,6 +3,7 @@ class UpdateH24CkbTransactionsCountOnCollectionsWorker sidekiq_options queue: "low" def perform + TokenCollection.update_all(h24_ckb_transactions_count: 0) TokenItem.joins(:collection).includes(:collection).where("token_items.updated_at > ?", 1.hour.ago).each do |item| item.collection.update_h24_ckb_transactions_count end