Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Deploy to mainnet #1640

Merged
merged 10 commits into from
Feb 26, 2024
50 changes: 24 additions & 26 deletions app/models/ckb_sync/new_node_data_processor.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ class NewNodeDataProcessor
value :reorg_started_at, global: true
attr_accessor :local_tip_block, :pending_raw_block, :ckb_txs, :target_block, :addrs_changes,
:outputs, :inputs, :outputs_data, :udt_address_ids, :contained_address_ids,
:dao_address_ids, :contained_udt_ids, :cell_datas, :enable_cota
:dao_address_ids, :contained_udt_ids, :cell_datas, :enable_cota, :token_transfer_ckb_tx_ids

def initialize(enable_cota = ENV["COTA_AGGREGATOR_URL"].present?)
@enable_cota = enable_cota
Expand Down Expand Up @@ -68,13 +68,14 @@ def process_block(node_block, refresh_balance: true)
@dao_address_ids = dao_address_ids = []
@contained_udt_ids = contained_udt_ids = []
@contained_address_ids = contained_address_ids = []
@token_transfer_ckb_tx_ids = token_transfer_ckb_tx_ids = Set.new

benchmark :process_ckb_txs, node_block, ckb_txs, contained_address_ids,
contained_udt_ids, dao_address_ids, tags, udt_address_ids
addrs_changes = Hash.new { |hash, key| hash[key] = {} }

input_capacities, output_capacities = benchmark :build_cells_and_locks!, local_block, node_block, ckb_txs, inputs, outputs,
tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_address_ids, addrs_changes
tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_address_ids, addrs_changes, token_transfer_ckb_tx_ids

# update explorer data
benchmark :update_ckb_txs_rel_and_fee, ckb_txs, tags, input_capacities, output_capacities, udt_address_ids,
Expand All @@ -94,6 +95,7 @@ def process_block(node_block, refresh_balance: true)
generate_statistics_data(local_block)
generate_deployed_cells_and_referring_cells(local_block)
detect_cota_infos(local_block)
invoke_token_transfer_detect_worker(token_transfer_ckb_tx_ids)

local_block.update_counter_for_ckb_node_version
local_block
Expand Down Expand Up @@ -131,6 +133,12 @@ def detect_cota_infos(local_block)
FetchCotaWorker.perform_async(local_block.number) if enable_cota
end

def invoke_token_transfer_detect_worker(token_transfer_ckb_tx_ids)
token_transfer_ckb_tx_ids.each do |tx_id|
TokenTransferDetectWorker.perform_async(tx_id)
end
end

def process_ckb_txs(
node_block, ckb_txs, contained_address_ids, contained_udt_ids, dao_address_ids, tags,
udt_address_ids
Expand Down Expand Up @@ -674,23 +682,13 @@ def build_udts!(local_block, outputs, outputs_data)
parsed_spore_cell = CkbUtils.parse_spore_cell_data(outputs_data[tx_index][index])
if parsed_spore_cell[:cluster_id].present?
binary_hashes = CkbUtils.hexes_to_bins_sql(CkbSync::Api.instance.spore_cluster_code_hashes)
spore_cluster_type = TypeScript.where("code_hash IN (#{binary_hashes})").where(
args: parsed_spore_cell[:cluster_id],
).first
if spore_cluster_type.present?
spore_cluster_cell = spore_cluster_type.cell_outputs.last
parsed_cluster_data = CkbUtils.parse_spore_cluster_data(spore_cluster_cell.data)
coll = TokenCollection.find_or_create_by(
standard: "spore",
name: parsed_cluster_data[:name],
description: parsed_cluster_data[:description],
cell_id: spore_cluster_cell.id,
creator_id: spore_cluster_cell.address_id,
)

nft_token_attr[:full_name] = parsed_cluster_data[:name]
nft_token_attr[:published] = true
end
spore_cluster_type_ids = TypeScript.where("code_hash IN (#{binary_hashes})").where(hash_type: "data1",
args: parsed_spore_cell[:cluster_id]).pluck(:id)

spore_cluster_cell = CellOutput.live.where(type_script_id: spore_cluster_type_ids).last
parsed_cluster_data = CkbUtils.parse_spore_cluster_data(spore_cluster_cell.data)
nft_token_attr[:full_name] = parsed_cluster_data[:name]
nft_token_attr[:published] = true
end
end
if cell_type == "nrc_721_token"
Expand Down Expand Up @@ -819,7 +817,7 @@ def update_ckb_txs_rel_and_fee(

def build_cells_and_locks!(
local_block, node_block, ckb_txs, inputs, outputs, tags, udt_address_ids,
dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes
dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes, token_transfer_ckb_tx_ids
)
cell_outputs_attributes = []
cell_inputs_attributes = []
Expand Down Expand Up @@ -879,7 +877,7 @@ def build_cells_and_locks!(
# prepare script ids for insert cell_outputs
prepare_script_ids(outputs)
build_cell_outputs!(node_block, outputs, ckb_txs, local_block, cell_outputs_attributes, output_capacities, tags,
udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes)
udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes, token_transfer_ckb_tx_ids)
if cell_outputs_attributes.present?
id_hashes = CellOutput.upsert_all(cell_outputs_attributes, unique_by: %i[tx_hash cell_index],
returning: %i[id data_hash])
Expand All @@ -903,7 +901,7 @@ def build_cells_and_locks!(
prev_outputs = nil
build_cell_inputs(inputs, ckb_txs, local_block.id, cell_inputs_attributes, prev_cell_outputs_attributes,
input_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids,
prev_outputs, addrs_changes)
prev_outputs, addrs_changes, token_transfer_ckb_tx_ids)

CellInput.upsert_all(cell_inputs_attributes,
unique_by: %i[ckb_transaction_id index])
Expand Down Expand Up @@ -996,7 +994,7 @@ def script_attributes(script, script_hash)

def build_cell_inputs(
inputs, ckb_txs, local_block_id, cell_inputs_attributes, prev_cell_outputs_attributes,
input_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, prev_outputs, addrs_changes
input_capacities, tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, prev_outputs, addrs_changes, token_transfer_ckb_tx_ids
)
tx_index = 0

Expand Down Expand Up @@ -1041,7 +1039,7 @@ def build_cell_inputs(
change_rec[:dao_txs] ||= Set.new
change_rec[:dao_txs] << ckb_txs[tx_index]["tx_hash"]
elsif cell_type.in?(%w(m_nft_token nrc_721_token spore_cell))
TokenTransferDetectWorker.perform_async(ckb_txs[tx_index]["id"])
token_transfer_ckb_tx_ids << ckb_txs[tx_index]["id"]
end

case previous_output[:cell_type]
Expand Down Expand Up @@ -1069,7 +1067,7 @@ def build_cell_inputs(

def build_cell_outputs!(
node_block, outputs, ckb_txs, local_block, cell_outputs_attributes, output_capacities,
tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes
tags, udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes, token_transfer_ckb_tx_ids
)
outputs.each do |tx_index, items|
cell_index = 0
Expand Down Expand Up @@ -1122,7 +1120,7 @@ def build_cell_outputs!(
type_hash: item.type.compute_hash, udt_type: "omiga_inscription",
).pick(:id)
elsif attr[:cell_type].in?(%w(m_nft_token nrc_721_token spore_cell))
TokenTransferDetectWorker.perform_async(ckb_txs[tx_index]["id"])
token_transfer_ckb_tx_ids << ckb_txs[tx_index]["id"]
end

output_capacities[tx_index] += item.capacity if tx_index != 0
Expand Down
1 change: 0 additions & 1 deletion app/workers/clean_up_worker.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ class CleanUpWorker
include Sidekiq::Worker

def perform
TokenCollection.remove_corrupted
CkbTransaction.tx_pending.where("created_at < ?", 1.day.ago).destroy_all
CkbTransaction.tx_rejected.where("created_at < ?", 3.months.ago).destroy_all
end
Expand Down
45 changes: 27 additions & 18 deletions app/workers/token_transfer_detect_worker.rb
Original file line number Diff line number Diff line change
Expand Up @@ -144,24 +144,33 @@ def find_or_create_m_nft_collection(_cell, type_script)
def find_or_create_spore_collection(_cell, type_script)
spore_cell = type_script.cell_outputs.order("id desc").first
parsed_spore_cell = CkbUtils.parse_spore_cell_data(spore_cell.data)
binary_hashes = CkbUtils.hexes_to_bins_sql(CkbSync::Api.instance.spore_cluster_code_hashes)
spore_cluster_type = TypeScript.where("code_hash IN (#{binary_hashes})").where(
args: parsed_spore_cell[:cluster_id],
).first
coll = TokenCollection.find_or_create_by(
standard: "spore",
type_script_id: spore_cluster_type.id,
sn: spore_cluster_type.script_hash,
)
spore_cluster_cell = spore_cluster_type.cell_outputs.order("id desc").first
if spore_cluster_cell.present? && coll.creator_id != spore_cluster_cell.address_id
parsed_cluster_data = CkbUtils.parse_spore_cluster_data(spore_cluster_cell.data)
coll.creator_id = spore_cluster_cell.address_id
coll.cell_id = spore_cluster_cell.id
coll.name = parsed_cluster_data[:name]
coll.description = parsed_cluster_data[:description]
coll.save
if parsed_spore_cell[:cluster_id].nil?
spore_cluster_type = TypeScript.find_or_create_by(code_hash: CkbSync::Api.instance.spore_cluster_code_hashes.first, hash_type: "data1",
args: parsed_spore_cell[:cluster_id])
TokenCollection.find_or_create_by(
standard: "spore",
sn: spore_cluster_type.script_hash,
description: "Only for no cluster spore cell",
)
else
binary_hashes = CkbUtils.hexes_to_bins_sql(CkbSync::Api.instance.spore_cluster_code_hashes)
spore_cluster_type_ids = TypeScript.where("code_hash IN (#{binary_hashes})").where(hash_type: "data1",
args: parsed_spore_cell[:cluster_id]).pluck(:id)
spore_cluster_cell = CellOutput.live.where(type_script_id: spore_cluster_type_ids).last
coll = TokenCollection.find_or_create_by(
standard: "spore",
sn: spore_cluster_cell.type_hash,
)
if spore_cluster_cell.present? && coll.creator_id != spore_cluster_cell.address_id
parsed_cluster_data = CkbUtils.parse_spore_cluster_data(spore_cluster_cell.data)
coll.type_script_id = spore_cluster_cell.type_script_id
coll.creator_id = spore_cluster_cell.address_id
coll.cell_id = spore_cluster_cell.id
coll.name = parsed_cluster_data[:name]
coll.description = parsed_cluster_data[:description]
coll.save
end
coll
end
coll
end
end
10 changes: 1 addition & 9 deletions config/initializers/sentry.rb
Original file line number Diff line number Diff line change
@@ -1,14 +1,6 @@
Sentry.init do |config|
config.dsn = ENV["SENTRY_DSN"]
config.breadcrumbs_logger = [:active_support_logger, :http_logger]

# Set tracesSampleRate to 1.0 to capture 100%
# of transactions for performance monitoring.
# We recommend adjusting this value in production
config.traces_sample_rate = ENV.fetch("SENTRY_SAMPLE_RATE", 1.0).to_f
config.traces_sampler = lambda do |context|
true
end
config.breadcrumbs_logger = %i[active_support_logger http_logger]
config.logger = Logger.new(STDERR)
config.enabled_environments = %w[production staging]
end
Loading