SandeshSam's tutorial copy
    Updated 2024-09-24
    -- forked from Sam's tutorial @ https://flipsidecrypto.xyz/studio/queries/f4d4ef0e-e5a8-477b-bc1a-f964d3edb9ea

    with seg as
    (
    select
    -- *
    -- ,regexp_substr_all(SUBSTR(input, 10), '.{64}') AS segmented
    input,
    regexp_substr_all(SUBSTR(input,11),'.{64}') as segmented
    from base.core.fact_traces
    where 1=1
    and block_number>17734723
    and tx_hash=lower('0x8b4d920c04d580edbefacd476e549716692c5d2a2733db6ce4311fb9ea206151')
    and identifier='CALL_ORIGIN'
    and to_address=lower('0x00000000000cC7ba78E64E86B2Bd59B1ae7F569E')
    and substr(input,0,11)='0x346758b90'
    -- group by 1
    -- order by 2 desc
    -- limit 100
    )
    select
    -- segmented,
    utils.udf_hex_to_int(segmented[0])/32 as data_0, -- where data_0 starts
    utils.udf_hex_to_int(segmented[1])/32 as data_1, -- where data_1 starts
    utils.udf_hex_to_int(segmented[2])/32 as data_2, -- where data_2 starts
    utils.udf_hex_to_int(segmented[3])/32 as data_3, -- where data_3 starts
    utils.udf_hex_to_int(segmented[4])/32 as additionalTransfers, -- data_0
    utils.udf_hex_to_int(segmented[5]) as data_1_0, -- data_1_0
    to_timestamp(utils.udf_hex_to_int(segmented[6])) as data_1_1, --data_1_1
    utils.udf_hex_to_int(segmented[7]) as data_1_2, --data_1_2
    utils.udf_hex_to_int(segmented[8])/32 as data_1_3_start, -- where data 1_3 starts relative to data_1 start
    utils.udf_hex_to_int(segmented[9])/32 as data_1_4_start, -- where data 1_4 starts relative to data_1 start
    utils.udf_hex_to_int(segmented[10]) as data_1_3_0_start, -- where does 1_3_0 start relative to data_1_3
    utils.udf_hex_to_int(segmented[11])/32 as data_1_3_0_0_start, -- where does 1_3_0_0 start relative to data_1_3_0 start
    concat('0x'||substr((segmented[12]),-40)) as enforcer_address, --data_1_3_0_0
    utils.udf_hex_to_int(segmented[13])/32 as data_1_3_0_1_start, -- (offset from current) where does 1_3_0_1 start relative to data_1_3_0 start
    QueryRunArchived: QueryRun has been archived