Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Take away block hash, block index dups processing script from migration to script #2958

Merged
merged 1 commit into from
Jan 17, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
## Current

### Features
- [#2835](https://github.com/poanetwork/blockscout/pull/2835), [#2871](https://github.com/poanetwork/blockscout/pull/2871), [#2872](https://github.com/poanetwork/blockscout/pull/2872), [#2886](https://github.com/poanetwork/blockscout/pull/2886), [#2925](https://github.com/poanetwork/blockscout/pull/2925), [#2936](https://github.com/poanetwork/blockscout/pull/2936), [#2949](https://github.com/poanetwork/blockscout/pull/2949), [#2940](https://github.com/poanetwork/blockscout/pull/2940) - Add "block_hash" to logs, token_transfers and internal transactions and "pending blocks operations" approach
- [#2835](https://github.com/poanetwork/blockscout/pull/2835), [#2871](https://github.com/poanetwork/blockscout/pull/2871), [#2872](https://github.com/poanetwork/blockscout/pull/2872), [#2886](https://github.com/poanetwork/blockscout/pull/2886), [#2925](https://github.com/poanetwork/blockscout/pull/2925), [#2936](https://github.com/poanetwork/blockscout/pull/2936), [#2949](https://github.com/poanetwork/blockscout/pull/2949), [#2940](https://github.com/poanetwork/blockscout/pull/2940), [#2958](https://github.com/poanetwork/blockscout/pull/2958) - Add "block_hash" to logs, token_transfers and internal transactions and "pending blocks operations" approach
- [#2926](https://github.com/poanetwork/blockscout/pull/2926) - API endpoint: sum balances except burnt address
- [#2918](https://github.com/poanetwork/blockscout/pull/2918) - Add tokenID for tokentx API action explicitly

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,76 +55,6 @@ defmodule Explorer.Repo.Migrations.AddPendingInternalTxsOperation do
DELETE FROM internal_transactions WHERE block_hash IS NULL;
""")

execute("""
DO $$
DECLARE
duplicates_count INTEGER := 0;
blocks_scanned INTEGER := 0;
int_txs_count INTEGER := 0;
temprow RECORD;
BEGIN
SELECT COUNT(*) INTO int_txs_count FROM internal_transactions;
IF int_txs_count < 10000000 THEN

FOR temprow IN
SELECT block_hash FROM internal_transactions
GROUP BY block_hash, block_index HAVING COUNT(*) > 1
LOOP
duplicates_count := duplicates_count + 1;
RAISE NOTICE '% duplicates, blocks scanned %, block #%, block hash is %', duplicates_count, blocks_scanned, temprow.number , temprow.hash;

IF NOT EXISTS (
SELECT 1 FROM pending_block_operations
WHERE block_hash = temprow.block_hash
) THEN
INSERT INTO pending_block_operations
(block_hash, inserted_at, updated_at, fetch_internal_transactions)
SELECT b.hash, now(), now(), TRUE FROM blocks b
WHERE b.hash = temprow.block_hash;
END IF;

DELETE FROM internal_transactions
WHERE block_hash = temprow.block_hash;

RAISE NOTICE 'DELETED';
END LOOP;

ELSE
FOR temprow IN SELECT number, hash FROM blocks LOOP
blocks_scanned := blocks_scanned + 1;
IF EXISTS (
SELECT 1 FROM transactions WHERE block_hash = temprow.hash
) THEN
IF EXISTS (
SELECT block_hash, block_index FROM internal_transactions
WHERE block_hash = temprow.hash
GROUP BY block_hash, block_index HAVING COUNT(*) > 1
) THEN
duplicates_count := duplicates_count + 1;
RAISE NOTICE '% duplicates, blocks scanned %, block #%, block hash is %', duplicates_count, blocks_scanned, temprow.number , temprow.hash;

IF NOT EXISTS (
SELECT 1 FROM pending_block_operations
WHERE block_hash = temprow.hash
) THEN
INSERT INTO pending_block_operations
(block_hash, inserted_at, updated_at, fetch_internal_transactions)
SELECT b.hash, now(), now(), TRUE FROM blocks b
WHERE b.hash = temprow.hash;
END IF;

DELETE FROM internal_transactions
WHERE block_hash = temprow.hash;

RAISE NOTICE 'DELETED';
END IF;
END IF;
END LOOP;
END IF;
RAISE NOTICE 'SCRIPT FINISHED';
END $$;
""")

execute("""
ALTER table internal_transactions
DROP CONSTRAINT internal_transactions_pkey,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
-- This script should be a part of migration to "pennding_block_operations" internal transactions indexing approach
-- if 20191018140054_add_pending_internal_txs_operation.exs migration failed due to occasional duplicates of
-- {block_hash, block_index} pair in the DB, that could exist due to bugs in previous versions of the application
--, before setting a primary key on those columns. If so, this script should be inserted at line 57 of that migration
-- just before changing of a primary key.

DO $$
DECLARE
duplicates_count INTEGER := 0;
blocks_scanned INTEGER := 0;
int_txs_count INTEGER := 0;
temprow RECORD;
BEGIN
SELECT COUNT(*) INTO int_txs_count FROM internal_transactions;
IF int_txs_count < 10000000 THEN

FOR temprow IN
SELECT block_hash FROM internal_transactions
GROUP BY block_hash, block_index HAVING COUNT(*) > 1
LOOP
duplicates_count := duplicates_count + 1;
RAISE NOTICE '% duplicates, blocks scanned %, block #%, block hash is %', duplicates_count, blocks_scanned, temprow.number , temprow.hash;

IF NOT EXISTS (
SELECT 1 FROM pending_block_operations
WHERE block_hash = temprow.block_hash
) THEN
INSERT INTO pending_block_operations
(block_hash, inserted_at, updated_at, fetch_internal_transactions)
SELECT b.hash, now(), now(), TRUE FROM blocks b
WHERE b.hash = temprow.block_hash;
END IF;

DELETE FROM internal_transactions
WHERE block_hash = temprow.block_hash;

RAISE NOTICE 'DELETED';
END LOOP;

ELSE
FOR temprow IN SELECT number, hash FROM blocks LOOP
blocks_scanned := blocks_scanned + 1;
IF EXISTS (
SELECT 1 FROM transactions WHERE block_hash = temprow.hash
) THEN
IF EXISTS (
SELECT block_hash, block_index FROM internal_transactions
WHERE block_hash = temprow.hash
GROUP BY block_hash, block_index HAVING COUNT(*) > 1
) THEN
duplicates_count := duplicates_count + 1;
RAISE NOTICE '% duplicates, blocks scanned %, block #%, block hash is %', duplicates_count, blocks_scanned, temprow.number , temprow.hash;

IF NOT EXISTS (
SELECT 1 FROM pending_block_operations
WHERE block_hash = temprow.hash
) THEN
INSERT INTO pending_block_operations
(block_hash, inserted_at, updated_at, fetch_internal_transactions)
SELECT b.hash, now(), now(), TRUE FROM blocks b
WHERE b.hash = temprow.hash;
END IF;

DELETE FROM internal_transactions
WHERE block_hash = temprow.hash;

RAISE NOTICE 'DELETED';
END IF;
END IF;
END LOOP;
END IF;
RAISE NOTICE 'SCRIPT FINISHED';
END $$;