Skip to content

Commit d405933

Browse files
committed
monitor: Added token_bridge Postgres DB support
Monitor and fly publish signed VAA row keys to pub/sub topic. processVAA cloud function subscribes to topic and writes token bridge VAA data to Postgres tables.
1 parent 6bb6186 commit d405933

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+2013
-675
lines changed

cloud-functions/deploy.sh

+3-8
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,6 @@ if [ -z "$GCP_PROJECT" ]; then
66
exit 1
77
fi
88

9-
if [ -z "$SERVICE_ACCOUNT" ]; then
10-
echo "SERVICE_ACCOUNT must be specified"
11-
exit 1
12-
fi
13-
14-
gcloud functions --project "$GCP_PROJECT" deploy guardian-heartbeats --region=europe-west3 --entry-point Heartbeats --memory=256MB --runtime go116 --trigger-http --allow-unauthenticated --service-account="$SERVICE_ACCOUNT" --update-env-vars GCP_PROJECT="$GCP_PROJECT"
15-
gcloud functions --project "$GCP_PROJECT" deploy governor-status --region=europe-west3 --entry-point GovernorStatus --memory=256MB --runtime go116 --trigger-http --allow-unauthenticated --service-account="$SERVICE_ACCOUNT" --update-env-vars GCP_PROJECT="$GCP_PROJECT"
16-
gcloud functions --project "$GCP_PROJECT" deploy governor-configs --region=europe-west3 --entry-point GovernorConfigs --memory=256MB --runtime go116 --trigger-http --allow-unauthenticated --service-account="$SERVICE_ACCOUNT" --update-env-vars GCP_PROJECT="$GCP_PROJECT"
9+
gcloud functions --project "$GCP_PROJECT" deploy guardian-heartbeats --region=europe-west3 --entry-point Heartbeats --memory=256MB --runtime go116 --trigger-http --allow-unauthenticated --update-env-vars GCP_PROJECT="$GCP_PROJECT"
10+
gcloud functions --project "$GCP_PROJECT" deploy governor-status --region=europe-west3 --entry-point GovernorStatus --memory=256MB --runtime go116 --trigger-http --allow-unauthenticated --update-env-vars GCP_PROJECT="$GCP_PROJECT"
11+
gcloud functions --project "$GCP_PROJECT" deploy governor-configs --region=europe-west3 --entry-point GovernorConfigs --memory=256MB --runtime go116 --trigger-http --allow-unauthenticated --update-env-vars GCP_PROJECT="$GCP_PROJECT"

cloud_functions/.env.sample

+9
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,12 @@ BIGTABLE_INSTANCE_ID=
55
CLOUD_FUNCTIONS_NUM_ROWS=
66
CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=
77
CLOUD_FUNCTIONS_BLOCK_INCREMENT=
8+
PG_USER=
9+
PG_PASSWORD=
10+
PG_DATABASE=
11+
PG_HOST=
12+
PG_TOKEN_TRANSFER_TABLE=
13+
PG_ATTEST_MESSAGE_TABLE=
14+
PG_TOKEN_METADATA_TABLE=
15+
PUBSUB_SIGNED_VAA_TOPIC=
16+
FIRESTORE_LATEST_COLLECTION=

cloud_functions/README.md

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
to run cloud functions locally: https://cloud.google.com/functions/docs/running/function-frameworks
22

33
To deploy: see scripts/deploy.sh
4-
Be sure to first export env variables: BIGTABLE_INSTANCE_ID, BIGTABLE_TABLE_ID, CLOUD_FUNCTIONS_NUM_ROWS, CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL, CLOUD_FUNCTIONS_BLOCK_INCREMENT (see .env.sample)
4+
Be sure to first export env variables (see .env.sample)
55

66
> format of a deploy command:
77
@@ -13,6 +13,6 @@ Be sure to first export env variables: BIGTABLE_INSTANCE_ID, BIGTABLE_TABLE_ID,
1313

1414
gcloud functions deploy <name of cloud function> --entry-point <name of function> --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 1GB --region <location> --set-env-vars LIST_OF_ENV_VARS
1515

16-
Note: these cloud functions are managed in conjunction with cloud storage (caches) and cloud scheduler (cron job to perdiodically compute cloud functions)
16+
Note: these cloud functions are managed in conjunction with cloud storage (caches) and cloud scheduler (cron job to periodically compute cloud functions)
1717
The compute CFs are currently deployed with --allow-unauthenticated flag so the cache can be reloaded manually, but then anyone can refresh the cache.
1818
TODO: figure out how to allow the cloud schedular to run the cache reload and remove the --allow-unauthenticated flag

cloud_functions/package.json

+7-2
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,20 @@
77
"scripts": {
88
"build": "tsc",
99
"dev": "ts-node src/index.ts",
10-
"start": "npx functions-framework --target=latestBlocks [--signature-type=http]"
10+
"start": "npx functions-framework --target=latestBlocks [--signature-type=http]",
11+
"deploy": "bash scripts/deploy.sh",
12+
"gcp-build": "npm i ./dist/src/wormhole-foundation-wormhole-monitor-common-0.0.1.tgz ./dist/src/wormhole-foundation-wormhole-monitor-database-0.0.1.tgz"
1113
},
1214
"dependencies": {
1315
"@certusone/wormhole-sdk": "^0.9.8",
1416
"@google-cloud/bigtable": "^4.1.0",
1517
"@google-cloud/functions-framework": "^3.1.3",
18+
"@google-cloud/pubsub": "^3.4.1",
1619
"@google-cloud/storage": "^6.8.0",
1720
"dotenv": "^16.0.3",
1821
"firebase-admin": "^11.4.1",
19-
"path-to-regexp": "^6.2.1"
22+
"knex": "^2.4.2",
23+
"path-to-regexp": "^6.2.1",
24+
"pg": "^8.10.0"
2025
}
2126
}

cloud_functions/scripts/deploy.sh

+72-13
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@
22
source .env
33
set -e
44

5+
# Install and authorize the gcloud CLI: https://cloud.google.com/sdk/docs/install
6+
57
# SET ENV VARIABLES
6-
# note: load the service account key for either bigtable or firestore before running their respective CFs
7-
# e.g., EXPORT GOOGLE_APPLICATION_CREDENTIALS=<path-to-credentials>
8-
# export env variables: BIGTABLE_INSTANCE_ID, BIGTABLE_TABLE_ID, CLOUD_FUNCTIONS_NUM_ROWS, BIGTABLE_SIGNED_VAAS_TABLE_ID, BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID
8+
# export env variables required below
99
# or source .env
1010
# make sure you npm run build in the root folder before trying to deploy :D
1111

@@ -29,16 +29,75 @@ if [ -z "$BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID" ]; then
2929
exit 1
3030
fi
3131

32-
# note CLOUD_FUNCTIONS_NUM_ROWS isn't required and defaults to 100 if not provided
32+
if [ -z "$CLOUD_FUNCTIONS_NUM_ROWS" ]; then
33+
echo "CLOUD_FUNCTIONS_NUM_ROWS must be specified"
34+
exit 1
35+
fi
36+
37+
if [ -z "$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL" ]; then
38+
echo "CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL be specified"
39+
exit 1
40+
fi
41+
42+
if [ -z "$CLOUD_FUNCTIONS_BLOCK_INCREMENT" ]; then
43+
echo "CLOUD_FUNCTIONS_BLOCK_INCREMENT must be specified"
44+
exit 1
45+
fi
46+
47+
if [ -z "$PG_USER" ]; then
48+
echo "PG_USER must be specified"
49+
exit 1
50+
fi
51+
52+
if [ -z "$PG_PASSWORD" ]; then
53+
echo "PG_PASSWORD must be specified"
54+
exit 1
55+
fi
56+
57+
if [ -z "$PG_DATABASE" ]; then
58+
echo "PG_DATABASE must be specified"
59+
exit 1
60+
fi
3361

34-
# for initial deployment
35-
# echo "gcloud functions deploy messages --entry-point getMessages --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 1GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_NUM_ROWS=$CLOUD_FUNCTIONS_NUM_ROWS,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL,CLOUD_FUNCTIONS_BLOCK_INCREMENT=$CLOUD_FUNCTIONS_BLOCK_INCREMENT"
62+
if [ -z "$PG_HOST" ]; then
63+
echo "PG_HOST must be specified"
64+
exit 1
65+
fi
66+
67+
if [ -z "$PG_TOKEN_TRANSFER_TABLE" ]; then
68+
echo "PG_TOKEN_TRANSFER_TABLE must be specified"
69+
exit 1
70+
fi
71+
72+
if [ -z "$PG_ATTEST_MESSAGE_TABLE" ]; then
73+
echo "PG_ATTEST_MESSAGE_TABLE must be specified"
74+
exit 1
75+
fi
76+
77+
if [ -z "$PG_TOKEN_METADATA_TABLE" ]; then
78+
echo "PG_TOKEN_METADATA_TABLE must be specified"
79+
exit 1
80+
fi
81+
82+
if [ -z "$PUBSUB_SIGNED_VAA_TOPIC" ]; then
83+
echo "PUBSUB_SIGNED_VAA_TOPIC must be specified"
84+
exit 1
85+
fi
86+
87+
if [ -z "$FIRESTORE_LATEST_COLLECTION" ]; then
88+
echo "FIRESTORE_LATEST_COLLECTION must be specified"
89+
exit 1
90+
fi
3691

37-
gcloud functions deploy messages --entry-point getMessages --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 1GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_NUM_ROWS=$CLOUD_FUNCTIONS_NUM_ROWS,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL,CLOUD_FUNCTIONS_BLOCK_INCREMENT=$CLOUD_FUNCTIONS_BLOCK_INCREMENT
38-
gcloud functions deploy message-counts --entry-point getMessageCounts --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3
39-
gcloud functions deploy compute-message-counts --entry-point computeMessageCounts --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 4GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL
40-
gcloud functions deploy latest-blocks --entry-point getLatestBlocks --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3 --set-env-vars CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL,FIRESTORE_LATEST_COLLECTION=$FIRESTORE_LATEST_COLLECTION
41-
gcloud functions deploy compute-missing-vaas --entry-point computeMissingVaas --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 2GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL
42-
gcloud functions deploy missing-vaas --entry-point getMissingVaas --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3
43-
gcloud functions deploy vaas-by-tx-hash --entry-point getVaasByTxHash --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3 --set-env-vars BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,BIGTABLE_SIGNED_VAAS_TABLE_ID=$BIGTABLE_SIGNED_VAAS_TABLE_ID,BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID=$BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID
92+
# Hack to make these packages available in the GCP build until they're published
93+
npm pack --silent --workspace @wormhole-foundation/wormhole-monitor-common --pack-destination ./dist/src
94+
npm pack --silent --workspace @wormhole-foundation/wormhole-monitor-database --pack-destination ./dist/src
4495

96+
gcloud functions deploy messages --entry-point getMessages --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 1GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_NUM_ROWS=$CLOUD_FUNCTIONS_NUM_ROWS,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL,CLOUD_FUNCTIONS_BLOCK_INCREMENT=$CLOUD_FUNCTIONS_BLOCK_INCREMENT
97+
gcloud functions deploy message-counts --entry-point getMessageCounts --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3
98+
gcloud functions deploy compute-message-counts --entry-point computeMessageCounts --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 4GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL
99+
gcloud functions deploy latest-blocks --entry-point getLatestBlocks --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3 --set-env-vars CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL,FIRESTORE_LATEST_COLLECTION=$FIRESTORE_LATEST_COLLECTION
100+
gcloud functions deploy compute-missing-vaas --entry-point computeMissingVaas --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 2GB --region europe-west3 --set-env-vars BIGTABLE_TABLE_ID=$BIGTABLE_TABLE_ID,BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL=$CLOUD_FUNCTIONS_REFRESH_TIME_INTERVAL
101+
gcloud functions deploy missing-vaas --entry-point getMissingVaas --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3
102+
gcloud functions deploy vaas-by-tx-hash --entry-point getVaasByTxHash --runtime nodejs16 --trigger-http --allow-unauthenticated --timeout 300 --memory 256MB --region europe-west3 --set-env-vars BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,BIGTABLE_SIGNED_VAAS_TABLE_ID=$BIGTABLE_SIGNED_VAAS_TABLE_ID,BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID=$BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID
103+
gcloud functions deploy process-vaa --entry-point processVaa --runtime nodejs16 --timeout 300 --memory 256MB --region europe-west3 --trigger-topic $PUBSUB_SIGNED_VAA_TOPIC --set-env-vars BIGTABLE_INSTANCE_ID=$BIGTABLE_INSTANCE_ID,BIGTABLE_SIGNED_VAAS_TABLE_ID=$BIGTABLE_SIGNED_VAAS_TABLE_ID,BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID=$BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID,PG_USER=$PG_USER,PG_PASSWORD=$PG_PASSWORD,PG_DATABASE=$PG_DATABASE,PG_HOST=$PG_HOST,PG_TOKEN_TRANSFER_TABLE=$PG_TOKEN_TRANSFER_TABLE,PG_ATTEST_MESSAGE_TABLE=$PG_ATTEST_MESSAGE_TABLE,PG_TOKEN_METADATA_TABLE=$PG_TOKEN_METADATA_TABLE

cloud_functions/src/computeMessageCounts.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import { Storage } from '@google-cloud/storage';
66
// Read/write with cloud storage
77
const storage = new Storage();
88
// The ID of your GCS bucket
9-
const bucketName = 'observed-blocks-cache';
9+
const bucketName = 'wormhole-observed-blocks-cache';
1010
const cacheBucket = storage.bucket(bucketName);
1111
const cacheFileName = 'message-counts-cache.json';
1212
const cloudStorageCache = cacheBucket.file(cacheFileName);

cloud_functions/src/computeMissingVaas.ts

+1-10
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import { ObservedMessage } from './types';
66

77
// Read/write to cloud storage
88
const storage = new Storage();
9-
const bucketName = 'observed-blocks-cache';
9+
const bucketName = 'wormhole-observed-blocks-cache';
1010
const cacheBucket = storage.bucket(bucketName);
1111
const cacheFileName = 'missing-vaas-cache.json';
1212
// The ID of your GCS bucket
@@ -24,10 +24,6 @@ async function getMissingVaas_(prevMissingVaas: MissingVaasByChain): Promise<Mis
2424
const bigtable = new Bigtable();
2525
const instance = bigtable.instance(assertEnvironmentVariable('BIGTABLE_INSTANCE_ID'));
2626
const table = instance.table(assertEnvironmentVariable('BIGTABLE_TABLE_ID'));
27-
console.log(
28-
assertEnvironmentVariable('BIGTABLE_INSTANCE_ID'),
29-
assertEnvironmentVariable('BIGTABLE_TABLE_ID')
30-
);
3127
// build range values for each chain based on first missing vaa row key
3228
let missingVaaRanges: { ranges: { start: string; end: string }[] } = {
3329
ranges: [],
@@ -45,7 +41,6 @@ async function getMissingVaas_(prevMissingVaas: MissingVaasByChain): Promise<Mis
4541
let missingMessages: MissingVaasByChain = {};
4642
const [missingVaaObservedMessages] = await table.getRows(missingVaaRanges);
4743
for (const [chainName, chainId] of Object.entries(CHAINS)) {
48-
console.log(chainName, chainId);
4944
let lastRowKey = '';
5045
let missingMessagesByChain: ObservedMessage[] = [];
5146
const messagesByChain = missingVaaObservedMessages.filter(
@@ -75,8 +70,6 @@ async function getMissingVaas_(prevMissingVaas: MissingVaasByChain): Promise<Mis
7570
}
7671

7772
lastRowKey = missingVaaMessagesByChain[missingVaaMessagesByChain.length - 1]?.id;
78-
} else {
79-
console.log('no missing vaas');
8073
}
8174
// update counts
8275
if (lastRowKey === '') {
@@ -119,10 +112,8 @@ export async function computeMissingVaas(req: any, res: any) {
119112
let cache = { messages: {} as MissingVaasByChain };
120113

121114
if (reloadCache === 'true' || reloadCache === '1') {
122-
console.log('emptying the caches');
123115
cache = { messages: {} as MissingVaasByChain };
124116
} else {
125-
console.log('loading from cache bucket');
126117
const [csCache] = await cloudStorageCache.download();
127118
cache = { messages: JSON.parse(csCache.toString()) };
128119
}

cloud_functions/src/getMessageCounts.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ export async function getMessageCounts(req: any, res: any) {
2727
let messages: CountsByChain = {};
2828
try {
2929
// The ID of your GCS bucket
30-
const bucketName = 'observed-blocks-cache';
30+
const bucketName = 'wormhole-observed-blocks-cache';
3131
const cacheBucket = storage.bucket(bucketName);
3232
const cacheFileName = 'message-counts-cache.json';
3333
const cloudStorageCache = cacheBucket.file(cacheFileName);

cloud_functions/src/getMissingVaas.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ export async function getMissingVaas(req: any, res: any) {
2727
let messages: MissingVaasByChain = {};
2828
try {
2929
// The ID of your GCS bucket
30-
const bucketName = 'observed-blocks-cache';
30+
const bucketName = 'wormhole-observed-blocks-cache';
3131
const cacheBucket = storage.bucket(bucketName);
3232
const cacheFileName = 'missing-vaas-cache.json';
3333

cloud_functions/src/getVaasByTxHash.ts

+20-7
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,22 @@
1-
import { Bigtable } from '@google-cloud/bigtable';
1+
import { Bigtable, Instance, Table } from '@google-cloud/bigtable';
22
import { assertEnvironmentVariable } from './utils';
33

4-
const bigtable = new Bigtable();
5-
const instance = bigtable.instance(assertEnvironmentVariable('BIGTABLE_INSTANCE_ID'));
6-
const vaasByTxHashTable = instance.table(
7-
assertEnvironmentVariable('BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID')
8-
);
9-
const signedVAAsTable = instance.table(assertEnvironmentVariable('BIGTABLE_SIGNED_VAAS_TABLE_ID'));
4+
let initialized = false;
5+
let bigtable: Bigtable;
6+
let instance: Instance;
7+
let vaasByTxHashTable: Table;
8+
let signedVAAsTable: Table;
9+
10+
function initialize() {
11+
bigtable = new Bigtable();
12+
instance = bigtable.instance(assertEnvironmentVariable('BIGTABLE_INSTANCE_ID'));
13+
vaasByTxHashTable = instance.table(
14+
assertEnvironmentVariable('BIGTABLE_VAAS_BY_TX_HASH_TABLE_ID')
15+
);
16+
signedVAAsTable = instance.table(assertEnvironmentVariable('BIGTABLE_SIGNED_VAAS_TABLE_ID'));
17+
console.log('initialized global variables');
18+
initialized = true;
19+
}
1020

1121
export async function getVaasByTxHash(req: any, res: any) {
1222
res.set('Access-Control-Allow-Origin', '*');
@@ -19,6 +29,9 @@ export async function getVaasByTxHash(req: any, res: any) {
1929
return;
2030
}
2131
try {
32+
if (!initialized) {
33+
initialize();
34+
}
2235
const txHash = req.query.tx;
2336
if (!txHash) {
2437
res.status(400);

cloud_functions/src/index.ts

+1
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ export const { getMissingVaas } = require('./getMissingVaas');
99
export const { computeMissingVaas } = require('./computeMissingVaas');
1010
export const { computeMessageCounts } = require('./computeMessageCounts');
1111
export const { getVaasByTxHash } = require('./getVaasByTxHash');
12+
export const { processVaa } = require('./processVaa');
1213

1314
// Register an HTTP function with the Functions Framework that will be executed
1415
// when you make an HTTP request to the deployed function's endpoint.

0 commit comments

Comments
 (0)