diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index 2e64c2e9a8..d75d0a4e54 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -22,6 +22,7 @@ jobs: env: AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + DATABASE_URL: ${{ secrets.DATABASE_URL }} INFURA_CONNECTION: ${{ secrets.INFURA_CONNECTION }} ALCHEMY_CONNECTION_POLYGON: ${{ secrets.ALCHEMY_CONNECTION_POLYGON }} ALCHEMY_CONNECTION_ARBITRUM: ${{ secrets.ALCHEMY_CONNECTION_ARBITRUM }} diff --git a/.gitignore b/.gitignore index 631bfc8d9f..dda0acaa18 100755 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,5 @@ ccImages .idea/ *output.json *.csv +*.sql +scripts/*.json diff --git a/beforeTests.js b/beforeTests.js index 28a400004d..420f902aad 100644 --- a/beforeTests.js +++ b/beforeTests.js @@ -35,9 +35,9 @@ module.exports = async function () { global.uniquePoolIdentifiersDB = new Set( ( await axios.get( - 'https://1rwmj4tky9.execute-api.eu-central-1.amazonaws.com/simplePools' + 'https://1rwmj4tky9.execute-api.eu-central-1.amazonaws.com/distinctID' ) - ).data.data + ).data .filter((p) => p.project !== global.apy[0].project) .map((p) => p.pool) ); diff --git a/env.js b/env.js index dc27da7fae..5a7ca06ce8 100644 --- a/env.js +++ b/env.js @@ -19,4 +19,6 @@ module.exports = { .readdirSync('./src/adaptors') .filter((el) => !el.includes('js') && el !== '.DS_Store') ), + // DB + DATABASE_URL: process.env.DATABASE_URL, }; diff --git a/migrations/1661488110733_init.js b/migrations/1661488110733_init.js new file mode 100644 index 0000000000..00f1473595 --- /dev/null +++ b/migrations/1661488110733_init.js @@ -0,0 +1,138 @@ +const { PgLiteral } = require('node-pg-migrate'); + +exports.up = (pgm) => { + // ----- ADD UUID EXTENSION + pgm.createExtension('uuid-ossp', { + ifNotExists: true, + }); + // ----- CREATE TABLES + // --- config + // table with static/semi-static information and consists of 1 row per unique pool. + // operations on this table: insert for new pools, update for existing pools + pgm.createTable('config', { + config_id: { + type: 'uuid', // uuid is created in the application + primaryKey: true, + }, + updated_at: { + type: 'timestamptz', + notNull: true, + default: pgm.func('current_timestamp'), + }, + pool: { type: 'text', notNull: true, unique: true }, + project: { type: 'text', notNull: true }, + chain: { type: 'text', notNull: true }, + symbol: { type: 'text', notNull: true }, + poolMeta: 'text', + underlyingTokens: { type: 'text[]' }, + rewardTokens: { type: 'text[]' }, + url: { type: 'text', notNull: true }, + }); + + // --- yield + // our timeseries table. insert only on hourly granularity + pgm.createTable('yield', { + yield_id: { + type: 'uuid', + default: new PgLiteral('uuid_generate_v4()'), + primaryKey: true, + }, + // configID is a FK in this table and references the PK (config_id) in config + configID: { + type: 'uuid', + notNull: true, + references: '"config"', + onDelete: 'cascade', + }, + timestamp: { + type: 'timestamptz', + notNull: true, + }, + tvlUsd: { type: 'bigint', notNull: true }, + apy: { type: 'numeric', notNull: true }, + apyBase: 'numeric', + apyReward: 'numeric', + }); + + // --- stat + // table which contains rolling statistics required to calculate ML features values + // and other things we use for plotting on the /overview page + pgm.createTable('stat', { + stat_id: { + type: 'uuid', + default: new PgLiteral('uuid_generate_v4()'), + primaryKey: true, + }, + // configID is a FK in this table and references the PK (config_id) in config + configID: { + type: 'uuid', + notNull: true, + references: '"config"', + unique: true, + onDelete: 'cascade', + }, + updated_at: { + type: 'timestamptz', + notNull: true, + default: pgm.func('current_timestamp'), + }, + count: { type: 'smallint', notNull: true }, + meanAPY: { type: 'numeric', notNull: true }, + mean2APY: { type: 'numeric', notNull: true }, + meanDR: { type: 'numeric', notNull: true }, + mean2DR: { type: 'numeric', notNull: true }, + productDR: { type: 'numeric', notNull: true }, + }); + + // --- median + // median table content is used for the median chart on /overview (append only) + pgm.createTable('median', { + median_id: { + type: 'uuid', + default: new PgLiteral('uuid_generate_v4()'), + primaryKey: true, + }, + timestamp: { + type: 'timestamptz', + notNull: true, + unique: true, + }, + uniquePools: { type: 'integer', notNull: true }, + medianAPY: { type: 'numeric', notNull: true }, + }); + + // ----- FUNCTION + // for creating the updated_at timestamp field + pgm.createFunction( + 'update_updated_at', + [], // no params + // options + { + language: 'plpgsql', + returns: 'TRIGGER', + replace: true, + }, + // function body + ` + BEGIN + NEW.updated_at = now(); + RETURN NEW; + END + ` + ); + + // ----- TRIGGERS; + // to trigger the defined function + pgm.createTrigger('config', 'update_updated_at', { + when: 'BEFORE', + operation: 'UPDATE', + function: 'update_updated_at', + level: 'ROW', + }); + pgm.createTrigger('stat', 'update_updated_at', { + when: 'BEFORE', + operation: 'UPDATE', + function: 'update_updated_at', + level: 'ROW', + }); +}; diff --git a/migrations/1662350452113_add-yield-index.js b/migrations/1662350452113_add-yield-index.js new file mode 100644 index 0000000000..2c58a0c145 --- /dev/null +++ b/migrations/1662350452113_add-yield-index.js @@ -0,0 +1,8 @@ +exports.up = (pgm) => { + // composite index for yield; + // added after ingestion of historical data + pgm.createIndex('yield', [ + { name: 'configID', sort: 'ASC' }, + { name: 'timestamp', sort: 'DESC' }, + ]); +}; diff --git a/package-lock.json b/package-lock.json index 21c2303648..17cced622e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -22,9 +22,8 @@ "graphql": "^15.5.1", "graphql-request": "^3.5.0", "lodash": "^4.17.21", - "mongoose": "^5.11.13", "node-fetch": "^2.6.1", - "saslprep": "^1.0.3", + "pg-promise": "^10.11.1", "simple-statistics": "^7.7.5", "superagent": "^6.1.0", "web3": "^1.4.0" @@ -47,6 +46,9 @@ "eslint-plugin-prettier": "^3.3.1", "eslint-plugin-react": "^7.22.0", "jest": "^28.1.3", + "node-pg-migrate": "^6.2.2", + "pg": "^8.8.0", + "pg-native": "^3.0.1", "prettier": "^2.2.1", "serverless": "^3.8.0", "serverless-prune-plugin": "^2.0.1", @@ -3798,9 +3800,9 @@ } }, "node_modules/@sinclair/typebox": { - "version": "0.24.34", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.34.tgz", - "integrity": "sha512-x3ejWKw7rpy30Bvm6U0AQMOHdjqe2E3YJrBHlTxH0KFsp77bBa+MH324nJxtXZFpnTy/JW2h5HPYVm0vG2WPnw==" + "version": "0.24.35", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.35.tgz", + "integrity": "sha512-iN6ehuDndiTiDz2F+Orv/+oHJR+PrGv+38oghCddpsW4YEZl5qyLsWxSwYUWrKEOfjpGtXDFW6scJtjpzSLeSw==" }, "node_modules/@sindresorhus/is": { "version": "0.14.0", @@ -3934,14 +3936,6 @@ "@types/node": "*" } }, - "node_modules/@types/bson": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/@types/bson/-/bson-4.0.5.tgz", - "integrity": "sha512-vVLwMUqhYJSQ/WKcE60eFqcyuWse5fGH+NMAXHuKrUAPoryq3ATxk5o4bgYNtg5aOM4APVg7Hnb3ASqUYG0PKg==", - "dependencies": { - "@types/node": "*" - } - }, "node_modules/@types/cacheable-request": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", @@ -4055,19 +4049,10 @@ "integrity": "sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==", "peer": true }, - "node_modules/@types/mongodb": { - "version": "3.6.20", - "resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-3.6.20.tgz", - "integrity": "sha512-WcdpPJCakFzcWWD9juKoZbRtQxKIMYF/JIAM4JrNHrMcnJL6/a2NWjXxW7fo9hxboxxkg+icff8d7+WIEvKgYQ==", - "dependencies": { - "@types/bson": "*", - "@types/node": "*" - } - }, "node_modules/@types/node": { - "version": "18.7.14", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.14.tgz", - "integrity": "sha512-6bbDaETVi8oyIARulOE9qF1/Qdi/23z6emrUh0fNJRUmjznqrixD4MpGDdgOFk5Xb0m2H6Xu42JGdvAxaJR/wA==" + "version": "18.7.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.15.tgz", + "integrity": "sha512-XnjpaI8Bgc3eBag2Aw4t2Uj/49lLBSStHWfqKvIuXD7FIrZyMLWp8KuAFHAqxMZYTF9l08N1ctUn9YNybZJVmQ==" }, "node_modules/@types/pbkdf2": { "version": "3.1.0", @@ -4077,6 +4062,17 @@ "@types/node": "*" } }, + "node_modules/@types/pg": { + "version": "8.6.5", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.5.tgz", + "integrity": "sha512-tOkGtAqRVkHa/PVZicq67zuujI4Oorfglsr2IbKofDwBSysnaqSx7W1mDqFqdkGE6Fbgh+PZAl0r/BWON/mozw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "node_modules/@types/prettier": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.0.tgz", @@ -5002,6 +4998,14 @@ "resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.12.0.tgz", "integrity": "sha512-c98Bf3tPniI+scsdk237ku1Dc3ujXQTSgyiPUDEOe7tRkhrqridvh8klBv0HCEso1OLOYcHuCv/cS6DNxKH+ZA==" }, + "node_modules/assert-options": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/assert-options/-/assert-options-0.7.0.tgz", + "integrity": "sha512-7q9uNH/Dh8gFgpIIb9ja8PJEWA5AQy3xnBC8jtKs8K/gNVCr1K6kIvlm59HUyYgvM7oEDoLzGgPcGd9FqhtXEQ==", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", @@ -5466,13 +5470,62 @@ "node": ">=8" } }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "devOptional": true, + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, "node_modules/bl": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", - "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, "dependencies": { - "readable-stream": "^2.3.5", - "safe-buffer": "^5.1.1" + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" } }, "node_modules/blakejs": { @@ -5481,9 +5534,9 @@ "integrity": "sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ==" }, "node_modules/bluebird": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.1.tgz", - "integrity": "sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA==" + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" }, "node_modules/bn.js": { "version": "5.2.1", @@ -5747,14 +5800,6 @@ "node-int64": "^0.4.0" } }, - "node_modules/bson": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz", - "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==", - "engines": { - "node": ">=0.6.19" - } - }, "node_modules/buffer": { "version": "4.9.2", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", @@ -5807,6 +5852,14 @@ "resolved": "https://registry.npmjs.org/buffer-to-arraybuffer/-/buffer-to-arraybuffer-0.0.5.tgz", "integrity": "sha512-3dthu5CYiVB1DEJp61FtApNnNndTckcqe4pFcLdvHtrpG+kcyekCJKg4MRiDcFW7A6AODnXB9U4dwQiCW5kzJQ==" }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, "node_modules/buffer-xor": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", @@ -5969,9 +6022,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001388", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001388.tgz", - "integrity": "sha512-znVbq4OUjqgLxMxoNX2ZeeLR0d7lcDiE5uJ4eUiWdml1J1EkxbnQq6opT9jb9SMfJxB0XA16/ziHwni4u1I3GQ==", + "version": "1.0.30001390", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001390.tgz", + "integrity": "sha512-sS4CaUM+/+vqQUlCvCJ2WtDlV81aWtHhqeEVkLokVJJa3ViN4zDxAGfq9R8i1m90uGHxo99cy10Od+lvn3hf0g==", "dev": true, "funding": [ { @@ -6064,6 +6117,20 @@ "node": ">=4" } }, + "node_modules/child-process-ext/node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/child-process-ext/node_modules/semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -6094,6 +6161,15 @@ "node": ">=0.10.0" } }, + "node_modules/child-process-ext/node_modules/split2": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "dev": true, + "dependencies": { + "readable-stream": "^3.0.0" + } + }, "node_modules/child-process-ext/node_modules/which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -6620,7 +6696,8 @@ "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "devOptional": true }, "node_modules/cors": { "version": "2.8.5", @@ -6831,10 +6908,10 @@ } }, "node_modules/decamelize": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", - "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", - "peer": true, + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-5.0.1.tgz", + "integrity": "sha512-VfxadyCECXgQlkoEAjeghAr5gY3Hf+IKjKb+X8tGVDtveCjN+USwprd2q3QXBR9T1+x2DG0XZF5/w+7HAtSaXA==", + "dev": true, "engines": { "node": ">=10" }, @@ -7168,14 +7245,6 @@ "node": ">=0.4.0" } }, - "node_modules/denque": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", - "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==", - "engines": { - "node": ">=0.10" - } - }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -7370,9 +7439,9 @@ "dev": true }, "node_modules/electron/node_modules/@types/node": { - "version": "14.18.26", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.26.tgz", - "integrity": "sha512-0b+utRBSYj8L7XAp0d+DX7lI4cSmowNaaTkk6/1SKzbKkG+doLuPusB9EOvzLJ8ahJSk03bTLIL6cWaEd4dBKA==", + "version": "14.18.27", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.27.tgz", + "integrity": "sha512-DcTUcwT9xEcf4rp2UHyGAcmlqG4Mhe7acozl5vY2xzSrwP1z19ZVyjzQ6DsNUrvIadpiyZoQCTHFt4t2omYIZQ==", "dev": true }, "node_modules/elliptic": { @@ -7953,9 +8022,9 @@ } }, "node_modules/eslint-plugin-react": { - "version": "7.31.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.4.tgz", - "integrity": "sha512-2ry4HTT+c+hSgpnV2DXj3d5oAmH11KH8HHQwtcfRdq6/+R3nEimvMbwAqK79eb4ZW1/hp8yC5elBusZM6li/Gg==", + "version": "7.31.6", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.6.tgz", + "integrity": "sha512-CXu4eu28sb8Sd2+cyUYsJVyDvpTlaXPG+bOzzpS9IzZKtye96AYX3ZmHQ6ayn/OAIQ/ufDJP8ElPWd63Pepn9w==", "dev": true, "dependencies": { "array-includes": "^3.1.5", @@ -8975,6 +9044,12 @@ "url": "https://github.com/sindresorhus/file-type?sponsor=1" } }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "devOptional": true + }, "node_modules/filename-reserved-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", @@ -10700,7 +10775,8 @@ "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true }, "node_modules/isexe": { "version": "2.0.0", @@ -12578,11 +12654,6 @@ "integrity": "sha512-86GgN2vzfUu7m9Wcj63iUkuDzFNYFVmjeDm2GzWpUk+opB0pEpMsw6ePCMrhYkumz2C1ihqtZzOMAg7FiXcNoQ==", "dev": true }, - "node_modules/kareem": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.3.2.tgz", - "integrity": "sha512-STHz9P7X2L4Kwn72fA4rGyqyXdmrMSdxqHx9IXon/FXluXieaFA6KJ2upcHAHxQPQ0LeM/OjLrhFxifHewOALQ==" - }, "node_modules/keccak": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/keccak/-/keccak-3.0.2.tgz", @@ -12769,6 +12840,17 @@ "node": ">= 0.8.0" } }, + "node_modules/libpq": { + "version": "1.8.12", + "resolved": "https://registry.npmjs.org/libpq/-/libpq-1.8.12.tgz", + "integrity": "sha512-4lUY9BD9suz76mVS0kH4rRgRy620g/c9YZH5GYC3smfIpjtj6KiPuQ4IwQSHSZMMMhMM3tBFrYUrw8mHOOZVeg==", + "devOptional": true, + "hasInstallScript": true, + "dependencies": { + "bindings": "1.5.0", + "nan": "^2.14.0" + } + }, "node_modules/lie": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", @@ -13170,11 +13252,6 @@ "node": ">=12" } }, - "node_modules/memory-pager": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", - "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" - }, "node_modules/memorystream": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", @@ -13617,146 +13694,6 @@ "node": "*" } }, - "node_modules/mongodb": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz", - "integrity": "sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw==", - "dependencies": { - "bl": "^2.2.1", - "bson": "^1.1.4", - "denque": "^1.4.1", - "optional-require": "^1.1.8", - "safe-buffer": "^5.1.2" - }, - "engines": { - "node": ">=4" - }, - "optionalDependencies": { - "saslprep": "^1.0.0" - }, - "peerDependenciesMeta": { - "aws4": { - "optional": true - }, - "bson-ext": { - "optional": true - }, - "kerberos": { - "optional": true - }, - "mongodb-client-encryption": { - "optional": true - }, - "mongodb-extjson": { - "optional": true - }, - "snappy": { - "optional": true - } - } - }, - "node_modules/mongodb/node_modules/optional-require": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz", - "integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==", - "dependencies": { - "require-at": "^1.0.6" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/mongoose": { - "version": "5.13.15", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-5.13.15.tgz", - "integrity": "sha512-cxp1Gbb8yUWkaEbajdhspSaKzAvsIvOtRlYD87GN/P2QEUhpd6bIvebi36T6M0tIVAMauNaK9SPA055N3PwF8Q==", - "dependencies": { - "@types/bson": "1.x || 4.0.x", - "@types/mongodb": "^3.5.27", - "bson": "^1.1.4", - "kareem": "2.3.2", - "mongodb": "3.7.3", - "mongoose-legacy-pluralize": "1.0.2", - "mpath": "0.8.4", - "mquery": "3.2.5", - "ms": "2.1.2", - "optional-require": "1.0.x", - "regexp-clone": "1.0.0", - "safe-buffer": "5.2.1", - "sift": "13.5.2", - "sliced": "1.0.1" - }, - "engines": { - "node": ">=4.0.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/mongoose" - } - }, - "node_modules/mongoose-legacy-pluralize": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/mongoose-legacy-pluralize/-/mongoose-legacy-pluralize-1.0.2.tgz", - "integrity": "sha512-Yo/7qQU4/EyIS8YDFSeenIvXxZN+ld7YdV9LqFVQJzTLye8unujAWPZ4NWKfFA+RNjh+wvTWKY9Z3E5XM6ZZiQ==", - "peerDependencies": { - "mongoose": "*" - } - }, - "node_modules/mongoose/node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/mpath": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.8.4.tgz", - "integrity": "sha512-DTxNZomBcTWlrMW76jy1wvV37X/cNNxPW1y2Jzd4DZkAaC5ZGsm8bfGfNOthcDuRJujXLqiuS6o3Tpy0JEoh7g==", - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/mquery": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/mquery/-/mquery-3.2.5.tgz", - "integrity": "sha512-VjOKHHgU84wij7IUoZzFRU07IAxd5kWJaDmyUzQlbjHjyoeK5TNeeo8ZsFDtTYnSgpW6n/nMNIHvE3u8Lbrf4A==", - "dependencies": { - "bluebird": "3.5.1", - "debug": "3.1.0", - "regexp-clone": "^1.0.0", - "safe-buffer": "5.1.2", - "sliced": "1.0.1" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/mquery/node_modules/debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/mquery/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -13899,7 +13836,7 @@ "version": "2.16.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz", "integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==", - "optional": true + "devOptional": true }, "node_modules/nano-json-stream-parser": { "version": "0.1.2", @@ -14049,6 +13986,66 @@ "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", "dev": true }, + "node_modules/node-pg-migrate": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/node-pg-migrate/-/node-pg-migrate-6.2.2.tgz", + "integrity": "sha512-0WYLTXpWu2doeZhiwJUW/1u21OqAFU2CMQ8YZ8VBcJ0xrdqYAjtd8GGFe5A5DM4NJdIZsqJcLPDFqY0FQsmivw==", + "dev": true, + "dependencies": { + "@types/pg": "^8.0.0", + "decamelize": "^5.0.0", + "mkdirp": "~1.0.0", + "yargs": "~17.3.0" + }, + "bin": { + "node-pg-migrate": "bin/node-pg-migrate" + }, + "engines": { + "node": ">=12.20.0" + }, + "peerDependencies": { + "pg": ">=4.3.0 <9.0.0" + } + }, + "node_modules/node-pg-migrate/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/node-pg-migrate/node_modules/yargs": { + "version": "17.3.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.1.tgz", + "integrity": "sha512-WUANQeVgjLbNsEmGk20f+nlHgOqzRFpiGWVaBrYGYIGANIIu3lWjoyi0fNlFmJkvfhCZ6BXINe7/W2O2bV4iaA==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/node-pg-migrate/node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, "node_modules/node-releases": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", @@ -14349,14 +14346,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/optional-require": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.0.3.tgz", - "integrity": "sha512-RV2Zp2MY2aeYK5G+B/Sps8lW5NHAzE5QClbFP15j+PWmP+T9PxlJXBOOLoSAdgwFvS4t0aMR4vpedMkbHfh0nA==", - "engines": { - "node": ">=4" - } - }, "node_modules/optionator": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", @@ -14412,41 +14401,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/ora/node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/ora/node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "node_modules/ora/node_modules/chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -14490,20 +14444,6 @@ "node": ">=8" } }, - "node_modules/ora/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/ora/node_modules/supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -14639,6 +14579,11 @@ "node": ">=6" } }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, "node_modules/pako": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", @@ -14897,6 +14842,159 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, + "node_modules/pg": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", + "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.2", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-minify": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/pg-minify/-/pg-minify-1.6.2.tgz", + "integrity": "sha512-1KdmFGGTP6jplJoI8MfvRlfvMiyBivMRP7/ffh4a11RUFJ7kC2J0ZHlipoKiH/1hz+DVgceon9U2qbaHpPeyPg==", + "engines": { + "node": ">=8.0" + } + }, + "node_modules/pg-native": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/pg-native/-/pg-native-3.0.1.tgz", + "integrity": "sha512-LBVNWkNh0fVx/cienARRP2y22J5OpUsKBe0TpxzAx3arEUUdIs77aLSAHS3scS7SMaqc+OkG40CEu5fN0/cjIw==", + "devOptional": true, + "dependencies": { + "libpq": "^1.8.10", + "pg-types": "^1.12.1", + "readable-stream": "1.0.31" + } + }, + "node_modules/pg-native/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "devOptional": true + }, + "node_modules/pg-native/node_modules/pg-types": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.13.0.tgz", + "integrity": "sha512-lfKli0Gkl/+za/+b6lzENajczwZHc7D5kiUCZfgm914jipD2kIOIvEkAhZ8GrW3/TUoP9w8FHjwpPObBye5KQQ==", + "devOptional": true, + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~1.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.0", + "postgres-interval": "^1.1.0" + } + }, + "node_modules/pg-native/node_modules/postgres-array": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.3.tgz", + "integrity": "sha512-5wClXrAP0+78mcsNX3/ithQ5exKvCyK5lr5NEEEeGwwM6NJdQgzIJBVxLvRW+huFpX92F2QnZ5CcokH0VhK2qQ==", + "devOptional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pg-native/node_modules/readable-stream": { + "version": "1.0.31", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "integrity": "sha512-tco/Dwv1f/sgIgN6CWdj/restacPKNskK6yps1981ivH2ZmLYcs5o5rVzL3qaO/cSkhN8hYOMWs7+glzOLSgRg==", + "devOptional": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/pg-native/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==", + "devOptional": true + }, + "node_modules/pg-pool": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-promise": { + "version": "10.12.0", + "resolved": "https://registry.npmjs.org/pg-promise/-/pg-promise-10.12.0.tgz", + "integrity": "sha512-7uN64iEHrhtRcOaU/AT3925S20JzQJG2nWVK2IUz5SlhB1eNdkXjAYoQtei+5kLJo81mOWcFq7x9J9VRldp0ig==", + "dependencies": { + "assert-options": "0.7.0", + "pg": "8.8.0", + "pg-minify": "1.6.2", + "spex": "3.2.0" + }, + "engines": { + "node": ">=12.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "dependencies": { + "split2": "^4.1.0" + } + }, "node_modules/picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -15027,6 +15125,41 @@ "node": ">=8" } }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -15108,7 +15241,8 @@ "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true }, "node_modules/process-utils": { "version": "4.0.0", @@ -15358,6 +15492,7 @@ "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -15445,11 +15580,6 @@ "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", "dev": true }, - "node_modules/regexp-clone": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-1.0.0.tgz", - "integrity": "sha512-TuAasHQNamyyJ2hb97IuBEif4qBHGjPHBS64sZwytpLEqtBQ1gPJTnOaQ6qmpET16cK14kkjbazl6+p0RRv0yw==" - }, "node_modules/regexp.prototype.flags": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", @@ -15539,14 +15669,6 @@ "uuid": "bin/uuid" } }, - "node_modules/require-at": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz", - "integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==", - "engines": { - "node": ">=4" - } - }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -15815,17 +15937,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "node_modules/saslprep": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", - "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", - "dependencies": { - "sparse-bitfield": "^3.0.3" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", @@ -16087,12 +16198,6 @@ "serverless": "1 || 2 || 3" } }, - "node_modules/serverless-prune-plugin/node_modules/bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, "node_modules/serverless-webpack": { "version": "5.9.1", "resolved": "https://registry.npmjs.org/serverless-webpack/-/serverless-webpack-5.9.1.tgz", @@ -16118,12 +16223,6 @@ "webpack": ">= 3.0.0 < 6" } }, - "node_modules/serverless-webpack/node_modules/bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, "node_modules/serverless-webpack/node_modules/fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", @@ -16274,12 +16373,6 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "node_modules/serverless/node_modules/bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, "node_modules/serverless/node_modules/cacheable-request": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz", @@ -16674,11 +16767,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/sift": { - "version": "13.5.2", - "resolved": "https://registry.npmjs.org/sift/-/sift-13.5.2.tgz", - "integrity": "sha512-+gxdEOMA2J+AI+fVsCqeNn7Tgx3M9ZN9jdi95939l1IJ8cZsqS8sqpJyOkic2SJk+1+98Uwryt/gL6XDaV+UZA==" - }, "node_modules/signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", @@ -16715,9 +16803,9 @@ } }, "node_modules/simple-git": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.13.0.tgz", - "integrity": "sha512-VYrs3joeHvWGcN3K135RpGpPjm4AHYeOrclwew6LlfHgq6ozQYIW2yMnmjf4PCgVOuSYCbXkdUjyiFawuJz8MA==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.14.0.tgz", + "integrity": "sha512-Paad1BkrI7vWhImLybDRYOHnh3WPsHSKXJpmKM+iGjjKNV91XaOdd+yIdZ/gqdzncHDEKYff4k+74oNo1R+U8Q==", "dev": true, "dependencies": { "@kwsites/file-exists": "^1.1.1", @@ -16801,11 +16889,6 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, - "node_modules/sliced": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", - "integrity": "sha512-VZBmZP8WU3sMOZm1bdgTadsQbcscK0UM8oKxKVBs4XAhUo2Xxzm/OFMGBkPusxw9xL3Uy8LrzEqGqJhclsr0yA==" - }, "node_modules/solc": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/solc/-/solc-0.7.3.tgz", @@ -16919,35 +17002,20 @@ "source-map": "^0.6.0" } }, - "node_modules/sparse-bitfield": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", - "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", - "dependencies": { - "memory-pager": "^1.0.2" + "node_modules/spex": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spex/-/spex-3.2.0.tgz", + "integrity": "sha512-9srjJM7NaymrpwMHvSmpDeIK5GoRMX/Tq0E8aOlDPS54dDnDUIp30DrP9SphMPEETDLzEM9+4qo+KipmbtPecg==", + "engines": { + "node": ">=4.5" } }, "node_modules/split2": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", - "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", - "dev": true, - "dependencies": { - "readable-stream": "^3.0.0" - } - }, - "node_modules/split2/node_modules/readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==", "engines": { - "node": ">= 6" + "node": ">= 10.x" } }, "node_modules/sprintf-js": { @@ -17682,41 +17750,6 @@ "node": ">=6" } }, - "node_modules/tar-stream/node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/tar-stream/node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "node_modules/tar-stream/node_modules/readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -19720,6 +19753,18 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/yargs-unparser/node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "peer": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/yargs-unparser/node_modules/is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", @@ -22546,9 +22591,9 @@ } }, "@sinclair/typebox": { - "version": "0.24.34", - "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.34.tgz", - "integrity": "sha512-x3ejWKw7rpy30Bvm6U0AQMOHdjqe2E3YJrBHlTxH0KFsp77bBa+MH324nJxtXZFpnTy/JW2h5HPYVm0vG2WPnw==" + "version": "0.24.35", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.24.35.tgz", + "integrity": "sha512-iN6ehuDndiTiDz2F+Orv/+oHJR+PrGv+38oghCddpsW4YEZl5qyLsWxSwYUWrKEOfjpGtXDFW6scJtjpzSLeSw==" }, "@sindresorhus/is": { "version": "0.14.0", @@ -22673,14 +22718,6 @@ "@types/node": "*" } }, - "@types/bson": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/@types/bson/-/bson-4.0.5.tgz", - "integrity": "sha512-vVLwMUqhYJSQ/WKcE60eFqcyuWse5fGH+NMAXHuKrUAPoryq3ATxk5o4bgYNtg5aOM4APVg7Hnb3ASqUYG0PKg==", - "requires": { - "@types/node": "*" - } - }, "@types/cacheable-request": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz", @@ -22794,19 +22831,10 @@ "integrity": "sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw==", "peer": true }, - "@types/mongodb": { - "version": "3.6.20", - "resolved": "https://registry.npmjs.org/@types/mongodb/-/mongodb-3.6.20.tgz", - "integrity": "sha512-WcdpPJCakFzcWWD9juKoZbRtQxKIMYF/JIAM4JrNHrMcnJL6/a2NWjXxW7fo9hxboxxkg+icff8d7+WIEvKgYQ==", - "requires": { - "@types/bson": "*", - "@types/node": "*" - } - }, "@types/node": { - "version": "18.7.14", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.14.tgz", - "integrity": "sha512-6bbDaETVi8oyIARulOE9qF1/Qdi/23z6emrUh0fNJRUmjznqrixD4MpGDdgOFk5Xb0m2H6Xu42JGdvAxaJR/wA==" + "version": "18.7.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.7.15.tgz", + "integrity": "sha512-XnjpaI8Bgc3eBag2Aw4t2Uj/49lLBSStHWfqKvIuXD7FIrZyMLWp8KuAFHAqxMZYTF9l08N1ctUn9YNybZJVmQ==" }, "@types/pbkdf2": { "version": "3.1.0", @@ -22816,6 +22844,17 @@ "@types/node": "*" } }, + "@types/pg": { + "version": "8.6.5", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.5.tgz", + "integrity": "sha512-tOkGtAqRVkHa/PVZicq67zuujI4Oorfglsr2IbKofDwBSysnaqSx7W1mDqFqdkGE6Fbgh+PZAl0r/BWON/mozw==", + "dev": true, + "requires": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "@types/prettier": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/@types/prettier/-/prettier-2.7.0.tgz", @@ -23584,6 +23623,11 @@ } } }, + "assert-options": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/assert-options/-/assert-options-0.7.0.tgz", + "integrity": "sha512-7q9uNH/Dh8gFgpIIb9ja8PJEWA5AQy3xnBC8jtKs8K/gNVCr1K6kIvlm59HUyYgvM7oEDoLzGgPcGd9FqhtXEQ==" + }, "assert-plus": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", @@ -23938,13 +23982,47 @@ "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==" }, + "bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "devOptional": true, + "requires": { + "file-uri-to-path": "1.0.0" + } + }, "bl": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz", - "integrity": "sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", + "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", + "dev": true, "requires": { - "readable-stream": "^2.3.5", - "safe-buffer": "^5.1.1" + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + }, + "dependencies": { + "buffer": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", + "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "dev": true, + "requires": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + } } }, "blakejs": { @@ -23953,9 +24031,9 @@ "integrity": "sha512-QXUSXI3QVc/gJME0dBpXrag1kbzOqCjCX8/b54ntNyW6sjtoqxqRk3LTmXzaJoh71zMsDCjM+47jS7XiwN/+fQ==" }, "bluebird": { - "version": "3.5.1", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.1.tgz", - "integrity": "sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA==" + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" }, "bn.js": { "version": "5.2.1", @@ -24174,11 +24252,6 @@ "node-int64": "^0.4.0" } }, - "bson": { - "version": "1.1.6", - "resolved": "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz", - "integrity": "sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg==" - }, "buffer": { "version": "4.9.2", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", @@ -24228,6 +24301,11 @@ "resolved": "https://registry.npmjs.org/buffer-to-arraybuffer/-/buffer-to-arraybuffer-0.0.5.tgz", "integrity": "sha512-3dthu5CYiVB1DEJp61FtApNnNndTckcqe4pFcLdvHtrpG+kcyekCJKg4MRiDcFW7A6AODnXB9U4dwQiCW5kzJQ==" }, + "buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==" + }, "buffer-xor": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/buffer-xor/-/buffer-xor-1.0.3.tgz", @@ -24343,9 +24421,9 @@ "dev": true }, "caniuse-lite": { - "version": "1.0.30001388", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001388.tgz", - "integrity": "sha512-znVbq4OUjqgLxMxoNX2ZeeLR0d7lcDiE5uJ4eUiWdml1J1EkxbnQq6opT9jb9SMfJxB0XA16/ziHwni4u1I3GQ==", + "version": "1.0.30001390", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001390.tgz", + "integrity": "sha512-sS4CaUM+/+vqQUlCvCJ2WtDlV81aWtHhqeEVkLokVJJa3ViN4zDxAGfq9R8i1m90uGHxo99cy10Od+lvn3hf0g==", "dev": true }, "caseless": { @@ -24413,6 +24491,17 @@ "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", "dev": true }, + "readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dev": true, + "requires": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + } + }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -24434,6 +24523,15 @@ "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", "dev": true }, + "split2": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "dev": true, + "requires": { + "readable-stream": "^3.0.0" + } + }, "which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -24841,7 +24939,8 @@ "core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", - "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" + "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==", + "devOptional": true }, "cors": { "version": "2.8.5", @@ -25019,10 +25118,10 @@ } }, "decamelize": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", - "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", - "peer": true + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-5.0.1.tgz", + "integrity": "sha512-VfxadyCECXgQlkoEAjeghAr5gY3Hf+IKjKb+X8tGVDtveCjN+USwprd2q3QXBR9T1+x2DG0XZF5/w+7HAtSaXA==", + "dev": true }, "decimal.js-light": { "version": "2.5.1", @@ -25289,11 +25388,6 @@ "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==" }, - "denque": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz", - "integrity": "sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw==" - }, "depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -25448,9 +25542,9 @@ }, "dependencies": { "@types/node": { - "version": "14.18.26", - "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.26.tgz", - "integrity": "sha512-0b+utRBSYj8L7XAp0d+DX7lI4cSmowNaaTkk6/1SKzbKkG+doLuPusB9EOvzLJ8ahJSk03bTLIL6cWaEd4dBKA==", + "version": "14.18.27", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.18.27.tgz", + "integrity": "sha512-DcTUcwT9xEcf4rp2UHyGAcmlqG4Mhe7acozl5vY2xzSrwP1z19ZVyjzQ6DsNUrvIadpiyZoQCTHFt4t2omYIZQ==", "dev": true } } @@ -26024,9 +26118,9 @@ } }, "eslint-plugin-react": { - "version": "7.31.4", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.4.tgz", - "integrity": "sha512-2ry4HTT+c+hSgpnV2DXj3d5oAmH11KH8HHQwtcfRdq6/+R3nEimvMbwAqK79eb4ZW1/hp8yC5elBusZM6li/Gg==", + "version": "7.31.6", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.31.6.tgz", + "integrity": "sha512-CXu4eu28sb8Sd2+cyUYsJVyDvpTlaXPG+bOzzpS9IzZKtye96AYX3ZmHQ6ayn/OAIQ/ufDJP8ElPWd63Pepn9w==", "dev": true, "requires": { "array-includes": "^3.1.5", @@ -26742,6 +26836,12 @@ "token-types": "^4.1.1" } }, + "file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "devOptional": true + }, "filename-reserved-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", @@ -27979,7 +28079,8 @@ "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", - "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "dev": true }, "isexe": { "version": "2.0.0", @@ -29391,11 +29492,6 @@ "integrity": "sha512-86GgN2vzfUu7m9Wcj63iUkuDzFNYFVmjeDm2GzWpUk+opB0pEpMsw6ePCMrhYkumz2C1ihqtZzOMAg7FiXcNoQ==", "dev": true }, - "kareem": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/kareem/-/kareem-2.3.2.tgz", - "integrity": "sha512-STHz9P7X2L4Kwn72fA4rGyqyXdmrMSdxqHx9IXon/FXluXieaFA6KJ2upcHAHxQPQ0LeM/OjLrhFxifHewOALQ==" - }, "keccak": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/keccak/-/keccak-3.0.2.tgz", @@ -29526,6 +29622,16 @@ "type-check": "~0.4.0" } }, + "libpq": { + "version": "1.8.12", + "resolved": "https://registry.npmjs.org/libpq/-/libpq-1.8.12.tgz", + "integrity": "sha512-4lUY9BD9suz76mVS0kH4rRgRy620g/c9YZH5GYC3smfIpjtj6KiPuQ4IwQSHSZMMMhMM3tBFrYUrw8mHOOZVeg==", + "devOptional": true, + "requires": { + "bindings": "1.5.0", + "nan": "^2.14.0" + } + }, "lie": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/lie/-/lie-3.3.0.tgz", @@ -29852,11 +29958,6 @@ "module-error": "^1.0.1" } }, - "memory-pager": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz", - "integrity": "sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg==" - }, "memorystream": { "version": "0.3.1", "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", @@ -30193,95 +30294,6 @@ "integrity": "sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w==", "optional": true }, - "mongodb": { - "version": "3.7.3", - "resolved": "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz", - "integrity": "sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw==", - "requires": { - "bl": "^2.2.1", - "bson": "^1.1.4", - "denque": "^1.4.1", - "optional-require": "^1.1.8", - "safe-buffer": "^5.1.2", - "saslprep": "^1.0.0" - }, - "dependencies": { - "optional-require": { - "version": "1.1.8", - "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz", - "integrity": "sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA==", - "requires": { - "require-at": "^1.0.6" - } - } - } - }, - "mongoose": { - "version": "5.13.15", - "resolved": "https://registry.npmjs.org/mongoose/-/mongoose-5.13.15.tgz", - "integrity": "sha512-cxp1Gbb8yUWkaEbajdhspSaKzAvsIvOtRlYD87GN/P2QEUhpd6bIvebi36T6M0tIVAMauNaK9SPA055N3PwF8Q==", - "requires": { - "@types/bson": "1.x || 4.0.x", - "@types/mongodb": "^3.5.27", - "bson": "^1.1.4", - "kareem": "2.3.2", - "mongodb": "3.7.3", - "mongoose-legacy-pluralize": "1.0.2", - "mpath": "0.8.4", - "mquery": "3.2.5", - "ms": "2.1.2", - "optional-require": "1.0.x", - "regexp-clone": "1.0.0", - "safe-buffer": "5.2.1", - "sift": "13.5.2", - "sliced": "1.0.1" - }, - "dependencies": { - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - } - } - }, - "mongoose-legacy-pluralize": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/mongoose-legacy-pluralize/-/mongoose-legacy-pluralize-1.0.2.tgz", - "integrity": "sha512-Yo/7qQU4/EyIS8YDFSeenIvXxZN+ld7YdV9LqFVQJzTLye8unujAWPZ4NWKfFA+RNjh+wvTWKY9Z3E5XM6ZZiQ==", - "requires": {} - }, - "mpath": { - "version": "0.8.4", - "resolved": "https://registry.npmjs.org/mpath/-/mpath-0.8.4.tgz", - "integrity": "sha512-DTxNZomBcTWlrMW76jy1wvV37X/cNNxPW1y2Jzd4DZkAaC5ZGsm8bfGfNOthcDuRJujXLqiuS6o3Tpy0JEoh7g==" - }, - "mquery": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/mquery/-/mquery-3.2.5.tgz", - "integrity": "sha512-VjOKHHgU84wij7IUoZzFRU07IAxd5kWJaDmyUzQlbjHjyoeK5TNeeo8ZsFDtTYnSgpW6n/nMNIHvE3u8Lbrf4A==", - "requires": { - "bluebird": "3.5.1", - "debug": "3.1.0", - "regexp-clone": "^1.0.0", - "safe-buffer": "5.1.2", - "sliced": "1.0.1" - }, - "dependencies": { - "debug": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/debug/-/debug-3.1.0.tgz", - "integrity": "sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g==", - "requires": { - "ms": "2.0.0" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - } - } - }, "ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", @@ -30390,7 +30402,7 @@ "version": "2.16.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.16.0.tgz", "integrity": "sha512-UdAqHyFngu7TfQKsCBgAA6pWDkT8MAO7d0jyOecVhN5354xbLqdn8mV9Tat9gepAupm0bt2DbeaSC8vS52MuFA==", - "optional": true + "devOptional": true }, "nano-json-stream-parser": { "version": "0.1.2", @@ -30506,6 +30518,47 @@ "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", "dev": true }, + "node-pg-migrate": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/node-pg-migrate/-/node-pg-migrate-6.2.2.tgz", + "integrity": "sha512-0WYLTXpWu2doeZhiwJUW/1u21OqAFU2CMQ8YZ8VBcJ0xrdqYAjtd8GGFe5A5DM4NJdIZsqJcLPDFqY0FQsmivw==", + "dev": true, + "requires": { + "@types/pg": "^8.0.0", + "decamelize": "^5.0.0", + "mkdirp": "~1.0.0", + "yargs": "~17.3.0" + }, + "dependencies": { + "mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true + }, + "yargs": { + "version": "17.3.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.3.1.tgz", + "integrity": "sha512-WUANQeVgjLbNsEmGk20f+nlHgOqzRFpiGWVaBrYGYIGANIIu3lWjoyi0fNlFmJkvfhCZ6BXINe7/W2O2bV4iaA==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.0.0" + } + }, + "yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", + "dev": true + } + } + }, "node-releases": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.6.tgz", @@ -30728,11 +30781,6 @@ "is-wsl": "^2.1.1" } }, - "optional-require": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/optional-require/-/optional-require-1.0.3.tgz", - "integrity": "sha512-RV2Zp2MY2aeYK5G+B/Sps8lW5NHAzE5QClbFP15j+PWmP+T9PxlJXBOOLoSAdgwFvS4t0aMR4vpedMkbHfh0nA==" - }, "optionator": { "version": "0.9.1", "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", @@ -30773,27 +30821,6 @@ "color-convert": "^2.0.1" } }, - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "chalk": { "version": "4.1.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", @@ -30825,17 +30852,6 @@ "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", "dev": true }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, "supports-color": { "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", @@ -30930,6 +30946,11 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, "pako": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz", @@ -31133,6 +31154,133 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow==" }, + "pg": { + "version": "8.8.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.8.0.tgz", + "integrity": "sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw==", + "requires": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.5.2", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + } + }, + "pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==" + }, + "pg-minify": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/pg-minify/-/pg-minify-1.6.2.tgz", + "integrity": "sha512-1KdmFGGTP6jplJoI8MfvRlfvMiyBivMRP7/ffh4a11RUFJ7kC2J0ZHlipoKiH/1hz+DVgceon9U2qbaHpPeyPg==" + }, + "pg-native": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/pg-native/-/pg-native-3.0.1.tgz", + "integrity": "sha512-LBVNWkNh0fVx/cienARRP2y22J5OpUsKBe0TpxzAx3arEUUdIs77aLSAHS3scS7SMaqc+OkG40CEu5fN0/cjIw==", + "devOptional": true, + "requires": { + "libpq": "^1.8.10", + "pg-types": "^1.12.1", + "readable-stream": "1.0.31" + }, + "dependencies": { + "isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "devOptional": true + }, + "pg-types": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.13.0.tgz", + "integrity": "sha512-lfKli0Gkl/+za/+b6lzENajczwZHc7D5kiUCZfgm914jipD2kIOIvEkAhZ8GrW3/TUoP9w8FHjwpPObBye5KQQ==", + "devOptional": true, + "requires": { + "pg-int8": "1.0.1", + "postgres-array": "~1.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.0", + "postgres-interval": "^1.1.0" + } + }, + "postgres-array": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.3.tgz", + "integrity": "sha512-5wClXrAP0+78mcsNX3/ithQ5exKvCyK5lr5NEEEeGwwM6NJdQgzIJBVxLvRW+huFpX92F2QnZ5CcokH0VhK2qQ==", + "devOptional": true + }, + "readable-stream": { + "version": "1.0.31", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.31.tgz", + "integrity": "sha512-tco/Dwv1f/sgIgN6CWdj/restacPKNskK6yps1981ivH2ZmLYcs5o5rVzL3qaO/cSkhN8hYOMWs7+glzOLSgRg==", + "devOptional": true, + "requires": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==", + "devOptional": true + } + } + }, + "pg-pool": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.5.2.tgz", + "integrity": "sha512-His3Fh17Z4eg7oANLob6ZvH8xIVen3phEZh2QuyrIl4dQSDVEabNducv6ysROKpDNPSD+12tONZVWfSgMvDD9w==", + "requires": {} + }, + "pg-promise": { + "version": "10.12.0", + "resolved": "https://registry.npmjs.org/pg-promise/-/pg-promise-10.12.0.tgz", + "integrity": "sha512-7uN64iEHrhtRcOaU/AT3925S20JzQJG2nWVK2IUz5SlhB1eNdkXjAYoQtei+5kLJo81mOWcFq7x9J9VRldp0ig==", + "requires": { + "assert-options": "0.7.0", + "pg": "8.8.0", + "pg-minify": "1.6.2", + "spex": "3.2.0" + } + }, + "pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "requires": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + } + }, + "pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "requires": { + "split2": "^4.1.0" + } + }, "picocolors": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", @@ -31226,6 +31374,29 @@ } } }, + "postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==" + }, + "postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha512-xy3pmLuQqRBZBXDULy7KbaitYqLcmxigw14Q5sj8QBVLqEwXfeybIKVWiqAXTlcvdvb0+xkOtDbfQMOf4lST1w==" + }, + "postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==" + }, + "postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "requires": { + "xtend": "^4.0.0" + } + }, "prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -31279,7 +31450,8 @@ "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", - "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", + "dev": true }, "process-utils": { "version": "4.0.0", @@ -31475,6 +31647,7 @@ "version": "2.3.7", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", @@ -31550,11 +31723,6 @@ "integrity": "sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA==", "dev": true }, - "regexp-clone": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/regexp-clone/-/regexp-clone-1.0.0.tgz", - "integrity": "sha512-TuAasHQNamyyJ2hb97IuBEif4qBHGjPHBS64sZwytpLEqtBQ1gPJTnOaQ6qmpET16cK14kkjbazl6+p0RRv0yw==" - }, "regexp.prototype.flags": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz", @@ -31620,11 +31788,6 @@ } } }, - "require-at": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz", - "integrity": "sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g==" - }, "require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", @@ -31822,14 +31985,6 @@ "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" }, - "saslprep": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz", - "integrity": "sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag==", - "requires": { - "sparse-bitfield": "^3.0.3" - } - }, "sax": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", @@ -32079,12 +32234,6 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", "dev": true }, - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, "cacheable-request": { "version": "7.0.2", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.2.tgz", @@ -32330,14 +32479,6 @@ "dev": true, "requires": { "bluebird": "^3.7.2" - }, - "dependencies": { - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - } } }, "serverless-webpack": { @@ -32356,12 +32497,6 @@ "ts-node": ">= 8.3.0" }, "dependencies": { - "bluebird": { - "version": "3.7.2", - "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", - "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==", - "dev": true - }, "fs-extra": { "version": "9.1.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz", @@ -32486,11 +32621,6 @@ "object-inspect": "^1.9.0" } }, - "sift": { - "version": "13.5.2", - "resolved": "https://registry.npmjs.org/sift/-/sift-13.5.2.tgz", - "integrity": "sha512-+gxdEOMA2J+AI+fVsCqeNn7Tgx3M9ZN9jdi95939l1IJ8cZsqS8sqpJyOkic2SJk+1+98Uwryt/gL6XDaV+UZA==" - }, "signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", @@ -32513,9 +32643,9 @@ } }, "simple-git": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.13.0.tgz", - "integrity": "sha512-VYrs3joeHvWGcN3K135RpGpPjm4AHYeOrclwew6LlfHgq6ozQYIW2yMnmjf4PCgVOuSYCbXkdUjyiFawuJz8MA==", + "version": "3.14.0", + "resolved": "https://registry.npmjs.org/simple-git/-/simple-git-3.14.0.tgz", + "integrity": "sha512-Paad1BkrI7vWhImLybDRYOHnh3WPsHSKXJpmKM+iGjjKNV91XaOdd+yIdZ/gqdzncHDEKYff4k+74oNo1R+U8Q==", "dev": true, "requires": { "@kwsites/file-exists": "^1.1.1", @@ -32576,11 +32706,6 @@ } } }, - "sliced": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz", - "integrity": "sha512-VZBmZP8WU3sMOZm1bdgTadsQbcscK0UM8oKxKVBs4XAhUo2Xxzm/OFMGBkPusxw9xL3Uy8LrzEqGqJhclsr0yA==" - }, "solc": { "version": "0.7.3", "resolved": "https://registry.npmjs.org/solc/-/solc-0.7.3.tgz", @@ -32675,35 +32800,15 @@ "source-map": "^0.6.0" } }, - "sparse-bitfield": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz", - "integrity": "sha512-kvzhi7vqKTfkh0PZU+2D2PIllw2ymqJKujUcyPMd9Y75Nv4nPbGJZXNhxsgdQab2BmlDct1YnfQCguEvHr7VsQ==", - "requires": { - "memory-pager": "^1.0.2" - } + "spex": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/spex/-/spex-3.2.0.tgz", + "integrity": "sha512-9srjJM7NaymrpwMHvSmpDeIK5GoRMX/Tq0E8aOlDPS54dDnDUIp30DrP9SphMPEETDLzEM9+4qo+KipmbtPecg==" }, "split2": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", - "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", - "dev": true, - "requires": { - "readable-stream": "^3.0.0" - }, - "dependencies": { - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - } - } + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.1.0.tgz", + "integrity": "sha512-VBiJxFkxiXRlUIeyMQi8s4hgvKCSjtknJv/LVYbrgALPwf5zSKmEwV9Lst25AkvMDnvxODugjdl6KZgwKM1WYQ==" }, "sprintf-js": { "version": "1.0.3", @@ -33285,27 +33390,6 @@ "readable-stream": "^3.1.1" }, "dependencies": { - "bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dev": true, - "requires": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", - "dev": true, - "requires": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -34821,6 +34905,12 @@ "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "peer": true }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "peer": true + }, "is-plain-obj": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", diff --git a/package.json b/package.json index 6eae3a20da..c063d8e28c 100644 --- a/package.json +++ b/package.json @@ -4,9 +4,10 @@ "scripts": { "deploy:env": "sls deploy --stage $NODE_ENV", "deploy:prod": "export AWS_PROFILE='defillama' && export NODE_ENV=prod && npm run deploy:env", - "deploy:dev": "export AWS_PROFILE='default' && export NODE_ENV=dev && npm run deploy:env", + "deploy:dev": "export AWS_PROFILE='defillama' && export NODE_ENV=dev && npm run deploy:env", "deploy": "export NODE_ENV=prod && npm run deploy:env", - "test": "jest" + "test": "jest", + "migrate": "node-pg-migrate" }, "author": "", "license": "ISC", @@ -24,9 +25,8 @@ "graphql": "^15.5.1", "graphql-request": "^3.5.0", "lodash": "^4.17.21", - "mongoose": "^5.11.13", "node-fetch": "^2.6.1", - "saslprep": "^1.0.3", + "pg-promise": "^10.11.1", "simple-statistics": "^7.7.5", "superagent": "^6.1.0", "web3": "^1.4.0" @@ -49,6 +49,9 @@ "eslint-plugin-prettier": "^3.3.1", "eslint-plugin-react": "^7.22.0", "jest": "^28.1.3", + "node-pg-migrate": "^6.2.2", + "pg": "^8.8.0", + "pg-native": "^3.0.1", "prettier": "^2.2.1", "serverless": "^3.8.0", "serverless-prune-plugin": "^2.0.1", diff --git a/scripts/boostrapMedianTable.js b/scripts/boostrapMedianTable.js deleted file mode 100644 index 17a7c237b1..0000000000 --- a/scripts/boostrapMedianTable.js +++ /dev/null @@ -1,49 +0,0 @@ -const fs = require('fs'); - -const AWS = require('aws-sdk'); -const ss = require('simple-statistics'); - -const { confirm } = require('../src/utils/confirm'); -const { boundaries } = require('../src/utils/exclude'); -const { insertMedian } = require('../src/handlers/triggerMedian'); - -// set config (we run this script locally) -const credentials = new AWS.SharedIniFileCredentials({ profile: 'defillama' }); -AWS.config.credentials = credentials; -AWS.config.update({ region: 'eu-central-1' }); -process.env['SSM_PATH'] = '/llama-apy/serverless/sls-authenticate'; - -(async () => { - await confirm( - `Confirm with 'yes' if you want to start the ${process.argv[1] - .split('/') - .slice(-1)} script: ` - ); - // pools.json is a full database snapshot of daily values only (the last value per pool per day) - // containing pool and the total apy fields - let data = JSON.parse(fs.readFileSync(process.argv[2])); - // keeping positive values only - data = data.filter( - (p) => - p.apy !== null && p.apy >= boundaries.apy.lb && p.apy <= boundaries.apy.ub - ); - const payload = []; - for (const [i, timestamp] of [ - ...new Set(data.map((el) => el.timestamp)), - ].entries()) { - console.log(i, timestamp); - - // filter to day - let X = data.filter((el) => el.timestamp === timestamp); - - payload.push({ - timestamp: new Date(timestamp), - medianAPY: ss.median(X.map((p) => p.apy)), - uniquePools: new Set(X.map((p) => p.pool)).size, - }); - } - - const response = await insertMedian(payload); - console.log(response); - process.exit(0); -})(); diff --git a/scripts/boostrapStatsTable.js b/scripts/boostrapStatsTable.js deleted file mode 100644 index 1a67f4125a..0000000000 --- a/scripts/boostrapStatsTable.js +++ /dev/null @@ -1,65 +0,0 @@ -const fs = require('fs'); - -const AWS = require('aws-sdk'); -const ss = require('simple-statistics'); - -const { confirm } = require('../src/utils/confirm'); -const { boundaries } = require('../src/utils/exclude'); -const { insertStats } = require('../src/handlers/triggerStats'); - -// set config (we run this script locally) -const credentials = new AWS.SharedIniFileCredentials({ profile: 'defillama' }); -AWS.config.credentials = credentials; -AWS.config.update({ region: 'eu-central-1' }); -process.env['SSM_PATH'] = '/llama-apy/serverless/sls-authenticate'; - -(async () => { - await confirm( - `Confirm with 'yes' if you want to start the ${process.argv[1] - .split('/') - .slice(-1)} script: ` - ); - // pools.json is a full database snapshot of daily values only (the last value per pool per day) - // containing pool and the total apy fields - let data = JSON.parse(fs.readFileSync(process.argv[2])); - // keeping positive values only - data = data.filter( - (p) => - p.apy !== null && p.apy >= boundaries.apy.lb && p.apy <= boundaries.apy.ub - ); - - // create return field - const T = 365; - // transform raw apy to return field (required for geometric mean below) - data = data.map((p) => ({ - ...p, - return: (1 + p.apy / 100) ** (1 / T) - 1, - })); - - const payload = []; - for (const [i, pool] of [...new Set(data.map((el) => el.pool))].entries()) { - console.log(i); - - // filter to pool - let X = data.filter((el) => el.pool === pool); - if (X.length === 0) continue; - - const count = X.length; - const seriesAPY = X.map((p) => p.apy); - const seriesReturn = X.map((p) => p.return); - - payload.push({ - pool, - count, - meanAPY: seriesAPY.reduce((a, b) => a + b, 0) / count, - mean2APY: count < 2 ? null : ss.variance(seriesAPY) * (count - 1), - meanDR: seriesReturn.reduce((a, b) => a + b, 0) / count, - mean2DR: count < 2 ? null : ss.variance(seriesReturn) * (count - 1), - productDR: seriesReturn.map((a) => 1 + a).reduce((a, b) => a * b), - }); - } - - const response = await insertStats(payload); - console.log(response); - process.exit(0); -})(); diff --git a/src/utils/confirm.js b/scripts/confirm.js similarity index 100% rename from src/utils/confirm.js rename to scripts/confirm.js diff --git a/scripts/createConfig.js b/scripts/createConfig.js new file mode 100644 index 0000000000..2711f7779c --- /dev/null +++ b/scripts/createConfig.js @@ -0,0 +1,46 @@ +const fs = require('fs'); + +const superagent = require('superagent'); + +const { confirm } = require('./confirm'); +const { connect } = require('../src/utils/dbConnection'); +const { + buildInsertConfigQuery, +} = require('../src/controllers/configController'); + +(async () => { + await confirm( + `Confirm with 'yes' if you want to start the ${process.argv[1] + .split('/') + .slice(-1)} script: ` + ); + + const uuids = JSON.parse(fs.readFileSync('./created_uuids.json')); + const urls = ( + await superagent.get( + 'https://1rwmj4tky9.execute-api.eu-central-1.amazonaws.com/urls' + ) + ).body; + let data = JSON.parse(fs.readFileSync('./yield_snapshot_last.json')); + + data = data.map((p) => ({ + config_id: uuids[p.pool], + pool: p.pool, + project: p.project, + chain: p.chain, + symbol: p.symbol, + poolMeta: p.poolMeta, + underlyingTokens: + p?.underlyingTokens?.length > 0 ? p?.underlyingTokens : null, + rewardTokens: p?.rewardTokens?.length > 0 ? p?.rewardTokens : null, + url: urls[p.project], + })); + + // build multi row insert query + const insertConfigQ = buildInsertConfigQuery(data); + + const conn = await connect(); + const response = await conn.result(insertConfigQ); + console.log(response); + process.exit(0); +})(); diff --git a/scripts/createMedian.js b/scripts/createMedian.js new file mode 100644 index 0000000000..7db05d7f6e --- /dev/null +++ b/scripts/createMedian.js @@ -0,0 +1,45 @@ +const fs = require('fs'); + +const ss = require('simple-statistics'); + +const { confirm } = require('./confirm'); +const exclude = require('../src/utils/exclude'); +const { insertMedian } = require('../src/controllers/medianController'); + +(async () => { + await confirm( + `Confirm with 'yes' if you want to start the ${process.argv[1] + .split('/') + .slice(-1)} script: ` + ); + // load yield table snapshot of daily values only + let data = JSON.parse(fs.readFileSync('./yield_snapshot_daily.json')); + // we filter further on tvl (10k) cause this is what we do on retrieval from db for frontend + data = data.filter( + (p) => + p.tvlUsd >= 1e4 && + !exclude.excludePools.includes(p.pool) && + !exclude.excludeAdaptors.includes(p.project) + ); + + let payload = []; + for (const [i, timestamp] of [ + ...new Set(data.map((el) => el.timestamp)), + ].entries()) { + console.log(i, timestamp); + + // filter to day + let X = data.filter((el) => el.timestamp === timestamp); + + payload.push({ + timestamp: new Date(timestamp), + medianAPY: parseFloat(ss.median(X.map((p) => p.apy)).toFixed(5)), + uniquePools: new Set(X.map((p) => p.pool)).size, + }); + } + payload = payload.sort((a, b) => b.timestamp - a.timestamp); + + const response = await insertMedian(payload); + console.log(response); + process.exit(0); +})(); diff --git a/scripts/createStat.js b/scripts/createStat.js new file mode 100644 index 0000000000..be12852d7d --- /dev/null +++ b/scripts/createStat.js @@ -0,0 +1,54 @@ +const fs = require('fs'); + +const ss = require('simple-statistics'); + +const { confirm } = require('./confirm'); +const { insertStat } = require('../src/controllers/statController'); + +(async () => { + await confirm( + `Confirm with 'yes' if you want to start the ${process.argv[1] + .split('/') + .slice(-1)} script: ` + ); + // load yield table snapshot of daily values only + let data = JSON.parse(fs.readFileSync('./yield_snapshot_daily.json')); + + // load the uuids + const uuids = JSON.parse(fs.readFileSync('./created_uuids.json')); + + // create return field + const T = 365; + // transform raw apy to return field (required for geometric mean below) + data = data.map((p) => ({ + pool: p.pool, + apy: p.apy, + return: (1 + p.apy / 100) ** (1 / T) - 1, + })); + + const payload = []; + for (const [i, pool] of [...new Set(data.map((p) => p.pool))].entries()) { + console.log(i); + + // filter to config id + let X = data.filter((p) => p.pool === pool); + if (X.length === 0) continue; + + const count = X.length; + const seriesAPY = X.map((p) => p.apy); + const seriesReturn = X.map((p) => p.return); + + payload.push({ + configID: uuids[pool], + count, + meanAPY: seriesAPY.reduce((a, b) => a + b, 0) / count, + mean2APY: count < 2 ? 0 : ss.variance(seriesAPY) * (count - 1), + meanDR: seriesReturn.reduce((a, b) => a + b, 0) / count, + mean2DR: count < 2 ? 0 : ss.variance(seriesReturn) * (count - 1), + productDR: seriesReturn.map((a) => 1 + a).reduce((a, b) => a * b), + }); + } + const response = await insertStat(payload); + console.log(response); + process.exit(0); +})(); diff --git a/scripts/createUUID.js b/scripts/createUUID.js new file mode 100644 index 0000000000..48ef4d2be9 --- /dev/null +++ b/scripts/createUUID.js @@ -0,0 +1,21 @@ +const fs = require('fs'); +const crypto = require('crypto'); + +const data = JSON.parse(fs.readFileSync('./yield_snapshot_last.json')); +const uniquePools = new Set(data.map((p) => p.pool)); +console.log('nb of unique pools: ', uniquePools.size); + +const uuidMapping = {}; +for (const pool of uniquePools) { + uuidMapping[pool] = crypto.randomUUID(); +} +console.log( + 'nb of unique pools in mapping: ', + new Set(Object.keys(uuidMapping)).size +); +console.log( + 'nb of unique uuids in mapping: ', + new Set(Object.values(uuidMapping)).size +); + +fs.writeFileSync('./created_uuids.json', JSON.stringify(uuidMapping)); diff --git a/scripts/createYield.js b/scripts/createYield.js new file mode 100644 index 0000000000..9d094c597d --- /dev/null +++ b/scripts/createYield.js @@ -0,0 +1,34 @@ +const fs = require('fs'); + +const { confirm } = require('./confirm'); +const { connect } = require('../src/utils/dbConnection'); +const { buildInsertYieldQuery } = require('../src/controllers/yieldController'); + +(async () => { + await confirm( + `Confirm with 'yes' if you want to start the ${process.argv[1] + .split('/') + .slice(-1)} script: ` + ); + + const uuids = JSON.parse(fs.readFileSync('./created_uuids.json')); + + let data = JSON.parse(fs.readFileSync('./yield_snapshot_daily.json')); + + data = data.map((p) => ({ + configID: uuids[p.pool], + timestamp: new Date(p.timestamp), + tvlUsd: p.tvlUsd, + apy: p.apy, + apyBase: p.apyBase, + apyReward: p.apyReward, + })); + + // build multi row insert query + const insertYieldQ = buildInsertYieldQuery(data); + + const conn = await connect(); + const response = await conn.result(insertYieldQ); + console.log(response); + process.exit(0); +})(); diff --git a/scripts/fillOld.js b/scripts/fillOld.js deleted file mode 100644 index 4bde0bad8c..0000000000 --- a/scripts/fillOld.js +++ /dev/null @@ -1,118 +0,0 @@ -const path = require('path'); -const dotenv = require('dotenv'); - -const AWS = require('aws-sdk'); - -const poolModel = require('../models/pool'); -const AppError = require('../utils/appError'); -const { confirm } = require('../src/utils/confirm'); -const dbConnection = require('../src/utils/dbConnection.js'); -const { insertPools } = require('../src/handlers/triggerAdaptor'); - -const credentials = new AWS.SharedIniFileCredentials({ profile: 'defillama' }); -AWS.config.credentials = credentials; - -dotenv.config({ path: './config.env' }); - -(async () => { - if (process.argv.length < 4) { - console.error(`Missing argument, you need to provide the adaptor name, a - unix timestamp in seconds and optionally the number of days you want to backfill the data - Eg: node scripts/fillOld.js pangolin 1648098107 10`); - process.exit(1); - } - - await confirm('Confirm with `yes` if you want to start the fillOld script: '); - - const project = process.argv[2]; - let timestamp = process.argv[3]; - const maxDays = process.argv[4] === undefined ? 1 : process.argv[4]; - // round timestamp to midnight - // eg 2022-04-06T00:00:00.000Z - timestamp = Math.floor(timestamp / 60 / 60 / 24) * 24 * 60 * 60; - const offset = 86400; - const passedFile = path.resolve( - process.cwd(), - `src/adaptors/${project}/index.js` - ); - - // 1. load module - const module = require(passedFile); - if (!module.timetravel) - return console.log(`${project} can't timetravel, exiting!`); - - // 2. run adaptor - console.log(`Starting timetravel for ${project}...\n`); - for (let i = 0; i < maxDays; i++) { - console.log( - `Unix: ${timestamp}, ISO: ${new Date( - timestamp * 1000 - ).toISOString()}, Nb: ${i + 1}` - ); - - console.log('\trunning adaptor'); - const data = await module.apy(timestamp); - - // filter to $1k usd tvl - const tvlMinThr = 1e3; - let dataDB = data.filter((el) => el.tvlUsd >= tvlMinThr); - - // add timestamp - dataDB = dataDB.map((p) => ({ - ...p, - timestamp: new Date(timestamp * 1000), - })); - - // DB update - // step1: we delete all hourly samples on that particular day for that project - // step2: we insert the new ones - // reason instead of updateMany: if we'd just use an update operation without deleting anything, - // we'd have only outdated objects for that day with the exception of the updated one. - // -> confusing when looking at the historcal data and especially bad when we want to use the old data - // for some analysis work as nothing would make sense - try { - // delete - const responseDelete = (await deletePools(timestamp, project)).response; - console.log(`\tDeleted ${responseDelete.n} samples`); - - // insert - const responseInsert = (await insertPools(dataDB)).response; - console.log(`\t${responseInsert} samples\n`); - } catch (err) { - throw new Error(err); - } - // update timestamp - timestamp -= offset; - } - console.log(`\njob finished, backfilled ${maxDays} day(s)`); - - process.exit(0); -})(); - -const deletePools = async (timestamp, project) => { - const conn = await dbConnection.connect(); - const M = conn.model(poolModel.modelName); - - const lb = new Date(timestamp * 1000); - const ub = new Date((timestamp + 86400) * 1000); - - // we filter to a project and the current timestamp from midnight up to the next day midnight - // eg timestamp 1649116800 == 2022-04-05T00:00:00.000Z - // lb == 2022-04-05T00:00:00.000Z; ub == 2022-04-06T00:00:00.000Z - // we remove everything >= lb up to < ub - const filter = { - project, - timestamp: { $gte: lb, $lt: ub }, - }; - - const response = await M.deleteMany(filter); - - if (!response) { - return new AppError("Couldn't delete data", 404); - } - - return { - status: 'success', - response, - }; -}; diff --git a/scripts/prepareSnapshot.py b/scripts/prepareSnapshot.py index 1f3b01c0b2..eac7cacd22 100644 --- a/scripts/prepareSnapshot.py +++ b/scripts/prepareSnapshot.py @@ -1,21 +1,101 @@ import sys +import ast import pandas as pd +# the way i ran this: +# Step 1) DOWNLOAD FULL HISTORY from mongodb -def trim(filename: str) -> None: - # this script reduces hourly data to daily data (latest value per pool based on timestamp) - # note: i didn't manage to run this directly on db server, hence the script +# Step 2) PREPARE DATA +# run this python script, which does: + # filter apy >= 0, tvlUsd >= 1000 + # cast dtype: tvlUsd to integer + # round apy columns to 5 decimals + # ... + # stores 3 outputs: the full hourly history (for yield), last value for each day (for stat), last value (for config) + +# Step 3) CREATE UUIDS +# based on output from Step 2) run the bootstrapUUID.js file which creates a unique uuid for each unique pool and stores that locally + +# Step 4) CREATE THE POSTGRES TABLES +# run the create scripts starting with config, then the others (order doesn't matter for the rest) + +def replaceFunc(x: str) -> str: + if x == "[null]": + return "[]" + elif x == "[null,null]": + return "[]" + elif "null," in x: + return x.replace("null,", "") + elif ",null" in x: + return x.replace(",null", "") + else: + return x + + +def prepare_snapshot(filename: str) -> None: df = pd.read_csv(f"{filename}") + + # correct none, null values in array + df.loc[df["underlyingTokens"].notnull(), "underlyingTokens"] = df.loc[ + df["underlyingTokens"].notnull(), "underlyingTokens" + ].apply(lambda x: replaceFunc(x) if "null" in x else x) + + # remove rows where all 3 apy fields are null + df = df[ + ~((df["apy"].isnull()) & (df["apyReward"].isnull()) & (df["apyBase"].isnull())) + ] + + # keep positive apy sum values only + df = df[(df["apy"] >= 0) & (df["apy"] <= 1e6)] + # tvl btw boundary values + df = df[(df["tvlUsd"] >= 1000) & (df["tvlUsd"] <= 2e10)] + + # remove pools and project from exclusion list + exclude_pools = [ + "0xf4bfe9b4ef01f27920e490cea87fe2642a8da18d", + "DWmAv5wMun4AHxigbwuJygfmXBBe9WofXAtrMCRJExfb", + "ripae-seth-weth-42161", + "ripae-peth-weth-42161", + "0x3eed430cd45c5e2b45aa1adc609cc77c6728d45b", + "0x3c42B0f384D2912661C940d46cfFE1CD10F1c66F-ethereum", + "0x165ab553871b1a6b3c706e15b6a7bb29a244b2f3", + ] + df = df[~df["pool"].isin(exclude_pools)] + df = df[df["project"] != "koyo-finance"] + + # cast dtypes and round + df["tvlUsd"] = df["tvlUsd"].astype(int) + apy_columns = ["apy", "apyBase", "apyReward"] + df[apy_columns] = df[apy_columns].round(5) + + # 1. hourly (for yield table) df["timestamp"] = pd.to_datetime(df["timestamp"]) - df = df.sort_values(["pool", "timestamp"]).reset_index(drop=True) - ( + df = df.sort_values(["pool", "timestamp"], ascending=True).reset_index(drop=True) + f = "yield_snapshot" + df.to_csv(f"{f}_hourly.csv", index=False) + + # 2. prepare daily (for stat) + df_daily = ( df.groupby(["pool", pd.Grouper(key="timestamp", freq="1D")]) .last() .reset_index() - .to_json(f"{filename.split('.')[0]}_daily.json", orient="records") ) + df_daily.to_json(f"{f}_daily.json", orient="records") + + # 3. prepare last (for config) + df_last = ( + df_daily.sort_values(["pool", "timestamp"], ascending=True) + .groupby("pool") + .last() + .reset_index() + ) + # cast string to arrays + func = lambda x: ast.literal_eval(x) if type(x) == str else x + df_last["underlyingTokens"] = df_last["underlyingTokens"].apply(func) + df_last["rewardTokens"] = df_last["rewardTokens"].apply(func) + df_last.to_json(f"{f}_last.json", orient="records") if __name__ == "__main__": - trim(sys.argv[1]) + prepare_snapshot(sys.argv[1]) diff --git a/serverless.yml b/serverless.yml index d77d953f0d..0976a72eff 100644 --- a/serverless.yml +++ b/serverless.yml @@ -7,7 +7,7 @@ frameworkVersion: '3' provider: name: aws - runtime: nodejs14.x + runtime: nodejs16.x stage: dev region: eu-central-1 tracing: @@ -28,13 +28,6 @@ provider: - s3:*Object* - sqs:SendMessage Resource: '*' - - Effect: Allow - Action: - - ssm:PutParameter - - ssm:GetParameter - - ssm:DescribeParameters - - kms:Decrypt - Resource: '*' environment: # for entrypoint and enrichment @@ -55,13 +48,14 @@ provider: ], ], } - SSM_PATH: ${self:custom.ssmPath} BUCKET_DATA: { Ref: BucketData } + DATABASE_URL: ${file(./env.js):DATABASE_URL} httpApi: metrics: true functions: + # ---------- TRIGGER HANDLERS # --- top-lvl-entrypoint triggerEntrypoint: handler: src/handlers/triggerEntrypoint.handler @@ -106,13 +100,13 @@ functions: - schedule: cron(20 * * * ? *) # --- stats update - triggerStats: - handler: src/handlers/triggerStats.handler - description: Lambda which updates the stats table + triggerStat: + handler: src/handlers/triggerStat.handler + description: Lambda which updates the stat table timeout: 300 events: - # at midnight - - schedule: cron(0 0 * * ? *) + # 30min prior midnight + - schedule: cron(30 23 * * ? *) # --- median insert triggerMedian: @@ -120,28 +114,19 @@ functions: description: Lambda which inserts latest value into the median table timeout: 300 events: - # at midnight - - schedule: cron(0 0 * * ? *) + # 30min prior midnight + - schedule: cron(30 23 * * ? *) # --- save poolsEnriched as CSV triggerCsv: handler: src/handlers/triggerCsv.handler - description: Lambda which dumps poolsEnriched as csv to s3 bucket + description: Lambda which saves poolsEnriched as csv to s3 bucket timeout: 300 events: # every hour at 25 past - schedule: cron(25 * * * ? *) - # --- DB Crud operations - getPools: - handler: src/handlers/getPools.handler - description: Lambda for retrieving the latest data for each unique pool - timeout: 30 - events: - - httpApi: - method: get - path: /simplePools - + # ---------- GET HANDLERS getPoolsEnriched: handler: src/handlers/getPoolsEnriched.handler description: Lambda for retrieving the latest enriched data for each unique pool @@ -161,25 +146,34 @@ functions: events: - httpApi: method: get - path: /chart/{pool} + path: /chart/{configID} getMedian: handler: src/handlers/getMedian.handler - description: Lambda for retrieving median data + description: Lambda for retrieving daily median apy's timeout: 20 events: - httpApi: method: get path: /median - getUrls: - handler: src/handlers/getUrls.handler - description: Lambda for retrieving url data + getUrl: + handler: src/handlers/getUrl.handler + description: Lambda for retrieving project url's + timeout: 20 + events: + - httpApi: + method: get + path: /url + + getDistinctID: + handler: src/handlers/getDistinctID.handler + description: Lambda for retrieving unique pool id's timeout: 20 events: - httpApi: method: get - path: /urls + path: /distinctID resources: Resources: @@ -244,7 +238,6 @@ resources: custom: stage: ${opt:stage, self:provider.stage} - ssmPath: /${self:service}/serverless/sls-authenticate webpack: webpackConfig: 'webpack.config.js' includeModules: true diff --git a/src/controllers/configController.js b/src/controllers/configController.js new file mode 100644 index 0000000000..9dc4df96d2 --- /dev/null +++ b/src/controllers/configController.js @@ -0,0 +1,118 @@ +const minify = require('pg-minify'); + +const { pgp, connect } = require('../utils/dbConnection'); + +const tableName = 'config'; + +// get config data per project +const getConfigProject = async (project) => { + const conn = await connect(); + + const query = minify( + ` + SELECT + config_id, + pool + FROM + $ + WHERE + project = $ + `, + { compress: true } + ); + + const response = await conn.query(query, { table: tableName, project }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} data`, 404); + } + + return response; +}; + +// get distinct urls per project +const getUrl = async () => { + const conn = await connect(); + + const query = minify( + ` + SELECT + DISTINCT(project), + url + FROM + $ + `, + { compress: true } + ); + + const response = await conn.query(query, { table: tableName }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} data`, 404); + } + + const out = {}; + for (const e of response) { + out[e.project] = e.url; + } + + return out; +}; + +// get unique pool values +// (used during adapter testing to check if a pool field is already in the DB) +const getDistinctID = async () => { + const conn = await connect(); + + const query = minify( + ` + SELECT + DISTINCT(pool), + project + FROM + $ + `, + { compress: true } + ); + + const response = await conn.query(query, { table: tableName }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} data`, 404); + } + + return response; +}; + +// multi row insert (update on conflict) query generator +const buildInsertConfigQuery = (payload) => { + const columns = [ + 'config_id', + 'pool', + 'project', + 'chain', + 'symbol', + // pg-promise is not aware of the db-schema -> we need to make sure that + // optional fields are marked and provided with a default value + // otherwise the `result` method will fail + { name: 'poolMeta', def: null }, + { name: 'underlyingTokens', def: null }, + { name: 'rewardTokens', def: null }, + 'url', + ]; + const cs = new pgp.helpers.ColumnSet(columns, { table: tableName }); + const query = + pgp.helpers.insert(payload, cs) + + ' ON CONFLICT(config_id) DO UPDATE SET ' + + cs.assignColumns({ from: 'EXCLUDED', skip: 'config_id' }); + + return query; +}; + +module.exports = { + getConfigProject, + buildInsertConfigQuery, + getUrl, + getDistinctID, + tableName, +}; diff --git a/src/controllers/medianController.js b/src/controllers/medianController.js new file mode 100644 index 0000000000..d59bc129f0 --- /dev/null +++ b/src/controllers/medianController.js @@ -0,0 +1,51 @@ +const minify = require('pg-minify'); + +const { pgp, connect } = require('../utils/dbConnection'); + +const tableName = 'median'; + +// get full content from median table +const getMedian = async () => { + const conn = await connect(); + + const query = minify( + ` + SELECT + timestamp, + "uniquePools", + "medianAPY" + FROM + $ + `, + { compress: true } + ); + + const response = await conn.query(query, { table: tableName }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} data`, 404); + } + + return response; +}; + +// insert +const insertMedian = async (payload) => { + const conn = await connect(); + + const columns = ['timestamp', 'uniquePools', 'medianAPY']; + const cs = new pgp.helpers.ColumnSet(columns, { table: tableName }); + const query = pgp.helpers.insert(payload, cs); + const response = await conn.result(query); + + if (!response) { + return new AppError(`Couldn't insert ${tableName} data`, 404); + } + + return response; +}; + +module.exports = { + getMedian, + insertMedian, +}; diff --git a/src/controllers/statController.js b/src/controllers/statController.js new file mode 100644 index 0000000000..1852d83b7f --- /dev/null +++ b/src/controllers/statController.js @@ -0,0 +1,75 @@ +const minify = require('pg-minify'); + +const { pgp, connect } = require('../utils/dbConnection'); + +const tableName = 'stat'; + +// get full content from stat table +const getStat = async () => { + const conn = await connect(); + + const query = minify( + ` + SELECT + "configID", + count, + "meanAPY", + "mean2APY", + "meanDR", + "mean2DR", + "productDR" + FROM + $ + `, + { compress: true } + ); + + const response = await conn.query(query, { table: tableName }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} data`, 404); + } + + // reformat + const responseObject = {}; + for (const p of response) { + const configID = p.configID; + responseObject[configID] = { configID, ...p }; + } + + return responseObject; +}; + +// multi row insert (update on conflict) +const insertStat = async (payload) => { + const conn = await connect(); + + const columns = [ + 'configID', + 'count', + 'meanAPY', + 'mean2APY', + 'meanDR', + 'mean2DR', + 'productDR', + ]; + const cs = new pgp.helpers.ColumnSet(columns, { table: tableName }); + + const query = + pgp.helpers.insert(payload, cs) + + ' ON CONFLICT("configID") DO UPDATE SET ' + + cs.assignColumns({ from: 'EXCLUDED', skip: 'configID' }); + + const response = await conn.result(query); + + if (!response) { + return new AppError(`Couldn't insert/update ${tableName} data`, 404); + } + + return response; +}; + +module.exports = { + getStat, + insertStat, +}; diff --git a/src/controllers/yieldController.js b/src/controllers/yieldController.js new file mode 100644 index 0000000000..ae04d02522 --- /dev/null +++ b/src/controllers/yieldController.js @@ -0,0 +1,258 @@ +const minify = require('pg-minify'); + +const AppError = require('../utils/appError'); +const exclude = require('../utils/exclude'); +const { pgp, connect } = require('../utils/dbConnection'); +const { + tableName: configTableName, +} = require('../controllers/configController'); + +const tableName = 'yield'; + +// get last DB entry per unique pool (with exclusion; this is what we use in enrichment handler) +const getYieldFiltered = async () => { + const conn = await connect(); + + // -- get latest yield row per unique configID (a pool) + // -- exclude if tvlUsd is < LB + // -- exclude if pool age > 7days + // -- join config data + const query = minify( + ` + SELECT + "configID", + timestamp, + pool, + project, + chain, + symbol, + "poolMeta", + "underlyingTokens", + "rewardTokens", + "tvlUsd", + apy, + "apyBase", + "apyReward" + FROM + ( + SELECT + DISTINCT ON ("configID") * + FROM + $ + WHERE + "tvlUsd" >= $ + AND timestamp >= NOW() - INTERVAL '$ DAY' + ORDER BY + "configID", + timestamp DESC + ) AS y + INNER JOIN $ AS c ON c.config_id = y."configID" + WHERE + pool NOT IN ($) + AND project NOT IN ($) + `, + { compress: true } + ); + + const response = await conn.query(query, { + tvlLB: exclude.boundaries.tvlUsdUI.lb, + age: exclude.boundaries.age, + yieldTable: tableName, + configTable: configTableName, + excludePools: exclude.excludePools, + excludeProjects: exclude.excludeAdaptors, + }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} data`, 404); + } + + return response; +}; + +// get full history of given configID +const getYieldHistory = async (configID) => { + const conn = await connect(); + + const query = minify( + ` + SELECT + timestamp, + "tvlUsd", + "apy" + FROM + $ + WHERE + timestamp IN ( + SELECT + max(timestamp) + FROM + $ + WHERE + "configID" = $ + GROUP BY + (timestamp :: date) + ) + AND "configID" = $ + ORDER BY + timestamp ASC + `, + { compress: true } + ); + + const response = await conn.query(query, { + configIDValue: configID, + table: tableName, + }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} history data`, 404); + } + + return { + status: 'success', + data: response, + }; +}; + +// get last DB entry per unique pool for a given project (used by adapter handler to check for TVL spikes) +const getYieldProject = async (project) => { + const conn = await connect(); + + // -- get latest yield row per unique configID (a pool) for a specific project + // -- exclude if tvlUsd is < LB + // -- exclude if pool age > 7days + // -- join config data + const query = minify( + ` + SELECT + DISTINCT ON ("configID") "configID", + "tvlUsd", + timestamp + FROM + $ + WHERE + "configID" IN ( + SELECT + DISTINCT (config_id) + FROM + $ + WHERE + "project" = $ + ) + AND "tvlUsd" >= $ + AND timestamp >= NOW() - INTERVAL '$ DAY' + ORDER BY + "configID", + timestamp DESC + `, + { compress: true } + ); + + const response = await conn.query(query, { + tvlLB: exclude.boundaries.tvlUsdUI.lb, + age: exclude.boundaries.age, + project, + yieldTable: tableName, + configTable: configTableName, + }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} project data`, 404); + } + + return response; +}; + +// get apy offset value for project/day combo +const getYieldOffset = async (project, offset) => { + const conn = await connect(); + + const age = Number(offset); + const daysMilliSeconds = age * 60 * 60 * 24 * 1000; + const tOffset = Date.now() - daysMilliSeconds; + + // 3 hour window + const h = 3; + const tWindow = 60 * 60 * h * 1000; + const tsLB = new Date(tOffset - tWindow); + const tsUB = new Date(tOffset + tWindow); + + const tvlLB = exclude.boundaries.tvlUsdUI.lb; + + // -- retrieve the historical offset data for a every unique pool given an offset day (1d/7d/30d) + // -- to calculate pct changes. allow some buffer (+/- 3hs) in case of missing data (via tsLB and tsUB) + const query = minify( + ` + SELECT + DISTINCT ON ("configID") "configID", + apy + FROM + ( + SELECT + "configID", + apy, + abs( + extract ( + epoch + FROM + timestamp - (NOW() - INTERVAL '$ DAY') + ) + ) AS abs_delta + FROM + $ AS y + INNER JOIN config AS c ON c.config_id = y."configID" + WHERE + "tvlUsd" >= $ + AND project = $ + AND timestamp >= $ + AND timestamp <= $ + ) AS y + ORDER BY + "configID", + abs_delta DESC + `, + { compress: true } + ); + + const response = await conn.query(query, { + project, + age, + tsLB, + tsUB, + tvlLB, + table: tableName, + }); + + if (!response) { + return new AppError(`Couldn't get ${tableName} offset data`, 404); + } + + return response; +}; + +// multi row insert query generator +const buildInsertYieldQuery = (payload) => { + // note: even though apyBase and apyReward are optional fields + // they are both added in the adapter handler to derive final apy. + // hence, there is no need to specify optional fields defaults for pg-promise + // (in contrast to some fields in `insertConfig`) + const columns = [ + 'configID', + 'timestamp', + 'tvlUsd', + 'apy', + 'apyBase', + 'apyReward', + ]; + const cs = new pgp.helpers.ColumnSet(columns, { table: tableName }); + return pgp.helpers.insert(payload, cs); +}; + +module.exports = { + getYieldFiltered, + getYieldHistory, + getYieldOffset, + getYieldProject, + buildInsertYieldQuery, +}; diff --git a/src/handlers/getChart.js b/src/handlers/getChart.js index 82c38b29b9..8dc068a78a 100644 --- a/src/handlers/getChart.js +++ b/src/handlers/getChart.js @@ -1,85 +1,6 @@ -const dbConnection = require('../utils/dbConnection.js'); -const poolModel = require('../models/pool'); -const AppError = require('../utils/appError'); -const { lambdaResponse } = require('../utils/lambda'); +const { getYieldHistory } = require('../controllers/yieldController'); -// retrieve chart data of latest daily tvl and apy values of requested pool module.exports.handler = async (event, context) => { context.callbackWaitsForEmptyEventLoop = false; - const conn = await dbConnection.connect(); - const M = conn.model(poolModel.modelName); - - const pool = event.pathParameters.pool; - - const aggQuery = [ - { - $match: { - pool: pool, - }, - }, - { - $sort: { - timestamp: -1, - }, - }, - { - $group: { - _id: { - $toDate: { - $subtract: [ - { $toLong: '$timestamp' }, - { $mod: [{ $toLong: '$timestamp' }, 86400000] }, - ], - }, - }, - apyBase: { - $first: '$apyBase', - }, - apyReward: { - $first: '$apyReward', - }, - apy: { - $first: '$apy', - }, - tvlUsd: { - $first: '$tvlUsd', - }, - timestamp: { - $first: '$timestamp', - }, - }, - }, - // remove the grouping key - { - $project: { - _id: 0, - }, - }, - { - $sort: { - timestamp: 1, - }, - }, - ]; - - const query = M.aggregate(aggQuery); - let response = await query; - - response = response.filter( - (p) => !(p.apy === null && p.apyBase === null && p.apyReward === null) - ); - response = response.map((p) => ({ - apy: p.apy ?? p.apyBase + p.apyReward, - tvlUsd: p.tvlUsd, - timestamp: p.timestamp, - })); - - if (!response) { - return new AppError("Couldn't retrieve data", 404); - } - - return lambdaResponse({ - status: 'success', - data: response, - }); + return await getYieldHistory(event.pathParameters.configID); }; diff --git a/src/handlers/getDistinctID.js b/src/handlers/getDistinctID.js new file mode 100644 index 0000000000..2cd6e38293 --- /dev/null +++ b/src/handlers/getDistinctID.js @@ -0,0 +1,6 @@ +const { getDistinctID } = require('../controllers/configController'); + +module.exports.handler = async (event, context) => { + context.callbackWaitsForEmptyEventLoop = false; + return await getDistinctID(); +}; diff --git a/src/handlers/getMedian.js b/src/handlers/getMedian.js index 110a13bea4..7b8fc790c0 100644 --- a/src/handlers/getMedian.js +++ b/src/handlers/getMedian.js @@ -1,22 +1,6 @@ -const dbConnection = require('../utils/dbConnection.js'); -const medianModel = require('../models/median'); -const AppError = require('../utils/appError'); +const { getMedian } = require('../controllers/medianController'); -// get expanding standard deviation data module.exports.handler = async (event, context) => { context.callbackWaitsForEmptyEventLoop = false; - const conn = await dbConnection.connect(); - const M = conn.model(medianModel.modelName); - - // return all documents - const response = await M.find({}, { _id: 0 }).sort({ timestamp: 1 }); - - if (!response) { - return new AppError("Couldn't get median data", 404); - } - - return { - status: 'success', - data: response, - }; + return await getMedian(); }; diff --git a/src/handlers/getPools.js b/src/handlers/getPools.js deleted file mode 100644 index ad287d7e6e..0000000000 --- a/src/handlers/getPools.js +++ /dev/null @@ -1,164 +0,0 @@ -const dbConnection = require('../utils/dbConnection.js'); -const poolModel = require('../models/pool'); -const AppError = require('../utils/appError'); -const exclude = require('../utils/exclude'); -const { lambdaResponse } = require('../utils/lambda.js'); - -// get latest object of each unique pool -module.exports.handler = async (event, context) => { - context.callbackWaitsForEmptyEventLoop = false; - const response = await getLatestPools(); - - if (!response) { - return new AppError("Couldn't retrieve data", 404); - } - - return lambdaResponse({ - status: 'success', - data: response, - }); -}; - -const getLatestPools = async (aggregationQuery = aggQuery) => { - const conn = await dbConnection.connect(); - const M = conn.model(poolModel.modelName); - - const query = M.aggregate(aggregationQuery); - let response = await query; - - // remove pools where all 3 fields are null (this and the below project/pool exclusion - // could certainly be implemented in the aggregation pipeline but i'm to stupid for mongodb pipelines) - response = response.filter( - (p) => - !(p.apy === null && p.apyBase === null && p.apyReward === null) && - !exclude.excludeAdaptors.includes(p.project) && - !exclude.excludePools.includes(p.pool) - ); - - return response; -}; - -const baseQuery = [ - { - $sort: { - pool: 1, - timestamp: -1, - }, - }, - { - $group: { - _id: '$pool', - chain: { - $first: '$chain', - }, - project: { - $first: '$project', - }, - symbol: { - $first: '$symbol', - }, - poolMeta: { - $first: '$poolMeta', - }, - tvlUsd: { - $first: '$tvlUsd', - }, - apyBase: { - $first: '$apyBase', - }, - apyReward: { - $first: '$apyReward', - }, - apy: { - $first: '$apy', - }, - timestamp: { - $first: '$timestamp', - }, - rewardTokens: { - $first: '$rewardTokens', - }, - underlyingTokens: { - $first: '$underlyingTokens', - }, - }, - }, - // sort on tvl desc - { - $sort: { - tvlUsd: -1, - }, - }, - // adding "back" the pool field, the grouping key is only available as _id - { - $addFields: { - pool: '$_id', - }, - }, - // remove the _id field - { - $project: { - _id: 0, - }, - }, -]; - -// remove pools based on exclusion values -const aggQuery = [ - ...baseQuery, - { - $match: { - tvlUsd: { - $gte: exclude.boundaries.tvlUsdUI.lb, - $lte: exclude.boundaries.tvlUsdUI.ub, - }, - // lte not enough, would remove null values, - // hence why the or statement to keep pools with apy === null - $or: [ - { - apy: { - $gte: exclude.boundaries.apy.lb, - $lte: exclude.boundaries.apy.ub, - }, - }, - { apy: null }, - ], - // remove pools which haven't been updated for >7days; - // some pools might just not be included anymore in the adaptor output, - // so instead of showing the latest object of that pool on the frontend - timestamp: { - $gte: new Date(new Date() - 60 * 60 * 24 * 7 * 1000), - }, - }, - }, -]; - -// remove pools based on exclusion values -const aggQueryMedian = [ - ...baseQuery, - { - $match: { - // lte not enough, would remove null values, - // hence why the or statement to keep pools with apy === null - $or: [ - { - apy: { - $gte: 0, - $lte: 1e6, - }, - }, - { apy: null }, - ], - // remove pools which haven't been updated for >7days; - // some pools might just not be included anymore in the adaptor output, - // so instead of showing the latest object of that pool on the frontend - timestamp: { - $gte: new Date(new Date() - 60 * 60 * 24 * 7 * 1000), - }, - }, - }, -]; - -module.exports.aggQuery = aggQuery; -module.exports.aggQueryMedian = aggQueryMedian; -module.exports.getLatestPools = getLatestPools; diff --git a/src/handlers/getPoolsEnriched.js b/src/handlers/getPoolsEnriched.js index 38af4a7daa..aa03713375 100644 --- a/src/handlers/getPoolsEnriched.js +++ b/src/handlers/getPoolsEnriched.js @@ -28,6 +28,7 @@ const buildPoolsEnriched = async (queryString) => { 'apy', 'rewardTokens', 'pool', + 'configID', 'apyPct1D', 'apyPct7D', 'apyPct30D', diff --git a/src/handlers/getUrl.js b/src/handlers/getUrl.js new file mode 100644 index 0000000000..7236a56222 --- /dev/null +++ b/src/handlers/getUrl.js @@ -0,0 +1,6 @@ +const { getUrl } = require('../controllers/configController'); + +module.exports.handler = async (event, context) => { + context.callbackWaitsForEmptyEventLoop = false; + return await getUrl(); +}; diff --git a/src/handlers/getUrls.js b/src/handlers/getUrls.js deleted file mode 100644 index dc837dea16..0000000000 --- a/src/handlers/getUrls.js +++ /dev/null @@ -1,23 +0,0 @@ -const dbConnection = require('../utils/dbConnection.js'); -const urlModel = require('../models/url'); -const AppError = require('../utils/appError'); - -// get expanding standard deviation data -module.exports.handler = async (event, context) => { - context.callbackWaitsForEmptyEventLoop = false; - const conn = await dbConnection.connect(); - const M = conn.model(urlModel.modelName); - - // return all documents - const response = await M.find({}, { _id: 0 }); - - if (!response) { - return new AppError("Couldn't get url data", 404); - } - - const out = {}; - for (const e of response) { - out[e.project] = e.url; - } - return out; -}; diff --git a/src/handlers/triggerAdaptor.js b/src/handlers/triggerAdaptor.js index 08a8e7756f..f7a79b395b 100644 --- a/src/handlers/triggerAdaptor.js +++ b/src/handlers/triggerAdaptor.js @@ -1,13 +1,20 @@ +const crypto = require('crypto'); + const superagent = require('superagent'); const utils = require('../adaptors/utils'); -const poolModel = require('../models/pool'); -const urlModel = require('../models/url'); -const { aggQuery } = require('./getPools'); const AppError = require('../utils/appError'); const exclude = require('../utils/exclude'); -const dbConnection = require('../utils/dbConnection.js'); const { sendMessage } = require('../utils/discordWebhook'); +const { connect } = require('../utils/dbConnection'); +const { + getYieldProject, + buildInsertYieldQuery, +} = require('../controllers/yieldController'); +const { + getConfigProject, + buildInsertConfigQuery, +} = require('../controllers/configController'); module.exports.handler = async (event, context) => { context.callbackWaitsForEmptyEventLoop = false; @@ -38,14 +45,16 @@ module.exports.handler = async (event, context) => { // func for running adaptor, storing result to db const main = async (body) => { - // run adaptor + // ---------- run adaptor console.log(body.adaptor); const project = require(`../adaptors/${body.adaptor}`); let data = await project.apy(); + // ---------- prepare prior insert // remove potential null/undefined objects in array data = data.filter((p) => p); + // cast dtypes // even though we have tests for datatypes, will need to guard against sudden changes // from api responses in terms of data types (eg have seen this on lido stETH) which went from // number to string. so in order for the below filters to work proplerly we need to guarantee that the @@ -85,7 +94,7 @@ const main = async (body) => { apy: p.apy ?? p.apyBase + p.apyReward, })); - // remove pools pools based on apy boundaries + // remove pools based on apy boundaries data = data.filter( (p) => p.apy !== null && @@ -96,35 +105,47 @@ const main = async (body) => { // remove exclusion pools data = data.filter((p) => !exclude.excludePools.includes(p.pool)); - // add the timestamp field - // will be rounded to the nearest hour - // eg 2022-04-06T10:00:00.000Z - const timestamp = new Date( - Math.floor(Date.now() / 1000 / 60 / 60) * 60 * 60 * 1000 - ); - data = data.map((p) => ({ ...p, timestamp: timestamp })); - - // format chain in case it was skipped in adapter. - // round tvlUsd to integer and apy fields to n-dec - const dec = 5; - data = data.map((p) => ({ - ...p, - chain: utils.formatChain(p.chain), - symbol: utils.formatSymbol(p.symbol), - tvlUsd: Math.round(p.tvlUsd), - apy: +p.apy.toFixed(dec), - apyBase: p.apyBase !== null ? +p.apyBase.toFixed(dec) : p.apyBase, - apyReward: p.apyReward !== null ? +p.apyReward.toFixed(dec) : p.apyReward, - })); + // for PK, FK, read data from config table + const config = await getConfigProject(body.adaptor); + const mapping = {}; + for (const c of config) { + // the pool fields are used to map to the config_id values from the config table + mapping[c.pool] = c.config_id; + } + // we round numerical fields to 5 decimals after the comma + const precision = 5; + const timestamp = new Date(Date.now()); + data = data.map((p) => { + // if pool not in mapping -> its a new pool -> create a new uuid, else keep existing one + const id = mapping[p.pool] ?? crypto.randomUUID(); + return { + ...p, + config_id: id, // config PK field + configID: id, // yield FK field referencing config_id in config + chain: utils.formatChain(p.chain), // format chain and symbol in case it was skipped in adapter + symbol: utils.formatSymbol(p.symbol), + tvlUsd: Math.round(p.tvlUsd), // round tvlUsd to integer and apy fields to n-dec + apy: +p.apy.toFixed(precision), // round apy fields + apyBase: p.apyBase !== null ? +p.apyBase.toFixed(precision) : p.apyBase, + apyReward: + p.apyReward !== null ? +p.apyReward.toFixed(precision) : p.apyReward, + url: project.url, + timestamp, + }; + }); + + // ---------- tvl spike check + // prior insert, we run a tvl check to make sure + // that there haven't been any sudden spikes in tvl compared to the previous insert; // insert only if tvl conditions are ok: // if tvl // - has increased >5x since the last hourly update // - and has been updated in the last 5 hours // -> block update - // load current project array - const dataInitial = await getProject(body.adaptor); + // load last entries for each pool for this sepcific adapter + const dataInitial = await getYieldProject(body.adaptor); const dataDB = []; const nHours = 5; @@ -132,7 +153,7 @@ const main = async (body) => { const timedeltaLimit = 60 * 60 * nHours * 1000; const droppedPools = []; for (const p of data) { - const x = dataInitial.find((e) => e.pool === p.pool); + const x = dataInitial.find((e) => e.configID === p.configID); if (x === undefined) { dataDB.push(p); continue; @@ -146,38 +167,32 @@ const main = async (body) => { ) { console.log(`removing pool ${p.pool}`); droppedPools.push({ - pool: p.pool, + configID: p.configID, symbol: p.symbol, project: p.project, tvlUsd: p.tvlUsd, tvlUsdDB: x.tvlUsd, tvlMultiplier: p.tvlUsd / x.tvlUsd, - lastUpdate: x.timestamp, - hoursUntilNextUpdate: 6 - timedelta / 1000 / 60 / 60, }); continue; } dataDB.push(p); } + // return if dataDB is empty; + if (!dataDB.length) return; - if ( - !dataInitial.length && - dataDB.filter(({ tvlUsd }) => tvlUsd > 10000).length - ) { - const message = `Project ${body.adaptor} yields have been added`; - await sendMessage(message, process.env.NEW_YIELDS_WEBHOOK); - } - + // send msg to discord if tvl spikes const delta = data.length - dataDB.length; if (delta > 0) { console.log(`removed ${delta} sample(s) prior to insert`); // send discord message + // we limit sending msg only if the pool's last tvlUsd value is >= $500k const filteredPools = droppedPools.filter((p) => p.tvlUsdDB >= 5e5); if (filteredPools.length) { const message = filteredPools .map( (p) => - `Project: ${p.project} Pool: ${p.pool} Symbol: ${ + `configID: ${p.configID} Project: ${p.project} Symbol: ${ p.symbol } TVL: from ${p.tvlUsdDB.toFixed()} to ${p.tvlUsd.toFixed()} (${p.tvlMultiplier.toFixed( 2 @@ -188,76 +203,49 @@ const main = async (body) => { } } - const response = await insertPools(dataDB); - console.log(response); - - // update url - if (project.url) { - console.log('insert/update url'); - await updateUrl(body.adaptor, project.url); - } -}; - -const insertPools = async (payload) => { - const conn = await dbConnection.connect(); - const M = conn.model(poolModel.modelName); - - const response = await M.insertMany(payload); - - if (!response) { - return new AppError("Couldn't insert data", 404); + // ---------- discord bot for newly added projects + if ( + !dataInitial.length && + dataDB.filter(({ tvlUsd }) => tvlUsd > exclude.boundaries.tvlUsdUI.lb) + .length + ) { + const message = `Project ${body.adaptor} yields have been added`; + await sendMessage(message, process.env.NEW_YIELDS_WEBHOOK); } - return { - status: 'success', - response: `Inserted ${payload.length} samples`, - }; -}; - -module.exports.insertPools = insertPools; - -// get latest object of each unique pool -const getProject = async (project) => { - const conn = await dbConnection.connect(); - const M = conn.model(poolModel.modelName); - - // add project field to match obj - aggQuery.slice(-1)[0]['$match']['project'] = project; - - const query = M.aggregate(aggQuery); - let response = await query; - - // remove pools where all 3 fields are null (this and the below project/pool exclusion - // could certainly be implemented in the aggregation pipeline but i'm to stupid for mongodb pipelines) - response = response.filter( - (p) => - !(p.apy === null && p.apyBase === null && p.apyReward === null) && - !exclude.excludePools.includes(p.pool) - ); - - return response; + // ---------- DB INSERT + const response = await insertConfigYieldTransaction(dataDB); + console.log(response); }; -const updateUrl = async (adapter, url) => { - const conn = await dbConnection.connect(); - const M = conn.model(urlModel.modelName); - - const response = await M.bulkWrite([ - { - updateOne: { - filter: { project: adapter }, - update: { - $set: { - url: url, - }, - }, - upsert: true, - }, - }, - ]); - - if (!response) { - return new AppError("Couldn't update data", 404); - } - console.log(response); +// --------- transaction query +const insertConfigYieldTransaction = async (payload) => { + const conn = await connect(); + + // build queries + const configQ = buildInsertConfigQuery(payload); + const yieldQ = buildInsertYieldQuery(payload); + + return conn + .tx(async (t) => { + // sequence of queries: + // 1. config: insert/update + const q1 = await t.result(configQ); + // 2. yield: insert + const q2 = await t.result(yieldQ); + + return [q1, q2]; + }) + .then((response) => { + // success, COMMIT was executed + return { + status: 'success', + data: response, + }; + }) + .catch((err) => { + // failure, ROLLBACK was executed + console.log(err); + return new AppError('ConfigYield Transaction failed, rolling back', 404); + }); }; diff --git a/src/handlers/triggerEnrichment.js b/src/handlers/triggerEnrichment.js index bf1bc59ec5..8c170f0be8 100644 --- a/src/handlers/triggerEnrichment.js +++ b/src/handlers/triggerEnrichment.js @@ -2,12 +2,13 @@ const superagent = require('superagent'); const ss = require('simple-statistics'); const utils = require('../utils/s3'); -const { getLatestPools } = require('./getPools'); -const { getStats } = require('./triggerStats'); +const { + getYieldFiltered, + getYieldOffset, +} = require('../controllers/yieldController'); +const { getStat } = require('../controllers/statController'); const { buildPoolsEnriched } = require('./getPoolsEnriched'); const { welfordUpdate } = require('../utils/welford'); -const dbConnection = require('../utils/dbConnection.js'); -const poolModel = require('../models/pool'); module.exports.handler = async (event, context) => { await main(); @@ -16,21 +17,13 @@ module.exports.handler = async (event, context) => { const main = async () => { console.log('START DATA ENRICHMENT'); - const urlBase = process.env.APIG_URL; - console.log('\n1. getting pools...'); - let data = await getLatestPools(); + // ---------- get lastet unique pool + console.log('\ngetting pools'); + const data = await getYieldFiltered(); - // derive final apy field via: - data = data.map((p) => ({ - ...p, - apy: p.apy ?? p.apyBase + p.apyReward, - })); - // remove any potential null values - data = data.filter((p) => p.apy !== null); - - ////// 2 add pct-change columns + // ---------- add additional fields // for each project we get 3 offsets (1D, 7D, 30D) and calculate absolute apy pct-change - console.log('\n2. adding pct-change fields...'); + console.log('\nadding pct-change fields'); const days = ['1', '7', '30']; let dataEnriched = []; const failed = []; @@ -42,14 +35,14 @@ const main = async () => { // api calls const promises = []; for (let i = 0; i < days.length; i++) { - promises.push(getOffsets(adaptor, days[i])); + promises.push(getYieldOffset(adaptor, days[i])); } try { const offsets = await Promise.all(promises); // calculate pct change for each pool dataEnriched = [ ...dataEnriched, - ...dataProject.map((pool) => enrich(pool, days, offsets)), + ...dataProject.map((p) => enrich(p, days, offsets)), ]; } catch (err) { console.log(err); @@ -63,7 +56,8 @@ const main = async () => { } } - console.log('\n3. adding additional pool info fields'); + // add info about stablecoin, exposure etc. + console.log('\nadding additional pool info fields'); const stablecoins = ( await superagent.get( 'https://stablecoins.llama.fi/stablecoins?includePrices=true' @@ -77,20 +71,21 @@ const main = async () => { ).body.protocols; dataEnriched = dataEnriched.map((el) => addPoolInfo(el, stablecoins, config)); + // add ML and overview plot fields // expanding mean, expanding standard deviation, // geometric mean and standard deviation (of daily returns) - console.log('\n4. adding stats columns'); + console.log('\nadding stats columns'); const T = 365; dataEnriched = dataEnriched.map((p) => ({ ...p, return: (1 + p.apy / 100) ** (1 / T) - 1, })); - const dataStats = await getStats(); - const statsColumns = welfordUpdate(dataEnriched, dataStats); + const dataStat = await getStat(); + const statColumns = welfordUpdate(dataEnriched, dataStat); // add columns to dataEnriched for (const p of dataEnriched) { - const x = statsColumns.find((i) => i.pool === p.pool); + const x = statColumns.find((i) => i.configID === p.configID); // create columns // a) ML section p['count'] = x.count; @@ -100,7 +95,7 @@ const main = async () => { // b) scatterchart section p['mu'] = (x.productDR ** (T / x.count) - 1) * 100; p['sigma'] = - x.count < 2 ? null : Math.sqrt((x.mean2DR / (x.count - 1)) * T) * 100; + x.count < 2 ? 0 : Math.sqrt((x.mean2DR / (x.count - 1)) * T) * 100; } // mark pools as outliers if outside boundary (let user filter via toggle on frontend) const columns = ['mu', 'sigma']; @@ -113,7 +108,7 @@ const main = async () => { const distance = 1.5; const x_lb = x_median - distance * x_iqr; const x_ub = x_median + distance * x_iqr; - outlierBoundaries[col] = { lb: x_lb, ub: x_ub }; + outlierBoundaries[col] = { lb: Math.max(0, x_lb), ub: x_ub }; } // before adding the new outlier field, // i'm setting sigma to 0 instead of keeping it to null @@ -133,13 +128,13 @@ const main = async () => { p['sigma'] > outlierBoundaries['sigma']['ub'], })); - console.log('\n5. adding apy runway prediction'); + // add ML predictions + console.log('\nadding apy runway prediction'); // load categorical feature mappings const modelMappings = await utils.readFromS3( 'llama-apy-prediction-prod', 'mlmodelartefacts/categorical_feature_mapping_2022_05_20.json' ); - console.log(modelMappings); for (const el of dataEnriched) { project_fact = modelMappings.project_factorized[el.project]; chain_fact = modelMappings.chain_factorized[el.chain]; @@ -150,9 +145,6 @@ const main = async () => { el.chain_factorized = chain_fact === undefined ? -1 : chain_fact; } - // remove any potential objects which have null value on mean - dataEnriched = dataEnriched.filter((el) => el.apyMeanExpanding !== null); - // impute null values on apyStdExpanding (this will be null whenever we have pools with less than 2 // samples, eg. whenever a new pool project is listed or an existing project adds new pools dataEnriched = dataEnriched.map((p) => ({ @@ -187,8 +179,7 @@ const main = async () => { for (const [i, el] of dataEnriched.entries()) { // for certain conditions we don't want to show predictions on the frontend // 1. apy === 0 - // 2. project === 'anchor' ("stable" apy, prediction would be confusing) - // 3. less than 7 datapoints per pool + // 2. less than 7 datapoints per pool // (low confidence in the model predictions backward looking features (mean and std) // are undeveloped and might skew prediction results) @@ -198,8 +189,7 @@ const main = async () => { 1: 'Stable/Up', }; - const nullifyPredictionsCond = - el.apy <= 0 || el.count < 7 || el.project === 'anchor'; + const nullifyPredictionsCond = el.apy <= 0 || el.count < 7; const cond = y_pred[i][0] >= y_pred[i][1]; // (we add label + probabalilty of the class with the larger probability) const predictedClass = nullifyPredictionsCond @@ -246,28 +236,27 @@ const main = async () => { } // round numbers - const precision = 3; + const precision = 5; dataEnriched = dataEnriched.map((p) => Object.fromEntries( Object.entries(p).map(([k, v]) => [ k, - typeof v === 'number' ? parseFloat(v.toFixed(precision)) : v, + typeof v === 'number' ? +v.toFixed(precision) : v, ]) ) ); - console.log('\n6. saving data to S3'); + // ---------- save output to S3 + console.log('\nsaving data to S3'); console.log('nb of pools', dataEnriched.length); const bucket = process.env.BUCKET_DATA; const key = 'enriched/dataEnriched.json'; dataEnriched = dataEnriched.sort((a, b) => b.tvlUsd - a.tvlUsd); await utils.writeToS3(bucket, key, dataEnriched); - // also save to other "folder" where we keep track of daily predictions (this will be used - // for ML dashboard performance monitoring) - const timestamp = new Date( - Math.floor(Date.now() / 1000 / 60 / 60) * 60 * 60 * 1000 - ).toISOString(); + // store ML predictions so we can keep track of model performance + const f = 1000 * 60 * 60; + const timestamp = new Date(Math.floor(Date.now() / f) * f).toISOString(); if (timestamp.split('T')[1] === '23:00:00.000Z') { const keyPredictions = `predictions-hourly/dataEnriched_${timestamp}.json`; @@ -287,7 +276,7 @@ const enrich = (pool, days, offsets) => { const poolC = { ...pool }; for (let d = 0; d < days.length; d++) { let X = offsets[d]; - const apyOffset = X.find((x) => x.pool === poolC.pool)?.apy; + const apyOffset = X.find((x) => x.configID === poolC.configID)?.apy; poolC[`apyPct${days[d]}D`] = poolC['apy'] - apyOffset; } return poolC; @@ -410,76 +399,3 @@ const addPoolInfo = (el, stablecoins, config) => { return el; }; - -// retrieve the historical offset data for a project and a given offset day (1d/7d/30d) -// to calculate pct changes. allow some buffer (+/- 3hs) in case of missing data -const getOffsets = async (project, days) => { - const conn = await dbConnection.connect(); - const M = conn.model(poolModel.modelName); - - const daysMilliSeconds = Number(days) * 60 * 60 * 24 * 1000; - const tOffset = Date.now() - daysMilliSeconds; - - // 3 hour window - const h = 3; - const tWindow = 60 * 60 * h * 1000; - // mongoose query requires Date - const recent = new Date(tOffset + tWindow); - const oldest = new Date(tOffset - tWindow); - - // pull only data >= 10k usd in tvl (we won't show smaller pools on the frontend) - const tvlUsdLB = 1e4; - - const aggQuery = [ - // filter - { - $match: { - project: project, - timestamp: { - $gte: oldest, - $lte: recent, - }, - tvlUsd: { $gte: tvlUsdLB }, - }, - }, - // calc time distances from exact offset - { - $addFields: { - time_dist: { - $abs: [{ $subtract: ['$timestamp', new Date(tOffset)] }], - }, - }, - }, - // sort ascending (the smallest distance are the closest data points to the exact offset) - { - $sort: { time_dist: 1 }, - }, - // group by id, and return the first sample of apy - { - $group: { - _id: '$pool', - apy: { - $first: '$apy', - }, - }, - }, - // adding "back" the pool field, the grouping key is only available as _id - { - $addFields: { - pool: '$_id', - }, - }, - // remove the grouping key - { - $project: { - _id: 0, - }, - }, - ]; - const query = M.aggregate(aggQuery); - - // run query on db server - const response = await query; - - return response; -}; diff --git a/src/handlers/triggerEntrypoint.js b/src/handlers/triggerEntrypoint.js index 5eaea69f04..d8b5dcf697 100644 --- a/src/handlers/triggerEntrypoint.js +++ b/src/handlers/triggerEntrypoint.js @@ -1,4 +1,5 @@ const SQS = require('aws-sdk/clients/sqs'); + const { excludeAdaptors } = require('../utils/exclude'); module.exports.handler = async () => { diff --git a/src/handlers/triggerMedian.js b/src/handlers/triggerMedian.js index 80d07bb322..956b56cf0c 100644 --- a/src/handlers/triggerMedian.js +++ b/src/handlers/triggerMedian.js @@ -1,30 +1,17 @@ const ss = require('simple-statistics'); -const { readFromS3 } = require('../utils/s3'); -const { getLatestPools, aggQueryMedian } = require('./getPools'); -const medianModel = require('../models/median'); -const AppError = require('../utils/appError'); -const dbConnection = require('../utils/dbConnection.js'); +const { getYieldFiltered } = require('../controllers/yieldController'); +const { insertMedian } = require('../controllers/medianController'); module.exports.handler = async () => { await main(); }; const main = async () => { - // read from internal s3 bucket which also includes the pools timestamp which we use to exclude - // any pool which has not been updated on that particular day (eg adapter failed during the whole day, or pool might be stale etc.) - // [checked, and only affect a small nb of pools (< 3%)] - // removing these pools gives an unbiased median calculatiion for that particular day, otherwise - let pools = await getLatestPools(aggQueryMedian); - - // derive final apy field via: - pools = pools.map((p) => ({ - ...p, - apy: p.apy ?? p.apyBase + p.apyReward, - })); - // remove any potential null values - pools = pools.filter((p) => p.apy !== null); + let pools = await getYieldFiltered(); + // include only pools which we have updated on that day, + // otherwise median calc for that day would include values from yst up to 7days ago console.log('removing stale pools...'); console.log('prior filter', pools.length); const maxTimestamp = Math.max(...pools.map((p) => p.timestamp)); @@ -35,45 +22,11 @@ const main = async () => { const payload = [ { - timestamp: new Date( - Math.floor(Date.now() / 1000 / 60 / 60) * 60 * 60 * 1000 - ), - medianAPY: ss.median(pools.map((p) => p.apy)), + timestamp: new Date(), + medianAPY: +ss.median(pools.map((p) => p.apy)).toFixed(5), uniquePools: new Set(pools.map((p) => p.pool)).size, }, ]; const response = await insertMedian(payload); console.log(response); }; - -const insertMedian = async (payload) => { - const conn = await dbConnection.connect(); - const M = conn.model(medianModel.modelName); - - const bulkOperations = []; - for (const el of payload) { - bulkOperations.push({ - updateOne: { - // need to provide a filter value, otherwise this won't work - filter: { timestamp: 0 }, - update: { - $set: el, - }, - upsert: true, - }, - }); - } - - const response = await M.bulkWrite(bulkOperations); - - if (!response) { - return new AppError("Couldn't update data", 404); - } - - return { - status: 'success', - response, - }; -}; - -module.exports.insertMedian = insertMedian; diff --git a/src/handlers/triggerStat.js b/src/handlers/triggerStat.js new file mode 100644 index 0000000000..62bdcadfa9 --- /dev/null +++ b/src/handlers/triggerStat.js @@ -0,0 +1,26 @@ +const { welfordUpdate } = require('../utils/welford'); +const { getYieldFiltered } = require('../controllers/yieldController'); +const { getStat, insertStat } = require('../controllers/statController'); + +module.exports.handler = async (event, context) => { + await main(); +}; + +// we trigger this once per day at midnight, reason: the stat table was boostrapped on +// daily values, and the ML relying on those features was trained on daily values too. +// so i want to keep things consistent (even though it shouldnt be a big difference, at least +// for the majority of pools) +const main = async () => { + let data = await getYieldFiltered(); + const T = 365; + // transform raw apy to return field (required for geometric mean) + data = data.map((p) => ({ + ...p, + return: (1 + p.apy / 100) ** (1 / T) - 1, + })); + + const dataStat = await getStat(); + const payload = welfordUpdate(data, dataStat); + const response = await insertStat(payload); + console.log(response); +}; diff --git a/src/handlers/triggerStats.js b/src/handlers/triggerStats.js deleted file mode 100644 index e863d24676..0000000000 --- a/src/handlers/triggerStats.js +++ /dev/null @@ -1,79 +0,0 @@ -const statModel = require('../models/stat'); -const AppError = require('../utils/appError'); -const { welfordUpdate } = require('../utils/welford'); -const dbConnection = require('../utils/dbConnection.js'); -const { buildPoolsEnriched } = require('./getPoolsEnriched'); - -module.exports.handler = async (event, context) => { - await main(); -}; - -const main = async () => { - const urlBase = process.env.APIG_URL; - let dataEnriched = await buildPoolsEnriched(undefined); - const T = 365; - // transform raw apy to return field (required for geometric mean) - dataEnriched = dataEnriched.map((p) => ({ - ...p, - return: (1 + p.apy / 100) ** (1 / T) - 1, - })); - - const dataStats = await getStats(); - const payload = welfordUpdate(dataEnriched, dataStats); - const response = await insertStats(payload); - console.log(response); -}; - -const insertStats = async (payload) => { - const conn = await dbConnection.connect(); - const M = conn.model(statModel.modelName); - - const bulkOperations = []; - for (const el of payload) { - bulkOperations.push({ - updateOne: { - filter: { pool: el.pool }, - update: { - $set: { - count: el.count, - meanAPY: el.meanAPY, - mean2APY: el.mean2APY, - meanDR: el.meanDR, - mean2DR: el.mean2DR, - productDR: el.productDR, - }, - }, - upsert: true, - }, - }); - } - const response = await M.bulkWrite(bulkOperations); - - if (!response) { - return new AppError("Couldn't update data", 404); - } - - return { - status: 'success', - response, - }; -}; - -// get expanding standard deviation data -const getStats = async () => { - const conn = await dbConnection.connect(); - const M = conn.model(statModel.modelName); - - // return all documents - const x = await M.find({}, { _id: 0, createdAt: 0, updatedAt: 0 }); - const response = {}; - for (let d of x) { - response[d.pool] = d; - } - - return response; -}; - -// for boostrapStatsTable.js -module.exports.insertStats = insertStats; -module.exports.getStats = getStats; diff --git a/src/models/median.js b/src/models/median.js deleted file mode 100644 index e1a6766e50..0000000000 --- a/src/models/median.js +++ /dev/null @@ -1,27 +0,0 @@ -const mongoose = require('mongoose'); - -const medianSchema = new mongoose.Schema( - { - timestamp: { - type: Date, - required: [true, 'A pool must have a timestamp field'], - }, - medianAPY: { - type: Number, - required: [true, 'A pool must have a medianAPY field'], - }, - uniquePools: { - type: Number, - default: [true, 'A pool must have a uniquePools field'], - }, - }, - { versionKey: false } -); - -medianSchema.index({ timestamp: 1 }); - -const nameModel = 'Median'; -const nameCollection = nameModel.toLowerCase(); -const medianModel = mongoose.model(nameModel, medianSchema, nameCollection); - -module.exports = medianModel; diff --git a/src/models/pool.js b/src/models/pool.js deleted file mode 100644 index 18e264f14d..0000000000 --- a/src/models/pool.js +++ /dev/null @@ -1,73 +0,0 @@ -const mongoose = require('mongoose'); - -const poolSchema = new mongoose.Schema( - { - pool: { - type: String, - required: [true, 'A pool must have an pool field'], - trim: true, - }, - underlyingTokens: { - type: [String], - }, - chain: { - type: String, - required: [true, 'A pool must have a chain field'], - trim: true, - }, - project: { - type: String, - required: [true, 'A pool must have a project field'], - trim: true, - }, - poolMeta: { - type: String, - trim: true, - }, - symbol: { - type: String, - required: [true, 'A pool must have a symbol field'], - trim: true, - }, - tvlUsd: { - type: Number, - required: [true, 'A pool must have a tvlUsd field'], - }, - apyBase: { - type: Number, - default: null, - }, - apyReward: { - type: Number, - default: null, - }, - apy: { - type: Number, - required: [true, 'A pool must have an apy field'], - }, - rewardTokens: { - type: [String], - }, - // for historical stuff in db, inserted that field via: - // db.pools.updateMany({}, [{$set: {"timestamp": {$dateTrunc: {date: "$createdAt", unit: "hour"}}}}]) - timestamp: { - type: Date, - required: [true, 'A pool must have a timestamp field'], - }, - }, - // i remove __v versionkey created by mongoose - { versionKey: false } -); - -// compound indices speed up queries significantly -// /latest -> ~50ms -// /chart/pool -> ~1ms -// /offsets/project/day -> ~1ms -poolSchema.index({ pool: 1, timestamp: -1 }); -poolSchema.index({ project: 1, timestamp: -1 }); - -const nameModel = 'Pools'; -const nameCollection = nameModel.toLowerCase(); -const poolModel = mongoose.model(nameModel, poolSchema, nameCollection); - -module.exports = poolModel; diff --git a/src/models/stat.js b/src/models/stat.js deleted file mode 100644 index 68416f791f..0000000000 --- a/src/models/stat.js +++ /dev/null @@ -1,46 +0,0 @@ -const mongoose = require('mongoose'); - -const statSchema = new mongoose.Schema( - { - pool: { - type: String, - required: [true, 'A pool must have an pool field'], - trim: true, - }, - count: { - type: Number, - default: null, - }, - // meanAPY and mean2APY are used for calculating mu and sigma of apy series for the ML algorithm - meanAPY: { - type: Number, - default: null, - }, - mean2APY: { - type: Number, - default: null, - }, - // meanDR, mean2DR and productDR are used for calculating mu and sigma of daily return (DR) series for the scatterchart - meanDR: { - type: Number, - default: null, - }, - mean2DR: { - type: Number, - default: null, - }, - productDR: { - type: Number, - default: null, - }, - }, - { timestamps: true } -); - -statSchema.index({ pool: 1 }); - -const nameModel = 'Stats'; -const nameCollection = nameModel.toLowerCase(); -const statModel = mongoose.model(nameModel, statSchema, nameCollection); - -module.exports = statModel; diff --git a/src/models/url.js b/src/models/url.js deleted file mode 100644 index 536c60b33f..0000000000 --- a/src/models/url.js +++ /dev/null @@ -1,26 +0,0 @@ -const mongoose = require('mongoose'); - -const urlSchema = new mongoose.Schema( - { - project: { - type: String, - required: [true, 'A url must have a project field'], - unique: true, - trim: true, - }, - url: { - type: String, - default: [true, 'A url must have a url field'], - trim: true, - }, - }, - { versionKey: false } -); - -urlSchema.index({ project: 1 }); - -const nameModel = 'Url'; -const nameCollection = nameModel.toLowerCase(); -const urlModel = mongoose.model(nameModel, urlSchema, nameCollection); - -module.exports = urlModel; diff --git a/src/utils/dbConnection.js b/src/utils/dbConnection.js index 46000635a6..2a317d1dc1 100755 --- a/src/utils/dbConnection.js +++ b/src/utils/dbConnection.js @@ -1,51 +1,38 @@ -const mongoose = require('mongoose'); -const SSM = require('aws-sdk/clients/ssm'); +const path = require('path'); -// https://mongoosejs.com/docs/lambda.html -// Because conn is in the global scope, Lambda may retain it between -// function calls thanks to `callbackWaitsForEmptyEventLoop`. -// This means your Lambda function doesn't have to go through the -// potentially expensive process of connecting to MongoDB every time. +require('dotenv').config({ path: path.resolve(__dirname, '../../config.env') }); -// more about callbackWaitsForEmptyEventLoop (which we add to every lambda which makes -// a connection to the db) -// See https://www.mongodb.com/blog/post/serverless-development-with-nodejs-aws-lambda-mongodb-atlas +const pgp = require('pg-promise')({ + /* initialization options */ + capSQL: true, // capitalize all generated SQL +}); +// set type options (pg-promise returns integers and numeric types as strings) +// id 20 = INTEGER +// id 1700 = NUMERIC +pgp.pg.types.setTypeParser(20, parseInt); +pgp.pg.types.setTypeParser(1700, parseFloat); // on first connect, cache db connection for reuse so we don't // need to connect on new requests let conn = null; -exports.connect = async () => { +const connect = async () => { if (conn === null) { console.log('using new db connection'); - - // 1) retrieve db connection secrets from SSM - const ssm = new SSM(); - const options = { - Name: `${process.env.SSM_PATH}/dbconnection`, - WithDecryption: true, - }; - let params = await ssm.getParameter(options).promise(); - params = JSON.parse(params.Parameter.Value); - - const DB = params.database.replace('', params.database_password); - // set conection - conn = mongoose - .connect(DB, { - useNewUrlParser: true, - useCreateIndex: true, - useFindAndModify: false, - useUnifiedTopology: true, - // and tell the MongoDB driver to not wait more than 5 seconds - // before erroring out if it isn't connected - serverSelectionTimeoutMS: 5000, - }) - .then(() => mongoose); - - // awaiting connection after assigning to the `conn` variable - // to avoid multiple function calls creating new connections - await conn; + // set connection + conn = pgp({ + connectionString: process.env.DATABASE_URL, + // max milliseconds a client can go unused before it is removed + // from the connection pool and destroyed. + // overriding default of 30sec to 60sec to decrease nb of potential reconnects of 1 lambda + // running multiple adapters + idleTimeoutMillis: 60000, + }); } - return conn; }; + +module.exports = { + pgp, + connect, +}; diff --git a/src/utils/welford.js b/src/utils/welford.js index 8dba12363f..041c6f1c35 100644 --- a/src/utils/welford.js +++ b/src/utils/welford.js @@ -7,8 +7,10 @@ module.exports.welfordUpdate = (pools, stats) => { // count aggregates the number of samples seen so far const payload = []; + const n = 1000 * 60 * 60 * 24; + const currentDay = new Date(Math.floor(new Date() / n) * n); for (const p of pools) { - d = stats[p.pool]; + d = stats[p.configID]; if (d !== undefined) { // extract @@ -19,19 +21,23 @@ module.exports.welfordUpdate = (pools, stats) => { mean2DR = d.mean2DR; productDR = d.productDR; - // update using welford algo - count += 1; - // a) ML section - deltaAPY = p.apy - meanAPY; - meanAPY += deltaAPY / count; - delta2APY = p.apy - meanAPY; - mean2APY += deltaAPY * delta2APY; - // b) scatterchart section - deltaDR = p.return - meanDR; - meanDR += deltaDR / count; - delta2DR = p.return - meanDR; - mean2DR += deltaDR * delta2DR; - productDR = (1 + p.return) * productDR; + // we only update if the last pool value is from that day (otherwise its stale and we don't + // want to increment/update but instead are just going to keep the existing values) + if (p.timestamp >= currentDay) { + // update using welford algo + count += 1; + // a) ML section + deltaAPY = p.apy - meanAPY; + meanAPY += deltaAPY / count; + delta2APY = p.apy - meanAPY; + mean2APY += deltaAPY * delta2APY; + // b) scatterchart section + deltaDR = p.return - meanDR; + meanDR += deltaDR / count; + delta2DR = p.return - meanDR; + mean2DR += deltaDR * delta2DR; + productDR = (1 + p.return) * productDR; + } } else { // in case of a new pool -> boostrap db values count = 1; @@ -45,7 +51,7 @@ module.exports.welfordUpdate = (pools, stats) => { } payload.push({ - pool: p.pool, + configID: p.configID, count, meanAPY, mean2APY,