diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c728d134..00de6ba6 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -18,6 +18,7 @@ jobs: - apps/twap - apps/notification-producer - apps/telegram + - apps/token-list-updater steps: - name: Checkout uses: actions/checkout@v3 diff --git a/apps/api/src/app/inversify.config.ts b/apps/api/src/app/inversify.config.ts index de6a4cce..bce08043 100644 --- a/apps/api/src/app/inversify.config.ts +++ b/apps/api/src/app/inversify.config.ts @@ -4,6 +4,7 @@ import { getPushNotificationsRepository, getPushSubscriptionsRepository, getSimulationRepository, + getTokenCacheRepository, getTokenBalancesRepository, getTokenHolderRepository, getUsdRepository, @@ -36,6 +37,8 @@ import { tenderlyRepositorySymbol, TokenBalancesRepository, tokenBalancesRepositorySymbol, + TokenCacheRepository, + tokenCacheRepositorySymbol, TokenHolderRepository, tokenHolderRepositorySymbol, UsdRepository, @@ -55,6 +58,7 @@ function getApiContainer(): Container { const cacheRepository = getCacheRepository(); const erc20Repository = getErc20Repository(cacheRepository); const simulationRepository = getSimulationRepository(); + const tokenCacheRepository = getTokenCacheRepository(); const tokenHolderRepository = getTokenHolderRepository(cacheRepository); const tokenBalancesRepository = getTokenBalancesRepository(); const usdRepository = getUsdRepository(cacheRepository, erc20Repository); @@ -89,6 +93,10 @@ function getApiContainer(): Container { .bind(tokenHolderRepositorySymbol) .toConstantValue(tokenHolderRepository); + apiContainer + .bind(tokenCacheRepositorySymbol) + .toConstantValue(tokenCacheRepository); + apiContainer .bind(tokenBalancesRepositorySymbol) .toConstantValue(tokenBalancesRepository); diff --git a/apps/api/src/app/routes/__chainId/tokens/search/__searchParam/index.ts b/apps/api/src/app/routes/__chainId/tokens/search/__searchParam/index.ts new file mode 100644 index 00000000..5fd02d48 --- /dev/null +++ b/apps/api/src/app/routes/__chainId/tokens/search/__searchParam/index.ts @@ -0,0 +1,49 @@ +import { getTokenListBySearchParam } from '@cowprotocol/repositories'; +import { FastifyPluginAsync } from 'fastify'; +import { + errorSchema, + ErrorSchema, + paramsSchema, + RouteSchema, + successSchema, + SuccessSchema, +} from './schemas'; + +const root: FastifyPluginAsync = async (fastify): Promise => { + // example: http://localhost:3010/1/tokens/search/USDC + fastify.get<{ + Params: RouteSchema; + Reply: SuccessSchema | ErrorSchema; + }>( + '/', + { + schema: { + params: paramsSchema, + response: { + '2XX': successSchema, + '500': errorSchema, + }, + }, + }, + async function (request, reply) { + const { chainId, searchParam } = request.params; + + try { + const tokens = await getTokenListBySearchParam(chainId, searchParam); + + fastify.log.info( + `Token search for "${searchParam}" on chain ${chainId}: ${tokens.length} tokens found` + ); + + reply.send(tokens); + } catch (error) { + fastify.log.error('Error searching tokens:', error); + reply.code(500).send({ + message: 'Internal server error while searching tokens', + }); + } + } + ); +}; + +export default root; diff --git a/apps/api/src/app/routes/__chainId/tokens/search/__searchParam/schemas.ts b/apps/api/src/app/routes/__chainId/tokens/search/__searchParam/schemas.ts new file mode 100644 index 00000000..0c66bb94 --- /dev/null +++ b/apps/api/src/app/routes/__chainId/tokens/search/__searchParam/schemas.ts @@ -0,0 +1,81 @@ +import { FromSchema, JSONSchema } from 'json-schema-to-ts'; +import { SupportedChainIdSchema } from '../../../../../schemas'; +import { AllChainIds } from '@cowprotocol/shared'; + +export const paramsSchema = { + type: 'object', + required: ['chainId', 'searchParam'], + additionalProperties: false, + properties: { + chainId: SupportedChainIdSchema, + searchParam: { + title: 'Search Parameter', + description: 'Token search parameter (name, symbol, or address)', + type: 'string', + minLength: 3, + maxLength: 100, + }, + }, +} as const satisfies JSONSchema; + +export const successSchema = { + type: 'array', + items: { + type: 'object', + required: ['chainId', 'address', 'name', 'symbol', 'decimals', 'logoURI'], + additionalProperties: false, + properties: { + chainId: { + title: 'Chain ID', + description: 'Blockchain network identifier.', + type: 'integer', + enum: AllChainIds, + }, + address: { + title: 'Token Address', + description: 'Contract address of the token.', + type: 'string', + pattern: '^0x[a-fA-F0-9]{40}$', + }, + name: { + title: 'Name', + description: 'Full name of the token.', + type: 'string', + }, + symbol: { + title: 'Symbol', + description: 'Token symbol/ticker.', + type: 'string', + }, + decimals: { + title: 'Decimals', + description: 'Number of decimal places for the token.', + type: 'integer', + minimum: 0, + maximum: 18, + }, + logoURI: { + title: 'Logo URI', + description: 'URI to the token logo.', + type: 'string', + }, + }, + }, +} as const satisfies JSONSchema; + +export const errorSchema = { + type: 'object', + required: ['message'], + additionalProperties: false, + properties: { + message: { + title: 'Message', + description: 'Message describing the error.', + type: 'string', + }, + }, +} as const satisfies JSONSchema; + +export type RouteSchema = FromSchema; +export type SuccessSchema = FromSchema; +export type ErrorSchema = FromSchema; diff --git a/apps/api/src/main.ts b/apps/api/src/main.ts index 797046e2..8b22a6b5 100644 --- a/apps/api/src/main.ts +++ b/apps/api/src/main.ts @@ -1,6 +1,8 @@ import Fastify from 'fastify'; import { app } from './app/app'; import { logger } from '@cowprotocol/shared'; +import { getTokenCacheRepository } from '@cowprotocol/services'; +import { setTokenCacheRepository } from '@cowprotocol/repositories'; const host = process.env.HOST ?? 'localhost'; const port = process.env.PORT ? Number(process.env.PORT) : 3001; @@ -10,6 +12,9 @@ export const server = Fastify({ logger, }); +const tokenCacheRepository = getTokenCacheRepository(); +setTokenCacheRepository(tokenCacheRepository); + // Register your application as a normal plugin. server.register(app); diff --git a/apps/notification-producer/jest.config.ts b/apps/notification-producer/jest.config.ts index 4b6ecb81..940f8299 100644 --- a/apps/notification-producer/jest.config.ts +++ b/apps/notification-producer/jest.config.ts @@ -6,6 +6,9 @@ export default { transform: { '^.+\\.[tj]s$': ['ts-jest', { tsconfig: '/tsconfig.spec.json' }], }, + transformIgnorePatterns: [ + 'node_modules/(?!(node-fetch|data-uri-to-buffer|fetch-blob|formdata-polyfill|@cowprotocol|@uniswap)/)', + ], moduleFileExtensions: ['ts', 'js', 'html'], coverageDirectory: '../../coverage/apps/notification-producer', setupFilesAfterEnv: ['../../jest.setup.ts'], diff --git a/apps/token-list-updater-e2e/.eslintrc.json b/apps/token-list-updater-e2e/.eslintrc.json new file mode 100644 index 00000000..8852e20b --- /dev/null +++ b/apps/token-list-updater-e2e/.eslintrc.json @@ -0,0 +1,10 @@ +{ + "extends": ["../../.eslintrc.json"], + "ignorePatterns": ["!**/*"], + "overrides": [ + { + "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], + "rules": {} + } + ] +} diff --git a/apps/token-list-updater-e2e/jest.config.ts b/apps/token-list-updater-e2e/jest.config.ts new file mode 100644 index 00000000..d252edfd --- /dev/null +++ b/apps/token-list-updater-e2e/jest.config.ts @@ -0,0 +1,17 @@ +/* eslint-disable */ +export default { + displayName: 'token-list-updater-e2e', + preset: '../..//jest.preset.js', + setupFiles: ['/src/test-setup.ts'], + testEnvironment: 'node', + transform: { + '^.+\\.[tj]s$': [ + 'ts-jest', + { + tsconfig: '/tsconfig.spec.json', + }, + ], + }, + moduleFileExtensions: ['ts', 'js', 'html'], + coverageDirectory: '../..//coverage/token-list-updater-e2e', +}; diff --git a/apps/token-list-updater-e2e/project.json b/apps/token-list-updater-e2e/project.json new file mode 100644 index 00000000..a87e7031 --- /dev/null +++ b/apps/token-list-updater-e2e/project.json @@ -0,0 +1,22 @@ +{ + "name": "token-list-updater-e2e", + "$schema": "../../node_modules/nx/schemas/project-schema.json", + "implicitDependencies": ["token-list-updater"], + "targets": { + "e2e": { + "executor": "@nx/jest:jest", + "outputs": ["{workspaceRoot}/coverage/{e2eProjectRoot}"], + "options": { + "jestConfig": "apps/token-list-updater-e2e/jest.config.ts", + "passWithNoTests": true + } + }, + "lint": { + "executor": "@nx/linter:eslint", + "outputs": ["{options.outputFile}"], + "options": { + "lintFilePatterns": ["apps/token-list-updater-e2e/**/*.{js,ts}"] + } + } + } +} diff --git a/apps/token-list-updater-e2e/src/test-setup.ts b/apps/token-list-updater-e2e/src/test-setup.ts new file mode 100644 index 00000000..8337712e --- /dev/null +++ b/apps/token-list-updater-e2e/src/test-setup.ts @@ -0,0 +1 @@ +// diff --git a/apps/token-list-updater-e2e/src/token-list-updater/token-list-updater.spec.ts b/apps/token-list-updater-e2e/src/token-list-updater/token-list-updater.spec.ts new file mode 100644 index 00000000..500b4272 --- /dev/null +++ b/apps/token-list-updater-e2e/src/token-list-updater/token-list-updater.spec.ts @@ -0,0 +1,13 @@ +import { execSync } from 'child_process'; +import { join } from 'path'; + +describe('CLI tests', () => { + // TODO: implement properly this test + it.skip('should print a message', () => { + const cliPath = join(process.cwd(), 'dist/apps/token-list-updater'); + + const output = execSync(`node ${cliPath}`).toString(); + + expect(output).toMatch(/Hello World/); + }); +}); diff --git a/apps/token-list-updater-e2e/tsconfig.json b/apps/token-list-updater-e2e/tsconfig.json new file mode 100644 index 00000000..ed633e1d --- /dev/null +++ b/apps/token-list-updater-e2e/tsconfig.json @@ -0,0 +1,13 @@ +{ + "extends": "../../tsconfig.base.json", + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.spec.json" + } + ], + "compilerOptions": { + "esModuleInterop": true + } +} diff --git a/apps/token-list-updater-e2e/tsconfig.spec.json b/apps/token-list-updater-e2e/tsconfig.spec.json new file mode 100644 index 00000000..2a5e4dff --- /dev/null +++ b/apps/token-list-updater-e2e/tsconfig.spec.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../..//dist/out-tsc", + "module": "commonjs", + "types": ["jest", "node"] + }, + "include": ["jest.config.ts", "src/**/*.ts"] +} diff --git a/apps/token-list-updater/.eslintrc.json b/apps/token-list-updater/.eslintrc.json new file mode 100644 index 00000000..9d9c0db5 --- /dev/null +++ b/apps/token-list-updater/.eslintrc.json @@ -0,0 +1,18 @@ +{ + "extends": ["../../.eslintrc.json"], + "ignorePatterns": ["!**/*"], + "overrides": [ + { + "files": ["*.ts", "*.tsx", "*.js", "*.jsx"], + "rules": {} + }, + { + "files": ["*.ts", "*.tsx"], + "rules": {} + }, + { + "files": ["*.js", "*.jsx"], + "rules": {} + } + ] +} diff --git a/apps/token-list-updater/Dockerfile b/apps/token-list-updater/Dockerfile new file mode 100644 index 00000000..ce76bae0 --- /dev/null +++ b/apps/token-list-updater/Dockerfile @@ -0,0 +1,24 @@ +# This file is generated by Nx. +# +# Build the docker image with `npx nx docker-build token-list-updater`. +# Tip: Modify "docker-build" options in project.json to change docker build args. +# +# Run the container with `docker run -p 3000:3000 -t token-list-updater`. +FROM docker.io/node:lts-alpine + +ENV HOST=0.0.0.0 +ENV PORT=3000 + +WORKDIR /app + +RUN addgroup --system token-list-updater && \ + adduser --system -G token-list-updater token-list-updater + +COPY dist/apps/token-list-updater token-list-updater +RUN chown -R token-list-updater:token-list-updater . + +# You can remove this install step if you build with `--bundle` option. +# The bundled output will include external dependencies. +RUN npm --prefix token-list-updater --omit=dev -f install + +CMD [ "node", "token-list-updater/main.js" ] diff --git a/apps/token-list-updater/jest.config.ts b/apps/token-list-updater/jest.config.ts new file mode 100644 index 00000000..eca82782 --- /dev/null +++ b/apps/token-list-updater/jest.config.ts @@ -0,0 +1,11 @@ +/* eslint-disable */ +export default { + displayName: 'token-list-updater', + preset: '../../jest.preset.js', + testEnvironment: 'node', + transform: { + '^.+\\.[tj]s$': ['ts-jest', { tsconfig: '/tsconfig.spec.json' }], + }, + moduleFileExtensions: ['ts', 'js', 'html'], + coverageDirectory: '../../coverage/apps/token-list-updater', +}; diff --git a/apps/token-list-updater/project.json b/apps/token-list-updater/project.json new file mode 100644 index 00000000..76110802 --- /dev/null +++ b/apps/token-list-updater/project.json @@ -0,0 +1,82 @@ +{ + "name": "token-list-updater", + "$schema": "../../node_modules/nx/schemas/project-schema.json", + "sourceRoot": "apps/token-list-updater/src", + "projectType": "application", + "targets": { + "build": { + "executor": "@nx/esbuild:esbuild", + "outputs": ["{options.outputPath}"], + "defaultConfiguration": "production", + "options": { + "platform": "node", + "outputPath": "dist/apps/token-list-updater", + "format": ["cjs"], + "bundle": false, + "main": "apps/token-list-updater/src/main.ts", + "tsConfig": "apps/token-list-updater/tsconfig.app.json", + "assets": ["apps/token-list-updater/src/assets"], + "generatePackageJson": true, + "esbuildOptions": { + "sourcemap": true, + "outExtension": { + ".js": ".js" + } + } + }, + "configurations": { + "development": {}, + "production": { + "generateLockfile": true, + "esbuildOptions": { + "sourcemap": false, + "outExtension": { + ".js": ".js" + } + } + } + } + }, + "start": { + "executor": "@nx/js:node", + "defaultConfiguration": "development", + "options": { + "buildTarget": "token-list-updater:build" + }, + "configurations": { + "development": { + "buildTarget": "token-list-updater:build:development" + }, + "production": { + "buildTarget": "token-list-updater:build:production" + } + } + }, + "lint": { + "executor": "@nx/linter:eslint", + "outputs": ["{options.outputFile}"], + "options": { + "lintFilePatterns": ["apps/token-list-updater/**/*.ts"] + } + }, + "test": { + "executor": "@nx/jest:jest", + "outputs": ["{workspaceRoot}/coverage/{projectRoot}"], + "options": { + "jestConfig": "apps/token-list-updater/jest.config.ts", + "passWithNoTests": true + }, + "configurations": { + "ci": { + "ci": true, + "codeCoverage": true + } + } + }, + "docker-build": { + "dependsOn": ["build"], + "command": "docker build -f apps/token-list-updater/Dockerfile . -t token-list-updater" + } + }, + "tags": [] +} diff --git a/apps/token-list-updater/src/assets/.gitkeep b/apps/token-list-updater/src/assets/.gitkeep new file mode 100644 index 00000000..e69de29b diff --git a/apps/token-list-updater/src/main.ts b/apps/token-list-updater/src/main.ts new file mode 100644 index 00000000..50986f3d --- /dev/null +++ b/apps/token-list-updater/src/main.ts @@ -0,0 +1,103 @@ +import 'reflect-metadata'; +import ms from 'ms'; + +import { + ALL_SUPPORTED_CHAIN_IDS, + SupportedChainId, +} from '@cowprotocol/cow-sdk'; +import { logger } from '@cowprotocol/shared'; + +import { getTokenCacheRepository } from '@cowprotocol/services'; + +import { Runnable } from '../types'; +import { TokenListUpdater } from './updater/updater'; + +const TIMEOUT_STOP_UPDATERS = ms(`30s`); + +let shuttingDown = false; +/** + * Main loop: Run and re-attempt on error + */ +async function mainLoop() { + const chainIds = ALL_SUPPORTED_CHAIN_IDS; + logger.info( + `[tokenlist-updater:main] Start tokenlist updater for networks: ${chainIds.join( + ', ' + )}` + ); + + const tokenCacheRepository = getTokenCacheRepository(); + + const updaters: Runnable[] = [ + // TokenListUpdater: update token list for each chain + ...chainIds + .filter((chainId) => chainId !== SupportedChainId.SEPOLIA) + .map((chainId, index) => { + return new TokenListUpdater({ + tokenCacheRepository, + chainId, + delayInMilliseconds: index * 20 * 1000, // every 20 seconds + }); + }), + ]; + + // Run all updaters in the background + const promises = updaters.map((updater) => updater.start()); + + // Wrap all updaters in a promise + const updatersPromise = Promise.all(promises); + + // Cleanup resources on application termination + const shutdown = () => { + gracefulShutdown(updaters, updatersPromise).catch((error) => { + logger.error(error, 'Error during shutdown'); + process.exit(1); + }); + }; + + process.on('SIGTERM', shutdown); + process.on('SIGINT', shutdown); + + await updatersPromise; +} + +async function gracefulShutdown( + updaters: Runnable[], + updatersPromise: Promise +) { + if (shuttingDown) return; + shuttingDown = true; + + // Command all producers to stop + logger.info(`Stopping ${updaters.length} updaters...`); + + const stopUpdatersPromise = Promise.all( + updaters.map((updater) => updater.stop()) + ); + + const timeoutInGracePeriod = new Promise((resolve) => + setTimeout(() => { + logger.info( + `Some of the updaters did not stop in time (${ + TIMEOUT_STOP_UPDATERS / 1000 + }s), forcing exit` + ); + resolve(true); + }, TIMEOUT_STOP_UPDATERS) + ); + + await Promise.race([ + // Wait for all producers to actually stop + stopUpdatersPromise + .then(() => updatersPromise) + .then(() => logger.info('All updaters have been stopped')), + // Give some grace period (otherwise timeout) + timeoutInGracePeriod, + ]); + + logger.info('Bye!'); + process.exit(0); +} + +// Start the main loop +mainLoop().catch((error) => logger.error(error, 'Unhandled error in updater')); diff --git a/apps/token-list-updater/src/updater/updater.ts b/apps/token-list-updater/src/updater/updater.ts new file mode 100644 index 00000000..d153c371 --- /dev/null +++ b/apps/token-list-updater/src/updater/updater.ts @@ -0,0 +1,59 @@ +import { initTokenList, TokenCacheRepository } from '@cowprotocol/repositories'; +import { logger, doForever, sleep } from '@cowprotocol/shared'; +import { Runnable } from '../../types'; + +const WAIT_TIME = 1000 * 60 * 60 * 6; // 6 hours + +export type TokenListUpdaterProps = { + chainId: number; + tokenCacheRepository: TokenCacheRepository; + delayInMilliseconds?: number; +}; + +export class TokenListUpdater implements Runnable { + isStopping = false; + + constructor(private props: TokenListUpdaterProps) {} + + /** + * Main loop: Run the token list updater. This method runs indefinitely, + * updating the token list for a specific chain. + * + * The method should not throw or finish. + */ + async start(): Promise { + await doForever({ + name: `TokenListUpdater for chain id: ${this.props.chainId}`, + callback: async (stop) => { + if (this.isStopping) { + stop(); + return; + } + logger.info(`Updating token list for chain id: ${this.props.chainId}`); + await (async () => { + await sleep(this.props.delayInMilliseconds || 0); + await this.updateTokenList(); + })(); + }, + waitTimeMilliseconds: WAIT_TIME, + logger, + }); + + logger.info( + `TokenListUpdater for chain id: ${this.props.chainId}`, + 'stopped' + ); + } + + async stop(): Promise { + this.isStopping = true; + } + + async updateTokenList(): Promise { + await initTokenList(this.props.chainId, this.props.tokenCacheRepository); + + logger.debug( + `[tokenlist-updater:main] Token list updated for chain id: ${this.props.chainId}` + ); + } +} diff --git a/apps/token-list-updater/tsconfig.app.json b/apps/token-list-updater/tsconfig.app.json new file mode 100644 index 00000000..f5e2e085 --- /dev/null +++ b/apps/token-list-updater/tsconfig.app.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "module": "commonjs", + "types": ["node"] + }, + "exclude": ["jest.config.ts", "src/**/*.spec.ts", "src/**/*.test.ts"], + "include": ["src/**/*.ts"] +} diff --git a/apps/token-list-updater/tsconfig.json b/apps/token-list-updater/tsconfig.json new file mode 100644 index 00000000..c1e2dd4e --- /dev/null +++ b/apps/token-list-updater/tsconfig.json @@ -0,0 +1,16 @@ +{ + "extends": "../../tsconfig.base.json", + "files": [], + "include": [], + "references": [ + { + "path": "./tsconfig.app.json" + }, + { + "path": "./tsconfig.spec.json" + } + ], + "compilerOptions": { + "esModuleInterop": true + } +} diff --git a/apps/token-list-updater/tsconfig.spec.json b/apps/token-list-updater/tsconfig.spec.json new file mode 100644 index 00000000..9b2a121d --- /dev/null +++ b/apps/token-list-updater/tsconfig.spec.json @@ -0,0 +1,14 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "outDir": "../../dist/out-tsc", + "module": "commonjs", + "types": ["jest", "node"] + }, + "include": [ + "jest.config.ts", + "src/**/*.test.ts", + "src/**/*.spec.ts", + "src/**/*.d.ts" + ] +} diff --git a/apps/token-list-updater/types.ts b/apps/token-list-updater/types.ts new file mode 100644 index 00000000..c4fd0b7b --- /dev/null +++ b/apps/token-list-updater/types.ts @@ -0,0 +1,14 @@ +/** + * A Runnable program + */ +export interface Runnable { + /** + * Start the program, this method should not throw or finish. + */ + start(): Promise; + + /** + * Stop the program, this method should not throw or finish. + */ + stop(): Promise; +} diff --git a/docker-compose.yml b/docker-compose.yml index 8b372b18..2dc02500 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -157,6 +157,19 @@ services: - queue # condition: service_healthy + token-list-updater: + container_name: token-list-updater + build: + context: . + dockerfile: ./apps/token-list-updater/Dockerfile + restart: unless-stopped + env_file: .env.docker + networks: + - bff-network + depends_on: + - api + - redis + volumes: data-postgres: data-rabbitmq: diff --git a/libs/repositories/jest.config.ts b/libs/repositories/jest.config.ts index 8ef20533..24375089 100644 --- a/libs/repositories/jest.config.ts +++ b/libs/repositories/jest.config.ts @@ -6,6 +6,9 @@ export default { transform: { '^.+\\.[tj]s$': ['ts-jest', { tsconfig: '/tsconfig.spec.json' }], }, + transformIgnorePatterns: [ + 'node_modules/(?!(node-fetch|data-uri-to-buffer|fetch-blob|formdata-polyfill|@cowprotocol|@uniswap)/)', + ], moduleFileExtensions: ['ts', 'js', 'html'], coverageDirectory: '../../coverage/libs/repositories', setupFilesAfterEnv: ['../../jest.setup.ts'], diff --git a/libs/repositories/src/datasources/tokenSearch/getTokensByChain.ts b/libs/repositories/src/datasources/tokenSearch/getTokensByChain.ts new file mode 100644 index 00000000..6f048bda --- /dev/null +++ b/libs/repositories/src/datasources/tokenSearch/getTokensByChain.ts @@ -0,0 +1,42 @@ +import { SupportedChainId } from '@cowprotocol/cow-sdk'; +import { logger } from '@cowprotocol/shared'; +import { TokenFromAPI } from './types'; + +async function fetchTokensFromCoinGecko( + chainName: string, + chainId: SupportedChainId +): Promise { + const tokenSource = `https://tokens.coingecko.com/${chainName}/all.json`; + + logger.info(`Fetching tokens for ${chainName}`); + + const response = await fetch(tokenSource); + + if (!response.ok) { + throw new Error( + `Failed to fetch tokens from ${tokenSource}: ${response.status} ${response.statusText}` + ); + } + + const data = await response.json(); + + if (!data.tokens || !Array.isArray(data.tokens)) { + throw new Error( + `Invalid token list format from ${tokenSource}: missing or invalid tokens array` + ); + } + + logger.info(`Fetched ${data.tokens.length} tokens for ${chainName}`); + + return data.tokens.map((token: TokenFromAPI) => ({ + ...token, + chainId, + })); +} + +export async function getTokensByChainName( + chainName: string, + chainId: SupportedChainId +): Promise { + return fetchTokensFromCoinGecko(chainName, chainId); +} diff --git a/libs/repositories/src/datasources/tokenSearch/index.ts b/libs/repositories/src/datasources/tokenSearch/index.ts new file mode 100644 index 00000000..559e7899 --- /dev/null +++ b/libs/repositories/src/datasources/tokenSearch/index.ts @@ -0,0 +1 @@ +export * from './tokenList'; diff --git a/libs/repositories/src/datasources/tokenSearch/tokenList.test.ts b/libs/repositories/src/datasources/tokenSearch/tokenList.test.ts new file mode 100644 index 00000000..8ab5b02c --- /dev/null +++ b/libs/repositories/src/datasources/tokenSearch/tokenList.test.ts @@ -0,0 +1,331 @@ +import { SupportedChainId } from '@cowprotocol/cow-sdk'; +import { TokenCacheRepository } from '../../repos/TokenCacheRepository'; +import { + getTokenListBySearchParam, + initTokenList, + setTokenCacheRepository, +} from './tokenList'; +import { TokenFromAPI } from './types'; + +const mockTokenCacheRepository: jest.Mocked = { + initTokenList: jest.fn(), + getTokenList: jest.fn(), + searchTokens: jest.fn(), + clearTokenList: jest.fn(), +}; + +const mockFetch = jest.fn(); +global.fetch = mockFetch; + +describe('tokenList', () => { + const mockTokensResponse = { + tokens: [ + { + chainId: SupportedChainId.MAINNET, + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Uniswap', + symbol: 'UNI', + decimals: 18, + logoURI: 'https://example.com/uni.png', + }, + { + chainId: SupportedChainId.MAINNET, + address: '0xdac17f958d2ee523a2206206994597c13d831ec7', + name: 'Tether USD', + symbol: 'USDT', + decimals: 6, + logoURI: 'https://example.com/usdt.png', + }, + ] as TokenFromAPI[], + }; + + beforeEach(() => { + jest.clearAllMocks(); + mockFetch.mockClear(); + + setTokenCacheRepository(mockTokenCacheRepository); + + Object.values(mockTokenCacheRepository).forEach((mock) => mock.mockReset()); + }); + + describe('initTokenList', () => { + it('should fetch tokens and cache them when not already cached', async () => { + // Setup mock responses + mockFetch.mockResolvedValueOnce({ + ok: true, + json: () => Promise.resolve(mockTokensResponse), + }); + + await initTokenList(SupportedChainId.MAINNET); + + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(mockFetch).toHaveBeenCalledWith( + 'https://tokens.coingecko.com/ethereum/all.json' + ); + expect(mockTokenCacheRepository.initTokenList).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + expect.arrayContaining([ + expect.objectContaining({ + chainId: SupportedChainId.MAINNET, + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Uniswap', + symbol: 'UNI', + }), + ]) + ); + }); + + it('should handle fetch errors gracefully', async () => { + mockFetch.mockRejectedValueOnce(new Error('Network error')); + + await expect(initTokenList(SupportedChainId.MAINNET)).rejects.toThrow( + 'Network error' + ); + + expect(mockTokenCacheRepository.initTokenList).not.toHaveBeenCalled(); + }); + + it('should handle invalid JSON responses', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: () => Promise.reject(new Error('Invalid JSON')), + }); + + await expect(initTokenList(SupportedChainId.MAINNET)).rejects.toThrow( + 'Invalid JSON' + ); + + expect(mockTokenCacheRepository.initTokenList).not.toHaveBeenCalled(); + }); + + it('should handle responses without tokens array', async () => { + mockFetch.mockResolvedValueOnce({ + ok: true, + json: () => Promise.resolve({ invalidStructure: true }), + }); + + await expect(initTokenList(SupportedChainId.MAINNET)).rejects.toThrow( + 'Invalid token list format from https://tokens.coingecko.com/ethereum/all.json: missing or invalid tokens array' + ); + + expect(mockTokenCacheRepository.initTokenList).not.toHaveBeenCalled(); + }); + + it('should handle HTTP errors', async () => { + mockFetch.mockResolvedValueOnce({ + ok: false, + status: 404, + statusText: 'Not Found', + }); + + await expect(initTokenList(SupportedChainId.MAINNET)).rejects.toThrow( + 'Failed to fetch tokens from https://tokens.coingecko.com/ethereum/all.json: 404 Not Found' + ); + + expect(mockTokenCacheRepository.initTokenList).not.toHaveBeenCalled(); + }); + + it('should cache empty token arrays when fetch returns no tokens', async () => { + mockFetch.mockResolvedValue({ + ok: true, + json: () => Promise.resolve({ tokens: [] }), + }); + + await initTokenList(SupportedChainId.MAINNET); + + expect(mockTokenCacheRepository.initTokenList).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + [] + ); + }); + }); + + describe('getTokenListBySearchParam', () => { + const mockSearchResults: TokenFromAPI[] = [ + { + chainId: SupportedChainId.MAINNET, + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Uniswap', + symbol: 'UNI', + decimals: 18, + logoURI: 'https://example.com/uni.png', + }, + ]; + + it('should delegate search to TokenCacheRepository', async () => { + mockTokenCacheRepository.searchTokens.mockResolvedValue( + mockSearchResults + ); + + const result = await getTokenListBySearchParam( + SupportedChainId.MAINNET, + 'uni' + ); + + expect(mockTokenCacheRepository.searchTokens).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + 'uni' + ); + expect(result).toEqual(mockSearchResults); + }); + + it('should return results from cache repository', async () => { + mockTokenCacheRepository.searchTokens.mockResolvedValue( + mockSearchResults + ); + + const result = await getTokenListBySearchParam( + SupportedChainId.MAINNET, + 'test' + ); + + expect(result).toEqual(mockSearchResults); + }); + + it('should handle empty search results', async () => { + mockTokenCacheRepository.searchTokens.mockResolvedValue([]); + + const result = await getTokenListBySearchParam( + SupportedChainId.MAINNET, + 'nonexistent' + ); + + expect(result).toEqual([]); + }); + + it('should handle different chain IDs', async () => { + mockTokenCacheRepository.searchTokens.mockResolvedValue([]); + + await getTokenListBySearchParam(SupportedChainId.GNOSIS_CHAIN, 'wxdai'); + + expect(mockTokenCacheRepository.searchTokens).toHaveBeenCalledWith( + SupportedChainId.GNOSIS_CHAIN, + 'wxdai' + ); + }); + + it('should handle empty search parameters', async () => { + mockTokenCacheRepository.searchTokens.mockResolvedValue([]); + + await getTokenListBySearchParam(SupportedChainId.MAINNET, ''); + + expect(mockTokenCacheRepository.searchTokens).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + '' + ); + }); + + it('should handle special characters in search parameters', async () => { + const searchParam = '0xabc123!@#$%^&*()'; + mockTokenCacheRepository.searchTokens.mockResolvedValue([]); + + await getTokenListBySearchParam(SupportedChainId.MAINNET, searchParam); + + expect(mockTokenCacheRepository.searchTokens).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + searchParam + ); + }); + + it('should handle very long search parameters', async () => { + const longSearchParam = 'a'.repeat(1000); + mockTokenCacheRepository.searchTokens.mockResolvedValue([]); + + await getTokenListBySearchParam( + SupportedChainId.MAINNET, + longSearchParam + ); + + expect(mockTokenCacheRepository.searchTokens).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + longSearchParam + ); + }); + }); + + describe('integration with real token data structure', () => { + it('should properly process token data with all required fields', async () => { + const completeTokenResponse = { + tokens: [ + { + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Uniswap', + symbol: 'UNI', + decimals: 18, + logoURI: 'https://example.com/uni.png', + }, + { + address: '0xdac17f958d2ee523a2206206994597c13d831ec7', + name: 'Tether USD', + symbol: 'USDT', + decimals: 6, + logoURI: 'https://example.com/usdt.png', + }, + ], + }; + + mockFetch.mockResolvedValue({ + ok: true, + json: () => Promise.resolve(completeTokenResponse), + }); + + await initTokenList(SupportedChainId.MAINNET); + + expect(mockTokenCacheRepository.initTokenList).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + expect.arrayContaining([ + expect.objectContaining({ + chainId: SupportedChainId.MAINNET, + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Uniswap', + symbol: 'UNI', + decimals: 18, + logoURI: 'https://example.com/uni.png', + }), + expect.objectContaining({ + chainId: SupportedChainId.MAINNET, + address: '0xdac17f958d2ee523a2206206994597c13d831ec7', + name: 'Tether USD', + symbol: 'USDT', + decimals: 6, + logoURI: 'https://example.com/usdt.png', + }), + ]) + ); + }); + + it('should handle token data with optional fields missing', async () => { + const minimalTokenResponse = { + tokens: [ + { + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Minimal Token', + symbol: 'MIN', + decimals: 18, + // logoURI is optional and missing + }, + ], + }; + + mockFetch.mockResolvedValue({ + ok: true, + json: () => Promise.resolve(minimalTokenResponse), + }); + + await initTokenList(SupportedChainId.MAINNET); + + expect(mockTokenCacheRepository.initTokenList).toHaveBeenCalledWith( + SupportedChainId.MAINNET, + expect.arrayContaining([ + expect.objectContaining({ + chainId: SupportedChainId.MAINNET, + address: '0xa0b86a33e6441d08b8b3f4b89b0e1e9b1b1c1d1e', + name: 'Minimal Token', + symbol: 'MIN', + decimals: 18, + }), + ]) + ); + }); + }); +}); diff --git a/libs/repositories/src/datasources/tokenSearch/tokenList.ts b/libs/repositories/src/datasources/tokenSearch/tokenList.ts new file mode 100644 index 00000000..1fa8d8b7 --- /dev/null +++ b/libs/repositories/src/datasources/tokenSearch/tokenList.ts @@ -0,0 +1,55 @@ +import { SupportedChainId } from '@cowprotocol/cow-sdk'; +import { logger } from '@cowprotocol/shared'; +import { TokenCacheRepository } from '../../repos/TokenCacheRepository'; +import { SUPPORTED_COINGECKO_PLATFORMS } from '../coingecko'; +import { getTokensByChainName } from './getTokensByChain'; +import { TokenFromAPI } from './types'; + +let tokenCacheRepository: TokenCacheRepository | null = null; + +export function setTokenCacheRepository( + repository: TokenCacheRepository +): void { + tokenCacheRepository = repository; +} + +export async function initTokenList( + chainId: SupportedChainId, + cacheRepository?: TokenCacheRepository +): Promise { + const repository = cacheRepository || tokenCacheRepository; + + if (!repository) { + throw new Error( + 'TokenCacheRepository not configured. Please set it using setTokenCacheRepository()' + ); + } + + const chainName = SUPPORTED_COINGECKO_PLATFORMS[chainId]; + + if (!chainName) { + throw new Error(`Chain ${chainId} is not supported by CoinGecko`); + } + + logger.info(`Initializing token list for chain ${chainId}`); + const tokens = await getTokensByChainName(chainName, chainId); + + await repository.initTokenList(chainId, tokens); + logger.info(`Cached ${tokens.length} tokens for chain ${chainId}`); +} + +export async function getTokenListBySearchParam( + chainId: SupportedChainId, + searchParam: string, + cacheRepository?: TokenCacheRepository +): Promise { + const repository = cacheRepository || tokenCacheRepository; + + if (!repository) { + throw new Error( + 'TokenCacheRepository not configured. Please set it using setTokenCacheRepository()' + ); + } + + return repository.searchTokens(chainId, searchParam); +} diff --git a/libs/repositories/src/datasources/tokenSearch/types.ts b/libs/repositories/src/datasources/tokenSearch/types.ts new file mode 100644 index 00000000..e8a4c453 --- /dev/null +++ b/libs/repositories/src/datasources/tokenSearch/types.ts @@ -0,0 +1,10 @@ +import { SupportedChainId } from '@cowprotocol/cow-sdk'; + +export type TokenFromAPI = { + chainId: SupportedChainId; + address: string; + name: string; + symbol: string; + decimals: number; + logoURI: string; +}; diff --git a/libs/repositories/src/index.ts b/libs/repositories/src/index.ts index debea97c..baf4b401 100644 --- a/libs/repositories/src/index.ts +++ b/libs/repositories/src/index.ts @@ -4,6 +4,7 @@ export * from './utils/isDbEnabled'; // Data-sources export * from './datasources/cms'; +export * from './datasources/tokenSearch'; export * from './datasources/cowApi'; export * from './datasources/orm/postgresOrm'; export * from './datasources/postgresPlain'; @@ -68,3 +69,6 @@ export * from './repos/ExpiredOrdersRepository/ExpiredOrdersRepositoryPostgres'; export * from './repos/PushNotificationsRepository/PushNotificationsRepository'; export * from './repos/PushSubscriptionsRepository/PushSubscriptionsRepository'; export * from './repos/PushSubscriptionsRepository/PushSubscriptionsRepositoryCms'; + +// Token cache repositories +export * from './repos/TokenCacheRepository'; diff --git a/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepository.ts b/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepository.ts new file mode 100644 index 00000000..c5dc73b3 --- /dev/null +++ b/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepository.ts @@ -0,0 +1,42 @@ +import { SupportedChainId } from '@cowprotocol/cow-sdk'; +import { TokenFromAPI } from '../../datasources/tokenSearch/types'; + +export const tokenCacheRepositorySymbol = Symbol.for('TokenCacheRepository'); + +export interface TokenCacheRepository { + /** + * Initialize token list for a specific chain + * @param chainId - The chain ID to initialize tokens for + * @param tokens - Array of tokens to cache + * @param ttlSeconds - Time to live in seconds (default: 24 hours) + */ + initTokenList( + chainId: SupportedChainId, + tokens: TokenFromAPI[], + ttlSeconds?: number + ): Promise; + + /** + * Get tokens for a specific chain + * @param chainId - The chain ID to get tokens for + * @returns Array of tokens or null if not cached + */ + getTokenList(chainId: SupportedChainId): Promise; + + /** + * Search tokens by parameter (name, symbol, or address) + * @param chainId - The chain ID to search in + * @param searchParam - Search parameter + * @returns Array of matching tokens + */ + searchTokens( + chainId: SupportedChainId, + searchParam: string + ): Promise; + + /** + * Clear token list for a specific chain + * @param chainId - The chain ID to clear + */ + clearTokenList(chainId: SupportedChainId): Promise; +} diff --git a/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepositoryRedis.test.ts b/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepositoryRedis.test.ts new file mode 100644 index 00000000..19e261d5 --- /dev/null +++ b/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepositoryRedis.test.ts @@ -0,0 +1,399 @@ +import { SupportedChainId } from '@cowprotocol/cow-sdk'; +import { Redis } from 'ioredis'; +import { TokenFromAPI } from '../../datasources/tokenSearch/types'; +import { TokenCacheRepositoryRedis } from './TokenCacheRepositoryRedis'; + +const mockTokens: TokenFromAPI[] = [ + { + chainId: SupportedChainId.MAINNET, + address: '0xA0b86a33E6441b8A7a6c5B3e8a8b7c9a0b86a33E', + name: 'Test Token', + symbol: 'TEST', + decimals: 18, + logoURI: 'https://example.com/test.png', + }, + { + chainId: SupportedChainId.MAINNET, + address: '0xB0b86a33E6441b8A7a6c5B3e8a8b7c9a0b86a33F', + name: 'Another Token', + symbol: 'ANOT', + decimals: 6, + logoURI: 'https://example.com/another.png', + }, + { + chainId: SupportedChainId.MAINNET, + address: '0xC0b86a33E6441b8A7a6c5B3e8a8b7c9a0b86a33G', + name: 'Ethereum Token', + symbol: 'ETH', + decimals: 18, + logoURI: 'https://example.com/eth.png', + }, +]; + +const mockRedisClient = { + del: jest.fn(), + hset: jest.fn(), + expire: jest.fn(), + hgetall: jest.fn(), + hdel: jest.fn(), +} as unknown as jest.Mocked; + +const consoleSpy = { + log: jest.spyOn(console, 'log').mockImplementation(), + warn: jest.spyOn(console, 'warn').mockImplementation(), +}; + +describe('TokenCacheRepositoryRedis', () => { + let repository: TokenCacheRepositoryRedis; + const chainId = SupportedChainId.MAINNET; + const expectedKey = `tokens:${chainId}`; + + beforeEach(() => { + repository = new TokenCacheRepositoryRedis(mockRedisClient); + jest.clearAllMocks(); + consoleSpy.log.mockClear(); + consoleSpy.warn.mockClear(); + }); + + afterAll(() => { + consoleSpy.log.mockRestore(); + consoleSpy.warn.mockRestore(); + }); + + describe('initTokenList', () => { + it('should initialize token list with default TTL', async () => { + mockRedisClient.del.mockResolvedValue(1); + mockRedisClient.hset.mockResolvedValue(0); + mockRedisClient.expire.mockResolvedValue(1); + + await repository.initTokenList(chainId, mockTokens); + + expect(mockRedisClient.del).toHaveBeenCalledWith(expectedKey); + expect(mockRedisClient.hset).toHaveBeenCalledWith(expectedKey, { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + mockTokens[0] + ), + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': JSON.stringify( + mockTokens[1] + ), + '0xc0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33g': JSON.stringify( + mockTokens[2] + ), + }); + expect(mockRedisClient.expire).toHaveBeenCalledWith( + expectedKey, + 24 * 60 * 60 + ); + }); + + it('should initialize token list with custom TTL', async () => { + mockRedisClient.del.mockResolvedValue(1); + mockRedisClient.hset.mockResolvedValue(0); + mockRedisClient.expire.mockResolvedValue(1); + + const customTtl = 60 * 60; // 1 hour + await repository.initTokenList(chainId, mockTokens, customTtl); + + expect(mockRedisClient.del).toHaveBeenCalledWith(expectedKey); + expect(mockRedisClient.hset).toHaveBeenCalledWith(expectedKey, { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + mockTokens[0] + ), + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': JSON.stringify( + mockTokens[1] + ), + '0xc0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33g': JSON.stringify( + mockTokens[2] + ), + }); + expect(mockRedisClient.expire).toHaveBeenCalledWith( + expectedKey, + customTtl + ); + }); + + it('should handle empty token list', async () => { + mockRedisClient.del.mockResolvedValue(1); + + await repository.initTokenList(chainId, []); + + expect(mockRedisClient.del).toHaveBeenCalledWith(expectedKey); + expect(mockRedisClient.hset).not.toHaveBeenCalled(); + expect(mockRedisClient.expire).not.toHaveBeenCalled(); + }); + + it('should normalize addresses to lowercase', async () => { + mockRedisClient.del.mockResolvedValue(1); + mockRedisClient.hset.mockResolvedValue(0); + mockRedisClient.expire.mockResolvedValue(1); + + const tokenWithUppercaseAddress: TokenFromAPI = { + ...mockTokens[0], + address: '0xA0B86A33E6441B8A7A6C5B3E8A8B7C9A0B86A33E', // Uppercase + }; + + await repository.initTokenList(chainId, [tokenWithUppercaseAddress]); + + expect(mockRedisClient.hset).toHaveBeenCalledWith(expectedKey, { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + tokenWithUppercaseAddress + ), + }); + }); + }); + + describe('getTokenList', () => { + it('should return tokens from cache', async () => { + const hashData = { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + mockTokens[0] + ), + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': JSON.stringify( + mockTokens[1] + ), + }; + mockRedisClient.hgetall.mockResolvedValue(hashData); + + const result = await repository.getTokenList(chainId); + + expect(mockRedisClient.hgetall).toHaveBeenCalledWith(expectedKey); + expect(result).toHaveLength(2); + expect(result).toEqual( + expect.arrayContaining([mockTokens[0], mockTokens[1]]) + ); + }); + + it('should return null when cache is empty', async () => { + mockRedisClient.hgetall.mockResolvedValue({}); + + const result = await repository.getTokenList(chainId); + + expect(mockRedisClient.hgetall).toHaveBeenCalledWith(expectedKey); + expect(result).toBeNull(); + }); + + it('should return null when cache returns null', async () => { + mockRedisClient.hgetall.mockResolvedValue( + null as unknown as Record + ); + + const result = await repository.getTokenList(chainId); + + expect(mockRedisClient.hgetall).toHaveBeenCalledWith(expectedKey); + expect(result).toBeNull(); + }); + + it('should handle JSON parsing errors and remove invalid entries', async () => { + const hashData = { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + mockTokens[0] + ), + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': 'invalid-json', + '0xc0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33g': JSON.stringify( + mockTokens[2] + ), + }; + mockRedisClient.hgetall.mockResolvedValue(hashData); + mockRedisClient.hdel.mockResolvedValue(1); + + const result = await repository.getTokenList(chainId); + + expect(mockRedisClient.hgetall).toHaveBeenCalledWith(expectedKey); + expect(mockRedisClient.hdel).toHaveBeenCalledWith( + expectedKey, + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f' + ); + expect(consoleSpy.warn).toHaveBeenCalledWith( + 'Failed to parse token data for address 0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f, removed from cache' + ); + expect(result).toHaveLength(2); + expect(result).toEqual( + expect.arrayContaining([mockTokens[0], mockTokens[2]]) + ); + }); + + it('should return null when all entries are invalid JSON', async () => { + const hashData = { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': 'invalid-json-1', + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': 'invalid-json-2', + }; + mockRedisClient.hgetall.mockResolvedValue(hashData); + mockRedisClient.hdel.mockResolvedValue(1); + + const result = await repository.getTokenList(chainId); + + expect(mockRedisClient.hdel).toHaveBeenCalledTimes(2); + expect(result).toBeNull(); + }); + + it('should handle Redis errors and return null', async () => { + const error = new Error('Redis connection failed'); + mockRedisClient.hgetall.mockRejectedValue(error); + + const result = await repository.getTokenList(chainId); + + expect(mockRedisClient.hgetall).toHaveBeenCalledWith(expectedKey); + expect(consoleSpy.log).toHaveBeenCalledWith( + 'redis getTokenList 5 ==>', + error + ); + expect(result).toBeNull(); + }); + }); + + describe('searchTokens', () => { + beforeEach(() => { + const hashData = { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + mockTokens[0] + ), + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': JSON.stringify( + mockTokens[1] + ), + '0xc0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33g': JSON.stringify( + mockTokens[2] + ), + }; + mockRedisClient.hgetall.mockResolvedValue(hashData); + }); + + it('should search tokens by name (case insensitive)', async () => { + const result = await repository.searchTokens(chainId, 'test'); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual(mockTokens[0]); + }); + + it('should search tokens by symbol (case insensitive)', async () => { + const result = await repository.searchTokens(chainId, 'eth'); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual(mockTokens[2]); + }); + + it('should search tokens by address (case insensitive)', async () => { + const result = await repository.searchTokens( + chainId, + '0xA0b86a33E6441b8A7a6c5B3e8a8b7c9a0b86a33E' + ); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual(mockTokens[0]); + }); + + it('should search tokens by partial address', async () => { + const result = await repository.searchTokens(chainId, '0xA0b86a33E'); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual(mockTokens[0]); + }); + + it('should search tokens by partial name', async () => { + const result = await repository.searchTokens(chainId, 'token'); + + expect(result).toHaveLength(3); + expect(result).toEqual(expect.arrayContaining(mockTokens)); + }); + + it('should handle search with leading/trailing whitespace', async () => { + const result = await repository.searchTokens(chainId, ' TEST '); + + expect(result).toHaveLength(1); + expect(result[0]).toEqual(mockTokens[0]); + }); + + it('should return empty array when no tokens match', async () => { + const result = await repository.searchTokens(chainId, 'nonexistent'); + + expect(result).toHaveLength(0); + }); + + it('should return empty array when token list is null', async () => { + mockRedisClient.hgetall.mockResolvedValue({}); + + const result = await repository.searchTokens(chainId, 'test'); + + expect(result).toHaveLength(0); + }); + + it('should handle empty search parameter', async () => { + const result = await repository.searchTokens(chainId, ''); + + expect(result).toHaveLength(3); + expect(result).toEqual(expect.arrayContaining(mockTokens)); + }); + + it('should handle whitespace-only search parameter', async () => { + const result = await repository.searchTokens(chainId, ' '); + + expect(result).toHaveLength(3); + expect(result).toEqual(expect.arrayContaining(mockTokens)); + }); + }); + + describe('clearTokenList', () => { + it('should clear token list for specified chain', async () => { + mockRedisClient.del.mockResolvedValue(1); + + await repository.clearTokenList(chainId); + + expect(mockRedisClient.del).toHaveBeenCalledWith(expectedKey); + }); + + it('should handle Redis errors when clearing', async () => { + const error = new Error('Redis delete failed'); + mockRedisClient.del.mockRejectedValue(error); + + await expect(repository.clearTokenList(chainId)).rejects.toThrow( + 'Redis delete failed' + ); + }); + }); + + describe('key generation', () => { + it('should generate correct keys for different chain IDs', async () => { + const chains = [ + SupportedChainId.MAINNET, + SupportedChainId.GNOSIS_CHAIN, + SupportedChainId.ARBITRUM_ONE, + ]; + + mockRedisClient.del.mockResolvedValue(1); + + for (const chain of chains) { + await repository.clearTokenList(chain); + expect(mockRedisClient.del).toHaveBeenCalledWith(`tokens:${chain}`); + } + }); + }); + + describe('integration scenarios', () => { + it('should handle complete workflow: init -> get -> search -> clear', async () => { + mockRedisClient.del.mockResolvedValue(1); + mockRedisClient.hset.mockResolvedValue(0); + mockRedisClient.expire.mockResolvedValue(1); + await repository.initTokenList(chainId, mockTokens); + + const hashData = { + '0xa0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33e': JSON.stringify( + mockTokens[0] + ), + '0xb0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33f': JSON.stringify( + mockTokens[1] + ), + '0xc0b86a33e6441b8a7a6c5b3e8a8b7c9a0b86a33g': JSON.stringify( + mockTokens[2] + ), + }; + mockRedisClient.hgetall.mockResolvedValue(hashData); + const tokens = await repository.getTokenList(chainId); + expect(tokens).toHaveLength(3); + + const searchResults = await repository.searchTokens(chainId, 'test'); + expect(searchResults).toHaveLength(1); + + await repository.clearTokenList(chainId); + expect(mockRedisClient.del).toHaveBeenCalledTimes(2); + }); + }); +}); diff --git a/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepositoryRedis.ts b/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepositoryRedis.ts new file mode 100644 index 00000000..4421776f --- /dev/null +++ b/libs/repositories/src/repos/TokenCacheRepository/TokenCacheRepositoryRedis.ts @@ -0,0 +1,93 @@ +import { injectable } from 'inversify'; +import { Redis } from 'ioredis'; +import { SupportedChainId } from '@cowprotocol/cow-sdk'; +import { TokenFromAPI } from '../../datasources/tokenSearch/types'; +import { TokenCacheRepository } from './TokenCacheRepository'; + +@injectable() +export class TokenCacheRepositoryRedis implements TokenCacheRepository { + private readonly keyPrefix = 'tokens:'; + private readonly defaultTtl = 24 * 60 * 60; // 24 hours in seconds + + constructor(private redisClient: Redis) {} + + private getKey(chainId: SupportedChainId): string { + return `${this.keyPrefix}${chainId}`; + } + + async initTokenList( + chainId: SupportedChainId, + tokens: TokenFromAPI[], + ttlSeconds: number = this.defaultTtl + ): Promise { + const key = this.getKey(chainId); + + // Clear existing hash if it exists + await this.redisClient.del(key); + + if (tokens.length === 0) { + return; + } + + const hashData: Record = {}; + for (const token of tokens) { + hashData[token.address.toLowerCase()] = JSON.stringify(token); + } + + await this.redisClient.hset(key, hashData); + + await this.redisClient.expire(key, ttlSeconds); + } + + async getTokenList( + chainId: SupportedChainId + ): Promise { + const key = this.getKey(chainId); + const hashData = await this.redisClient.hgetall(key); + + if (!hashData || Object.keys(hashData).length === 0) { + return null; + } + + const tokens: TokenFromAPI[] = []; + + for (const [address, serializedToken] of Object.entries(hashData)) { + try { + const token = JSON.parse(serializedToken) as TokenFromAPI; + tokens.push(token); + } catch (error) { + await this.redisClient.hdel(key, address); + console.warn( + `Failed to parse token data for address ${address}, removed from cache` + ); + } + } + + return tokens.length > 0 ? tokens : null; + } + + async searchTokens( + chainId: SupportedChainId, + searchParam: string + ): Promise { + const tokens = await this.getTokenList(chainId); + + if (!tokens) { + return []; + } + + const trimmedSearchParam = searchParam.trim().toLowerCase(); + + return tokens.filter( + (token) => + token.name.toLowerCase().includes(trimmedSearchParam) || + token.symbol.toLowerCase().includes(trimmedSearchParam) || + token.address.toLowerCase().includes(trimmedSearchParam) + ); + } + + async clearTokenList(chainId: SupportedChainId): Promise { + const key = this.getKey(chainId); + await this.redisClient.del(key); + } +} diff --git a/libs/repositories/src/repos/TokenCacheRepository/index.ts b/libs/repositories/src/repos/TokenCacheRepository/index.ts new file mode 100644 index 00000000..f966f5d5 --- /dev/null +++ b/libs/repositories/src/repos/TokenCacheRepository/index.ts @@ -0,0 +1,2 @@ +export * from './TokenCacheRepository'; +export * from './TokenCacheRepositoryRedis'; diff --git a/libs/services/jest.config.ts b/libs/services/jest.config.ts index 6f179e25..b977bc5e 100644 --- a/libs/services/jest.config.ts +++ b/libs/services/jest.config.ts @@ -6,6 +6,9 @@ export default { transform: { '^.+\\.[tj]s$': ['ts-jest', { tsconfig: '/tsconfig.spec.json' }], }, + transformIgnorePatterns: [ + 'node_modules/(?!(node-fetch|data-uri-to-buffer|fetch-blob|formdata-polyfill|@cowprotocol|@uniswap)/)', + ], moduleFileExtensions: ['ts', 'js', 'html'], coverageDirectory: '../../coverage/libs/services', setupFilesAfterEnv: ['../../jest.setup.ts'], diff --git a/libs/services/src/factories.ts b/libs/services/src/factories.ts index 7275e32d..3696d37e 100644 --- a/libs/services/src/factories.ts +++ b/libs/services/src/factories.ts @@ -27,6 +27,8 @@ import { SimulationRepository, SimulationRepositoryTenderly, TelegramBot, + TokenCacheRepository, + TokenCacheRepositoryRedis, TokenBalancesRepository, TokenBalancesRepositoryMoralis, TokenHolderRepository, @@ -190,3 +192,19 @@ export function getTelegramBot(): TelegramBot { return telegramBot; } + +let tokenCacheRepository: TokenCacheRepository | null = null; + +export function getTokenCacheRepository(): TokenCacheRepository { + if (!redisClient) { + throw new Error( + 'Redis client is required for TokenCacheRepository. Please configure Redis.' + ); + } + + if (!tokenCacheRepository) { + tokenCacheRepository = new TokenCacheRepositoryRedis(redisClient); + } + + return tokenCacheRepository; +} diff --git a/package.json b/package.json index ad70775b..7cae14d6 100644 --- a/package.json +++ b/package.json @@ -14,6 +14,7 @@ "migration:revert": "nx run repositories:migration:revert", "producer": "nx run notification-producer:start", "telegram": "nx run telegram:start", + "token-list-updater": "nx run token-list-updater:start", "build": "nx run-many --all --target=build", "test": "nx run-many --all --target=test", "new:fastify": "nx generate @nx/node:application --framework=fastify --docker --directory=apps", @@ -109,5 +110,6 @@ "typescript": "^5.5.3", "vite-plugin-dts": "^3.0.3", "vite-tsconfig-paths": "^4.2.0" - } -} \ No newline at end of file + }, + "packageManager": "yarn@1.22.22+sha512.a6b2f7906b721bba3d67d4aff083df04dad64c399707841b7acf00f6b133b7ac24255f2652fa22ae3534329dc6180534e98d17432037ff6fd140556e2bb3137e" +}