Skip to content

Replace run-with-docker.sh with testcontainer #301

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 8 additions & 2 deletions .ctirc
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,18 @@
{
"project": "packages/entity-cache-adapter-redis/tsconfig.json",
"output": "packages/entity-cache-adapter-redis/src",
"exclude": [ "**/__testfixtures__/**" ]
"exclude": [
"**/__testfixtures__/**",
"**/__integration-tests__/**"
]
},
{
"project": "packages/entity-database-adapter-knex/tsconfig.json",
"output": "packages/entity-database-adapter-knex/src",
"exclude": [ "**/__testfixtures__/**" ]
"exclude": [
"**/__testfixtures__/**",
"**/__integration-tests__/**"
]
},
{
"project": "packages/entity-ip-address-field/tsconfig.json",
Expand Down
13 changes: 3 additions & 10 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,17 +20,10 @@ jobs:
- run: yarn lint --max-warnings=0
- name: Check if barrels are up to date
run: yarn ctix && [ -z "$(git status --porcelain)" ]
- run: yarn test --coverage
- run: yarn integration --coverage
- uses: SimenB/github-actions-cpu-cores@v2
id: cpu-cores
- run: yarn test:all ---maxWorkers ${{ steps.cpu-cores.outputs.count }}
- uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./coverage
flags: unittest
fail_ci_if_error: true
- uses: codecov/codecov-action@v5
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: ./coverage-integration
flags: integration
fail_ci_if_error: true
7 changes: 1 addition & 6 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,7 @@ node_modules/
!**/.yarn/versions
**/.pnp.*

.nx/cache
.nx/workspace-data

# Entity-specific ignores

build/
coverage/
coverage-integration/
doc/
doc/
7 changes: 0 additions & 7 deletions jest-integration.config.js

This file was deleted.

13 changes: 12 additions & 1 deletion jest.config.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
module.exports = {
transform: { '\\.[jt]sx?$': ['babel-jest', { rootMode: 'upward' }] },
collectCoverage: true,
collectCoverageFrom: [
'packages/*/src/**',
'!packages/*/src/index.ts',
Expand All @@ -9,5 +10,15 @@ module.exports = {
'!**/__*test*__/**',
],
coverageProvider: 'v8',
testMatch: ['**/__tests__/**/*-test.ts'],
randomize: true,
testEnvironmentOptions: { globalsCleanupMode: 'on' },
workerThreads: true,
projects: [
{ displayName: 'unit', testMatch: ['**/__tests__/**/*-test.ts'] },
{
displayName: 'integration',
globalSetup: '<rootDir>/setup.mjs',
testMatch: ['**/__integration-tests__/**/*-test.ts'],
},
],
};
16 changes: 0 additions & 16 deletions nx.json

This file was deleted.

10 changes: 6 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,13 @@
"clean": "yarn build --clean",
"lint": "eslint .",
"lint-fix": "yarn lint --fix",
"test": "yarn build --noCheck && jest",
"test:all": "yarn test",
"integration": "./resources/run-with-docker yarn test --config jest-integration.config.js --runInBand",
"test": "yarn test:unit",
"test:unit": "yarn test:all --selectProjects unit",
"test:all": "yarn build --noCheck && DEBUG=testcontainers:pull jest",
"integration": "yarn test:all --selectProjects integration",
"integration:all": "yarn integration",
"prepack": "yarn build",
"ctix": "ctix build && resources/prepend-barrels.sh",
"ctix": "ctix build && ./prepend-barrels.sh",
"typedoc": "yarn build && typedoc"
},
"devDependencies": {
Expand All @@ -35,6 +36,7 @@
"lerna": "^8.2.2",
"prettier": "^3.5.3",
"prettier-plugin-organize-imports": "^4.1.0",
"testcontainers": "^11.0.3",
"typedoc": "^0.28.4",
"typescript": "^5.8.3"
},
Expand Down
3 changes: 1 addition & 2 deletions packages/entity-cache-adapter-local-memory/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,7 @@
"clean": "yarn build --clean",
"lint": "yarn run --top-level eslint src",
"lint-fix": "yarn lint --fix",
"test": "yarn test:all --rootDir $(pwd)",
"integration": "yarn integration:all --rootDir $(pwd)"
"test": "yarn test:unit --roots $(pwd)"
},
"engines": {
"node": ">=16"
Expand Down
5 changes: 3 additions & 2 deletions packages/entity-cache-adapter-redis/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@
"clean": "yarn build --clean",
"lint": "yarn run --top-level eslint src",
"lint-fix": "yarn lint --fix",
"test": "yarn test:all --rootDir $(pwd)",
"integration": "yarn integration:all --rootDir $(pwd)"
"test": "yarn test:unit --roots $(pwd)",
"integration": "yarn test:integration --roots $(pwd)"
},
"engines": {
"node": ">=16"
Expand All @@ -34,6 +34,7 @@
"@expo/batcher": "^1.0.0",
"@expo/entity-testing-utils": "workspace:^",
"@jest/globals": "^30.0.0",
"@testcontainers/redis": "^11.0.3",
"ioredis": "^5.6.0",
"ts-mockito": "^2.6.1",
"typescript": "^5.8.3"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,7 @@ import {
} from '@expo/entity';
import { afterAll, beforeAll, beforeEach, describe, expect, it, jest } from '@jest/globals';
import invariant from 'invariant';
import Redis from 'ioredis';
import nullthrows from 'nullthrows';
import { URL } from 'url';
import { v4 as uuidv4 } from 'uuid';

import {
Expand All @@ -20,6 +18,7 @@ import {
IRedisTransaction,
RedisCacheInvalidationStrategy,
} from '../GenericRedisCacher';
import { Redis, StartedRedisContainer, startRedisAsync } from './testcontainer';
import { RedisTestEntity, RedisTestEntityFields } from '../__testfixtures__/RedisTestEntity';
import { createRedisIntegrationTestEntityCompanionProvider } from '../__testfixtures__/createRedisIntegrationTestEntityCompanionProvider';

Expand Down Expand Up @@ -69,14 +68,14 @@ class BatchedRedis implements IRedis {
}

describe(GenericRedisCacher, () => {
const redis = new Redis(new URL(process.env['REDIS_URL']!).toString());
const redisClient = new BatchedRedis(redis);

let container: StartedRedisContainer;
let redisClient: Redis;
let genericRedisCacheContext: GenericRedisCacheContext;

beforeAll(() => {
beforeAll(async () => {
({ container, redisClient } = await startRedisAsync());
genericRedisCacheContext = {
redisClient,
redisClient: new BatchedRedis(redisClient),
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -94,11 +93,12 @@ describe(GenericRedisCacher, () => {
});

beforeEach(async () => {
await redis.flushdb();
await redisClient.flushdb();
});

afterAll(async () => {
redis.disconnect();
redisClient.disconnect();
await container.stop();
});

it('has correct caching behavior', async () => {
Expand All @@ -107,7 +107,7 @@ describe(GenericRedisCacher, () => {
createRedisIntegrationTestEntityCompanionProvider(genericRedisCacheContext),
);

const mgetSpy = jest.spyOn(redis, 'mget');
const mgetSpy = jest.spyOn(redisClient, 'mget');

const genericCacher = viewerContext.entityCompanionProvider.getCompanionForEntity(
RedisTestEntity,
Expand Down Expand Up @@ -149,7 +149,7 @@ describe(GenericRedisCacher, () => {
new SingleFieldHolder('id'),
new SingleFieldValueHolder(entity1Created.getID()),
);
const cachedJSON = await redis.get(cacheKeyEntity1);
const cachedJSON = await redisClient.get(cacheKeyEntity1);
const cachedValue = JSON.parse(cachedJSON!);
expect(cachedValue).toMatchObject({
id: entity1.getID(),
Expand All @@ -164,7 +164,7 @@ describe(GenericRedisCacher, () => {
'name',
entity1Created.getField('name'),
);
await expect(redis.get(cacheKeyEntity1NameField)).resolves.toEqual(cachedJSON);
await expect(redisClient.get(cacheKeyEntity1NameField)).resolves.toEqual(cachedJSON);

// simulate non existent db fetch, should write negative result ('') to cache
const nonExistentId = uuidv4();
Expand All @@ -177,7 +177,7 @@ describe(GenericRedisCacher, () => {
new SingleFieldHolder('id'),
new SingleFieldValueHolder(nonExistentId),
);
const nonExistentCachedValue = await redis.get(cacheKeyNonExistent);
const nonExistentCachedValue = await redisClient.get(cacheKeyNonExistent);
expect(nonExistentCachedValue).toEqual('');
// load again through entities framework to ensure it reads negative result
const entityNonExistentResult2 =
Expand All @@ -188,7 +188,7 @@ describe(GenericRedisCacher, () => {

// invalidate from cache to ensure it invalidates correctly in both caches
await RedisTestEntity.loaderUtils(viewerContext).invalidateFieldsAsync(entity1.getAllFields());
await expect(redis.get(cacheKeyEntity1)).resolves.toBeNull();
await expect(redis.get(cacheKeyEntity1NameField)).resolves.toBeNull();
await expect(redisClient.get(cacheKeyEntity1)).resolves.toBeNull();
await expect(redisClient.get(cacheKeyEntity1NameField)).resolves.toBeNull();
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -8,26 +8,28 @@ import {
} from '@expo/entity';
import { enforceAsyncResult } from '@expo/results';
import { afterAll, beforeAll, beforeEach, describe, expect, it } from '@jest/globals';
import Redis from 'ioredis';
import { URL } from 'url';
import { v4 as uuidv4 } from 'uuid';

import {
GenericRedisCacheContext,
GenericRedisCacher,
RedisCacheInvalidationStrategy,
} from '../GenericRedisCacher';
import { Redis, StartedRedisContainer, startRedisAsync } from './testcontainer';
import { RedisTestEntity, RedisTestEntityFields } from '../__testfixtures__/RedisTestEntity';
import { createRedisIntegrationTestEntityCompanionProvider } from '../__testfixtures__/createRedisIntegrationTestEntityCompanionProvider';

class TestViewerContext extends ViewerContext {}

describe(GenericRedisCacher, () => {
let container: StartedRedisContainer;
let redisClient: Redis;
let genericRedisCacheContext: GenericRedisCacheContext;

beforeAll(() => {
beforeAll(async () => {
({ container, redisClient } = await startRedisAsync());
genericRedisCacheContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -45,10 +47,12 @@ describe(GenericRedisCacher, () => {
});

beforeEach(async () => {
await (genericRedisCacheContext.redisClient as Redis).flushdb();
await redisClient.flushdb();
});

afterAll(async () => {
(genericRedisCacheContext.redisClient as Redis).disconnect();
redisClient.disconnect();
await container.stop();
});

it('has correct caching behavior', async () => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,12 @@
import { CacheStatus, ViewerContext } from '@expo/entity';
import { afterAll, beforeAll, beforeEach, describe, expect, it } from '@jest/globals';
import Redis from 'ioredis';
import { URL } from 'url';

import {
GenericRedisCacheContext,
GenericRedisCacher,
RedisCacheInvalidationStrategy,
} from '../GenericRedisCacher';
import { Redis, StartedRedisContainer, startRedisAsync } from './testcontainer';
import {
RedisTestEntity,
redisTestEntityConfiguration,
Expand All @@ -19,10 +18,13 @@ class TestViewerContext extends ViewerContext {}

describe(GenericRedisCacher, () => {
let genericRedisCacheContext: GenericRedisCacheContext;
let redisClient: Redis;
let container: StartedRedisContainer;

beforeAll(() => {
beforeAll(async () => {
({ redisClient, container } = await startRedisAsync());
genericRedisCacheContext = {
redisClient: new Redis(new URL(process.env['REDIS_URL']!).toString()),
redisClient,
makeKeyFn(...parts: string[]): string {
const delimiter = ':';
const escapedParts = parts.map((part) =>
Expand All @@ -40,10 +42,12 @@ describe(GenericRedisCacher, () => {
});

beforeEach(async () => {
await (genericRedisCacheContext.redisClient as Redis).flushdb();
await redisClient.flushdb();
});

afterAll(async () => {
(genericRedisCacheContext.redisClient as Redis).disconnect();
redisClient.disconnect();
await container.stop();
});

it('has correct caching and loading behavior', async () => {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,24 @@
import { EntityCacheAdapterTransientError, ViewerContext } from '@expo/entity';
import { beforeAll, beforeEach, describe, expect, it } from '@jest/globals';
import Redis from 'ioredis';
import { URL } from 'url';
import { afterAll, beforeAll, beforeEach, describe, expect, it } from '@jest/globals';

import {
GenericRedisCacheContext,
GenericRedisCacher,
RedisCacheInvalidationStrategy,
} from '../GenericRedisCacher';
import { Redis, StartedRedisContainer, startRedisAsync } from './testcontainer';
import { RedisTestEntity } from '../__testfixtures__/RedisTestEntity';
import { createRedisIntegrationTestEntityCompanionProvider } from '../__testfixtures__/createRedisIntegrationTestEntityCompanionProvider';

class TestViewerContext extends ViewerContext {}

describe(GenericRedisCacher, () => {
const redisClient = new Redis(new URL(process.env['REDIS_URL']!).toString());
let container: StartedRedisContainer;
let redisClient: Redis;
let genericRedisCacheContext: GenericRedisCacheContext;

beforeAll(() => {
beforeAll(async () => {
({ container, redisClient } = await startRedisAsync());
genericRedisCacheContext = {
redisClient,
makeKeyFn(...parts: string[]): string {
Expand All @@ -40,6 +41,10 @@ describe(GenericRedisCacher, () => {
await redisClient.flushdb();
});

afterAll(async () => {
await container.stop();
});

it('throws when redis is disconnected', async () => {
redisClient.disconnect();

Expand Down
Loading