Mirror of https://github.com/roostorg/coop
github.com/roostorg/coop
1import { randomUUID } from 'crypto';
2import { Kysely, type CompiledQuery, type QueryResult } from 'kysely';
3
4import { type Dependencies } from '../../iocContainer/index.js';
5import { makeMockWarehouseDialect } from '../../test/stubs/makeMockWarehouseKyselyDialect.js';
6import { type MockedFn } from '../../test/mockHelpers/jestMocks.js';
7import { safePick } from '../../utils/misc.js';
8import { makeFetchUserSubmissionStatistics } from './fetchUserSubmissionStatistics.js';
9
10describe('fetchUserSubmissionStatistics', () => {
11 let warehouseMock: MockedFn<
12 (it: CompiledQuery) => Promise<QueryResult<unknown>>
13 >;
14 let sut: ReturnType<typeof makeFetchUserSubmissionStatistics>;
15
16 beforeEach(() => {
17 // This mutation is safe (while we're not running tests concurrently) as
18 // it's local to the test suite. Consider using the `makeTestWithFixture`
19 // helper instead to make a local copy of this state for each test.
20 // eslint-disable-next-line better-mutation/no-mutation
21 warehouseMock = jest.fn(async (_it) => Promise.resolve({ rows: [] }));
22
23 // This mutation is safe (while we're not running tests concurrently) as
24 // it's local to the test suite. Consider using the `makeTestWithFixture`
25 // helper instead to make a local copy of this state for each test.
26 // eslint-disable-next-line better-mutation/no-mutation
27 const kysely = new Kysely({
28 dialect: makeMockWarehouseDialect(warehouseMock),
29 });
30 const dialectMock: Dependencies['DataWarehouseDialect'] = {
31 getKyselyInstance: () => kysely,
32 destroy: jest.fn(async () => {}),
33 };
34
35 // eslint-disable-next-line better-mutation/no-mutation
36 sut = makeFetchUserSubmissionStatistics(dialectMock);
37 });
38
39 test('should generate proper query given org + user ids only', async () => {
40 await sut({ orgId: 'x', userItemIdentifiers: [{ id: '1', typeId: 'a' }] });
41 await sut({
42 orgId: 'x',
43 userItemIdentifiers: [
44 { id: '1', typeId: 'a' },
45 { id: '3', typeId: 'b' },
46 ],
47 });
48
49 expect(warehouseMock).toHaveBeenCalledTimes(2);
50
51 const queriesRan = warehouseMock.mock.calls.map((it) =>
52 safePick(it[0], ['parameters', 'sql']),
53 );
54
55 expect(queriesRan).toMatchInlineSnapshot(`
56 [
57 {
58 "parameters": [
59 "x",
60 "1",
61 "a",
62 ],
63 "sql": "select "USER_ID" as "userId", "USER_TYPE_ID" as "userTypeId", "ITEM_TYPE_ID" as "itemTypeId", sum("NUM_SUBMISSIONS") as "numSubmissions" from "USER_STATISTICS_SERVICE"."SUBMISSION_STATS" where "ORG_ID" = :1 and ("USER_ID" = :2 and "USER_TYPE_ID" = :3) group by "USER_ID", "USER_TYPE_ID", "ITEM_TYPE_ID"",
64 },
65 {
66 "parameters": [
67 "x",
68 "1",
69 "a",
70 "3",
71 "b",
72 ],
73 "sql": "select "USER_ID" as "userId", "USER_TYPE_ID" as "userTypeId", "ITEM_TYPE_ID" as "itemTypeId", sum("NUM_SUBMISSIONS") as "numSubmissions" from "USER_STATISTICS_SERVICE"."SUBMISSION_STATS" where "ORG_ID" = :1 and (("USER_ID" = :2 and "USER_TYPE_ID" = :3) or ("USER_ID" = :4 and "USER_TYPE_ID" = :5)) group by "USER_ID", "USER_TYPE_ID", "ITEM_TYPE_ID"",
74 },
75 ]
76 `);
77 });
78
79 test('should batch queries of more than 16,000 unique user ids', async () => {
80 const numUserIds = Math.floor(16_000 / Math.max(Math.random(), 0.05)); // some big int over 16,000
81 const largeUserIdList = Array.from({ length: numUserIds }, (_) => ({
82 id: randomUUID(),
83 typeId: randomUUID(),
84 }));
85
86 await sut({ orgId: 'x', userItemIdentifiers: largeUserIdList });
87 expect(warehouseMock.mock.calls.length).toBeGreaterThan(1);
88 });
89
90 test('should generate proper query given user/org ids + date filters', async () => {
91 await sut({
92 orgId: 'x',
93 userItemIdentifiers: [{ id: '1', typeId: 'a' }],
94 startTime: new Date('2020-01-01T00:00Z'),
95 });
96
97 await sut({
98 orgId: 'x',
99 userItemIdentifiers: [
100 { id: '1', typeId: 'a' },
101 { id: '3', typeId: 'b' },
102 ],
103 endTime: new Date('2020-01-01T00:00:00Z'),
104 });
105
106 await sut({
107 orgId: 'x',
108 userItemIdentifiers: [
109 { id: '1', typeId: 'a' },
110 { id: '3', typeId: 'b' },
111 ],
112 endTime: new Date('2020-01-01T00:00:00Z'),
113 startTime: new Date('2020-02-01T00:00:00Z'),
114 });
115
116 expect(warehouseMock).toHaveBeenCalledTimes(3);
117
118 const queriesRan = warehouseMock.mock.calls.map((it) =>
119 safePick(it[0], ['parameters', 'sql']),
120 );
121
122 expect(queriesRan).toMatchInlineSnapshot(`
123 [
124 {
125 "parameters": [
126 "x",
127 "1",
128 "a",
129 2020-01-01T00:00:00.000Z,
130 ],
131 "sql": "select "USER_ID" as "userId", "USER_TYPE_ID" as "userTypeId", "ITEM_TYPE_ID" as "itemTypeId", sum("NUM_SUBMISSIONS") as "numSubmissions" from "USER_STATISTICS_SERVICE"."SUBMISSION_STATS" where "ORG_ID" = :1 and ("USER_ID" = :2 and "USER_TYPE_ID" = :3) and "TS_START_INCLUSIVE" >= :4 group by "USER_ID", "USER_TYPE_ID", "ITEM_TYPE_ID"",
132 },
133 {
134 "parameters": [
135 "x",
136 "1",
137 "a",
138 "3",
139 "b",
140 2020-01-01T00:00:00.000Z,
141 ],
142 "sql": "select "USER_ID" as "userId", "USER_TYPE_ID" as "userTypeId", "ITEM_TYPE_ID" as "itemTypeId", sum("NUM_SUBMISSIONS") as "numSubmissions" from "USER_STATISTICS_SERVICE"."SUBMISSION_STATS" where "ORG_ID" = :1 and (("USER_ID" = :2 and "USER_TYPE_ID" = :3) or ("USER_ID" = :4 and "USER_TYPE_ID" = :5)) and "TS_END_EXCLUSIVE" <= :6 group by "USER_ID", "USER_TYPE_ID", "ITEM_TYPE_ID"",
143 },
144 {
145 "parameters": [
146 "x",
147 "1",
148 "a",
149 "3",
150 "b",
151 2020-02-01T00:00:00.000Z,
152 2020-01-01T00:00:00.000Z,
153 ],
154 "sql": "select "USER_ID" as "userId", "USER_TYPE_ID" as "userTypeId", "ITEM_TYPE_ID" as "itemTypeId", sum("NUM_SUBMISSIONS") as "numSubmissions" from "USER_STATISTICS_SERVICE"."SUBMISSION_STATS" where "ORG_ID" = :1 and (("USER_ID" = :2 and "USER_TYPE_ID" = :3) or ("USER_ID" = :4 and "USER_TYPE_ID" = :5)) and "TS_START_INCLUSIVE" >= :6 and "TS_END_EXCLUSIVE" <= :7 group by "USER_ID", "USER_TYPE_ID", "ITEM_TYPE_ID"",
155 },
156 ]
157 `);
158 });
159});