directus

Форк
0
/
payload.test.ts 
348 строк · 8.5 Кб
1
import type { Knex } from 'knex';
2
import knex from 'knex';
3
import { MockClient, Tracker, createTracker } from 'knex-mock-client';
4
import type { MockedFunction } from 'vitest';
5
import { afterEach, beforeAll, beforeEach, describe, expect, test, vi } from 'vitest';
6
import type { Helpers } from '../database/helpers/index.js';
7
import { getHelpers } from '../database/helpers/index.js';
8
import { PayloadService } from './index.js';
9

10
vi.mock('../../src/database/index', () => ({
11
	getDatabaseClient: vi.fn().mockReturnValue('postgres'),
12
}));
13

14
describe('Integration Tests', () => {
15
	let db: MockedFunction<Knex>;
16
	let tracker: Tracker;
17

18
	beforeAll(async () => {
19
		db = vi.mocked(knex.default({ client: MockClient }));
20
		tracker = createTracker(db);
21
	});
22

23
	afterEach(() => {
24
		tracker.reset();
25
	});
26

27
	describe('Services / PayloadService', () => {
28
		describe('transformers', () => {
29
			let service: PayloadService;
30
			let helpers: Helpers;
31

32
			beforeEach(() => {
33
				service = new PayloadService('test', {
34
					knex: db,
35
					schema: { collections: {}, relations: [] },
36
				});
37

38
				helpers = getHelpers(db);
39
			});
40

41
			describe('csv', () => {
42
				test('Returns undefined for illegal values', async () => {
43
					const result = await service.transformers['cast-csv']!({
44
						value: 123,
45
						action: 'read',
46
						payload: {},
47
						accountability: { role: null },
48
						specials: [],
49
						helpers,
50
					});
51

52
					expect(result).toBe(undefined);
53
				});
54

55
				test('Returns [] for empty strings', async () => {
56
					const result = await service.transformers['cast-csv']!({
57
						value: '',
58
						action: 'read',
59
						payload: {},
60
						accountability: { role: null },
61
						specials: [],
62
						helpers,
63
					});
64

65
					expect(result).toMatchObject([]);
66
				});
67

68
				test('Returns array values as is', async () => {
69
					const result = await service.transformers['cast-csv']!({
70
						value: ['test', 'directus'],
71
						action: 'read',
72
						payload: {},
73
						accountability: { role: null },
74
						specials: [],
75
						helpers,
76
					});
77

78
					expect(result).toEqual(['test', 'directus']);
79
				});
80

81
				test('Splits the CSV string', async () => {
82
					const result = await service.transformers['cast-csv']!({
83
						value: 'test,directus',
84
						action: 'read',
85
						payload: {},
86
						accountability: { role: null },
87
						specials: [],
88
						helpers,
89
					});
90

91
					expect(result).toMatchObject(['test', 'directus']);
92
				});
93

94
				test('Saves array values as joined string', async () => {
95
					const result = await service.transformers['cast-csv']!({
96
						value: ['test', 'directus'],
97
						action: 'create',
98
						payload: {},
99
						accountability: { role: null },
100
						specials: [],
101
						helpers,
102
					});
103

104
					expect(result).toBe('test,directus');
105
				});
106

107
				test('Saves string values as is', async () => {
108
					const result = await service.transformers['cast-csv']!({
109
						value: 'test,directus',
110
						action: 'create',
111
						payload: {},
112
						accountability: { role: null },
113
						specials: [],
114
						helpers,
115
					});
116

117
					expect(result).toBe('test,directus');
118
				});
119
			});
120
		});
121

122
		describe('processDates', () => {
123
			let service: PayloadService;
124

125
			const dateFieldId = 'date_field';
126
			const dateTimeFieldId = 'datetime_field';
127
			const timestampFieldId = 'timestamp_field';
128

129
			beforeEach(() => {
130
				service = new PayloadService('test', {
131
					knex: db,
132
					schema: {
133
						collections: {
134
							test: {
135
								collection: 'test',
136
								primary: 'id',
137
								singleton: false,
138
								sortField: null,
139
								note: null,
140
								accountability: null,
141
								fields: {
142
									[dateFieldId]: {
143
										field: dateFieldId,
144
										defaultValue: null,
145
										nullable: true,
146
										generated: false,
147
										type: 'date',
148
										dbType: 'date',
149
										precision: null,
150
										scale: null,
151
										special: [],
152
										note: null,
153
										validation: null,
154
										alias: false,
155
									},
156
									[dateTimeFieldId]: {
157
										field: dateTimeFieldId,
158
										defaultValue: null,
159
										nullable: true,
160
										generated: false,
161
										type: 'dateTime',
162
										dbType: 'datetime',
163
										precision: null,
164
										scale: null,
165
										special: [],
166
										note: null,
167
										validation: null,
168
										alias: false,
169
									},
170
									[timestampFieldId]: {
171
										field: timestampFieldId,
172
										defaultValue: null,
173
										nullable: true,
174
										generated: false,
175
										type: 'timestamp',
176
										dbType: 'timestamp',
177
										precision: null,
178
										scale: null,
179
										special: [],
180
										note: null,
181
										validation: null,
182
										alias: false,
183
									},
184
								},
185
							},
186
						},
187
						relations: [],
188
					},
189
				});
190
			});
191

192
			describe('processes dates', () => {
193
				test('with zero values', () => {
194
					const result = service.processDates(
195
						[
196
							{
197
								[dateFieldId]: '0000-00-00',
198
								[dateTimeFieldId]: '0000-00-00 00:00:00',
199
								[timestampFieldId]: '0000-00-00 00:00:00.000',
200
							},
201
						],
202
						'read',
203
					);
204

205
					expect(result).toMatchObject([
206
						{
207
							[dateFieldId]: null,
208
							[dateTimeFieldId]: null,
209
							[timestampFieldId]: null,
210
						},
211
					]);
212
				});
213

214
				test('with typical values', () => {
215
					const result = service.processDates(
216
						[
217
							{
218
								[dateFieldId]: '2022-01-10',
219
								[dateTimeFieldId]: '2021-09-31 12:34:56',
220
								[timestampFieldId]: '1980-12-08 00:11:22.333',
221
							},
222
						],
223
						'read',
224
					);
225

226
					expect(result).toMatchObject([
227
						{
228
							[dateFieldId]: '2022-01-10',
229
							[dateTimeFieldId]: '2021-10-01T12:34:56',
230
							[timestampFieldId]: new Date('1980-12-08 00:11:22.333').toISOString(),
231
						},
232
					]);
233
				});
234

235
				test('with date object values', () => {
236
					const result = service.processDates(
237
						[
238
							{
239
								[dateFieldId]: new Date(1666777777000),
240
								[dateTimeFieldId]: new Date(1666666666000),
241
								[timestampFieldId]: new Date(1666555444333),
242
							},
243
						],
244
						'read',
245
					);
246

247
					expect(result).toMatchObject([
248
						{
249
							[dateFieldId]: toLocalISOString(new Date(1666777777000)).slice(0, 10),
250
							[dateTimeFieldId]: toLocalISOString(new Date(1666666666000)),
251
							[timestampFieldId]: new Date(1666555444333).toISOString(),
252
						},
253
					]);
254
				});
255
			});
256
		});
257

258
		describe('processValues', () => {
259
			let service: PayloadService;
260

261
			const concealedField = 'hidden';
262
			const stringField = 'string';
263
			const REDACT_STR = '**********';
264

265
			beforeEach(() => {
266
				service = new PayloadService('test', {
267
					knex: db,
268
					schema: {
269
						collections: {
270
							test: {
271
								collection: 'test',
272
								primary: 'id',
273
								singleton: false,
274
								sortField: null,
275
								note: null,
276
								accountability: null,
277
								fields: {
278
									[concealedField]: {
279
										field: concealedField,
280
										defaultValue: null,
281
										nullable: true,
282
										generated: false,
283
										type: 'hash',
284
										dbType: 'nvarchar',
285
										precision: null,
286
										scale: null,
287
										special: ['hash', 'conceal'],
288
										note: null,
289
										validation: null,
290
										alias: false,
291
									},
292
									[stringField]: {
293
										field: stringField,
294
										defaultValue: null,
295
										nullable: true,
296
										generated: false,
297
										type: 'string',
298
										dbType: 'nvarchar',
299
										precision: null,
300
										scale: null,
301
										special: [],
302
										note: null,
303
										validation: null,
304
										alias: false,
305
									},
306
								},
307
							},
308
						},
309
						relations: [],
310
					},
311
				});
312
			});
313

314
			test('processing special fields', async () => {
315
				const result = await service.processValues('read', {
316
					string: 'not-redacted',
317
					hidden: 'secret',
318
				});
319

320
				expect(result).toMatchObject({ string: 'not-redacted', hidden: REDACT_STR });
321
			});
322

323
			test('processing aliassed special fields', async () => {
324
				const result = await service.processValues(
325
					'read',
326
					{
327
						other_string: 'not-redacted',
328
						other_hidden: 'secret',
329
					},
330
					{ other_string: 'string', other_hidden: 'hidden' },
331
				);
332

333
				expect(result).toMatchObject({ other_string: 'not-redacted', other_hidden: REDACT_STR });
334
			});
335
		});
336
	});
337
});
338

339
function toLocalISOString(date: Date) {
340
	const year = String(date.getFullYear());
341
	const month = String(date.getMonth() + 1).padStart(2, '0');
342
	const day = String(date.getDate()).padStart(2, '0');
343
	const hours = String(date.getHours()).padStart(2, '0');
344
	const minutes = String(date.getMinutes()).padStart(2, '0');
345
	const seconds = String(date.getSeconds()).padStart(2, '0');
346

347
	return `${year}-${month}-${day}T${hours}:${minutes}:${seconds}`;
348
}
349

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.