directus

Форк
0
/
payload.ts 
852 строки · 26.2 Кб
1
import { ForbiddenError, InvalidPayloadError } from '@directus/errors';
2
import type {
3
	Accountability,
4
	Alterations,
5
	FieldOverview,
6
	Item,
7
	PrimaryKey,
8
	Query,
9
	SchemaOverview,
10
} from '@directus/types';
11
import { parseJSON, toArray } from '@directus/utils';
12
import { format, isValid, parseISO } from 'date-fns';
13
import { unflatten } from 'flat';
14
import Joi from 'joi';
15
import type { Knex } from 'knex';
16
import { clone, cloneDeep, isNil, isObject, isPlainObject, omit, pick } from 'lodash-es';
17
import { randomUUID } from 'node:crypto';
18
import { parse as wktToGeoJSON } from 'wellknown';
19
import type { Helpers } from '../database/helpers/index.js';
20
import { getHelpers } from '../database/helpers/index.js';
21
import getDatabase from '../database/index.js';
22
import type { AbstractServiceOptions, ActionEventParams, MutationOptions } from '../types/index.js';
23
import { generateHash } from '../utils/generate-hash.js';
24

25
type Action = 'create' | 'read' | 'update';
26

27
type Transformers = {
28
	[type: string]: (context: {
29
		action: Action;
30
		value: any;
31
		payload: Partial<Item>;
32
		accountability: Accountability | null;
33
		specials: string[];
34
		helpers: Helpers;
35
	}) => Promise<any>;
36
};
37

38
/**
39
 * Process a given payload for a collection to ensure the special fields (hash, uuid, date etc) are
40
 * handled correctly.
41
 */
42
export class PayloadService {
43
	accountability: Accountability | null;
44
	knex: Knex;
45
	helpers: Helpers;
46
	collection: string;
47
	schema: SchemaOverview;
48

49
	constructor(collection: string, options: AbstractServiceOptions) {
50
		this.accountability = options.accountability || null;
51
		this.knex = options.knex || getDatabase();
52
		this.helpers = getHelpers(this.knex);
53
		this.collection = collection;
54
		this.schema = options.schema;
55

56
		return this;
57
	}
58

59
	public transformers: Transformers = {
60
		async hash({ action, value }) {
61
			if (!value) return;
62

63
			if (action === 'create' || action === 'update') {
64
				return await generateHash(String(value));
65
			}
66

67
			return value;
68
		},
69
		async uuid({ action, value }) {
70
			if (action === 'create' && !value) {
71
				return randomUUID();
72
			}
73

74
			return value;
75
		},
76
		async 'cast-boolean'({ action, value }) {
77
			if (action === 'read') {
78
				if (value === true || value === 1 || value === '1') {
79
					return true;
80
				} else if (value === false || value === 0 || value === '0') {
81
					return false;
82
				} else if (value === null || value === '') {
83
					return null;
84
				}
85
			}
86

87
			return value;
88
		},
89
		async 'cast-json'({ action, value }) {
90
			if (action === 'read') {
91
				if (typeof value === 'string') {
92
					try {
93
						return parseJSON(value);
94
					} catch {
95
						return value;
96
					}
97
				}
98
			}
99

100
			return value;
101
		},
102
		async conceal({ action, value }) {
103
			if (action === 'read') return value ? '**********' : null;
104
			return value;
105
		},
106
		async 'user-created'({ action, value, accountability }) {
107
			if (action === 'create') return accountability?.user || null;
108
			return value;
109
		},
110
		async 'user-updated'({ action, value, accountability }) {
111
			if (action === 'update') return accountability?.user || null;
112
			return value;
113
		},
114
		async 'role-created'({ action, value, accountability }) {
115
			if (action === 'create') return accountability?.role || null;
116
			return value;
117
		},
118
		async 'role-updated'({ action, value, accountability }) {
119
			if (action === 'update') return accountability?.role || null;
120
			return value;
121
		},
122
		async 'date-created'({ action, value, helpers }) {
123
			if (action === 'create') return new Date(helpers.date.writeTimestamp(new Date().toISOString()));
124
			return value;
125
		},
126
		async 'date-updated'({ action, value, helpers }) {
127
			if (action === 'update') return new Date(helpers.date.writeTimestamp(new Date().toISOString()));
128
			return value;
129
		},
130
		async 'cast-csv'({ action, value }) {
131
			if (Array.isArray(value) === false && typeof value !== 'string') return;
132

133
			if (action === 'read') {
134
				if (Array.isArray(value)) return value;
135

136
				if (value === '') return [];
137

138
				return value.split(',');
139
			}
140

141
			if (Array.isArray(value)) {
142
				return value.join(',');
143
			}
144

145
			return value;
146
		},
147
	};
148

149
	processValues(action: Action, payloads: Partial<Item>[]): Promise<Partial<Item>[]>;
150
	processValues(action: Action, payload: Partial<Item>): Promise<Partial<Item>>;
151
	processValues(action: Action, payloads: Partial<Item>[], aliasMap: Record<string, string>): Promise<Partial<Item>[]>;
152
	processValues(action: Action, payload: Partial<Item>, aliasMap: Record<string, string>): Promise<Partial<Item>>;
153
	async processValues(
154
		action: Action,
155
		payload: Partial<Item> | Partial<Item>[],
156
		aliasMap: Record<string, string> = {},
157
	): Promise<Partial<Item> | Partial<Item>[]> {
158
		const processedPayload = toArray(payload);
159

160
		if (processedPayload.length === 0) return [];
161

162
		const fieldsInPayload = Object.keys(processedPayload[0]!);
163
		const fieldEntries = Object.entries(this.schema.collections[this.collection]!.fields);
164
		const aliasEntries = Object.entries(aliasMap);
165

166
		let specialFields: [string, FieldOverview][] = [];
167

168
		for (const [name, field] of fieldEntries) {
169
			if (field.special && field.special.length > 0) {
170
				specialFields.push([name, field]);
171

172
				for (const [aliasName, fieldName] of aliasEntries) {
173
					if (fieldName === name) {
174
						specialFields.push([aliasName, { ...field, field: aliasName }]);
175
					}
176
				}
177
			}
178
		}
179

180
		if (action === 'read') {
181
			specialFields = specialFields.filter(([name]) => {
182
				return fieldsInPayload.includes(name);
183
			});
184
		}
185

186
		await Promise.all(
187
			processedPayload.map(async (record: any) => {
188
				await Promise.all(
189
					specialFields.map(async ([name, field]) => {
190
						const newValue = await this.processField(field, record, action, this.accountability);
191
						if (newValue !== undefined) record[name] = newValue;
192
					}),
193
				);
194
			}),
195
		);
196

197
		this.processGeometries(processedPayload, action);
198
		this.processDates(processedPayload, action);
199

200
		if (['create', 'update'].includes(action)) {
201
			processedPayload.forEach((record) => {
202
				for (const [key, value] of Object.entries(record)) {
203
					if (Array.isArray(value) || (typeof value === 'object' && !(value instanceof Date) && value !== null)) {
204
						if (!value.isRawInstance) {
205
							record[key] = JSON.stringify(value);
206
						}
207
					}
208
				}
209
			});
210
		}
211

212
		if (action === 'read') {
213
			this.processAggregates(processedPayload);
214
		}
215

216
		if (Array.isArray(payload)) {
217
			return processedPayload;
218
		}
219

220
		return processedPayload[0]!;
221
	}
222

223
	processAggregates(payload: Partial<Item>[]) {
224
		const aggregateKeys = Object.keys(payload[0]!).filter((key) => key.includes('->'));
225

226
		if (aggregateKeys.length) {
227
			for (const item of payload) {
228
				Object.assign(item, unflatten(pick(item, aggregateKeys), { delimiter: '->' }));
229
				aggregateKeys.forEach((key) => delete item[key]);
230
			}
231
		}
232
	}
233

234
	async processField(
235
		field: SchemaOverview['collections'][string]['fields'][string],
236
		payload: Partial<Item>,
237
		action: Action,
238
		accountability: Accountability | null,
239
	): Promise<any> {
240
		if (!field.special) return payload[field.field];
241
		const fieldSpecials = field.special ? toArray(field.special) : [];
242

243
		let value = clone(payload[field.field]);
244

245
		for (const special of fieldSpecials) {
246
			if (special in this.transformers) {
247
				value = await this.transformers[special]!({
248
					action,
249
					value,
250
					payload,
251
					accountability,
252
					specials: fieldSpecials,
253
					helpers: this.helpers,
254
				});
255
			}
256
		}
257

258
		return value;
259
	}
260

261
	/**
262
	 * Native geometries are stored in custom binary format. We need to insert them with
263
	 * the function st_geomfromtext. For this to work, that function call must not be
264
	 * escaped. It's therefore placed as a Knex.Raw object in the payload. Thus the need
265
	 * to check if the value is a raw instance before stringifying it in the next step.
266
	 */
267
	processGeometries<T extends Partial<Record<string, any>>[]>(payloads: T, action: Action): T {
268
		const process =
269
			action == 'read'
270
				? (value: any) => (typeof value === 'string' ? wktToGeoJSON(value) : value)
271
				: (value: any) => this.helpers.st.fromGeoJSON(typeof value == 'string' ? parseJSON(value) : value);
272

273
		const fieldsInCollection = Object.entries(this.schema.collections[this.collection]!.fields);
274
		const geometryColumns = fieldsInCollection.filter(([_, field]) => field.type.startsWith('geometry'));
275

276
		for (const [name] of geometryColumns) {
277
			for (const payload of payloads) {
278
				if (payload[name]) {
279
					payload[name] = process(payload[name]);
280
				}
281
			}
282
		}
283

284
		return payloads;
285
	}
286

287
	/**
288
	 * Knex returns `datetime` and `date` columns as Date.. This is wrong for date / datetime, as those
289
	 * shouldn't return with time / timezone info respectively
290
	 */
291
	processDates(payloads: Partial<Record<string, any>>[], action: Action): Partial<Record<string, any>>[] {
292
		const fieldsInCollection = Object.entries(this.schema.collections[this.collection]!.fields);
293

294
		const dateColumns = fieldsInCollection.filter(([_name, field]) =>
295
			['dateTime', 'date', 'timestamp'].includes(field.type),
296
		);
297

298
		const timeColumns = fieldsInCollection.filter(([_name, field]) => {
299
			return field.type === 'time';
300
		});
301

302
		if (dateColumns.length === 0 && timeColumns.length === 0) return payloads;
303

304
		for (const [name, dateColumn] of dateColumns) {
305
			for (const payload of payloads) {
306
				let value: number | string | Date = payload[name];
307

308
				if (value === null || (typeof value === 'string' && /^[.0 :-]{10,}$/.test(value))) {
309
					payload[name] = null;
310
					continue;
311
				}
312

313
				if (!value) continue;
314

315
				if (action === 'read') {
316
					if (typeof value === 'number' || typeof value === 'string') {
317
						value = new Date(value);
318
					}
319

320
					if (dateColumn.type === 'timestamp') {
321
						const newValue = this.helpers.date.readTimestampString(value.toISOString());
322
						payload[name] = newValue;
323
					}
324

325
					if (dateColumn.type === 'dateTime') {
326
						const year = String(value.getFullYear());
327
						const month = String(value.getMonth() + 1).padStart(2, '0');
328
						const day = String(value.getDate()).padStart(2, '0');
329
						const hours = String(value.getHours()).padStart(2, '0');
330
						const minutes = String(value.getMinutes()).padStart(2, '0');
331
						const seconds = String(value.getSeconds()).padStart(2, '0');
332

333
						const newValue = `${year}-${month}-${day}T${hours}:${minutes}:${seconds}`;
334
						payload[name] = newValue;
335
					}
336

337
					if (dateColumn.type === 'date') {
338
						const year = String(value.getFullYear());
339
						const month = String(value.getMonth() + 1).padStart(2, '0');
340
						const day = String(value.getDate()).padStart(2, '0');
341

342
						// Strip off the time / timezone information from a date-only value
343
						const newValue = `${year}-${month}-${day}`;
344
						payload[name] = newValue;
345
					}
346
				} else {
347
					if (value instanceof Date === false && typeof value === 'string') {
348
						if (dateColumn.type === 'date') {
349
							const parsedDate = parseISO(value);
350

351
							if (!isValid(parsedDate)) {
352
								throw new InvalidPayloadError({ reason: `Invalid Date format in field "${dateColumn.field}"` });
353
							}
354

355
							payload[name] = parsedDate;
356
						}
357

358
						if (dateColumn.type === 'dateTime') {
359
							const parsedDate = parseISO(value);
360

361
							if (!isValid(parsedDate)) {
362
								throw new InvalidPayloadError({ reason: `Invalid DateTime format in field "${dateColumn.field}"` });
363
							}
364

365
							payload[name] = parsedDate;
366
						}
367

368
						if (dateColumn.type === 'timestamp') {
369
							const newValue = this.helpers.date.writeTimestamp(value);
370
							payload[name] = newValue;
371
						}
372
					}
373
				}
374
			}
375
		}
376

377
		/**
378
		 * Some DB drivers (MS SQL f.e.) return time values as Date objects. For consistencies sake,
379
		 * we'll abstract those back to hh:mm:ss
380
		 */
381
		for (const [name] of timeColumns) {
382
			for (const payload of payloads) {
383
				const value = payload[name];
384

385
				if (!value) continue;
386

387
				if (action === 'read') {
388
					if (value instanceof Date) payload[name] = format(value, 'HH:mm:ss');
389
				}
390
			}
391
		}
392

393
		return payloads;
394
	}
395

396
	/**
397
	 * Recursively save/update all nested related Any-to-One items
398
	 */
399
	async processA2O(
400
		data: Partial<Item>,
401
		opts?: MutationOptions,
402
	): Promise<{ payload: Partial<Item>; revisions: PrimaryKey[]; nestedActionEvents: ActionEventParams[] }> {
403
		const relations = this.schema.relations.filter((relation) => {
404
			return relation.collection === this.collection;
405
		});
406

407
		const revisions: PrimaryKey[] = [];
408

409
		const nestedActionEvents: ActionEventParams[] = [];
410

411
		const payload = cloneDeep(data);
412

413
		// Only process related records that are actually in the payload
414
		const relationsToProcess = relations.filter((relation) => {
415
			return relation.field in payload && isPlainObject(payload[relation.field]);
416
		});
417

418
		for (const relation of relationsToProcess) {
419
			// If the required a2o configuration fields are missing, this is a m2o instead of an a2o
420
			if (!relation.meta?.one_collection_field || !relation.meta?.one_allowed_collections) continue;
421

422
			const relatedCollection = payload[relation.meta.one_collection_field];
423

424
			if (!relatedCollection) {
425
				throw new InvalidPayloadError({
426
					reason: `Can't update nested record "${relation.collection}.${relation.field}" without field "${relation.collection}.${relation.meta.one_collection_field}" being set`,
427
				});
428
			}
429

430
			const allowedCollections = relation.meta.one_allowed_collections;
431

432
			if (allowedCollections.includes(relatedCollection) === false) {
433
				throw new InvalidPayloadError({
434
					reason: `"${relation.collection}.${relation.field}" can't be linked to collection "${relatedCollection}"`,
435
				});
436
			}
437

438
			const { getService } = await import('../utils/get-service.js');
439

440
			const service = getService(relatedCollection, {
441
				accountability: this.accountability,
442
				knex: this.knex,
443
				schema: this.schema,
444
			});
445

446
			const relatedPrimary = this.schema.collections[relatedCollection]!.primary;
447
			const relatedRecord: Partial<Item> = payload[relation.field];
448

449
			if (['string', 'number'].includes(typeof relatedRecord)) continue;
450

451
			const hasPrimaryKey = relatedPrimary in relatedRecord;
452

453
			let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimary];
454

455
			const exists =
456
				hasPrimaryKey &&
457
				!!(await this.knex
458
					.select(relatedPrimary)
459
					.from(relatedCollection)
460
					.where({ [relatedPrimary]: relatedPrimaryKey })
461
					.first());
462

463
			if (exists) {
464
				const fieldsToUpdate = omit(relatedRecord, relatedPrimary);
465

466
				if (Object.keys(fieldsToUpdate).length > 0) {
467
					await service.updateOne(relatedPrimaryKey, relatedRecord, {
468
						onRevisionCreate: (pk) => revisions.push(pk),
469
						bypassEmitAction: (params) =>
470
							opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
471
						emitEvents: opts?.emitEvents,
472
						mutationTracker: opts?.mutationTracker,
473
					});
474
				}
475
			} else {
476
				relatedPrimaryKey = await service.createOne(relatedRecord, {
477
					onRevisionCreate: (pk) => revisions.push(pk),
478
					bypassEmitAction: (params) =>
479
						opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
480
					emitEvents: opts?.emitEvents,
481
					mutationTracker: opts?.mutationTracker,
482
				});
483
			}
484

485
			// Overwrite the nested object with just the primary key, so the parent level can be saved correctly
486
			payload[relation.field] = relatedPrimaryKey;
487
		}
488

489
		return { payload, revisions, nestedActionEvents };
490
	}
491

492
	/**
493
	 * Save/update all nested related m2o items inside the payload
494
	 */
495
	async processM2O(
496
		data: Partial<Item>,
497
		opts?: MutationOptions,
498
	): Promise<{ payload: Partial<Item>; revisions: PrimaryKey[]; nestedActionEvents: ActionEventParams[] }> {
499
		const payload = cloneDeep(data);
500

501
		// All the revisions saved on this level
502
		const revisions: PrimaryKey[] = [];
503

504
		const nestedActionEvents: ActionEventParams[] = [];
505

506
		// Many to one relations that exist on the current collection
507
		const relations = this.schema.relations.filter((relation) => {
508
			return relation.collection === this.collection;
509
		});
510

511
		// Only process related records that are actually in the payload
512
		const relationsToProcess = relations.filter((relation) => {
513
			return relation.field in payload && isObject(payload[relation.field]);
514
		});
515

516
		for (const relation of relationsToProcess) {
517
			// If no "one collection" exists, this is a A2O, not a M2O
518
			if (!relation.related_collection) continue;
519
			const relatedPrimaryKeyField = this.schema.collections[relation.related_collection]!.primary;
520

521
			const { getService } = await import('../utils/get-service.js');
522

523
			const service = getService(relation.related_collection, {
524
				accountability: this.accountability,
525
				knex: this.knex,
526
				schema: this.schema,
527
			});
528

529
			const relatedRecord: Partial<Item> = payload[relation.field];
530

531
			if (['string', 'number'].includes(typeof relatedRecord)) continue;
532

533
			const hasPrimaryKey = relatedPrimaryKeyField in relatedRecord;
534

535
			let relatedPrimaryKey: PrimaryKey = relatedRecord[relatedPrimaryKeyField];
536

537
			const exists =
538
				hasPrimaryKey &&
539
				!!(await this.knex
540
					.select(relatedPrimaryKeyField)
541
					.from(relation.related_collection)
542
					.where({ [relatedPrimaryKeyField]: relatedPrimaryKey })
543
					.first());
544

545
			if (exists) {
546
				const fieldsToUpdate = omit(relatedRecord, relatedPrimaryKeyField);
547

548
				if (Object.keys(fieldsToUpdate).length > 0) {
549
					await service.updateOne(relatedPrimaryKey, relatedRecord, {
550
						onRevisionCreate: (pk) => revisions.push(pk),
551
						bypassEmitAction: (params) =>
552
							opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
553
						emitEvents: opts?.emitEvents,
554
						mutationTracker: opts?.mutationTracker,
555
					});
556
				}
557
			} else {
558
				relatedPrimaryKey = await service.createOne(relatedRecord, {
559
					onRevisionCreate: (pk) => revisions.push(pk),
560
					bypassEmitAction: (params) =>
561
						opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
562
					emitEvents: opts?.emitEvents,
563
					mutationTracker: opts?.mutationTracker,
564
				});
565
			}
566

567
			// Overwrite the nested object with just the primary key, so the parent level can be saved correctly
568
			payload[relation.field] = relatedPrimaryKey;
569
		}
570

571
		return { payload, revisions, nestedActionEvents };
572
	}
573

574
	/**
575
	 * Recursively save/update all nested related o2m items
576
	 */
577
	async processO2M(
578
		data: Partial<Item>,
579
		parent: PrimaryKey,
580
		opts?: MutationOptions,
581
	): Promise<{ revisions: PrimaryKey[]; nestedActionEvents: ActionEventParams[] }> {
582
		const revisions: PrimaryKey[] = [];
583

584
		const nestedActionEvents: ActionEventParams[] = [];
585

586
		const relations = this.schema.relations.filter((relation) => {
587
			return relation.related_collection === this.collection;
588
		});
589

590
		const payload = cloneDeep(data);
591

592
		// Only process related records that are actually in the payload
593
		const relationsToProcess = relations.filter((relation) => {
594
			if (!relation.meta?.one_field) return false;
595
			return relation.meta.one_field in payload;
596
		});
597

598
		const nestedUpdateSchema = Joi.object({
599
			create: Joi.array().items(Joi.object().unknown()),
600
			update: Joi.array().items(Joi.object().unknown()),
601
			delete: Joi.array().items(Joi.string(), Joi.number()),
602
		});
603

604
		for (const relation of relationsToProcess) {
605
			if (!relation.meta) continue;
606

607
			const currentPrimaryKeyField = this.schema.collections[relation.related_collection!]!.primary;
608
			const relatedPrimaryKeyField = this.schema.collections[relation.collection]!.primary;
609

610
			const { getService } = await import('../utils/get-service.js');
611

612
			const service = getService(relation.collection, {
613
				accountability: this.accountability,
614
				knex: this.knex,
615
				schema: this.schema,
616
			});
617

618
			const recordsToUpsert: Partial<Item>[] = [];
619
			const savedPrimaryKeys: PrimaryKey[] = [];
620

621
			// Nested array of individual items
622
			const field = payload[relation.meta!.one_field!];
623

624
			if (!field || Array.isArray(field)) {
625
				const updates = field || []; // treat falsey values as removing all children
626

627
				for (let i = 0; i < updates.length; i++) {
628
					const relatedRecord = updates[i];
629

630
					let record = cloneDeep(relatedRecord);
631

632
					if (typeof relatedRecord === 'string' || typeof relatedRecord === 'number') {
633
						const existingRecord = await this.knex
634
							.select(relatedPrimaryKeyField, relation.field)
635
							.from(relation.collection)
636
							.where({ [relatedPrimaryKeyField]: record })
637
							.first();
638

639
						if (!!existingRecord === false) {
640
							throw new ForbiddenError();
641
						}
642

643
						// If the related item is already associated to the current item, and there's no
644
						// other updates (which is indicated by the fact that this is just the PK, we can
645
						// ignore updating this item. This makes sure we don't trigger any update logic
646
						// for items that aren't actually being updated. NOTE: We use == here, as the
647
						// primary key might be reported as a string instead of number, coming from the
648
						// http route, and or a bigInteger in the DB
649
						if (
650
							isNil(existingRecord[relation.field]) === false &&
651
							(existingRecord[relation.field] == parent ||
652
								existingRecord[relation.field] == payload[currentPrimaryKeyField])
653
						) {
654
							savedPrimaryKeys.push(existingRecord[relatedPrimaryKeyField]);
655
							continue;
656
						}
657

658
						record = {
659
							[relatedPrimaryKeyField]: relatedRecord,
660
						};
661
					}
662

663
					recordsToUpsert.push({
664
						...record,
665
						[relation.field]: parent || payload[currentPrimaryKeyField],
666
					});
667
				}
668

669
				savedPrimaryKeys.push(
670
					...(await service.upsertMany(recordsToUpsert, {
671
						onRevisionCreate: (pk) => revisions.push(pk),
672
						bypassEmitAction: (params) =>
673
							opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
674
						emitEvents: opts?.emitEvents,
675
						mutationTracker: opts?.mutationTracker,
676
					})),
677
				);
678

679
				const query: Query = {
680
					filter: {
681
						_and: [
682
							{
683
								[relation.field]: {
684
									_eq: parent,
685
								},
686
							},
687
							{
688
								[relatedPrimaryKeyField]: {
689
									_nin: savedPrimaryKeys,
690
								},
691
							},
692
						],
693
					},
694
				};
695

696
				// Nullify all related items that aren't included in the current payload
697
				if (relation.meta.one_deselect_action === 'delete') {
698
					// There's no revision for a deletion
699
					await service.deleteByQuery(query, {
700
						bypassEmitAction: (params) =>
701
							opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
702
						emitEvents: opts?.emitEvents,
703
						mutationTracker: opts?.mutationTracker,
704
					});
705
				} else {
706
					await service.updateByQuery(
707
						query,
708
						{ [relation.field]: null },
709
						{
710
							onRevisionCreate: (pk) => revisions.push(pk),
711
							bypassEmitAction: (params) =>
712
								opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
713
							emitEvents: opts?.emitEvents,
714
							mutationTracker: opts?.mutationTracker,
715
						},
716
					);
717
				}
718
			}
719
			// "Updates" object w/ create/update/delete
720
			else {
721
				const alterations = field as Alterations;
722
				const { error } = nestedUpdateSchema.validate(alterations);
723
				if (error) throw new InvalidPayloadError({ reason: `Invalid one-to-many update structure: ${error.message}` });
724

725
				if (alterations.create) {
726
					const sortField = relation.meta.sort_field;
727

728
					let createPayload: Alterations['create'];
729

730
					if (sortField !== null) {
731
						const highestOrderNumber: Record<'max', number | null> | undefined = await this.knex
732
							.from(relation.collection)
733
							.where({ [relation.field]: parent })
734
							.whereNotNull(sortField)
735
							.max(sortField, { as: 'max' })
736
							.first();
737

738
						createPayload = alterations.create.map((item, index) => {
739
							const record = cloneDeep(item);
740

741
							// add sort field value if it is not supplied in the item
742
							if (parent !== null && record[sortField] === undefined) {
743
								record[sortField] = highestOrderNumber?.max ? highestOrderNumber.max + index + 1 : index + 1;
744
							}
745

746
							return {
747
								...record,
748
								[relation.field]: parent || payload[currentPrimaryKeyField],
749
							};
750
						});
751
					} else {
752
						createPayload = alterations.create.map((item) => ({
753
							...item,
754
							[relation.field]: parent || payload[currentPrimaryKeyField],
755
						}));
756
					}
757

758
					await service.createMany(createPayload, {
759
						onRevisionCreate: (pk) => revisions.push(pk),
760
						bypassEmitAction: (params) =>
761
							opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
762
						emitEvents: opts?.emitEvents,
763
						mutationTracker: opts?.mutationTracker,
764
					});
765
				}
766

767
				if (alterations.update) {
768
					const primaryKeyField = this.schema.collections[relation.collection]!.primary;
769

770
					for (const item of alterations.update) {
771
						await service.updateOne(
772
							item[primaryKeyField],
773
							{
774
								...item,
775
								[relation.field]: parent || payload[currentPrimaryKeyField],
776
							},
777
							{
778
								onRevisionCreate: (pk) => revisions.push(pk),
779
								bypassEmitAction: (params) =>
780
									opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
781
								emitEvents: opts?.emitEvents,
782
								mutationTracker: opts?.mutationTracker,
783
							},
784
						);
785
					}
786
				}
787

788
				if (alterations.delete) {
789
					const query: Query = {
790
						filter: {
791
							_and: [
792
								{
793
									[relation.field]: {
794
										_eq: parent,
795
									},
796
								},
797
								{
798
									[relatedPrimaryKeyField]: {
799
										_in: alterations.delete,
800
									},
801
								},
802
							],
803
						},
804
					};
805

806
					if (relation.meta.one_deselect_action === 'delete') {
807
						await service.deleteByQuery(query, {
808
							bypassEmitAction: (params) =>
809
								opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
810
							emitEvents: opts?.emitEvents,
811
							mutationTracker: opts?.mutationTracker,
812
						});
813
					} else {
814
						await service.updateByQuery(
815
							query,
816
							{ [relation.field]: null },
817
							{
818
								onRevisionCreate: (pk) => revisions.push(pk),
819
								bypassEmitAction: (params) =>
820
									opts?.bypassEmitAction ? opts.bypassEmitAction(params) : nestedActionEvents.push(params),
821
								emitEvents: opts?.emitEvents,
822
								mutationTracker: opts?.mutationTracker,
823
							},
824
						);
825
					}
826
				}
827
			}
828
		}
829

830
		return { revisions, nestedActionEvents };
831
	}
832

833
	/**
834
	 * Transforms the input partial payload to match the output structure, to have consistency
835
	 * between delta and data
836
	 */
837
	async prepareDelta(data: Partial<Item>): Promise<string | null> {
838
		let payload = cloneDeep(data);
839

840
		for (const key in payload) {
841
			if (payload[key]?.isRawInstance) {
842
				payload[key] = payload[key].bindings[0];
843
			}
844
		}
845

846
		payload = await this.processValues('read', payload);
847

848
		if (Object.keys(payload).length === 0) return null;
849

850
		return JSON.stringify(payload);
851
	}
852
}
853

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.