directus

Форк
0
509 строк · 14.8 Кб
1
import { useEnv } from '@directus/env';
2
import { ContentTooLargeError, ForbiddenError, InvalidPayloadError, ServiceUnavailableError } from '@directus/errors';
3
import formatTitle from '@directus/format-title';
4
import type { BusboyFileStream, File, PrimaryKey } from '@directus/types';
5
import { toArray } from '@directus/utils';
6
import type { AxiosResponse } from 'axios';
7
import encodeURL from 'encodeurl';
8
import exif, { type GPSInfoTags, type ImageTags, type IopTags, type PhotoTags } from 'exif-reader';
9
import type { IccProfile } from 'icc';
10
import { parse as parseIcc } from 'icc';
11
import { clone, pick } from 'lodash-es';
12
import { extension } from 'mime-types';
13
import type { Readable } from 'node:stream';
14
import { PassThrough as PassThroughStream, Transform as TransformStream } from 'node:stream';
15
import { pipeline } from 'node:stream/promises';
16
import zlib from 'node:zlib';
17
import path from 'path';
18
import sharp from 'sharp';
19
import url from 'url';
20
import { SUPPORTED_IMAGE_METADATA_FORMATS } from '../constants.js';
21
import emitter from '../emitter.js';
22
import { useLogger } from '../logger.js';
23
import { getAxios } from '../request/index.js';
24
import { getStorage } from '../storage/index.js';
25
import type { AbstractServiceOptions, MutationOptions } from '../types/index.js';
26
import { parseIptc, parseXmp } from '../utils/parse-image-metadata.js';
27
import { ItemsService } from './items.js';
28

29
const env = useEnv();
30
const logger = useLogger();
31

32
type Metadata = Partial<Pick<File, 'height' | 'width' | 'description' | 'title' | 'tags' | 'metadata'>>;
33

34
export class FilesService extends ItemsService {
35
	constructor(options: AbstractServiceOptions) {
36
		super('directus_files', options);
37
	}
38

39
	/**
40
	 * Upload a single new file to the configured storage adapter
41
	 */
42
	async uploadOne(
43
		stream: BusboyFileStream | Readable,
44
		data: Partial<File> & { storage: string },
45
		primaryKey?: PrimaryKey,
46
		opts?: MutationOptions,
47
	): Promise<PrimaryKey> {
48
		const storage = await getStorage();
49

50
		let existingFile: Record<string, any> | null = null;
51

52
		// If the payload contains a primary key, we'll check if the file already exists
53
		if (primaryKey !== undefined) {
54
			// If the file you're uploading already exists, we'll consider this upload a replace so we'll fetch the existing file's folder and filename_download
55
			existingFile =
56
				(await this.knex
57
					.select('folder', 'filename_download', 'filename_disk', 'title', 'description', 'metadata')
58
					.from('directus_files')
59
					.where({ id: primaryKey })
60
					.first()) ?? null;
61
		}
62

63
		// Merge the existing file's folder and filename_download with the new payload
64
		const payload = { ...(existingFile ?? {}), ...clone(data) };
65

66
		const disk = storage.location(payload.storage);
67

68
		// If no folder is specified, we'll use the default folder from the settings if it exists
69
		if ('folder' in payload === false) {
70
			const settings = await this.knex.select('storage_default_folder').from('directus_settings').first();
71

72
			if (settings?.storage_default_folder) {
73
				payload.folder = settings.storage_default_folder;
74
			}
75
		}
76

77
		// Is this file a replacement? if the file data already exists and we have a primary key
78
		const isReplacement = existingFile !== null && primaryKey !== undefined;
79

80
		// If this is a new file upload, we need to generate a new primary key and DB record
81
		if (isReplacement === false || primaryKey === undefined) {
82
			primaryKey = await this.createOne(payload, { emitEvents: false });
83
		}
84

85
		const fileExtension =
86
			path.extname(payload.filename_download!) || (payload.type && '.' + extension(payload.type)) || '';
87

88
		// The filename_disk is the FINAL filename on disk
89
		payload.filename_disk ||= primaryKey + (fileExtension || '');
90

91
		// Temp filename is used for replacements
92
		const tempFilenameDisk = 'temp_' + payload.filename_disk;
93

94
		if (!payload.type) {
95
			payload.type = 'application/octet-stream';
96
		}
97

98
		// Used to clean up if something goes wrong
99
		const cleanUp = async () => {
100
			try {
101
				if (isReplacement === true) {
102
					// If this is a replacement that failed, we need to delete the temp file
103
					await disk.delete(tempFilenameDisk);
104
				} else {
105
					// If this is a new file that failed
106
					// delete the DB record
107
					await super.deleteMany([primaryKey!]);
108

109
					// delete the final file
110
					await disk.delete(payload.filename_disk!);
111
				}
112
			} catch (err: any) {
113
				if (isReplacement === true) {
114
					logger.warn(`Couldn't delete temp file ${tempFilenameDisk}`);
115
				} else {
116
					logger.warn(`Couldn't delete file ${payload.filename_disk}`);
117
				}
118

119
				logger.warn(err);
120
			}
121
		};
122

123
		try {
124
			// If this is a replacement, we'll write the file to a temp location first to ensure we don't overwrite the existing file if something goes wrong
125
			if (isReplacement === true) {
126
				await disk.write(tempFilenameDisk, stream, payload.type);
127
			} else {
128
				// If this is a new file upload, we'll write the file to the final location
129
				await disk.write(payload.filename_disk, stream, payload.type);
130
			}
131

132
			// Check if the file was truncated (if the stream ended early) and throw limit error if it was
133
			if ('truncated' in stream && stream.truncated === true) {
134
				throw new ContentTooLargeError();
135
			}
136
		} catch (err: any) {
137
			logger.warn(`Couldn't save file ${payload.filename_disk}`);
138
			logger.warn(err);
139

140
			await cleanUp();
141

142
			if (err instanceof ContentTooLargeError) {
143
				throw err;
144
			} else {
145
				throw new ServiceUnavailableError({ service: 'files', reason: `Couldn't save file ${payload.filename_disk}` });
146
			}
147
		}
148

149
		// If the file is a replacement, we need to update the DB record with the new payload, delete the old files, and upgrade the temp file
150
		if (isReplacement === true) {
151
			await this.updateOne(primaryKey, payload, { emitEvents: false });
152

153
			// delete the previously saved file and thumbnails to ensure they're generated fresh
154
			for await (const filepath of disk.list(String(primaryKey))) {
155
				await disk.delete(filepath);
156
			}
157

158
			// Upgrade the temp file to the final filename
159
			await disk.move(tempFilenameDisk, payload.filename_disk);
160
		}
161

162
		const { size } = await storage.location(data.storage).stat(payload.filename_disk);
163
		payload.filesize = size;
164

165
		if (SUPPORTED_IMAGE_METADATA_FORMATS.includes(payload.type)) {
166
			const stream = await storage.location(data.storage).read(payload.filename_disk);
167
			const { height, width, description, title, tags, metadata } = await this.getMetadata(stream);
168

169
			if (!payload.height && height) {
170
				payload.height = height;
171
			}
172

173
			if (!payload.width && width) {
174
				payload.width = width;
175
			}
176

177
			if (!payload.metadata && metadata) {
178
				payload.metadata = metadata;
179
			}
180

181
			// Note that if this is a replace file upload, the below properties are fetched and included in the payload above
182
			// in the `existingFile` variable... so this will ONLY set the values if they're not already set
183

184
			if (!payload.description && description) {
185
				payload.description = description;
186
			}
187

188
			if (!payload.title && title) {
189
				payload.title = title;
190
			}
191

192
			if (!payload.tags && tags) {
193
				payload.tags = tags;
194
			}
195
		}
196

197
		// We do this in a service without accountability. Even if you don't have update permissions to the file,
198
		// we still want to be able to set the extracted values from the file on create
199
		const sudoService = new ItemsService('directus_files', {
200
			knex: this.knex,
201
			schema: this.schema,
202
		});
203

204
		await sudoService.updateOne(primaryKey, payload, { emitEvents: false });
205

206
		if (opts?.emitEvents !== false) {
207
			emitter.emitAction(
208
				'files.upload',
209
				{
210
					payload,
211
					key: primaryKey,
212
					collection: this.collection,
213
				},
214
				{
215
					database: this.knex,
216
					schema: this.schema,
217
					accountability: this.accountability,
218
				},
219
			);
220
		}
221

222
		return primaryKey;
223
	}
224

225
	/**
226
	 * Extract metadata from a buffer's content
227
	 */
228
	async getMetadata(
229
		stream: Readable,
230
		allowList: string | string[] = env['FILE_METADATA_ALLOW_LIST'] as string[],
231
	): Promise<Metadata> {
232
		return new Promise((resolve, reject) => {
233
			pipeline(
234
				stream,
235
				sharp().metadata(async (err, sharpMetadata) => {
236
					if (err) {
237
						reject(err);
238
						return;
239
					}
240

241
					const metadata: Metadata = {};
242

243
					if (sharpMetadata.orientation && sharpMetadata.orientation >= 5) {
244
						metadata.height = sharpMetadata.width ?? null;
245
						metadata.width = sharpMetadata.height ?? null;
246
					} else {
247
						metadata.width = sharpMetadata.width ?? null;
248
						metadata.height = sharpMetadata.height ?? null;
249
					}
250

251
					// Backward-compatible layout as it used to be with 'exifr'
252
					const fullMetadata: {
253
						ifd0?: Partial<ImageTags>;
254
						ifd1?: Partial<ImageTags>;
255
						exif?: Partial<PhotoTags>;
256
						gps?: Partial<GPSInfoTags>;
257
						interop?: Partial<IopTags>;
258
						icc?: IccProfile;
259
						iptc?: Record<string, unknown>;
260
						xmp?: Record<string, unknown>;
261
					} = {};
262

263
					if (sharpMetadata.exif) {
264
						try {
265
							const { Image, ThumbnailTags, Iop, GPSInfo, Photo } = (exif as unknown as typeof exif.default)(
266
								sharpMetadata.exif,
267
							);
268

269
							if (Image) {
270
								fullMetadata.ifd0 = Image;
271
							}
272

273
							if (ThumbnailTags) {
274
								fullMetadata.ifd1 = ThumbnailTags;
275
							}
276

277
							if (Iop) {
278
								fullMetadata.interop = Iop;
279
							}
280

281
							if (GPSInfo) {
282
								fullMetadata.gps = GPSInfo;
283
							}
284

285
							if (Photo) {
286
								fullMetadata.exif = Photo;
287
							}
288
						} catch (err) {
289
							logger.warn(`Couldn't extract Exif metadata from file`);
290
							logger.warn(err);
291
						}
292
					}
293

294
					if (sharpMetadata.icc) {
295
						try {
296
							fullMetadata.icc = parseIcc(sharpMetadata.icc);
297
						} catch (err) {
298
							logger.warn(`Couldn't extract ICC profile data from file`);
299
							logger.warn(err);
300
						}
301
					}
302

303
					if (sharpMetadata.iptc) {
304
						try {
305
							fullMetadata.iptc = parseIptc(sharpMetadata.iptc);
306
						} catch (err) {
307
							logger.warn(`Couldn't extract IPTC Photo Metadata from file`);
308
							logger.warn(err);
309
						}
310
					}
311

312
					if (sharpMetadata.xmp) {
313
						try {
314
							fullMetadata.xmp = parseXmp(sharpMetadata.xmp);
315
						} catch (err) {
316
							logger.warn(`Couldn't extract XMP data from file`);
317
							logger.warn(err);
318
						}
319
					}
320

321
					if (fullMetadata?.iptc?.['Caption'] && typeof fullMetadata.iptc['Caption'] === 'string') {
322
						metadata.description = fullMetadata.iptc?.['Caption'];
323
					}
324

325
					if (fullMetadata?.iptc?.['Headline'] && typeof fullMetadata.iptc['Headline'] === 'string') {
326
						metadata.title = fullMetadata.iptc['Headline'];
327
					}
328

329
					if (fullMetadata?.iptc?.['Keywords']) {
330
						metadata.tags = fullMetadata.iptc['Keywords'] as string;
331
					}
332

333
					if (allowList === '*' || allowList?.[0] === '*') {
334
						metadata.metadata = fullMetadata;
335
					} else {
336
						metadata.metadata = pick(fullMetadata, allowList);
337
					}
338

339
					// Fix (incorrectly parsed?) values starting / ending with spaces,
340
					// limited to one level and string values only
341
					for (const section of Object.keys(metadata.metadata)) {
342
						for (const [key, value] of Object.entries(metadata.metadata[section])) {
343
							if (typeof value === 'string') {
344
								metadata.metadata[section][key] = value.trim();
345
							}
346
						}
347
					}
348

349
					resolve(metadata);
350
				}),
351
			);
352
		});
353
	}
354

355
	/**
356
	 * Import a single file from an external URL
357
	 */
358
	async importOne(importURL: string, body: Partial<File>): Promise<PrimaryKey> {
359
		const fileCreatePermissions = this.accountability?.permissions?.find(
360
			(permission) => permission.collection === 'directus_files' && permission.action === 'create',
361
		);
362

363
		if (this.accountability && this.accountability?.admin !== true && !fileCreatePermissions) {
364
			throw new ForbiddenError();
365
		}
366

367
		let fileResponse;
368

369
		try {
370
			const axios = await getAxios();
371

372
			fileResponse = await axios.get<Readable>(encodeURL(importURL), {
373
				responseType: 'stream',
374
				decompress: false,
375
			});
376
		} catch (error: any) {
377
			logger.warn(`Couldn't fetch file from URL "${importURL}"${error.message ? `: ${error.message}` : ''}`);
378
			logger.trace(error);
379

380
			throw new ServiceUnavailableError({
381
				service: 'external-file',
382
				reason: `Couldn't fetch file from URL "${importURL}"`,
383
			});
384
		}
385

386
		const parsedURL = url.parse(fileResponse.request.res.responseUrl);
387
		const filename = decodeURI(path.basename(parsedURL.pathname as string));
388

389
		const payload = {
390
			filename_download: filename,
391
			storage: toArray(env['STORAGE_LOCATIONS'] as string)[0]!,
392
			type: fileResponse.headers['content-type'],
393
			title: formatTitle(filename),
394
			...(body || {}),
395
		};
396

397
		return await this.uploadOne(decompressResponse(fileResponse.data, fileResponse.headers), payload, payload.id);
398
	}
399

400
	/**
401
	 * Create a file (only applicable when it is not a multipart/data POST request)
402
	 * Useful for associating metadata with existing file in storage
403
	 */
404
	override async createOne(data: Partial<File>, opts?: MutationOptions): Promise<PrimaryKey> {
405
		if (!data.type) {
406
			throw new InvalidPayloadError({ reason: `"type" is required` });
407
		}
408

409
		const key = await super.createOne(data, opts);
410
		return key;
411
	}
412

413
	/**
414
	 * Delete multiple files
415
	 */
416
	override async deleteMany(keys: PrimaryKey[]): Promise<PrimaryKey[]> {
417
		const storage = await getStorage();
418
		const files = await super.readMany(keys, { fields: ['id', 'storage', 'filename_disk'], limit: -1 });
419

420
		if (!files) {
421
			throw new ForbiddenError();
422
		}
423

424
		await super.deleteMany(keys);
425

426
		for (const file of files) {
427
			const disk = storage.location(file['storage']);
428
			const filePrefix = path.parse(file['filename_disk']).name;
429

430
			// Delete file + thumbnails
431
			for await (const filepath of disk.list(filePrefix)) {
432
				await disk.delete(filepath);
433
			}
434
		}
435

436
		return keys;
437
	}
438
}
439

440
function decompressResponse(stream: Readable, headers: AxiosResponse['headers']) {
441
	const contentEncoding = (headers['content-encoding'] || '').toLowerCase();
442

443
	if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
444
		return stream;
445
	}
446

447
	let isEmpty = true;
448

449
	const checker = new TransformStream({
450
		transform(data, _encoding, callback) {
451
			if (isEmpty === false) {
452
				callback(null, data);
453
				return;
454
			}
455

456
			isEmpty = false;
457

458
			handleContentEncoding(data);
459

460
			callback(null, data);
461
		},
462

463
		flush(callback) {
464
			callback();
465
		},
466
	});
467

468
	const finalStream = new PassThroughStream({
469
		autoDestroy: false,
470
		destroy(error, callback) {
471
			stream.destroy();
472

473
			callback(error);
474
		},
475
	});
476

477
	stream.pipe(checker);
478

479
	return finalStream;
480

481
	function handleContentEncoding(data: any) {
482
		let decompressStream;
483

484
		if (contentEncoding === 'br') {
485
			decompressStream = zlib.createBrotliDecompress();
486
		} else if (contentEncoding === 'deflate' && isDeflateAlgorithm(data)) {
487
			decompressStream = zlib.createInflateRaw();
488
		} else {
489
			decompressStream = zlib.createUnzip();
490
		}
491

492
		decompressStream.once('error', (error) => {
493
			if (isEmpty && !stream.readable) {
494
				finalStream.end();
495
				return;
496
			}
497

498
			finalStream.destroy(error);
499
		});
500

501
		checker.pipe(decompressStream).pipe(finalStream);
502
	}
503

504
	function isDeflateAlgorithm(data: any) {
505
		const DEFLATE_ALGORITHM_HEADER = 0x08;
506

507
		return data.length > 0 && (data[0] & DEFLATE_ALGORITHM_HEADER) === 0;
508
	}
509
}
510

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.