1
import { useEnv } from '@directus/env';
2
import { ContentTooLargeError, ForbiddenError, InvalidPayloadError, ServiceUnavailableError } from '@directus/errors';
3
import formatTitle from '@directus/format-title';
4
import type { BusboyFileStream, File, PrimaryKey } from '@directus/types';
5
import { toArray } from '@directus/utils';
6
import type { AxiosResponse } from 'axios';
7
import encodeURL from 'encodeurl';
8
import exif, { type GPSInfoTags, type ImageTags, type IopTags, type PhotoTags } from 'exif-reader';
9
import type { IccProfile } from 'icc';
10
import { parse as parseIcc } from 'icc';
11
import { clone, pick } from 'lodash-es';
12
import { extension } from 'mime-types';
13
import type { Readable } from 'node:stream';
14
import { PassThrough as PassThroughStream, Transform as TransformStream } from 'node:stream';
15
import { pipeline } from 'node:stream/promises';
16
import zlib from 'node:zlib';
17
import path from 'path';
18
import sharp from 'sharp';
20
import { SUPPORTED_IMAGE_METADATA_FORMATS } from '../constants.js';
21
import emitter from '../emitter.js';
22
import { useLogger } from '../logger.js';
23
import { getAxios } from '../request/index.js';
24
import { getStorage } from '../storage/index.js';
25
import type { AbstractServiceOptions, MutationOptions } from '../types/index.js';
26
import { parseIptc, parseXmp } from '../utils/parse-image-metadata.js';
27
import { ItemsService } from './items.js';
30
const logger = useLogger();
32
type Metadata = Partial<Pick<File, 'height' | 'width' | 'description' | 'title' | 'tags' | 'metadata'>>;
34
export class FilesService extends ItemsService {
35
constructor(options: AbstractServiceOptions) {
36
super('directus_files', options);
40
* Upload a single new file to the configured storage adapter
43
stream: BusboyFileStream | Readable,
44
data: Partial<File> & { storage: string },
45
primaryKey?: PrimaryKey,
46
opts?: MutationOptions,
47
): Promise<PrimaryKey> {
48
const storage = await getStorage();
50
let existingFile: Record<string, any> | null = null;
52
// If the payload contains a primary key, we'll check if the file already exists
53
if (primaryKey !== undefined) {
54
// If the file you're uploading already exists, we'll consider this upload a replace so we'll fetch the existing file's folder and filename_download
57
.select('folder', 'filename_download', 'filename_disk', 'title', 'description', 'metadata')
58
.from('directus_files')
59
.where({ id: primaryKey })
63
// Merge the existing file's folder and filename_download with the new payload
64
const payload = { ...(existingFile ?? {}), ...clone(data) };
66
const disk = storage.location(payload.storage);
68
// If no folder is specified, we'll use the default folder from the settings if it exists
69
if ('folder' in payload === false) {
70
const settings = await this.knex.select('storage_default_folder').from('directus_settings').first();
72
if (settings?.storage_default_folder) {
73
payload.folder = settings.storage_default_folder;
77
// Is this file a replacement? if the file data already exists and we have a primary key
78
const isReplacement = existingFile !== null && primaryKey !== undefined;
80
// If this is a new file upload, we need to generate a new primary key and DB record
81
if (isReplacement === false || primaryKey === undefined) {
82
primaryKey = await this.createOne(payload, { emitEvents: false });
86
path.extname(payload.filename_download!) || (payload.type && '.' + extension(payload.type)) || '';
88
// The filename_disk is the FINAL filename on disk
89
payload.filename_disk ||= primaryKey + (fileExtension || '');
91
// Temp filename is used for replacements
92
const tempFilenameDisk = 'temp_' + payload.filename_disk;
95
payload.type = 'application/octet-stream';
98
// Used to clean up if something goes wrong
99
const cleanUp = async () => {
101
if (isReplacement === true) {
102
// If this is a replacement that failed, we need to delete the temp file
103
await disk.delete(tempFilenameDisk);
105
// If this is a new file that failed
106
// delete the DB record
107
await super.deleteMany([primaryKey!]);
109
// delete the final file
110
await disk.delete(payload.filename_disk!);
113
if (isReplacement === true) {
114
logger.warn(`Couldn't delete temp file ${tempFilenameDisk}`);
116
logger.warn(`Couldn't delete file ${payload.filename_disk}`);
124
// If this is a replacement, we'll write the file to a temp location first to ensure we don't overwrite the existing file if something goes wrong
125
if (isReplacement === true) {
126
await disk.write(tempFilenameDisk, stream, payload.type);
128
// If this is a new file upload, we'll write the file to the final location
129
await disk.write(payload.filename_disk, stream, payload.type);
132
// Check if the file was truncated (if the stream ended early) and throw limit error if it was
133
if ('truncated' in stream && stream.truncated === true) {
134
throw new ContentTooLargeError();
137
logger.warn(`Couldn't save file ${payload.filename_disk}`);
142
if (err instanceof ContentTooLargeError) {
145
throw new ServiceUnavailableError({ service: 'files', reason: `Couldn't save file ${payload.filename_disk}` });
149
// If the file is a replacement, we need to update the DB record with the new payload, delete the old files, and upgrade the temp file
150
if (isReplacement === true) {
151
await this.updateOne(primaryKey, payload, { emitEvents: false });
153
// delete the previously saved file and thumbnails to ensure they're generated fresh
154
for await (const filepath of disk.list(String(primaryKey))) {
155
await disk.delete(filepath);
158
// Upgrade the temp file to the final filename
159
await disk.move(tempFilenameDisk, payload.filename_disk);
162
const { size } = await storage.location(data.storage).stat(payload.filename_disk);
163
payload.filesize = size;
165
if (SUPPORTED_IMAGE_METADATA_FORMATS.includes(payload.type)) {
166
const stream = await storage.location(data.storage).read(payload.filename_disk);
167
const { height, width, description, title, tags, metadata } = await this.getMetadata(stream);
169
if (!payload.height && height) {
170
payload.height = height;
173
if (!payload.width && width) {
174
payload.width = width;
177
if (!payload.metadata && metadata) {
178
payload.metadata = metadata;
181
// Note that if this is a replace file upload, the below properties are fetched and included in the payload above
182
// in the `existingFile` variable... so this will ONLY set the values if they're not already set
184
if (!payload.description && description) {
185
payload.description = description;
188
if (!payload.title && title) {
189
payload.title = title;
192
if (!payload.tags && tags) {
197
// We do this in a service without accountability. Even if you don't have update permissions to the file,
198
// we still want to be able to set the extracted values from the file on create
199
const sudoService = new ItemsService('directus_files', {
204
await sudoService.updateOne(primaryKey, payload, { emitEvents: false });
206
if (opts?.emitEvents !== false) {
212
collection: this.collection,
217
accountability: this.accountability,
226
* Extract metadata from a buffer's content
230
allowList: string | string[] = env['FILE_METADATA_ALLOW_LIST'] as string[],
231
): Promise<Metadata> {
232
return new Promise((resolve, reject) => {
235
sharp().metadata(async (err, sharpMetadata) => {
241
const metadata: Metadata = {};
243
if (sharpMetadata.orientation && sharpMetadata.orientation >= 5) {
244
metadata.height = sharpMetadata.width ?? null;
245
metadata.width = sharpMetadata.height ?? null;
247
metadata.width = sharpMetadata.width ?? null;
248
metadata.height = sharpMetadata.height ?? null;
251
// Backward-compatible layout as it used to be with 'exifr'
252
const fullMetadata: {
253
ifd0?: Partial<ImageTags>;
254
ifd1?: Partial<ImageTags>;
255
exif?: Partial<PhotoTags>;
256
gps?: Partial<GPSInfoTags>;
257
interop?: Partial<IopTags>;
259
iptc?: Record<string, unknown>;
260
xmp?: Record<string, unknown>;
263
if (sharpMetadata.exif) {
265
const { Image, ThumbnailTags, Iop, GPSInfo, Photo } = (exif as unknown as typeof exif.default)(
270
fullMetadata.ifd0 = Image;
274
fullMetadata.ifd1 = ThumbnailTags;
278
fullMetadata.interop = Iop;
282
fullMetadata.gps = GPSInfo;
286
fullMetadata.exif = Photo;
289
logger.warn(`Couldn't extract Exif metadata from file`);
294
if (sharpMetadata.icc) {
296
fullMetadata.icc = parseIcc(sharpMetadata.icc);
298
logger.warn(`Couldn't extract ICC profile data from file`);
303
if (sharpMetadata.iptc) {
305
fullMetadata.iptc = parseIptc(sharpMetadata.iptc);
307
logger.warn(`Couldn't extract IPTC Photo Metadata from file`);
312
if (sharpMetadata.xmp) {
314
fullMetadata.xmp = parseXmp(sharpMetadata.xmp);
316
logger.warn(`Couldn't extract XMP data from file`);
321
if (fullMetadata?.iptc?.['Caption'] && typeof fullMetadata.iptc['Caption'] === 'string') {
322
metadata.description = fullMetadata.iptc?.['Caption'];
325
if (fullMetadata?.iptc?.['Headline'] && typeof fullMetadata.iptc['Headline'] === 'string') {
326
metadata.title = fullMetadata.iptc['Headline'];
329
if (fullMetadata?.iptc?.['Keywords']) {
330
metadata.tags = fullMetadata.iptc['Keywords'] as string;
333
if (allowList === '*' || allowList?.[0] === '*') {
334
metadata.metadata = fullMetadata;
336
metadata.metadata = pick(fullMetadata, allowList);
339
// Fix (incorrectly parsed?) values starting / ending with spaces,
340
// limited to one level and string values only
341
for (const section of Object.keys(metadata.metadata)) {
342
for (const [key, value] of Object.entries(metadata.metadata[section])) {
343
if (typeof value === 'string') {
344
metadata.metadata[section][key] = value.trim();
356
* Import a single file from an external URL
358
async importOne(importURL: string, body: Partial<File>): Promise<PrimaryKey> {
359
const fileCreatePermissions = this.accountability?.permissions?.find(
360
(permission) => permission.collection === 'directus_files' && permission.action === 'create',
363
if (this.accountability && this.accountability?.admin !== true && !fileCreatePermissions) {
364
throw new ForbiddenError();
370
const axios = await getAxios();
372
fileResponse = await axios.get<Readable>(encodeURL(importURL), {
373
responseType: 'stream',
376
} catch (error: any) {
377
logger.warn(`Couldn't fetch file from URL "${importURL}"${error.message ? `: ${error.message}` : ''}`);
380
throw new ServiceUnavailableError({
381
service: 'external-file',
382
reason: `Couldn't fetch file from URL "${importURL}"`,
386
const parsedURL = url.parse(fileResponse.request.res.responseUrl);
387
const filename = decodeURI(path.basename(parsedURL.pathname as string));
390
filename_download: filename,
391
storage: toArray(env['STORAGE_LOCATIONS'] as string)[0]!,
392
type: fileResponse.headers['content-type'],
393
title: formatTitle(filename),
397
return await this.uploadOne(decompressResponse(fileResponse.data, fileResponse.headers), payload, payload.id);
401
* Create a file (only applicable when it is not a multipart/data POST request)
402
* Useful for associating metadata with existing file in storage
404
override async createOne(data: Partial<File>, opts?: MutationOptions): Promise<PrimaryKey> {
406
throw new InvalidPayloadError({ reason: `"type" is required` });
409
const key = await super.createOne(data, opts);
414
* Delete multiple files
416
override async deleteMany(keys: PrimaryKey[]): Promise<PrimaryKey[]> {
417
const storage = await getStorage();
418
const files = await super.readMany(keys, { fields: ['id', 'storage', 'filename_disk'], limit: -1 });
421
throw new ForbiddenError();
424
await super.deleteMany(keys);
426
for (const file of files) {
427
const disk = storage.location(file['storage']);
428
const filePrefix = path.parse(file['filename_disk']).name;
430
// Delete file + thumbnails
431
for await (const filepath of disk.list(filePrefix)) {
432
await disk.delete(filepath);
440
function decompressResponse(stream: Readable, headers: AxiosResponse['headers']) {
441
const contentEncoding = (headers['content-encoding'] || '').toLowerCase();
443
if (!['gzip', 'deflate', 'br'].includes(contentEncoding)) {
449
const checker = new TransformStream({
450
transform(data, _encoding, callback) {
451
if (isEmpty === false) {
452
callback(null, data);
458
handleContentEncoding(data);
460
callback(null, data);
468
const finalStream = new PassThroughStream({
470
destroy(error, callback) {
477
stream.pipe(checker);
481
function handleContentEncoding(data: any) {
482
let decompressStream;
484
if (contentEncoding === 'br') {
485
decompressStream = zlib.createBrotliDecompress();
486
} else if (contentEncoding === 'deflate' && isDeflateAlgorithm(data)) {
487
decompressStream = zlib.createInflateRaw();
489
decompressStream = zlib.createUnzip();
492
decompressStream.once('error', (error) => {
493
if (isEmpty && !stream.readable) {
498
finalStream.destroy(error);
501
checker.pipe(decompressStream).pipe(finalStream);
504
function isDeflateAlgorithm(data: any) {
505
const DEFLATE_ALGORITHM_HEADER = 0x08;
507
return data.length > 0 && (data[0] & DEFLATE_ALGORITHM_HEADER) === 0;