directus

Форк
0
/
assets.ts 
216 строк · 7.3 Кб
1
import { useEnv } from '@directus/env';
2
import {
3
	ForbiddenError,
4
	IllegalAssetTransformationError,
5
	RangeNotSatisfiableError,
6
	ServiceUnavailableError,
7
} from '@directus/errors';
8
import type { Range, Stat } from '@directus/storage';
9
import type { Accountability, File } from '@directus/types';
10
import type { Knex } from 'knex';
11
import { clamp } from 'lodash-es';
12
import { contentType } from 'mime-types';
13
import type { Readable } from 'node:stream';
14
import hash from 'object-hash';
15
import path from 'path';
16
import type { FailOnOptions } from 'sharp';
17
import sharp from 'sharp';
18
import { SUPPORTED_IMAGE_TRANSFORM_FORMATS } from '../constants.js';
19
import getDatabase from '../database/index.js';
20
import { useLogger } from '../logger.js';
21
import { getStorage } from '../storage/index.js';
22
import type { AbstractServiceOptions, Transformation, TransformationSet } from '../types/index.js';
23
import { getMilliseconds } from '../utils/get-milliseconds.js';
24
import { isValidUuid } from '../utils/is-valid-uuid.js';
25
import * as TransformationUtils from '../utils/transformations.js';
26
import { AuthorizationService } from './authorization.js';
27
import { FilesService } from './files.js';
28

29
const env = useEnv();
30
const logger = useLogger();
31

32
export class AssetsService {
33
	knex: Knex;
34
	accountability: Accountability | null;
35
	authorizationService: AuthorizationService;
36
	filesService: FilesService;
37

38
	constructor(options: AbstractServiceOptions) {
39
		this.knex = options.knex || getDatabase();
40
		this.accountability = options.accountability || null;
41
		this.filesService = new FilesService({ ...options, accountability: null });
42
		this.authorizationService = new AuthorizationService(options);
43
	}
44

45
	async getAsset(
46
		id: string,
47
		transformation?: TransformationSet,
48
		range?: Range,
49
	): Promise<{ stream: Readable; file: any; stat: Stat }> {
50
		const storage = await getStorage();
51

52
		const publicSettings = await this.knex
53
			.select('project_logo', 'public_background', 'public_foreground', 'public_favicon')
54
			.from('directus_settings')
55
			.first();
56

57
		const systemPublicKeys = Object.values(publicSettings || {});
58

59
		/**
60
		 * This is a little annoying. Postgres will error out if you're trying to search in `where`
61
		 * with a wrong type. In case of directus_files where id is a uuid, we'll have to verify the
62
		 * validity of the uuid ahead of time.
63
		 */
64
		if (!isValidUuid(id)) throw new ForbiddenError();
65

66
		if (systemPublicKeys.includes(id) === false && this.accountability?.admin !== true) {
67
			await this.authorizationService.checkAccess('read', 'directus_files', id);
68
		}
69

70
		const file = (await this.filesService.readOne(id, { limit: 1 })) as File;
71

72
		const exists = await storage.location(file.storage).exists(file.filename_disk);
73

74
		if (!exists) throw new ForbiddenError();
75

76
		if (range) {
77
			const missingRangeLimits = range.start === undefined && range.end === undefined;
78
			const endBeforeStart = range.start !== undefined && range.end !== undefined && range.end <= range.start;
79
			const startOverflow = range.start !== undefined && range.start >= file.filesize;
80
			const endUnderflow = range.end !== undefined && range.end <= 0;
81

82
			if (missingRangeLimits || endBeforeStart || startOverflow || endUnderflow) {
83
				throw new RangeNotSatisfiableError({ range });
84
			}
85

86
			const lastByte = file.filesize - 1;
87

88
			if (range.end) {
89
				if (range.start === undefined) {
90
					// fetch chunk from tail
91
					range.start = file.filesize - range.end;
92
					range.end = lastByte;
93
				}
94

95
				if (range.end >= file.filesize) {
96
					// fetch entire file
97
					range.end = lastByte;
98
				}
99
			}
100

101
			if (range.start) {
102
				if (range.end === undefined) {
103
					// fetch entire file
104
					range.end = lastByte;
105
				}
106

107
				if (range.start < 0) {
108
					// fetch file from head
109
					range.start = 0;
110
				}
111
			}
112
		}
113

114
		const type = file.type;
115
		const transforms = transformation ? TransformationUtils.resolvePreset(transformation, file) : [];
116

117
		if (type && transforms.length > 0 && SUPPORTED_IMAGE_TRANSFORM_FORMATS.includes(type)) {
118
			const maybeNewFormat = TransformationUtils.maybeExtractFormat(transforms);
119

120
			const assetFilename =
121
				path.basename(file.filename_disk, path.extname(file.filename_disk)) +
122
				getAssetSuffix(transforms) +
123
				(maybeNewFormat ? `.${maybeNewFormat}` : path.extname(file.filename_disk));
124

125
			const exists = await storage.location(file.storage).exists(assetFilename);
126

127
			if (maybeNewFormat) {
128
				file.type = contentType(assetFilename) || null;
129
			}
130

131
			if (exists) {
132
				return {
133
					stream: await storage.location(file.storage).read(assetFilename, range),
134
					file,
135
					stat: await storage.location(file.storage).stat(assetFilename),
136
				};
137
			}
138

139
			// Check image size before transforming. Processing an image that's too large for the
140
			// system memory will kill the API. Sharp technically checks for this too in it's
141
			// limitInputPixels, but we should have that check applied before starting the read streams
142
			const { width, height } = file;
143

144
			if (
145
				!width ||
146
				!height ||
147
				width > (env['ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION'] as number) ||
148
				height > (env['ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION'] as number)
149
			) {
150
				logger.warn(`Image is too large to be transformed, or image size couldn't be determined.`);
151
				throw new IllegalAssetTransformationError({ invalidTransformations: ['width', 'height'] });
152
			}
153

154
			const { queue, process } = sharp.counters();
155

156
			if (queue + process > (env['ASSETS_TRANSFORM_MAX_CONCURRENT'] as number)) {
157
				throw new ServiceUnavailableError({
158
					service: 'files',
159
					reason: 'Server too busy',
160
				});
161
			}
162

163
			const readStream = await storage.location(file.storage).read(file.filename_disk, range);
164

165
			const transformer = sharp({
166
				limitInputPixels: Math.pow(env['ASSETS_TRANSFORM_IMAGE_MAX_DIMENSION'] as number, 2),
167
				sequentialRead: true,
168
				failOn: env['ASSETS_INVALID_IMAGE_SENSITIVITY_LEVEL'] as FailOnOptions,
169
			});
170

171
			transformer.timeout({
172
				seconds: clamp(Math.round(getMilliseconds(env['ASSETS_TRANSFORM_TIMEOUT'], 0) / 1000), 1, 3600),
173
			});
174

175
			if (transforms.find((transform) => transform[0] === 'rotate') === undefined) transformer.rotate();
176

177
			transforms.forEach(([method, ...args]) => (transformer[method] as any).apply(transformer, args));
178

179
			readStream.on('error', (e: Error) => {
180
				logger.error(e, `Couldn't transform file ${file.id}`);
181
				readStream.unpipe(transformer);
182
			});
183

184
			try {
185
				await storage.location(file.storage).write(assetFilename, readStream.pipe(transformer), type);
186
			} catch (error) {
187
				try {
188
					await storage.location(file.storage).delete(assetFilename);
189
				} catch {
190
					// Ignored to prevent original error from being overwritten
191
				}
192

193
				if ((error as Error)?.message?.includes('timeout')) {
194
					throw new ServiceUnavailableError({ service: 'assets', reason: `Transformation timed out` });
195
				} else {
196
					throw error;
197
				}
198
			}
199

200
			return {
201
				stream: await storage.location(file.storage).read(assetFilename, range),
202
				stat: await storage.location(file.storage).stat(assetFilename),
203
				file,
204
			};
205
		} else {
206
			const readStream = await storage.location(file.storage).read(file.filename_disk, range);
207
			const stat = await storage.location(file.storage).stat(file.filename_disk);
208
			return { stream: readStream, file, stat };
209
		}
210
	}
211
}
212

213
const getAssetSuffix = (transforms: Transformation[]) => {
214
	if (Object.keys(transforms).length === 0) return '';
215
	return `__${hash(transforms)}`;
216
};
217

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.