dream
24 строки · 848.0 Байт
1name: toxic_classification
2display_name: Toxic Classification
3component_type: null
4model_type: NN-based
5is_customizable: false
6author: publisher@deeppavlov.ai
7description: 'classifies toxicity: identity_attack, insult, obscene, severe_toxicity,
8sexual_explicit, threat, toxicity'
9ram_usage: 3G
10gpu_usage: 2G
11group: annotators
12connector:
13protocol: http
14timeout: 2.0
15url: http://toxic-classification-multilingual:8013/respond
16dialog_formatter: state_formatters.dp_formatters:preproc_last_human_utt_dialog
17response_formatter: state_formatters.dp_formatters:simple_formatter_service
18previous_services: null
19required_previous_services: null
20state_manager_method: add_annotation
21tags: null
22endpoint: respond
23service: annotators/MultilingualToxicClassification/service_configs/toxic-classification-multilingual
24date_created: '2023-03-16T09:45:32'
25