google-research
55 строк · 1.5 Кб
1# coding=utf-8
2# Copyright 2024 The Google Research Authors.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16"""Contains commonly used loss functions."""
17
18import functools19
20from flax.training import common_utils21
22import jax23import jax.numpy as jnp24
25
26def binary_logistic_loss(logit, label, num_classes=2):27"""Computes the logistic loss for one datapoint.28
29Args:
30logit: logit predicted by the model
31label: true class label: 0 or 1.
32num_classes: not used
33
34Returns:
35loss: value of the loss
36"""
37del num_classes38return (jax.nn.softplus(logit) - label * logit).sum()39
40
41@functools.partial(jax.jit, static_argnums=(2,))42def cross_entropy_loss(logprobs, label,43num_classes):44"""Computes the cross entropy loss for one datapoint.45
46Args:
47logprobs: log probabilities predicted by the model
48label: true class label
49num_classes: number of classes in the task
50
51Returns:
52loss: value of the loss.
53"""
54one_hot_labels = common_utils.onehot(label, num_classes=num_classes)55return -jnp.sum(one_hot_labels * logprobs)56