google-research
59 строк · 2.0 Кб
1# coding=utf-8
2# Copyright 2024 The Google Research Authors.
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8# http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16"""Functions for calculating masks of new cache items."""
17import abc
18from typing import Dict, Tuple
19
20import tensorflow.compat.v2 as tf
21
22from negative_cache import negative_cache
23
24
25class CacheFilterFn(object, metaclass=abc.ABCMeta):
26
27@abc.abstractmethod
28def __call__(self, cache,
29new_items):
30pass
31
32
33class IsInCacheFilterFn(CacheFilterFn):
34"""Creates a mask for items that are already in the cache.
35
36Given a tuple of keys, this class is a function that checks if there is a
37cache element that matches exactly on all keys.
38"""
39
40def __init__(self, keys):
41self.keys = keys
42
43def __call__(self, cache,
44new_items):
45datawise_matches = []
46for key in self.keys:
47cache_vals = cache.data[key]
48new_items_vals = new_items[key]
49if cache_vals.dtype.is_floating:
50raise NotImplementedError('Floating datatypes are not yet implemented.')
51cache_vals = tf.expand_dims(cache_vals, axis=0)
52new_items_vals = tf.expand_dims(new_items_vals, axis=1)
53elementwise = cache_vals == new_items_vals
54datawise = tf.reduce_all(elementwise, axis=range(2, tf.rank(elementwise)))
55datawise_matches.append(datawise)
56all_keys_datawise = tf.stack(datawise_matches, axis=2)
57all_keys_match = tf.reduce_all(all_keys_datawise, axis=2)
58in_cache = tf.reduce_any(all_keys_match, axis=1)
59return tf.logical_not(in_cache)
60