from keras_core import backend from keras_core import operations as ops from keras_core.api_export import keras_core_export from keras_core.losses.loss import squeeze_to_same_rank from keras_core.metrics import reduction_metrics def accuracy(y_true, y_pred): y_pred = ops.convert_to_tensor(y_pred) y_true = ops.convert_to_tensor(y_true, dtype=y_pred.dtype) y_true, y_pred = squeeze_to_same_rank(y_true, y_pred) return ops.cast(ops.equal(y_true, y_pred), dtype=backend.floatx()) @keras_core_export("keras_core.metrics.Accuracy") class Accuracy(reduction_metrics.MeanMetricWrapper): """Calculates how often predictions equal labels. This metric creates two local variables, `total` and `count` that are used to compute the frequency with which `y_pred` matches `y_true`. This frequency is ultimately returned as `binary accuracy`: an idempotent operation that simply divides `total` by `count`. If `sample_weight` is `None`, weights default to 1. Use `sample_weight` of 0 to mask values. Args: name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. Standalone usage: >>> m = keras_core.metrics.Accuracy() >>> m.update_state([[1], [2], [3], [4]], [[0], [2], [3], [4]]) >>> m.result() 0.75 >>> m.reset_state() >>> m.update_state([[1], [2], [3], [4]], [[0], [2], [3], [4]], ... sample_weight=[1, 1, 0, 0]) >>> m.result() 0.5 Usage with `compile()` API: ```python model.compile(optimizer='sgd', loss='mse', metrics=[keras_core.metrics.Accuracy()]) ``` """ def __init__(self, name="accuracy", dtype=None): super().__init__(fn=accuracy, name=name, dtype=dtype) def get_config(self): return {"name": self.name, "dtype": self.dtype} def binary_accuracy(y_true, y_pred, threshold=0.5): y_pred = ops.convert_to_tensor(y_pred) threshold = ops.cast(threshold, y_pred.dtype) y_pred = ops.cast(y_pred > threshold, y_pred.dtype) return ops.mean( ops.cast(ops.equal(y_true, y_pred), backend.floatx()), axis=-1, ) @keras_core_export("keras_core.metrics.BinaryAccuracy") class BinaryAccuracy(reduction_metrics.MeanMetricWrapper): """Calculates how often predictions match binary labels. This metric creates two local variables, `total` and `count` that are used to compute the frequency with which `y_pred` matches `y_true`. This frequency is ultimately returned as `binary accuracy`: an idempotent operation that simply divides `total` by `count`. If `sample_weight` is `None`, weights default to 1. Use `sample_weight` of 0 to mask values. Args: name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. threshold: (Optional) Float representing the threshold for deciding whether prediction values are 1 or 0. Standalone usage: >>> m = keras_core.metrics.BinaryAccuracy() >>> m.update_state([[1], [1], [0], [0]], [[0.98], [1], [0], [0.6]]) >>> m.result() 0.75 >>> m.reset_state() >>> m.update_state([[1], [1], [0], [0]], [[0.98], [1], [0], [0.6]], ... sample_weight=[1, 0, 0, 1]) >>> m.result() 0.5 Usage with `compile()` API: ```python model.compile(optimizer='sgd', loss='mse', metrics=[keras_core.metrics.BinaryAccuracy()]) ``` """ def __init__(self, name="binary_accuracy", dtype=None): super().__init__(fn=binary_accuracy, name=name, dtype=dtype) def get_config(self): return {"name": self.name, "dtype": self.dtype} def categorical_accuracy(y_true, y_pred): y_true = ops.argmax(y_true, axis=-1) reshape_matches = False y_pred = ops.convert_to_tensor(y_pred) y_true = ops.convert_to_tensor(y_true, dtype=y_true.dtype) y_true_org_shape = ops.shape(y_true) y_pred_rank = len(y_pred.shape) y_true_rank = len(y_true.shape) # If the shape of y_true is (num_samples, 1), squeeze to (num_samples,) if ( (y_true_rank is not None) and (y_pred_rank is not None) and (len(y_true.shape) == len(y_pred.shape)) ): y_true = ops.squeeze(y_true, [-1]) reshape_matches = True y_pred = ops.argmax(y_pred, axis=-1) # If the predicted output and actual output types don't match, force cast # them to match. if y_pred.dtype != y_true.dtype: y_pred = ops.cast(y_pred, dtype=y_true.dtype) matches = ops.cast(ops.equal(y_true, y_pred), backend.floatx()) if reshape_matches: matches = ops.reshape(matches, shape=y_true_org_shape) return matches @keras_core_export("keras_core.metrics.CategoricalAccuracy") class CategoricalAccuracy(reduction_metrics.MeanMetricWrapper): """Calculates how often predictions match one-hot labels. You can provide logits of classes as `y_pred`, since argmax of logits and probabilities are same. This metric creates two local variables, `total` and `count` that are used to compute the frequency with which `y_pred` matches `y_true`. This frequency is ultimately returned as `categorical accuracy`: an idempotent operation that simply divides `total` by `count`. `y_pred` and `y_true` should be passed in as vectors of probabilities, rather than as labels. If necessary, use `ops.one_hot` to expand `y_true` as a vector. If `sample_weight` is `None`, weights default to 1. Use `sample_weight` of 0 to mask values. Args: name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. Standalone usage: >>> m = keras_core.metrics.CategoricalAccuracy() >>> m.update_state([[0, 0, 1], [0, 1, 0]], [[0.1, 0.9, 0.8], ... [0.05, 0.95, 0]]) >>> m.result() 0.5 >>> m.reset_state() >>> m.update_state([[0, 0, 1], [0, 1, 0]], [[0.1, 0.9, 0.8], ... [0.05, 0.95, 0]], ... sample_weight=[0.7, 0.3]) >>> m.result() 0.3 Usage with `compile()` API: ```python model.compile(optimizer='sgd', loss='mse', metrics=[keras_core.metrics.CategoricalAccuracy()]) ``` """ def __init__(self, name="categorical_accuracy", dtype=None): super().__init__(fn=categorical_accuracy, name=name, dtype=dtype) def get_config(self): return {"name": self.name, "dtype": self.dtype}