View Code? Open in Web Editor
NEW
This project forked from openvinotoolkit /anomalib
An anomaly detection library comprising state-of-the-art algorithms and features such as experiment management, hyper-parameter optimization, and edge inference.
Home Page: https://openvinotoolkit.github.io/anomalib/
License: Apache License 2.0
Python 99.37%
Dockerfile 0.63%
anomalib's People
anomalib's Issues
feature extractor
self .backbone = backbone
self .layers = layers
self .feature_extractor = FeatureExtractor (backbone = self .backbone , layers = layers , pre_trained = pre_trained )
features = self .feature_extractor (input_tensor )
generate embedding
embeddings = features [self .layers [0 ]]
for layer in self .layers [1 :]:
layer_embedding = features [layer ]
layer_embedding = F .interpolate (layer_embedding , size = embeddings .shape [- 2 :], mode = "nearest" )
embeddings = torch .cat ((embeddings , layer_embedding ), 1 )
# subsample embeddings
idx = self .idx .to (embeddings .device )
embeddings = torch .index_select (embeddings , 1 , idx )
generate Anomaly Map
self .anomaly_map_generator = AnomalyMapGenerator (image_size = input_size )
output = self .anomaly_map_generator (
embedding = embeddings , mean = self .gaussian .mean , inv_covariance = self .gaussian .inv_covariance
return self .compute_anomaly_map (embedding , mean , inv_covariance )
score_map = self .compute_distance (
embedding = embedding ,
stats = [mean .to (embedding .device ), inv_covariance .to (embedding .device )],
)
up_sampled_score_map = self .up_sample (score_map )
smoothed_anomaly_map = self .smooth_anomaly_map (up_sampled_score_map )
batch , channel , height , width = embedding .shape
embedding = embedding .reshape (batch , channel , height * width )
# calculate mahalanobis distances
mean , inv_covariance = stats
delta = (embedding - mean ).permute (2 , 0 , 1 )
distances = (torch .matmul (delta , inv_covariance ) * delta ).sum (2 ).permute (1 , 0 )
distances = distances .reshape (batch , 1 , height , width )
distances = distances .clamp (0 ).sqrt ()
score_map = F .interpolate (
distance ,
size = self .image_size ,
mode = "bilinear" ,
align_corners = False ,
)
blurred_anomaly_map = self .blur (anomaly_map )
FeatureExtractor
self .backbone = backbone
self .teacher_model = FeatureExtractor (backbone = self .backbone , pre_trained = True , layers = layers )
self .student_model = FeatureExtractor (
backbone = self .backbone , pre_trained = False , layers = layers , requires_grad = True
)
teacher_features : Dict [str , Tensor ] = self .teacher_model (images )
student_features : Dict [str , Tensor ] = self .student_model (images )
Generate Anomaly Map
self .anomaly_map_generator = AnomalyMapGenerator (image_size = tuple (image_size ))
output = self .anomaly_map_generator (teacher_features = teacher_features , student_features = student_features )
return self .compute_anomaly_map (teacher_features , student_features )
batch_size = list (teacher_features .values ())[0 ].shape [0 ]
anomaly_map = torch .ones (batch_size , 1 , self .image_size [0 ], self .image_size [1 ])
for layer in teacher_features .keys ():
layer_map = self .compute_layer_map (teacher_features [layer ], student_features [layer ])
anomaly_map = anomaly_map .to (layer_map .device )
anomaly_map *= layer_map
norm_teacher_features = F .normalize (teacher_features )
norm_student_features = F .normalize (student_features )
layer_map = 0.5 * torch .norm (norm_teacher_features - norm_student_features , p = 2 , dim = - 3 , keepdim = True ) ** 2
layer_map = F .interpolate (layer_map , size = self .image_size , align_corners = False , mode = "bilinear" )