Skip to content
Snippets Groups Projects
Commit 1628ccb3 authored by Lorenzo Pantolini's avatar Lorenzo Pantolini
Browse files

fix cosine_sim_matrix input

parent d32a1794
No related branches found
No related tags found
No related merge requests found
......@@ -3,7 +3,7 @@ import numba as nb
MIN_FLOAT64 = np.finfo(np.float64).min
@nb.njit(cache=False)
@nb.njit
def _make_dtw_matrix(
score_matrix: np.ndarray,
gap_open_penalty: float = 0.0,
......@@ -78,7 +78,7 @@ def _make_dtw_matrix(
return matrix, backtrack
@nb.njit(cache=False)
@nb.njit
def _get_dtw_alignment(start_direction, backtrack: np.ndarray, n1, m1):
"""
Finds optimal warping path from a backtrack matrix
......
import torch
import numpy as np
from scipy import spatial
def compute_similarity_matrix(embedding1, embedding2, l=1, p=2):
......@@ -48,7 +49,7 @@ def compute_similarity_matrix_plain(embedding1, embedding2, l=1, p=2):
def compute_cosine_similarity_matrix(embedding1, embedding2, l=1, p=2):
def compute_cosine_similarity_matrix(embedding1, embedding2):
""" Take as input 2 sequence embeddings (at a residue level) and returns the cosine similarity matrix
with the signal enhancement based on Z-scores. The signal enhancement seems to be redundant
when used with the cosine similarity score, therefore we don't recommend this version.
......@@ -84,5 +85,5 @@ def compute_cosine_similarity_matrix_plain(embedding1, embedding2):
:type embedding2: pytorch tensor
"""
return torch.tensor(1-spatial.distance.cdist(embedding1, embedding2, 'cosine'))
return torch.tensor(1-spatial.distance.cdist(embedding1.cpu().numpy(), embedding2.cpu().numpy(), 'cosine'))
......@@ -20,7 +20,7 @@ print(emb1.shape)
similarity_matrix = sm.compute_similarity_matrix(emb1, emb2)
eba_results = methods.compute_eba(similarity_matrix)
### to return the alignment itself use:
#eba_results = eba.EBA(similarity_matrix, extensive_output=True)
#eba_results = methods.compute_eba(similarity_matrix, extensive_output=True)
### show results
print('EBA raw: ', eba_results['EBA_raw'])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment