
    +sg6                        d dl mZ d dlmZ d dlmZ d dlmZ d dlm	c m
Z d dlmZm	Z	 d dlmZ  G d d	e      Z G d
 de	j"                        Zy)    )annotations)Iterable)Enum)AnyN)Tensornn)SentenceTransformerc                  "    e Zd ZdZd Zd Zd Zy)TripletDistanceMetriczThe metric for the triplet lossc                4    dt        j                  | |      z
  S )N   )Fcosine_similarityxys     [/var/www/html/venv/lib/python3.12/site-packages/sentence_transformers/losses/TripletLoss.py<lambda>zTripletDistanceMetric.<lambda>   s    !a11!Q77     c                2    t        j                  | |d      S )N   pr   pairwise_distancer   s     r   r   zTripletDistanceMetric.<lambda>       Q00A; r   c                2    t        j                  | |d      S )Nr   r   r   r   s     r   r   zTripletDistanceMetric.<lambda>   r   r   N)__name__
__module____qualname____doc__COSINE	EUCLIDEAN	MANHATTAN r   r   r   r      s    )7F;I;Ir   r   c                  f     e Zd Zej                  df	 	 	 	 	 d fdZddZddZed	d       Z	 xZ
S )
TripletLoss   c                L    t         |           || _        || _        || _        y)a  
        This class implements triplet loss. Given a triplet of (anchor, positive, negative),
        the loss minimizes the distance between anchor and positive while it maximizes the distance
        between anchor and negative. It compute the following loss function:

        ``loss = max(||anchor - positive|| - ||anchor - negative|| + margin, 0)``.

        Margin is an important hyperparameter and needs to be tuned respectively.

        Args:
            model: SentenceTransformerModel
            distance_metric: Function to compute distance between two
                embeddings. The class TripletDistanceMetric contains
                common distance metrices that can be used.
            triplet_margin: The negative should be at least this much
                further away from the anchor than the positive.

        References:
            - For further details, see: https://en.wikipedia.org/wiki/Triplet_loss

        Requirements:
            1. (anchor, positive, negative) triplets

        Inputs:
            +---------------------------------------+--------+
            | Texts                                 | Labels |
            +=======================================+========+
            | (anchor, positive, negative) triplets | none   |
            +---------------------------------------+--------+

        Example:
            ::

                from sentence_transformers import SentenceTransformer, SentenceTransformerTrainer, losses
                from datasets import Dataset

                model = SentenceTransformer("microsoft/mpnet-base")
                train_dataset = Dataset.from_dict({
                    "anchor": ["It's nice weather outside today.", "He drove to work."],
                    "positive": ["It's so sunny.", "He took the car to the office."],
                    "negative": ["It's quite rainy, sadly.", "She walked to the store."],
                })
                loss = losses.TripletLoss(model=model)

                trainer = SentenceTransformerTrainer(
                    model=model,
                    train_dataset=train_dataset,
                    loss=loss,
                )
                trainer.train()
        N)super__init__modeldistance_metrictriplet_margin)selfr,   r-   r.   	__class__s       r   r+   zTripletLoss.__init__   s(    l 	
.,r   c                   |D cg c]  }| j                  |      d    }}|\  }}}| j                  ||      }| j                  ||      }	t        j                  ||	z
  | j                  z         }
|
j                         S c c}w )Nsentence_embedding)r,   r-   r   relur.   mean)r/   sentence_featureslabelssentence_featurereps
rep_anchorrep_posrep_negdistance_posdistance_neglossess              r   forwardzTripletLoss.forwardQ   s    [lmGW

+,-ABmm'+$
GW++J@++J@|3d6I6IIJ{{} ns   Bc                    | j                   j                  }t        t              j	                         D ]  \  }}|| j                   k(  sd| } n || j
                  dS )NzTripletDistanceMetric.)r-   r.   )r-   r   varsr   itemsr.   )r/   distance_metric_namenamevalues       r   get_config_dictzTripletLoss.get_config_dict[   sg    #33<< 56<<> 	KD%,,,)?v'F$	
 $84K^K^__r   c                     y)Na  
@misc{hermans2017defense,
    title={In Defense of the Triplet Loss for Person Re-Identification},
    author={Alexander Hermans and Lucas Beyer and Bastian Leibe},
    year={2017},
    eprint={1703.07737},
    archivePrefix={arXiv},
    primaryClass={cs.CV}
}
r%   )r/   s    r   citationzTripletLoss.citationd   s    	r   )r,   r	   r.   floatreturnNone)r5   zIterable[dict[str, Tensor]]r6   r   rJ   r   )rJ   zdict[str, Any])rJ   str)r   r   r    r   r#   r+   r?   rF   propertyrH   __classcell__)r0   s   @r   r'   r'      sL    :O:Y:Yst9-(9-kp9-	9-v` 
 
r   r'   )
__future__r   collections.abcr   enumr   typingr   torch.nn.functionalr   
functionalr   torchr   )sentence_transformers.SentenceTransformerr	   r   Moduler'   r%   r   r   <module>rX      s;    " $      I<D <Z")) Zr   