a
    yff0                     @   s   d dl mZ d dlZd dlZd dlmZ d dlmZm	Z	 d dl
mZ d dlmZmZmZmZ d dlmZmZ G dd	 d	eZdS )
    )PathN)DetectionValidator)LOGGERops)check_requirements)	OKS_SIGMAPoseMetricsbox_ioukpt_iou)output_to_targetplot_imagesc                       s   e Zd ZdZd fdd	Z fddZdd Zd	d
 Z fddZ fddZ	 fddZ
dd Zd ddZdd Zdd Zdd Zdd Zdd Z  ZS )!PoseValidatoraP  
    A class extending the DetectionValidator class for validation based on a pose model.

    Example:
        ```python
        from ultralytics.models.yolo.pose import PoseValidator

        args = dict(model="yolov8n-pose.pt", data="coco8-pose.yaml")
        validator = PoseValidator(args=args)
        validator()
        ```
    Nc                    sf   t  ||||| d| _d| _d| j_t| j| jd| _	t
| jjtrb| jj dkrbtd dS )zSInitialize a 'PoseValidator' object with custom parameters and assigned attributes.NZpose)save_diron_plotZmpsu   WARNING ⚠️ Apple MPS known Pose bug. Recommend 'device=cpu' for Pose models. See https://github.com/ultralytics/ultralytics/issues/4031.)super__init__sigma	kpt_shapeargstaskr   r   r   metrics
isinstancedevicestrlowerr   warning)self
dataloaderr   Zpbarr   
_callbacks	__class__ \/var/www/html/django/DPS/env/lib/python3.9/site-packages/ultralytics/models/yolo/pose/val.pyr      s    zPoseValidator.__init__c                    s(   t  |}|d | j |d< |S )zcPreprocesses the batch by converting the 'keypoints' data into a float and moving it to the device.	keypoints)r   
preprocesstor   float)r   batchr   r!   r"   r$   *   s    zPoseValidator.preprocessc                 C   s   dd S )z;Returns description of evaluation metrics in string format.z,%22s%11s%11s%11s%11s%11s%11s%11s%11s%11s%11s)ClassZImagesZ	InstanceszBox(PRmAP50	mAP50-95)zPose(Pr)   r*   r+   r!   r   r!   r!   r"   get_desc0   s    zPoseValidator.get_descc              
   C   s6   t j|| jj| jj| jd| jjp&| jj| jj| j	dS )zPApply non-maximum suppression and return detections with high confidence scores.T)labelsZmulti_labelZagnosticmax_detnc)
r   Znon_max_suppressionr   confiouZlb
single_clsZagnostic_nmsr/   r0   )r   predsr!   r!   r"   postprocess@   s    zPoseValidator.postprocessc                    sb   t  | | jd | _| jddgk}| jd }|r8tnt|| | _tg g g g g g d| _	dS )z0Initiate pose estimation metrics for YOLO model.r         r   )tp_ptpr1   pred_cls
target_cls
target_imgN)
r   init_metricsdatar   r   npZonesr   dictstats)r   modelZis_poseZnkptr   r!   r"   r=   M   s    
zPoseValidator.init_metricsc                    s   t  ||}|d |d |k }|d \}}| }|d  |9  < |d  |9  < tj|d ||d |d d}||d	< |S )
zVPrepares a batch for processing by converting keypoints to float and moving to device.r#   	batch_idximgsz).r   ).   	ori_shape	ratio_padrG   kpts)r   _prepare_batchcloner   scale_coords)r   sir'   pbatchrI   hwr   r!   r"   rJ   V   s    zPoseValidator._prepare_batchc                    sd   t  ||}|d jd }|ddddf t||d}tj|d ||d |d d	 ||fS )
z=Prepares and scales keypoints in a batch for pose processing.rI   rE   N   rD   rF   rG   rH   )r   _prepare_predshapeviewlenr   rL   )r   predrN   prednZnk	pred_kptsr   r!   r"   rS   b   s
    "zPoseValidator._prepare_predc                 C   s,  t |D ]\}}|  jd7  _t|}ttjd| jdtjd| jdtj|| jtj| jdtj|| jtj| jdd}| 	||}|
d|
d }}	t|}
||d< | |d	< |dkr|
r| j D ]}| j| ||  q| jjr| jjd
|	|d q| jjrd|d
d
df< | ||\}}|d
d
df |d< |d
d
df |d< |
r| ||	||d< | ||	|||d |d< | jjr| j||	| | j D ]}| j| ||  q| jjr| ||d |  | jjr| ||| jj|d | jd t|d | j d  qd
S )zMetrics.rE   r   )r   )dtyper   )r1   r:   r9   r8   clsbboxr;   r<   N)
detections	gt_bboxesgt_cls      r1   r:   r9   rI   r8   im_filerF   r.   z.txt)	enumerateseenrV   r@   torchzerosr   ZniouboolrJ   popuniquerA   keysappendr   ZplotsZconfusion_matrixZprocess_batchr3   rS   _process_batch	save_jsonpred_to_jsonsave_txtsave_one_txt	save_confr   r   stem)r   r4   r'   rM   rW   ZnprstatrN   r[   r\   nlkrX   rY   r!   r!   r"   update_metricsj   sV    



 zPoseValidator.update_metricsc                 C   s|   |durH|durHt |ddddf dd }t||| j|d}nt||ddddf }| |dddf ||S )a  
        Return correct prediction matrix by computing Intersection over Union (IoU) between detections and ground truth.

        Args:
            detections (torch.Tensor): Tensor with shape (N, 6) representing detection boxes and scores, where each
                detection is of the format (x1, y1, x2, y2, conf, class).
            gt_bboxes (torch.Tensor): Tensor with shape (M, 4) representing ground truth bounding boxes, where each
                box is of the format (x1, y1, x2, y2).
            gt_cls (torch.Tensor): Tensor with shape (M,) representing ground truth class indices.
            pred_kpts (torch.Tensor | None): Optional tensor with shape (N, 51) representing predicted keypoints, where
                51 corresponds to 17 keypoints each having 3 values.
            gt_kpts (torch.Tensor | None): Optional tensor with shape (N, 51) representing ground truth keypoints.

        Returns:
            torch.Tensor: A tensor with shape (N, 10) representing the correct prediction matrix for 10 IoU levels,
                where N is the number of detections.

        Example:
            ```python
            detections = torch.rand(100, 6)  # 100 predictions: (x1, y1, x2, y2, conf, class)
            gt_bboxes = torch.rand(50, 4)  # 50 ground truth boxes: (x1, y1, x2, y2)
            gt_cls = torch.randint(0, 2, (50,))  # 50 ground truth class indices
            pred_kpts = torch.rand(100, 51)  # 100 predicted keypoints
            gt_kpts = torch.rand(50, 51)  # 50 ground truth keypoints
            correct_preds = _process_batch(detections, gt_bboxes, gt_cls, pred_kpts, gt_kpts)
            ```

        Note:
            `0.53` scale factor used in area computation is referenced from https://github.com/jin-s13/xtcocoapi/blob/master/xtcocotools/cocoeval.py#L384.
        N   rE   g(\?)r   areara   r`   )r   	xyxy2xywhprodr
   r   r	   Zmatch_predictions)r   r]   r^   r_   rY   Zgt_kptsrx   r2   r!   r!   r"   rl      s
    $zPoseValidator._process_batchc                 C   sN   t |d |d |d d|d |d |d | jd| d	 | j| jd
	 dS )zSPlots and saves validation set samples with predicted bounding boxes and keypoints.imgrC   r[   rR   Zbboxesr#   rb   	val_batchz_labels.jpgrI   pathsfnamenamesr   N)r   Zsqueezer   r   r   )r   r'   nir!   r!   r"   plot_val_samples   s    zPoseValidator.plot_val_samplesc                    sb   t  fdd|D d}t|d gt| jjdR ||d  jd| d  j jd	 d
S )z!Plots predictions for YOLO model.c                    s0   g | ](}|d d dd f j dg jR  qS )NrQ   rR   )rU   r   ).0pr,   r!   r"   
<listcomp>       z2PoseValidator.plot_predictions.<locals>.<listcomp>r   r{   )r/   rb   r|   z	_pred.jpgr}   N)	re   catr   r   r   r/   r   r   r   )r   r'   r4   r   rY   r!   r,   r"   plot_predictions   s    zPoseValidator.plot_predictionsc                 C   sV   ddl m} |tj|d |d ftjdd| j|ddddf |dj||d dS )	zRSave YOLO detections to a txt file in normalized coordinates in a specific format.r   )ResultsrE   )rZ   NrQ   )pathr   Zboxesr#   )rq   )Zultralytics.engine.resultsr   r?   rf   Zuint8r   ro   )r   rX   rY   rq   rT   filer   r!   r!   r"   rp      s    zPoseValidator.save_one_txtc              
   C   s   t |j}| rt|n|}t|ddddf }|ddddf  |ddddf d 8  < t| | D ]J\}}| j	|| j
t|d  dd |D |dd t|d dd qzdS )	z.Converts YOLO predictions to COCO JSON format.Nra   rw   r`   c                 S   s   g | ]}t |d qS )r7   )roundr   xr!   r!   r"   r      r   z.PoseValidator.pred_to_json.<locals>.<listcomp>rQ   )image_idZcategory_idr\   r#   Zscore)r   rr   	isnumericintr   ry   ziptolistjdictrk   Z	class_mapr   )r   rX   filenamerr   r   boxr   br!   r!   r"   rn      s    
0
zPoseValidator.pred_to_jsonc              
   C   s  | j jr~| jr~t| jr~| jd d }| jd }td| d| d zt	d dd	l
m} dd
lm} ||fD ]}| sxJ | dqx|t|}|t|}t|||d|||dgD ]x\}	}
| jrdd | jjjD |
j_|
  |
  |
  |	d d }|
jdd \|| jj|d  < || jj| < qW n6 ty| } ztd|  W Y d}~n
d}~0 0 |S )z8Evaluates object detection model using COCO JSON format.r   z)annotations/person_keypoints_val2017.jsonzpredictions.jsonz"
Evaluating pycocotools mAP using z and z...zpycocotools>=2.0.6r   )COCO)COCOevalz file not foundr\   r#   c                 S   s   g | ]}t t|jqS r!   )r   r   rr   r   r!   r!   r"   r     r   z+PoseValidator.eval_json.<locals>.<listcomp>ra   rw   NrE   zpycocotools unable to run: )r   rm   Zis_cocorV   r   r>   r   r   infor   Zpycocotools.cocor   Zpycocotools.cocoevalr   is_filer   ZloadResrc   r   ZdatasetZim_filesparamsZimgIdsevaluate
accumulateZ	summarizerA   r   rj   	Exceptionr   )r   rA   Z	anno_jsonZ	pred_jsonr   r   r   annorW   ievalidxer!   r!   r"   	eval_json   s2    
$*&zPoseValidator.eval_json)NNNNN)NN)__name__
__module____qualname____doc__r   r$   r-   r5   r=   rJ   rS   rv   rl   r   r   rp   rn   r   __classcell__r!   r!   r   r"   r      s   	5
(r   )pathlibr   numpyr?   re   Zultralytics.models.yolo.detectr   Zultralytics.utilsr   r   Zultralytics.utils.checksr   Zultralytics.utils.metricsr   r   r	   r
   Zultralytics.utils.plottingr   r   r   r!   r!   r!   r"   <module>   s   