a
    Sic                     @   sj  d Z ddlZddlZddlmZ ddlmZ ddlZddl	m
Z
mZmZmZmZmZmZmZmZ ddlmZ dd	lmZmZmZmZmZmZ dd
l	mZ ddl	mZ ddl	mZ  ddl!m"Z" ddl#m$Z$ ddl%m&Z&m'Z' e(e)dZ*e
+ j,Z-d$ddZ.d%ddZ/G dd dZ0G dd dZ1G dd dZ2dd Z3e-rHG dd dZ4G d d! d!Z5G d"d# d#eZ6dS )&z7
    Implements support for high-level dataset access.
    N)warn)local   )	h5h5sh5th5rh5dh5ph5fdh5ds	_selector)H5pyDeprecationWarning   )HLObjectphil	with_philEmptycached_propertyfind_item_type)filters)
selections)selections2)Datatype)filename_decode)VDSmapvds_support
   Fc                 C   s  |dur,t |ts,ddlm} |j||d}|du r\|du rT|du rLtdt|}|j}nJt |trl|fnt|}|durt	j
|t	jdt	j
|jt	jdkrtdt |	tr|	f}	|	dur|	n|}t |trt |ts|f}t |trtdd	 t||D rd
||}t|t |tr4|j}|j}nT|du rT|du rTt	d}n&|du rp|durp|j}n
t	|}tj|dd}t||||	|fr|du rtd|du r|
du rd}
d}|tv r|
durtd|}
d}t|pttj|||||
|||	|||}|durjt|}|durVt|j}t	j||d}n
t	|}| | |du rxd}|dv r|!| ntd|du r|"tj#tj$B  n(|du r|"d n|durtd|	durtdd	 |	D }	|dus|dur|pttj%}|dur2|&| |durF|'| t |tr`t(t(j)}nt(*||	}t+j| j|||||d}|durt |ts|,t(j-t(j-| |S )z+ Return a new low-level dataset identifier Nr   )base)specified_dtypez-One of data, shape or dtype must be specifieddtypez%Shape tuple is incompatible with datac                 s   s"   | ]\}}|d ur||kV  qd S N ).0dimchunkr#   r#   L/var/www/html/django/DPS/env/lib/python3.9/site-packages/h5py/_hl/dataset.py	<genexpr>E   s   z make_new_dset.<locals>.<genexpr>z^Chunk shape must not be greater than data shape in any dimension. {} is not compatible with {}z=f4)logicalFz1Chunked format required for given storage optionsT   gzipzConflict in compression options)TFz(track_times must be either True or Falser   z(track_order must be either True or Falsec                 s   s    | ]}|d ur|nt jV  qd S r"   r   	UNLIMITED)r$   mr#   r#   r'   r(          )dcpldapl).
isinstancer    r   array_for_new_object	TypeErrorshapeinttuplenumpyproduct	ulonglong
ValueErrorboolanyzipformatr   idr!   r   	py_create_LEGACY_GZIP_COMPRESSION_VALSr   	fill_dcplr
   createDATASET_CREATEcheck_string_dtypestring_dtypeencodingarrayset_fill_valueset_obj_track_timesset_attr_creation_orderCRT_ORDER_TRACKEDCRT_ORDER_INDEXEDDATASET_ACCESSset_efile_prefixset_virtual_prefixr   NULLcreate_simpler	   writeALL)parentr6   r!   datanamechunkscompressionshuffle
fletcher32maxshapecompression_opts	fillvaluescaleoffsettrack_timesexternaltrack_orderr0   r1   efile_prefixvirtual_prefixallow_unknown_filterr   Z	tmp_shapeerrmsgtidstring_infosiddset_idr#   r#   r'   make_new_dset$   s    
*























rm   c                 K   sd   |dus|dur"|pt t j}n|p(d}|dur<|| |durN|| tj| j||d}|S )z1 Return an existing low-level dataset identifier N)r1   )r
   rE   rP   rQ   rR   r	   openrA   )rW   rY   r1   re   rf   kwdsrl   r#   r#   r'   	open_dset   s    

rp   c                   @   s8   e Zd ZdZdd Zdd Zdd Zdd	 Zd
d ZdS )AstypeWrapperz7Wrapper to convert data on reading from a dataset.
    c                 C   s   || _ t|| _d S r"   )_dsetr9   r!   _dtype)selfdsetr!   r#   r#   r'   __init__   s    zAstypeWrapper.__init__c                 C   s   | j j|| jdS N)	new_dtype)rr   __getitem__rs   rt   argsr#   r#   r'   ry      s    zAstypeWrapper.__getitem__c                 C   s   t dtdd | j| jj_| S )NztUsing astype() as a context manager is deprecated. Slice the returned object instead, like: ds.astype(np.int32)[:10]r   )category
stacklevel)r   r   rs   rr   _localastypert   r#   r#   r'   	__enter__   s    zAstypeWrapper.__enter__c                 G   s   d | j j_d S r"   )rr   r~   r   rz   r#   r#   r'   __exit__   s    zAstypeWrapper.__exit__c                 C   s
   t | jS )zb Get the length of the underlying dataset

        >>> length = len(dataset.astype('f8'))
        lenrr   r   r#   r#   r'   __len__   s    zAstypeWrapper.__len__N)	__name__
__module____qualname____doc__rv   ry   r   r   r   r#   r#   r#   r'   rq      s   
rq   c                   @   s*   e Zd ZdZd
ddZdd Zdd Zd	S )AsStrWrapperz0Wrapper to decode strings on reading the datasetstrictc                 C   s,   || _ |d u rt|jj}|| _|| _d S r"   )rr   r   rG   r!   rI   errors)rt   ru   rI   r   r#   r#   r'   rv      s
    zAsStrWrapper.__init__c                    sJ    j | }t|r$| j jS tj fdd|jD td	|j
S )Nc                    s   g | ]}|  j jqS r#   )decoderI   r   )r$   br   r#   r'   
<listcomp>   s   z,AsStrWrapper.__getitem__.<locals>.<listcomp>r    )rr   r9   isscalarr   rI   r   rJ   flatobjectreshaper6   )rt   r{   Z	bytes_arrr#   r   r'   ry      s    

zAsStrWrapper.__getitem__c                 C   s
   t | jS )z] Get the length of the underlying dataset

        >>> length = len(dataset.asstr())
        r   r   r#   r#   r'   r      s    zAsStrWrapper.__len__N)r   )r   r   r   r   rv   ry   r   r#   r#   r#   r'   r      s   
r   c                   @   s,   e Zd ZdZdZdd Zdd Zdd ZdS )	FieldsWrapperzBWrapper to extract named fields from a dataset with a struct dtypeNc                 C   s,   || _ t|tr|| _|g}t||| _d S r"   )rr   r2   strextract_fieldreadtime_dtype
read_dtype)rt   ru   Zprior_dtypenamesr#   r#   r'   rv      s
    
zFieldsWrapper.__init__c                 C   s*   | j j|| jd}| jd ur&|| j }|S rw   )rr   ry   r   r   )rt   r{   rX   r#   r#   r'   ry      s    

zFieldsWrapper.__getitem__c                 C   s
   t | jS )zh Get the length of the underlying dataset

        >>> length = len(dataset.fields(['x', 'y']))
        r   r   r#   r#   r'   r     s    zFieldsWrapper.__len__)r   r   r   r   r   rv   ry   r   r#   r#   r#   r'   r      s
   r   c                    sJ    j du rtd|D ]}| j vrtd| qt fdd|D S )z=Make a NumPy compound dtype with a subset of available fieldsNz+Field names only allowed for compound typesz&Field %s does not appear in this type.c                    s   g | ]}| j | d  fqS )r   )fields)r$   rY   basetyper#   r'   r     r/   z"readtime_dtype.<locals>.<listcomp>)r   r<   r9   r!   )r   r   rY   r#   r   r'   r     s    

r   c                   @   s(   e Zd ZdZdd Zdd Zdd ZdS )	CollectiveContextz$ Manages collective I/O in MPI mode c                 C   s
   || _ d S r"   )rr   )rt   ru   r#   r#   r'   rv   !  s    zCollectiveContext.__init__c                 C   s   | j jtj d S r"   )rr   _dxplset_dxpl_mpior   MPIO_COLLECTIVEr   r#   r#   r'   r   $  s    zCollectiveContext.__enter__c                 G   s   | j jtj d S r"   )rr   r   r   r   MPIO_INDEPENDENTrz   r#   r#   r'   r   (  s    zCollectiveContext.__exit__N)r   r   r   r   rv   r   r   r#   r#   r#   r'   r     s   r   c                   @   s*   e Zd ZdZd	ddZdd Zdd ZdS )
ChunkIteratorzD
    Class to iterate through list of chunks of a given dataset
    Nc                 C   s   |j | _t|j }|js td|j| _|d u rdg }t|D ]}|td| j|  q<t	|| _
nt|trx|f| _
n|| _
t| j
|krtdg | _t|D ]X}| j
| }|jdk s|j| j| ks|j|jkrtd|j| j|  }| j| qd S )NzChunked dataset requiredr   zCInvalid selection - selection region must have same rank as datasetzAInvalid selection - selection region must be within dataset space)r6   _shaper   rZ   r5   _layoutrangeappendslicer8   _selr2   r<   _chunk_indexstartstop)rt   ru   
source_selrankslicesr%   sindexr#   r#   r'   rv   1  s,    



&zChunkIterator.__init__c                 C   s   | S r"   r#   r   r#   r#   r'   __iter__O  s    zChunkIterator.__iter__c                 C   s2  t | j}g }|dks6| jd | jd  | jd jkr<t t|D ]p}| j| }| j| | j|  }| j| d | j|  }||jk r|j}||jkr|j}t	||d}|
| qD|d }|dkr*| j| }| j|  d7  < | j| | j|  }||jk rt|S |dkr d| j|< |d8 }qt|S )Nr   r   )r   r   r   r   r   r   StopIterationr   r   r   r   r8   )rt   r   r   r%   r   r   r   	chunk_endr#   r#   r'   __next__R  s2    
(







zChunkIterator.__next__)N)r   r   r   r   rv   r   r   r#   r#   r#   r'   r   -  s   
r   c                       st  e Zd ZdZdd Zd]ddZddd	d
Zer@ee	dd Z
edd Zee	dd Zedd Zeje	dd Zedd Zedd Zedd Zedd Zee	dd Zee	dd Zee	d d! Zee	d"d# Zee	d$d% Zee	d&d' Zee	d(d) Zee	d*d+ Zee	d,d- Zee	d.d/ Zee	d0d1 Zed2d3 Ze	d4d5 fd6d7
Z d^d8d9Z!e	d:d; Z"d<d= Z#e	d>d? Z$e	d_d@dAZ%edBdC Z&e	d`dDdEZ'e	dFdG Z(dadHdIZ)dbdJdKZ*e	dcdLdMZ+e	dNdO Z,e-e.j/dPr"e	dQdR Z0e-e.j/dSr<e	dTdU Z1e2r^ee	dVdW Z3e	dXdY Z4e	ddd[d\Z5  Z6S )eDatasetz(
        Represents an HDF5 dataset
    c                 C   s
   t | |S )z Get a wrapper allowing you to perform reads to a
        different destination type, e.g.:

        >>> double_precision = dataset.astype('f8')[0:100:2]
        )rq   )rt   r!   r#   r#   r'   r   |  s    zDataset.astypeNr   c                 C   s8   t | j}|du rtd|du r*|j}t| ||dS )a(  Get a wrapper to read string data as Python strings:

        >>> str_array = dataset.asstr()[:]

        The parameters have the same meaning as in ``bytes.decode()``.
        If ``encoding`` is unspecified, it will use the encoding in the HDF5
        datatype (either ascii or utf-8).
        NzFdset.asstr() can only be used on datasets with an HDF5 string datatype)r   )r   rG   r!   r5   rI   r   )rt   rI   r   rj   r#   r#   r'   asstr  s    	zDataset.asstr_prior_dtypec                C   s   |du r| j }t| ||S )aQ  Get a wrapper to read a subset of fields from a compound data type:

        >>> 2d_coords = dataset.fields(['x', 'y'])[:]

        If names is a string, a single field is extracted, and the resulting
        arrays will have that dtype. Otherwise, it should be an iterable,
        and the read data will have a compound dtype.
        N)r!   r   )rt   r   r   r#   r#   r'   r     s    	zDataset.fieldsc                 C   s   t | S )z3 Context manager for MPI collective reads & writes )r   r   r#   r#   r'   
collective  s    zDataset.collectivec                 C   s<   ddl m} t || W  d   S 1 s.0    Y  dS )z3 Access dimension scales attached to this dataset. r   )DimensionManagerN)dimsr   r   )rt   r   r#   r#   r'   r     s    zDataset.dimsc                 C   s   | j jS )z5Numpy-style attribute giving the number of dimensions)rA   r   r   r#   r#   r'   ndim  s    zDataset.ndimc                 C   sT   d| j v r| j d S t | jj}W d   n1 s60    Y  | jrP|| j d< |S )z1Numpy-style shape tuple giving dataset dimensionsr6   N)_cache_propsr   rA   r6   	_readonlyrt   r6   r#   r#   r'   r6     s    

&
zDataset.shapec                 C   s   |  | d S r"   )resizer   r#   r#   r'   r6     s    c                 C   sF   d| j v r| j d S | jr d}ntj| jtjd}| jrB|| j d< |S )z3Numpy-style attribute giving the total dataset sizesizeNr    )r   	_is_emptyr9   prodr6   intpr   rt   r   r#   r#   r'   r     s    


zDataset.sizec                 C   s   | j }|du rdS | jj| S )zHNumpy-style attribute giving the raw dataset size as the number of bytesNr   )r   r!   itemsizer   r#   r#   r'   nbytes  s    zDataset.nbytesc                 C   s8   d| j v r| j d S t| j }| jr4|| j d< |S )z/Internal object for optimised selection of datar   )r   r   ZSelectorrA   	get_spacer   )rt   Zslrr#   r#   r'   r     s    


zDataset._selectorc                 C   s4   d| j v r| j d S t| j}| jr0|| j d< |S )z-Internal object for optimised reading of data_fast_reader)r   r   ReaderrA   r   )rt   rdrr#   r#   r'   r     s    


zDataset._fast_readerc                 C   s   | j jS )z%Numpy dtype representing the datatype)rA   r!   r   r#   r#   r'   r!     s    zDataset.dtypec                 C   s    | j }| tjkr| S dS )zDataset chunks (or None)N)_dcpl
get_layoutr	   CHUNKED	get_chunk)rt   r0   r#   r#   r'   rZ     s    zDataset.chunksc                 C   s    dD ]}|| j v r|  S qdS )zCompression strategy (or None))r+   lzfszipN_filters)rt   xr#   r#   r'   r[     s    

zDataset.compressionc                 C   s   | j | jdS )z< Compression setting.  Int(0-9) for gzip, 2-tuple for szip. N)r   getr[   r   r#   r#   r'   r_     s    zDataset.compression_optsc                 C   s
   d| j v S )zShuffle filter present (T/F)r\   r   r   r#   r#   r'   r\      s    zDataset.shufflec                 C   s
   d| j v S )z"Fletcher32 filter is present (T/F)r]   r   r   r#   r#   r'   r]   &  s    zDataset.fletcher32c                 C   s*   z| j d d W S  ty$   Y dS 0 dS )a  Scale/offset filter settings. For integer data types, this is
        the number of bits stored, or 0 for auto-detected. For floating
        point data types, this is the number of decimal places retained.
        If the scale/offset filter is not in use, this is None.ra   r   N)r   KeyErrorr   r#   r#   r'   ra   ,  s    zDataset.scaleoffsetc                 C   sT   | j  }|dkrdS t }t|D ]*}| j |\}}}|t|||f q$|S )zExternal file settings. Returns a list of tuples of
        (name, offset, size) for each external file entry, or returns None
        if no external files are used.r   N)r   get_external_countlistr   get_externalr   r   )rt   countZext_listr   rY   offsetr   r#   r#   r'   rc   8  s    
zDataset.externalc                 C   s2   | j  }|d}|du r dS tdd |D S )zcShape up to which this dataset can be resized.  Axes with value
        None have no resize limit. TNc                 s   s    | ]}|t jkr|nd V  qd S r"   r,   r$   r   r#   r#   r'   r(   Q  r/   z#Dataset.maxshape.<locals>.<genexpr>)rA   r   get_simple_extent_dimsr8   )rt   spacer   r#   r#   r'   r^   G  s
    

zDataset.maxshapec                 C   s$   t jd| jd}| j| |d S )z*Fill value for this dataset (0 by default))r   r    r   )r9   zerosr!   r   get_fill_value)rt   arrr#   r#   r'   r`   S  s    zDataset.fillvaluec                 C   s   | j   S )z9Get extent type for this dataset - SIMPLE, SCALAR or NULL)rA   r   get_simple_extent_typer   r#   r#   r'   _extent_type[  s    zDataset._extent_typec                 C   s   | j tjkS )zCheck if extent type is empty)r   r   rS   r   r#   r#   r'   r   a  s    zDataset._is_emptyF)readonlyc                   sl   t |tjstd| t | | j | _t	
t	j| _t| j| _|| _i | _t | _d| j_dS )zJ Create a new Dataset object by binding to a low-level DatasetID.
        z%s is not a DatasetIDN)r2   r	   	DatasetIDr<   superrv   rA   get_create_plistr   r
   rE   DATASET_XFERr   r   get_filtersr   r   r   r   r~   r   )rt   bindr   	__class__r#   r'   rv   f  s    zDataset.__init__c              	   C   s   t  | jdu rtd|dur|dkr4|| jjk sHtd| jjd  zt|}W n tyn   tdY n0 t| j}|||< t	|}| j
| W d   n1 s0    Y  dS )aE   Resize the dataset, or the specified axis.

        The dataset must be stored in chunked format; it can be resized up to
        the "maximum shape" (keyword maxshape) specified at creation time.
        The rank of the dataset cannot be changed.

        "Size" should be a shape tuple, or if an axis is specified, an integer.

        BEWARE: This functions differently than the NumPy resize() method!
        The data is not "reshuffled" to fit in the new shape; each axis is
        grown or shrunk independently.  The coordinates of existing data are
        fixed.
        Nz$Only chunked datasets can be resizedr   zInvalid axis (0 to %s allowed)r   z2Argument must be a single int if axis is specified)r   rZ   r5   rA   r   r<   r7   r   r6   r8   
set_extent)rt   r   axisnewlenr#   r#   r'   r   v  s    

zDataset.resizec                 C   s   |   }|tjkrtd|S )z The size of the first axis.  TypeError if scalar.

        Limited to 2**32 on 32-bit systems; Dataset.len() is preferred.
        z>Value too big for Python's __len__; use Dataset.len() instead.)r   sysmaxsizeOverflowErrorr   r#   r#   r'   r     s    
zDataset.__len__c                 C   sJ   t 2 | j}t|dkr td|d W  d   S 1 s<0    Y  dS )z The size of the first axis.  TypeError if scalar.

        Use of this method is preferred to len(dset), as Python's built-in
        len() cannot handle values greater then 2**32 on 32-bit systems.
        r   z'Attempt to take len() of scalar datasetN)r   r6   r   r5   r   r#   r#   r'   r     s
    zDataset.lenc                 c   s:   | j }t|dkrtdt|d D ]}| | V  q&dS )z Iterate over the first axis.  TypeError if scalar.

        BEWARE: Modifications to the yielded data are *NOT* written to file.
        r   z#Can't iterate over a scalar datasetN)r6   r   r5   r   )rt   r6   ir#   r#   r'   r     s
    zDataset.__iter__c                 C   s
   t | |S )a   Return chunk iterator.  If set, the sel argument is a slice or
        tuple of slices that defines the region to be used. If not set, the
        entire dataspace will be used for the iterator.

        For each chunk within the given region, the iterator yields a tuple of
        slices that gives the intersection of the given chunk with the
        selection area.

        A TypeError will be raised if the dataset is not chunked.

        A ValueError will be raised if the selection region is invalid.

        )r   )rt   selr#   r#   r'   iter_chunks  s    zDataset.iter_chunksc                 C   s$   | j tjko"t| j tjtjfS )z+Is this dataset suitable for simple reading)	r   r   SIMPLEr2   rA   get_typer   TypeIntegerIDTypeFloatIDr   r#   r#   r'   _fast_read_ok  s    zDataset._fast_read_okc                 C   s  t |tr|n|f}|du r*t| jdd}| jrZ|du rZz| j|W S  tyX   Y n0 | jr|dkst	|dkr|d t
u rt| jS tdtdd |D }|rt	|dkr|d }td	d |D }| j||d
| S |du r| j}t|}t	|dkrt |d tjrt|d | j}|| jkr>tdt|d | j}t|}|du rlt|S tj||d}|jdkr|S t|}	|	  | j|	||| |S | jdkr|dkst	|dkr|d t
u rtj| j|dS | jdkr|| j }
t !|
|}|j"du r2tjd|d}ntj|j"|d}|D ]\}}
| j||
|| qF|j"du rx|d S |S tj#| j|| d}|j$dkrtj|j%|dS tj|j%|dd}t|j"}|j}
| jj||
||| j&d |jdkr|d S |S )a   Read a slice from the HDF5 dataset.

        Takes slices and recarray-style field names (more than one is
        allowed!) in any order.  Obeys basic NumPy rules, including
        broadcasting.

        Also supports:

        * Boolean "mask" array indexing
        Nr   r#   r   r   zEmpty datasets cannot be slicedc                 s   s   | ]}t |tr|V  qd S r"   r2   r   r   r#   r#   r'   r(     r/   z&Dataset.__getitem__.<locals>.<genexpr>c                 s   s   | ]}t |ts|V  qd S r"   r   r   r#   r#   r'   r(     r/   r   z+Region reference must point to this datasetr    datasetC)orderdxpl)'r2   r8   getattrr~   r   r   readr5   r   r   Ellipsisr   r!   r<   r   r   rB   r   RegionReferencedereferencerA   
get_regionr   guess_shaper9   r   r   r   rT   
select_allr6   r   sel2Zselect_readmshapeselectnselectarray_shaper   )rt   r{   rx   r   mtypeobjrk   r  outZsid_outfspace	selectionr   mspacer#   r#   r'   ry     sx     

 


&
zDataset.__getitem__c                    s  t |tr|n|f}tdd |D  tdd |D }tjdurJttfvrJztj|d}W nH t	y   z"tj
fdd|D jd}W n t	y   Y n0 Y n0 |jkr|jdkr*tj|jdd	 td
}dd |tj|jdd	 tjd|jd	 fD | dd< ntj
dgtd}||d< |}njjdksjjdkrJt |tjr|jjdkrJjjdu rJt dkrjjdur d jjvrt	d d  jj d  d }d}n
j}d}tj||jdd}|r|t d |fg}||jdt|jt|j  }njjdkrtjjdkrt|tu rtj|dtd}tj
dd |jD jd|j}n*t |tjrdnjj}tj|d|d}jjdurXjjd }|jt| d }	|	|kr"td|	|f tt|j|f}
|jdt|jt|  }n8t dkr|j}jjdu rtdfdd D }t|dkrd dd |D }t	d| t dkr|jjdu rt|j}t!tj"|# }
|
$% d d| nn fdd|jj&D }t!tj"|jj'}
|D ]@}t|jj| d }|jj| d }|
$%||| qBn
|j}d}
t(j)j|d}|j*dkrdS |d krH|j+d krHjjdurtd!j,r tj-j,tj.dtj-|j+tj.dkr tj|j+|jd}ntj|j+d	 |jd}||d"< |}|j}t/0|1|}|2|D ]}j3j4||||
j5d# qbdS )$z Write to the HDF5 dataset from a Numpy array.

        NumPy's broadcasting rules are honored, for "simple" indexing
        (slices and integers).  For advanced indexing, the shapes must
        match.
        c                 s   s   | ]}t |tr|V  qd S r"   r   r   r#   r#   r'   r(   J  r/   z&Dataset.__setitem__.<locals>.<genexpr>c                 s   s   | ]}t |ts|V  qd S r"   r   r   r#   r#   r'   r(   K  r/   Nr    c                    s   g | ]}t j| d qS )r    )r9   rJ   r   )vlenr#   r'   r   U  s   z'Dataset.__setitem__.<locals>.<listcomp>r   )r6   r!   c                 S   s   g | ]}|qS r#   r#   )r$   r   r#   r#   r'   r   \  r/   r   OVzNo such field for indexing: %sTFr   )r!   r   Sutf-8)r   r!   c                 S   s   g | ]}| d qS )r  )encode)r$   r   r#   r#   r'   r     s   zWWhen writing to array types, last N dimensions have to match (got %s, but should be %s)z1Illegal slicing argument (not a compound dataset)c                    s   g | ]}| j jvr|qS r#   )r!   r   r   r   r#   r'   r     r/   z, c                 s   s   | ]}d | V  qdS )"%s"Nr#   r   r#   r#   r'   r(     r/   z8Illegal slicing argument (fields %s not in dataset type)c                    s   g | ]}| v r|qS r#   r#   r   )r   r#   r'   r     r/   r   r#   z5Scalar broadcasting is not supported for array dtypes.r   )6r2   r8   r   check_vlen_dtyper!   bytesr   r9   asarrayr<   rJ   r   emptyr6   r   r   r:   r;   ravelkindndarraysubdtyper   r   r   viewrG   rI   r   r   r5   rB   joinrE   COMPOUNDget_sizeinsert_er   r   r   r  r  r  rZ   r   float64r   rT   Zexpand_shape	broadcastrA   rU   r   )rt   r{   valtmpr!   Zcast_compoundZ	str_arraydtshpZvalshpr  r  mismatchsubtype
fieldnames	fieldnamer   r  val2r  r  r#   )r   rt   r  r'   __setitem__?  s    

"



&



 zDataset.__setitem__c                 C   s   t  | jrtd|du r*t| j}nt| j|| }|j}|du rVt|j}nt|j|}||j	D ]}| jj
|||| jd qpW d   n1 s0    Y  dS )z Read data directly from HDF5 into an existing NumPy array.

        The destination array must be C-contiguous and writable.
        Selections must be the output of numpy.s_[<args>].

        Broadcasting is supported for simple indexing.
        z+Empty datasets have no numpy representationNr   )r   r   r5   r   SimpleSelectionr6   r  rA   r,  r  r  r   )rt   destr   dest_selr  r  r#   r#   r'   read_direct  s    zDataset.read_directc                 C   s   t  | jrtd|du r*t|j}nt|j|}|j}|du rTt| j}nt| j|| }||j	D ]}| jj
|||| jd qpW d   n1 s0    Y  dS )z Write data directly to HDF5 from a NumPy array.

        The source array must be C-contiguous.  Selections must be
        the output of numpy.s_[<args>].

        Broadcasting is supported for simple indexing.
        z#Empty datasets cannot be written toNr   )r   r   r5   r   r7  r6   r  rA   r,  r  rU   r   )rt   sourcer   r9  r  r  r#   r#   r'   write_direct  s    zDataset.write_directc                 C   sF   t j| j|du r| jn|d}t j| jt jddkr8|S | | |S )z Create a Numpy array containing the whole dataset.  DON'T THINK
        THIS MEANS DATASETS ARE INTERCHANGEABLE WITH ARRAYS.  For one thing,
        you have to read the whole dataset every time this method is called.
        Nr    r   )r9   r   r6   r!   r:   r;   r:  )rt   r!   r   r#   r#   r'   	__array__
  s
    
zDataset.__array__c                 C   sX   | s
d}nJ| j d u rd}n&tt| j }d|dkr:|nd }d|| j| jjf }|S )Nz<Closed HDF5 dataset>z("anonymous")r  r3   /z&<HDF5 dataset %s: shape %s, type "%s">)rY   ppbasenamenormpathr6   r!   r   )rt   rnamestrrY   r#   r#   r'   __repr__  s    
zDataset.__repr__refreshc                 C   s   | j   | j  dS )z Refresh the dataset metadata by reloading from the file.

            This is part of the SWMR features and only exist when the HDF5
            library version >=1.9.178
            N)_idrE  r   clearr   r#   r#   r'   rE  )  s    
zDataset.refreshflushc                 C   s   | j   dS )a    Flush the dataset data and metadata to the file.
            If the dataset is chunked, raw data chunks are written to the file.

            This is part of the SWMR features and only exist when the HDF5
            library version >=1.9.178
            N)rF  rH  r   r#   r#   r'   rH  4  s    zDataset.flushc                 C   s   | j  tjkS )z"Check if this is a virtual dataset)r   r   r	   VIRTUALr   r#   r#   r'   
is_virtual?  s    zDataset.is_virtualc                    s.   | j std| j  fddt  D S )z5Get a list of the data mappings for a virtual datasetzNot a virtual datasetc              	      s2   g | ]*}t  | | | |qS r#   )r   get_virtual_vspaceget_virtual_filenameget_virtual_dsetnameget_virtual_srcspace)r$   jr0   r#   r'   r   K  s   
z+Dataset.virtual_sources.<locals>.<listcomp>)rJ  RuntimeErrorr   r   get_virtual_countr   r#   rP  r'   virtual_sourcesE  s    

zDataset.virtual_sourcesr3   c                 C   s   t | j| | dS )zMake this dataset an HDF5 dimension scale.

        You can then attach it to dimensions of other datasets like this::

            other_ds.dims[0].attach_scale(ds)

        You can optionally pass a name to associate with this scale.
        N)r   	set_scalerF  r*  )rt   rY   r#   r#   r'   
make_scaleR  s    
zDataset.make_scale)Nr   )N)N)N)NN)NN)N)r3   )7r   r   r   r   r   r   r   MPIpropertyr   r   r   r   r6   setterr   r   r   r   r!   rZ   r[   r_   r\   r]   ra   rc   r^   r`   r   r   r   rv   r   r   r   r   r   r   ry   r6  r:  r<  r=  rD  hasattrr	   r   rE  rH  r   rJ  rS  rU  __classcell__r#   r#   r   r'   r   v  s   










 



l
 



	
	
r   )NNNNNNNNNNNNFNNNNNNF)NNN)7r   	posixpathr?  r   warningsr   	threadingr   r9   r3   r   r   r   r   r	   r
   r   r   r   h5py_warningsr   r   r   r   r   r   r   r   r   r   r   r   r
  datatyper   compatr   vdsr   r   	frozensetr   rC   
get_configmpirV  rm   rp   rq   r   r   r   r   r   r   r#   r#   r#   r'   <module>
   sB   , 
       

  I