a
    iDf2H                     @  s  U d dl mZ d dlZd dlZd dlZd dlZd dlmZ d dlmZ d dl	m
Z
mZmZ d dlZd dlmZ d dlmZ d dlmZmZmZ d d	lmZ d d
lmZ e
rd dlmZ d dlmZ d dl mZ! d dl"mZ d dl#m$Z$ d dl%m&Z& e'e(Z)dZ*dd Z+dddddZ,dd Z-dd Z.dddddd Z/d8d"d#Z0d$d% Z1e2d&d'fd(d)Z3G d*d+ d+eej4Z5G d,d- d-Z6G d.d/ d/Z7G d0d1 d1e6Z8G d2d3 d3e8Z9G d4d5 d5Z:i Z;d6e<d7< dS )9    )annotationsN)Iterable)glob)TYPE_CHECKINGAnyClassVar)
cf_encoder)indexing)
FrozenDictNdimSizeLenMixinis_remote_uri)get_chunked_array_type)is_chunked_array)BufferedIOBase)DatasetDataTree)NestedSequenceZ
__values__c                 C  s@   t | tjrt| } t | tr<t| s<tjtj| } | S )a  
    Normalize pathlikes to string.

    Parameters
    ----------
    path :
        Path to file.

    Examples
    --------
    >>> from pathlib import Path

    >>> directory = Path(xr.backends.common.__file__).parent
    >>> paths_path = Path(directory).joinpath("comm*n.py")
    >>> paths_str = xr.backends.common._normalize_path(paths_path)
    >>> print([type(p) for p in (paths_str,)])
    [<class 'str'>]
    )	
isinstanceosPathLikefspathstrr   pathabspath
expanduser)r    r   c/nfs/NAS7/SABIOD/METHODE/ermites/ermites_venv/lib/python3.9/site-packages/xarray/backends/common.py_normalize_path$   s
    
r   z5str | os.PathLike | NestedSequence[str | os.PathLike]z	list[str])pathsreturnc              
     s  t | trt| r|dddkrzddlm} W n. ty` } ztd|W Y d}~n
d}~0 0 || d|di d	i d
d\ }}  | } fdd|D } n*t| rt	d|  dnt
tt| } n(t | tjrt| g} ndd | D } | S )a  
    Find absolute paths from the pattern.

    Parameters
    ----------
    paths :
        Path(s) to file(s). Can include wildcards like * .
    **kwargs :
        Extra kwargs. Mainly for fsspec.

    Examples
    --------
    >>> from pathlib import Path

    >>> directory = Path(xr.backends.common.__file__).parent
    >>> paths = str(Path(directory).joinpath("comm*n.py"))  # Find common with wildcard
    >>> paths = xr.backends.common._find_absolute_paths(paths)
    >>> [Path(p).name for p in paths]
    ['common.py']
    ZengineNZzarrr   )get_fs_token_pathszCThe use of remote URLs for opening zarr requires the package fsspecrbZbackend_kwargsstorage_optionsF)moder#   expandc                   s   g | ]}  |qS r   )Z
get_mapper).0r   fsr   r   
<listcomp>i       z(_find_absolute_paths.<locals>.<listcomp>zjcannot do wild-card matching for paths that are remote URLs unless engine='zarr' is specified. Got paths: z7. Instead, supply paths as an explicit list of strings.c                 S  s&   g | ]}t |tjrt|n|qS r   )r   r   r   r   )r&   pr   r   r   r)   u   r*   )r   r   r   getZfsspec.corer!   ImportErrorr   Z_strip_protocol
ValueErrorsortedr   r   r   r   )r   kwargsr!   e_Z	tmp_pathsr   r'   r   _find_absolute_paths@   s>    
r3   c                 C  s   | d u rt } | S NNONE_VAR_NAMEnamer   r   r   _encode_variable_namez   s    r9   c                 C  s   | t krd } | S r4   r5   r7   r   r   r   _decode_variable_name   s    r:   zncDataset | ncDatasetLegacyH5;str | os.PathLike[Any] | BufferedIOBase | AbstractDataStorer   )	ncDatasetfilename_or_objr    c                 K  s   ddl m} ddlm} ddlm} ||fi |}|d|i}| |ddZ}t|D ]@}	||fd|	i|}
||	j}|||
d	}|j	|	|d
dd qXW d    n1 s0    Y  |S )Nr   )open_datasetr   NodePath/r)r$   group)r8   dataFT)Zallow_overwriteZnew_nodes_along_path)
Zxarray.backends.apir>   xarray.core.datatreer   xarray.core.treenoder@   	from_dict_iter_nc_groupsr8   Z	_set_item)r<   r=   r0   r>   r   r@   dsZ	tree_rootZncdsr   Zsubgroup_dsZ	node_namenew_noder   r   r   _open_datatree_netcdf   s"    
&rK   rA   c                 c  sP   ddl m} ||}| j D ],\}}|| }t|V  t||dE d H  qd S )Nr   r?   )parent)rF   r@   groupsitemsr   rH   )rootrL   r@   r   rC   Zgpathr   r   r   rH      s    
rH   c                 C  sB   d}| j dur,| jdd f| }| j } qdd| }| |fS )z;Find the root and group name of a netCDF4/h5netcdf dataset.r   NrA   )rL   r8   splitjoin)rI   Z	hierarchyrC   r   r   r   find_root_and_group   s    
rS      i  c           	      C  s   |dksJ t |d D ]}z| | W   S  |y   ||krB |d|  }|tj| }d| d||  dt  }t| t	d|  Y q0 qdS )	z
    Robustly index an array, using retry logic with exponential backoff if any
    of the errors ``catch`` are raised. The initial_delay is measured in ms.

    With the default settings, the maximum delay will be in the range of 32-64
    seconds.
    r         zgetitem failed, waiting z ms before trying again (z# tries remaining). Full traceback: gMbP?N)
rangenprandomrandint	traceback
format_excloggerdebugtimesleep)	arraykeyZcatchmax_retriesZinitial_delaynZ
base_delayZ
next_delaymsgr   r   r   robust_getitem   s"    
rf   c                   @  s    e Zd ZdZdddddZdS )BackendArrayr   Nznp.typing.DTypeLike)dtypec                 C  s   t td f| j }| | S r4   )r	   ZBasicIndexerslicendim)selfrh   rb   r   r   r   get_duck_array   s    zBackendArray.get_duck_array)N)__name__
__module____qualname__	__slots__rl   r   r   r   r   rg      s   rg   c                   @  sP   e Zd ZdZdd Zdd Zdd Zdd	 Zd
d Zdd Z	dd Z
dd ZdS )AbstractDataStorer   c                 C  s
   t  d S r4   NotImplementedErrorrk   r   r   r   get_dimensions   s    z AbstractDataStore.get_dimensionsc                 C  s
   t  d S r4   rr   rt   r   r   r   	get_attrs   s    zAbstractDataStore.get_attrsc                 C  s
   t  d S r4   rr   rt   r   r   r   get_variables   s    zAbstractDataStore.get_variablesc                 C  s   i S r4   r   rt   r   r   r   get_encoding   s    zAbstractDataStore.get_encodingc                 C  s.   t dd |   D }t |  }||fS )a,  
        This loads the variables and attributes simultaneously.
        A centralized loading function makes it easier to create
        data stores that do automatic encoding/decoding.

        For example::

            class SuffixAppendingDataStore(AbstractDataStore):

                def load(self):
                    variables, attributes = AbstractDataStore.load(self)
                    variables = {'%s_suffix' % k: v
                                 for k, v in variables.items()}
                    attributes = {'%s_suffix' % k: v
                                  for k, v in attributes.items()}
                    return variables, attributes

        This function will be called anytime variables or attributes
        are requested, so care should be taken to make sure its fast.
        c                 s  s   | ]\}}t ||fV  qd S r4   )r:   r&   kvr   r   r   	<genexpr>   s   z)AbstractDataStore.load.<locals>.<genexpr>)r
   rw   rN   rv   rk   	variables
attributesr   r   r   load   s
    
zAbstractDataStore.loadc                 C  s   d S r4   r   rt   r   r   r   close   s    zAbstractDataStore.closec                 C  s   | S r4   r   rt   r   r   r   	__enter__  s    zAbstractDataStore.__enter__c                 C  s   |    d S r4   )r   )rk   Zexception_typeZexception_valuer[   r   r   r   __exit__  s    zAbstractDataStore.__exit__N)rm   rn   ro   rp   ru   rv   rw   rx   r   r   r   r   r   r   r   r   rq      s   rq   c                   @  s.   e Zd ZdZd
ddZdddZddd	ZdS )ArrayWritersourcestargetsregionslockNc                 C  s   g | _ g | _g | _|| _d S r4   r   )rk   r   r   r   r   __init__  s    zArrayWriter.__init__c                 C  sH   t |r.| j| | j| | j| n|r<|||< n||d< d S )N.)r   r   appendr   r   )rk   sourcetargetZregionr   r   r   add  s    
zArrayWriter.addTc                 C  s\   | j rXt| j  }|d u ri }|j| j | jf| j|d| jd|}g | _ g | _g | _|S d S )NT)r   computeflushr   )r   r   storer   r   r   )rk   r   Zchunkmanager_store_kwargsZchunkmanagerZdelayed_storer   r   r   sync  s&    
	zArrayWriter.sync)N)N)TN)rm   rn   ro   rp   r   r   r   r   r   r   r   r   
  s   

r   c                   @  sv   e Zd ZdZdd Zdd Zdd Zdd	 Zd
d Zdd Z	dd Z
e ddfddZdd ZdddZdddZdS )AbstractWritableDataStorer   c                   s4    fdd|  D } fdd|  D }||fS )a  
        Encode the variables and attributes in this store

        Parameters
        ----------
        variables : dict-like
            Dictionary of key/value (variable name / xr.Variable) pairs
        attributes : dict-like
            Dictionary of key/value (attribute name / attribute) pairs

        Returns
        -------
        variables : dict-like
        attributes : dict-like

        c                   s   i | ]\}}|  |qS r   encode_variablery   rt   r   r   
<dictcomp>L  r*   z4AbstractWritableDataStore.encode.<locals>.<dictcomp>c                   s   i | ]\}}|  |qS r   encode_attributery   rt   r   r   r   M  r*   )rN   r}   r   rt   r   encode;  s    z AbstractWritableDataStore.encodec                 C  s   |S )zencode one variabler   )rk   r{   r   r   r   r   P  s    z)AbstractWritableDataStore.encode_variablec                 C  s   |S )zencode one attributer   )rk   ar   r   r   r   T  s    z*AbstractWritableDataStore.encode_attributec                 C  s
   t  d S r4   rr   )rk   dimlengthr   r   r   set_dimensionX  s    z'AbstractWritableDataStore.set_dimensionc                 C  s
   t  d S r4   rr   rk   rz   r{   r   r   r   set_attribute[  s    z'AbstractWritableDataStore.set_attributec                 C  s
   t  d S r4   rr   r   r   r   r   set_variable^  s    z&AbstractWritableDataStore.set_variablec                 C  s   |  ||j dS )z
        in stores, variables are all variables AND coordinates
        in xarray.Dataset variables are variables NOT coordinates,
        so here we pass the whole dataset in instead of doing
        dataset.variables
        N)r   attrs)rk   Zdatasetr   r   r   store_dataseta  s    z'AbstractWritableDataStore.store_datasetNc                 C  sL   |du rt  }| ||\}}| | | j||d | j||||d dS )a  
        Top level method for putting data on this store, this method:
          - encodes variables/attributes
          - sets dimensions
          - sets variables

        Parameters
        ----------
        variables : dict-like
            Dictionary of key/value (variable name / xr.Variable) pairs
        attributes : dict-like
            Dictionary of key/value (attribute name / attribute) pairs
        check_encoding_set : list-like
            List of variables that should be checked for invalid encoding
            values
        writer : ArrayWriter
        unlimited_dims : list-like
            List of dimension names that should be treated as unlimited
            dimensions.
        Nunlimited_dims)r   r   set_attributesset_dimensionsset_variables)rk   r~   r   check_encoding_setwriterr   r   r   r   r   j  s    
zAbstractWritableDataStore.storec                 C  s"   |  D ]\}}| || qdS )z
        This provides a centralized method to set the dataset attributes on the
        data store.

        Parameters
        ----------
        attributes : dict-like
            Dictionary of key/value (attribute name / attribute) pairs
        N)rN   r   )rk   r   rz   r{   r   r   r   r     s    
z(AbstractWritableDataStore.set_attributesc                 C  sH   |  D ]:\}}t|}||v }| j||||d\}	}
||
|	 qdS )a  
        This provides a centralized method to set the variables on the data
        store.

        Parameters
        ----------
        variables : dict-like
            Dictionary of key/value (variable name / xr.Variable) pairs
        check_encoding_set : list-like
            List of variables that should be checked for invalid encoding
            values
        writer : ArrayWriter
        unlimited_dims : list-like
            List of dimension names that should be treated as unlimited
            dimensions.
        r   N)rN   r9   Zprepare_variabler   )rk   r~   r   r   r   Zvnr{   r8   checkr   r   r   r   r   r     s    
z'AbstractWritableDataStore.set_variablesc           	   	   C  s   |du rt  }|  }i }|D ]}d||< q| D ]}|tt|j|j q4| D ]\\}}||v r||| krt	d|d| d||  dqZ||vrZ||v }| 
||| qZdS )au  
        This provides a centralized method to set the dimensions on the data
        store.

        Parameters
        ----------
        variables : dict-like
            Dictionary of key/value (variable name / xr.Variable) pairs
        unlimited_dims : list-like
            List of dimension names that should be treated as unlimited
            dimensions.
        Nz,Unable to update size for existing dimensionz (z != ))setru   valuesupdatedictzipdimsshaperN   r.   r   )	rk   r~   r   Zexisting_dimsr   r{   r   r   Zis_unlimitedr   r   r   r     s,    
z(AbstractWritableDataStore.set_dimensions)N)N)rm   rn   ro   rp   r   r   r   r   r   r   r   	frozensetr   r   r   r   r   r   r   r   r   8  s   
'
r   c                   @  s   e Zd ZdZdd ZdS )WritableCFDataStorer   c                   sB   t ||\}} fdd| D } fdd| D }||fS )Nc                   s   i | ]\}}|  |qS r   r   ry   rt   r   r   r     r*   z.WritableCFDataStore.encode.<locals>.<dictcomp>c                   s   i | ]\}}|  |qS r   r   ry   rt   r   r   r     r*   )r   rN   r}   r   rt   r   r     s    zWritableCFDataStore.encodeN)rm   rn   ro   rp   r   r   r   r   r   r     s   r   c                   @  s   e Zd ZU dZdZded< dZded< dZded< d	d
ddZdddddddddZ	dddddZ
ddddddZdS )BackendEntrypointa  
    ``BackendEntrypoint`` is a class container and it is the main interface
    for the backend plugins, see :ref:`RST backend_entrypoint`.
    It shall implement:

    - ``open_dataset`` method: it shall implement reading from file, variables
      decoding and it returns an instance of :py:class:`~xarray.Dataset`.
      It shall take in input at least ``filename_or_obj`` argument and
      ``drop_variables`` keyword argument.
      For more details see :ref:`RST open_dataset`.
    - ``guess_can_open`` method: it shall return ``True`` if the backend is able to open
      ``filename_or_obj``, ``False`` otherwise. The implementation of this
      method is not mandatory.
    - ``open_datatree`` method: it shall implement reading from file, variables
      decoding and it returns an instance of :py:class:`~datatree.DataTree`.
      It shall take in input at least ``filename_or_obj`` argument. The
      implementation of this method is not mandatory.  For more details see
      <reference to open_datatree documentation>.

    Attributes
    ----------

    open_dataset_parameters : tuple, default: None
        A list of ``open_dataset`` method parameters.
        The setting of this attribute is not mandatory.
    description : str, default: ""
        A short string describing the engine.
        The setting of this attribute is not mandatory.
    url : str, default: ""
        A string with the URL to the backend's documentation.
        The setting of this attribute is not mandatory.
    NzClassVar[tuple | None]open_dataset_parameters zClassVar[str]descriptionurlr   )r    c                 C  sB   dt | j d}| jr(|d| j 7 }| jr>|d| j 7 }|S )N<>z
  z
  Learn more at )typerm   r   r   )rk   txtr   r   r   __repr__  s    zBackendEntrypoint.__repr__)drop_variablesr;   zstr | Iterable[str] | Noner   r   )r=   r   r0   r    c                K  s
   t  dS )`
        Backend open_dataset method used by Xarray in :py:func:`~xarray.open_dataset`.
        Nrr   )rk   r=   r   r0   r   r   r   r>     s    zBackendEntrypoint.open_datasetbool)r=   r    c                 C  s   dS )r   Fr   )rk   r=   r   r   r   guess_can_open#  s    z BackendEntrypoint.guess_can_openr   )r=   r0   r    c                 K  s
   t  dS )zb
        Backend open_datatree method used by Xarray in :py:func:`~xarray.open_datatree`.
        Nrr   )rk   r=   r0   r   r   r   open_datatree-  s    	zBackendEntrypoint.open_datatree)rm   rn   ro   __doc__r   __annotations__r   r   r   r>   r   r   r   r   r   r   r     s   
!
r   z5dict[str, tuple[str | None, type[BackendEntrypoint]]]BACKEND_ENTRYPOINTS)rA   )=
__future__r   loggingr   r_   r[   collections.abcr   r   typingr   r   r   numpyrX   Zxarray.conventionsr   Zxarray.corer	   Zxarray.core.utilsr
   r   r   Z xarray.namedarray.parallelcompatr   Zxarray.namedarray.pycompatr   ior   Zh5netcdf.legacyapir   ZncDatasetLegacyH5ZnetCDF4r<   Zxarray.core.datasetrE   r   Zxarray.core.typesr   	getLoggerrm   r]   r6   r   r3   r9   r:   rK   rH   rS   	Exceptionrf   ZExplicitlyIndexedrg   rq   r   r   r   r   r   r   r   r   r   r   <module>   sL   
:


4. %R