
    l$e?                        d dl Z d dlZd dlmZ d dlmZmZmZmZm	Z	m
Z
 d dlZddlmZmZ ddlmZmZ ddlmZ ddlmZmZmZ  e            rd dlZ ej        e          Z e	d	d
          Z G d d
          Z G d de          ZdS )    N)Path)DictListOptionalTypeTypeVarUnion   )CONFIG_NAMEPYTORCH_WEIGHTS_NAME)hf_hub_downloadis_torch_available)HfApi)SoftTemporaryDirectoryloggingvalidate_hf_hub_argsTModelHubMixin)boundc                      e Zd ZdZdddddeeef         dee         dee         de	d	ee         f
d
Z
ded	dfdZeedddddddddee         deeef         de	de	dee         deeee	f                  deeeef                  de	dee         d	efd                        Zedee         dedee         deeeef                  de	dee         de	de	deeee	f                  d	efd            Zeddddddddddd
dedee         dede	dee         dee         dee         dee	         d eeee         ef                  d!eeee         ef                  d"eeee         ef                  d	efd#            ZdS )$r   a  
    A generic mixin to integrate ANY machine learning framework with the Hub.

    To integrate your framework, your model class must inherit from this class. Custom logic for saving/loading models
    have to be overwritten in  [`_from_pretrained`] and [`_save_pretrained`]. [`PyTorchModelHubMixin`] is a good example
    of mixin integration with the Hub. Check out our [integration guide](../guides/integrations) for more instructions.
    NF)configrepo_idpush_to_hubsave_directoryr   r   r   returnc                n   t          |          }|                    dd           |                     |           t          |t                    r/|t
          z                      t          j        |                     |r3|	                                }|||d<   ||j
        } | j        dd|i|S dS )a  
        Save weights in local directory.

        Args:
            save_directory (`str` or `Path`):
                Path to directory in which the model weights and configuration will be saved.
            config (`dict`, *optional*):
                Model configuration specified as a key/value dictionary.
            push_to_hub (`bool`, *optional*, defaults to `False`):
                Whether or not to push your model to the Huggingface Hub after saving it.
            repo_id (`str`, *optional*):
                ID of your repository on the Hub. Used only if `push_to_hub=True`. Will default to the folder name if
                not provided.
            kwargs:
                Additional key word arguments passed along to the [`~ModelHubMixin._from_pretrained`] method.
        T)parentsexist_okNr   r    )r   mkdir_save_pretrained
isinstancedictr   
write_textjsondumpscopynamer   )selfr   r   r   r   kwargss         9lib/python3.11/site-packages/huggingface_hub/hub_mixin.pysave_pretrainedzModelHubMixin.save_pretrained    s    2 n--TD999 	n--- fd## 	Jk)55dj6H6HIII 	?[[]]F!#)x (-#4#>>G>v>>>t    c                     t           )aD  
        Overwrite this method in subclass to define how to save your model.
        Check out our [integration guide](../guides/integrations) for instructions.

        Args:
            save_directory (`str` or `Path`):
                Path to directory in which the model weights and configuration will be saved.
        NotImplementedError)r)   r   s     r+   r!   zModelHubMixin._save_pretrainedL   s
     "!r-   )force_downloadresume_downloadproxiestoken	cache_dirlocal_files_onlyrevisionclspretrained_model_name_or_pathr1   r2   r3   r4   r5   r6   r7   c                @   |}
d}t           j                            |
          rt          t          j        |
          v r&t           j                            |
t                    }nt                              t           dt          |
          	                                            n}t          |
t                    rh	 t          t          |
          t          |||||||	  	        }n<# t          j        j        $ r% t                              t           d           Y nw xY w|Ut!          |dd          5 }t#          j        |          }ddd           n# 1 swxY w Y   |	                    d|i            | j        d
t          |
          |||||||d	|	S )a  
        Download a model from the Huggingface Hub and instantiate it.

        Args:
            pretrained_model_name_or_path (`str`, `Path`):
                - Either the `model_id` (string) of a model hosted on the Hub, e.g. `bigscience/bloom`.
                - Or a path to a `directory` containing model weights saved using
                    [`~transformers.PreTrainedModel.save_pretrained`], e.g., `../path/to/my_model_directory/`.
            revision (`str`, *optional*):
                Revision of the model on the Hub. Can be a branch name, a git tag or any commit id.
                Defaults to the latest commit on `main` branch.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding
                the existing cache.
            resume_download (`bool`, *optional*, defaults to `False`):
                Whether to delete incompletely received files. Will attempt to resume the download if such a file exists.
            proxies (`Dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`. The proxies are used on every request.
            token (`str` or `bool`, *optional*):
                The token to use as HTTP bearer authorization for remote files. By default, it will use the token
                cached when running `huggingface-cli login`.
            cache_dir (`str`, `Path`, *optional*):
                Path to the folder where cached files are stored.
            local_files_only (`bool`, *optional*, defaults to `False`):
                If `True`, avoid downloading the file and return the path to the local cached file if it exists.
            model_kwargs (`Dict`, *optional*):
                Additional kwargs to pass to the model during initialization.
        Nz not found in 	r   filenamer7   r5   r1   r3   r2   r4   r6   z not found in HuggingFace Hub.rzutf-8)encodingr   )model_idr7   r5   r1   r3   r2   r6   r4   r   )ospathisdirr   listdirjoinloggerwarningr   resolver"   strr   requests
exceptionsRequestExceptionopenr%   loadupdate_from_pretrained)r8   r9   r1   r2   r3   r4   r5   r6   r7   model_kwargsr?   config_filefr   s                 r+   from_pretrainedzModelHubMixin.from_pretrainedW   s   X 1%)7=="" 	Obj2222 gll8[AA+WWT(^^=S=S=U=UWWXXXX#&& 	OO-MM(%'#1#$3%5
 
 
 &7 O O O+MMMNNNNNO "k3999 &Q1& & & & & & & & & & & & & & &6 2333#s# 

]])+-

 

 

 

 
	
s$   ?*C* *6D#"D#:EE"Er?   c                    t           )a  Overwrite this method in subclass to define how to load your model from pretrained.

        Use [`hf_hub_download`] or [`snapshot_download`] to download files from the Hub before loading them. Most
        args taken as input can be directly passed to those 2 methods. If needed, you can add more arguments to this
        method using "model_kwargs". For example [`PyTorchModelHubMixin._from_pretrained`] takes as input a `map_location`
        parameter to set on which device the model should be loaded.

        Check out our [integration guide](../guides/integrations) for more instructions.

        Args:
            model_id (`str`):
                ID of the model to load from the Huggingface Hub (e.g. `bigscience/bloom`).
            revision (`str`, *optional*):
                Revision of the model on the Hub. Can be a branch name, a git tag or any commit id. Defaults to the
                latest commit on `main` branch.
            force_download (`bool`, *optional*, defaults to `False`):
                Whether to force (re-)downloading the model weights and configuration files from the Hub, overriding
                the existing cache.
            resume_download (`bool`, *optional*, defaults to `False`):
                Whether to delete incompletely received files. Will attempt to resume the download if such a file exists.
            proxies (`Dict[str, str]`, *optional*):
                A dictionary of proxy servers to use by protocol or endpoint (e.g., `{'http': 'foo.bar:3128',
                'http://hostname': 'foo.bar:4012'}`).
            token (`str` or `bool`, *optional*):
                The token to use as HTTP bearer authorization for remote files. By default, it will use the token
                cached when running `huggingface-cli login`.
            cache_dir (`str`, `Path`, *optional*):
                Path to the folder where cached files are stored.
            local_files_only (`bool`, *optional*, defaults to `False`):
                If `True`, avoid downloading the file and return the path to the local cached file if it exists.
            model_kwargs:
                Additional keyword arguments passed along to the [`~ModelHubMixin._from_pretrained`] method.
        r/   )
r8   r?   r7   r5   r1   r3   r2   r6   r4   rP   s
             r+   rO   zModelHubMixin._from_pretrained   s    ^ "!r-   z!Push model using huggingface_hub.)
r   commit_messageprivateapi_endpointr4   branch	create_prallow_patternsignore_patternsdelete_patternsrU   rV   rW   rX   rY   rZ   r[   r\   c       
         <   t          ||          }|                    ||d          j        }t                      5 }t	          |          |z  }|                     ||           |                    |d|||||	|
|	  	        cddd           S # 1 swxY w Y   dS )a  
        Upload model checkpoint to the Hub.

        Use `allow_patterns` and `ignore_patterns` to precisely filter which files should be pushed to the hub. Use
        `delete_patterns` to delete existing remote files in the same commit. See [`upload_folder`] reference for more
        details.


        Args:
            repo_id (`str`):
                ID of the repository to push to (example: `"username/my-model"`).
            config (`dict`, *optional*):
                Configuration object to be saved alongside the model weights.
            commit_message (`str`, *optional*):
                Message to commit while pushing.
            private (`bool`, *optional*, defaults to `False`):
                Whether the repository created should be private.
            api_endpoint (`str`, *optional*):
                The API endpoint to use when pushing the model to the hub.
            token (`str`, *optional*):
                The token to use as HTTP bearer authorization for remote files. By default, it will use the token
                cached when running `huggingface-cli login`.
            branch (`str`, *optional*):
                The git branch on which to push the model. This defaults to `"main"`.
            create_pr (`boolean`, *optional*):
                Whether or not to create a Pull Request from `branch` with that commit. Defaults to `False`.
            allow_patterns (`List[str]` or `str`, *optional*):
                If provided, only files matching at least one pattern are pushed.
            ignore_patterns (`List[str]` or `str`, *optional*):
                If provided, files matching any of the patterns are not pushed.
            delete_patterns (`List[str]` or `str`, *optional*):
                If provided, remote files matching any of the patterns will be deleted from the repo.

        Returns:
            The url of the commit of your model in the given repository.
        )endpointr4   T)r   rV   r   )r   model)	r   	repo_typefolder_pathrU   r7   rY   rZ   r[   r\   N)r   create_repor   r   r   r,   upload_folder)r)   r   r   rU   rV   rW   r4   rX   rY   rZ   r[   r\   apitmp
saved_paths                  r+   r   zModelHubMixin.push_to_hub   s    h \777//'7T/RRZ $%% 	cW,J  F ;;;$$!&-#- / / % 
 
	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	 	s   ABBB)__name__
__module____qualname____doc__r	   rH   r   r   r#   boolr,   r!   classmethodr   r   r   r   rS   rO   r   r   r   r-   r+   r   r      sg         "&!%!* * *c4i(* 	*
 #* * 
#* * * *X	"t 	" 	" 	" 	" 	" 
  % %"&,004!&"&P
 P
 P
!WP
',S$Y'7P
 	P

 P
 $P
 c4i()P
 E#t),-P
 P
 3-P
 
P
 P
 P
  [P
d ."!W." ." 3-	."
 E#t),-." ." $." ." ." c4i()." 
." ." ." [."` 
 "&A&*# $$(:>;?;?D D DD 	D
 D D smD }D D D>D !tCy#~!67D "%S	3"78D "%S	3"78D 
D D D D D Dr-   c                       e Zd ZdZdeddfdZedddd	ed
ee         dee	eef                  de
dee         de
de
de	ee
df         dede
fd            ZdS )PyTorchModelHubMixinaL  
    Implementation of [`ModelHubMixin`] to provide model Hub upload/download capabilities to PyTorch models. The model
    is set in evaluation mode by default using `model.eval()` (dropout modules are deactivated). To train the model,
    you should first set it back in training mode with `model.train()`.

    Example:

    ```python
    >>> import torch
    >>> import torch.nn as nn
    >>> from huggingface_hub import PyTorchModelHubMixin


    >>> class MyModel(nn.Module, PyTorchModelHubMixin):
    ...     def __init__(self):
    ...         super().__init__()
    ...         self.param = nn.Parameter(torch.rand(3, 4))
    ...         self.linear = nn.Linear(4, 5)

    ...     def forward(self, x):
    ...         return self.linear(x + self.param)
    >>> model = MyModel()

    # Save model weights to local directory
    >>> model.save_pretrained("my-awesome-model")

    # Push model weights to the Hub
    >>> model.push_to_hub("my-awesome-model")

    # Download and initialize weights from the Hub
    >>> model = MyModel.from_pretrained("username/my-awesome-model")
    ```
    r   r   Nc                     t          | d          r| j        n| }t          j        |                                |t
          z             dS )z7Save weights from a Pytorch model to a local directory.moduleN)hasattrrp   torchsave
state_dictr   )r)   r   model_to_saves      r+   r!   z%PyTorchModelHubMixin._save_pretrainedG  sF    '.tX'>'>HD
=++--~@T/TUUUUUr-   cpuF)map_locationstrictr?   r7   r5   r1   r3   r2   r6   r4   rw   rx   c       
            t           j                            |          r5t          d           t           j                            |t
                    }nt          |t
          |||||||	  	        } | di |}t          j        |t          j	        |	                    }|
                    ||
           |                                 |S )z<Load Pytorch pretrained weights and return the loaded model.z$Loading weights from local directoryr;   )rw   )rx   r   )r@   rA   rB   printrD   r   r   rr   rM   deviceload_state_dicteval)r8   r?   r7   r5   r1   r3   r2   r6   r4   rw   rx   rP   
model_filer_   rt   s                  r+   rO   z%PyTorchModelHubMixin._from_pretrainedL  s    " 7=="" 	8999h0DEEJJ( -!#- /!1
 
 
J ##l##Z
l9S9STTT
j888

r-   )rg   rh   ri   rj   r   r!   rl   rH   r   r	   rk   r   rO   r   r-   r+   rn   rn   $  s           DVt V V V V V
  "% % % % 3-	%
 E#t),-% % $% % % S$_%% % % % % [% % %r-   rn   )r%   r@   pathlibr   typingr   r   r   r   r   r	   rI   	constantsr   r   file_downloadr   r   hf_apir   utilsr   r   r   rr   
get_loggerrg   rE   r   r   rn   r   r-   r+   <module>r      su    				       = = = = = = = = = = = = = = = =  8 8 8 8 8 8 8 8 > > > > > > > >       H H H H H H H H H H  LLL		H	%	% GC'''J J J J J J J JZN N N N N= N N N N Nr-   