o
    h$                     @   s  d dl Z d dlZd dlmZ d dlmZmZmZ d dlm	Z	 d dl
Z
d dl
mZ d dlmZ ddlmZ dd	lmZ 	 ed
d Zedd Zedd Zedddd Zdd Zeee	dZeded eedZeded eeedd eje	dddZed ed eedZed!ed G d"d# d#eZG d$d% d%eZed&e
j j!fd'd(Z"ed&e
j j!fd)d*Z#ed&e
j j!fd+d,Z$ed&e
j j!fd-d.Z%e j&G d/d0 d0Z'd&e
j j!fd1d2Z(G d3d4 d4Z)dS )5    N)import_module)AnyListOptional)#min_cut_rematerialization_partition)_guards)
ts_compile   )aot_autograd)register_debug_backendc                 C   s   | S N gmfake_tensor_inputsr   r   V/var/www/html/ai/venv/lib/python3.10/site-packages/torch/_dynamo/backends/debugging.pyeager   s   r   c                    s4   ddl m}  fdd}||dd| }|  |S )Nr   )make_fxc                     s   t j j|  S r   torchfxInterpreterrunargsr   r   r   runnable_gm      z'pre_dispatch_eager.<locals>.runnable_gmT)pre_dispatch)"torch.fx.experimental.proxy_tensorr   print_readable)r   r   r   r   pre_dispatch_gmr   r   r   pre_dispatch_eager   s
   r"   c                    s   ddl m   fdd}|S )Nr   )SchemaCheckModec                     s<      t jj|  W  d    S 1 sw   Y  d S r   r   r   r#   r   r   r   inner,   s   $zeager_debug.<locals>.inner)#torch._subclasses.schema_check_moder#   )r   r   r%   r   r$   r   eager_debug%   s   r'   ts)namec                 C   s   t j| S r   )r   jitscriptr   r   r   r   torchscript3   s   r,   c                    s    fdd}d|_ |S )Nc                    s   t j | S r   )r   r   r   	boxed_runr   fx_gr   r   r   :   r   zboxed_nop.<locals>.runT)_boxed_call)r/   example_inputsr   r   r.   r   	boxed_nop9   s   r2   )fw_compilerpartition_fn	aot_eager)r)   compiler_fn)r3   aot_eager_default_partitionerc                   C   s   t d S )Nztorch._inductor.compile_fx)r   select_decomp_tabler   r   r   r   <lambda>V   s    r9   inductor)compiler)r3   bw_compilerdecompositionsr4   aot_eager_decomp_partitionaot_tsc                   @      e Zd ZdS )ReluCompileErrorN__name__
__module____qualname__r   r   r   r   rA   k       rA   c                   @   r@   )TestingOnlyCompileErrorNrB   r   r   r   r   rG   o   rF   rG   r   c                 C   s$   | j jD ]}|jtjkrt q| S r   )graphnodestargetr   relurA   r   r1   noder   r   r   relu_compile_error_TESTING_ONLYs   s
   rN   c                 C   s4   | j jD ]}|jtjkrtj|_d|_q|   | S )N)FReluRuntimeError)rH   rI   rJ   r   rK   _assertr   	recompilerL   r   r   r   relu_runtime_error_TESTING_ONLY{   s   rR   c                 C   s>   | j jD ]}|jtjkrtj|_|jd df|_q|   | S )Nr   r	   )rH   rI   rJ   r   rK   addr   rQ   rL   r   r   r    relu_accuracy_error_TESTING_ONLY   s   rT   c                 C   s:   | j jD ]	}|jdkr nq| S |D ]}|jst q| S )Ncall_function)rH   rI   opis_leafrG   )r   r1   rM   tr   r   r   #non_leaf_compile_error_TESTING_ONLY   s   
rY   c                   @   s   e Zd ZU dZeejj ed< e	ed< e	ed< ee
 ed< e	ed< dZeeejj  ed< dZeeej  ed	< dZee ed
< dd ZdS )ExplainOutputzu
    This is the output of :func:`torch._dynamo.explain()`
    There is no reason to create this class directly.
    graphsgraph_countgraph_break_countbreak_reasonsop_countNops_per_graph
out_guardscompile_timesc           	      C   sX  d| j  d}|d| j d7 }|d| j d7 }|d7 }t| jD ])\}}|d|d  d7 }|d	|j d7 }|d
7 }|jD ]
}|d| d7 }q@q"| jd urv|d7 }t| jD ]\}}|d|d  d7 }|D ]
}|d| d7 }qjqZ| jd ur|d7 }t| jD ]\}}|d|d  d7 }|dt	| 7 }q| j
d ur|d| j
 d7 }|S )NzGraph Count: 
zGraph Break Count: z
Op Count: zBreak Reasons:
z  Break Reason r	   z:
z    Reason: z    User Stack:
z      zOps per Graph:
z  Ops z    zOut Guards:
z  Guard zCompile Times: )r\   r]   r_   	enumerater^   reason
user_stackr`   ra   strrb   )	selfoutputidxbreak_reasonframe_summaryopsrV   iguardr   r   r   __str__   s4   



zExplainOutput.__str__)rC   rD   rE   __doc__r   r   r   GraphModule__annotations__intr   r`   r   Nodera   r   Guardrb   rg   rp   r   r   r   r   rZ      s   
 rZ   c                 C   sT   | |  dd | jjD }|t|7 }| | | jjr#| | j | ||||fS )a  
    This function is a utility which processes a torch.fx.GraphModule and
    accumulates information about its ops, graph breaks, and other details. It
    is intended to be used by the ExplainWithBackend class and
    `torch._dynamo.explain()` to provide details from Dynamo's graph capture.

    Parameters:
        gm (torch.fx.GraphModule): The GraphModule to be processed.
        graphs (list): A list that accumulates all the GraphModules processed.
        op_count (int): The total count of operations in all GraphModules processed so far.
        ops_per_graph (list): A list that accumulates the operations of each GraphModule.
        break_reasons (list): A list that accumulates the reasons for breaks in each GraphModule.

    Returns:
        tuple: A tuple containing the processed GraphModule, the updated lists of graphs,
               operations per graph, and break reasons, and the updated operation count.
    c                 S   s   g | ]
}|j d kr|jqS )rU   )rV   rJ   ).0rM   r   r   r   
<listcomp>   s    z)_explain_graph_detail.<locals>.<listcomp>)appendrH   rI   lencompile_subgraph_reasongraph_break)r   r[   r_   r`   r^   rm   r   r   r   _explain_graph_detail   s   

r}   c                   @   s8   e Zd ZdZdd ZdejjfddZde	fdd	Z
d
S )ExplainWithBackenda  
    This class is intended to be used as a backend for `torch.compile`. It is
    composable with other backends. When used in this way, it accumulates
    information about graph breaks, ops, and other info and provides a string
    representation summarizing this information.

    Attributes:
        backend (str): The name of the backend to use for optimization.
        graphs (list): A list of the graphs captured by TorchDynamo.
        op_count (int): The total number of operations in all optimized graphs.
        break_reasons (list): A list of graph break reasons with stack traces.

    Example Usage:
        def fn(x):
            x = torch.sigmoid(x)
            return x

        torch._dynamo.reset()
        eb = ExplainWithBackend("inductor")
        optimized_fn = torch.compile(fn, backend=eb)
        result = optimized_fn(torch.randn(5))
        print(eb.output())
    c                 C   s,   ddl m} ||| _g | _d| _g | _d S )Nr	   )lookup_backendr   )registryr   backendr[   r_   r^   )rh   r   r   r   r   r   __init__  s
   

zExplainWithBackend.__init__r   c                 C   s2   t || j| jg | j\}| _| _}| _| ||S r   )r}   r[   r_   r^   r   )rh   r   r1   _r   r   r   __call__  s   zExplainWithBackend.__call__returnc                 C   s(   t | j}t| j||d | j| j}|S )Nr	   )rz   r[   rZ   r^   r_   )rh   r\   ri   r   r   r   ri     s   
zExplainWithBackend.outputN)rC   rD   rE   rq   r   r   r   rr   r   rZ   ri   r   r   r   r   r~      s
    r~   )*dataclasses	functools	importlibr   typingr   r   r   functorch.compiler   r   r   torch._functorch.compilersr   commonr
   r   r   register_backendr   r"   r'   r,   r2   r5   r7   partialr>   r?   	ExceptionrA   rG   r   rr   rN   rR   rT   rY   	dataclassrZ   r}   r~   r   r   r   r   <module>   sr    






	
0
