https://t.me/RX1948
Server : Apache
System : Linux iad1-shared-b8-43 6.6.49-grsec-jammy+ #10 SMP Thu Sep 12 23:23:08 UTC 2024 x86_64
User : dh_edsupp ( 6597262)
PHP Version : 8.2.26
Disable Function : NONE
Directory :  /lib/python3/dist-packages/boto/emr/__pycache__/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Current File : //lib/python3/dist-packages/boto/emr/__pycache__/step.cpython-310.pyc
o

ckF[3#�@s�ddlmZGdd�de�ZGdd�de�ZGdd�de�ZGdd	�d	e�ZGd
d�de�ZGdd
�d
e�ZGdd�de�Z	Gdd�de�Z
Gdd�de
�ZGdd�de
�ZdS)�)�sixc@s(eZdZdZdd�Zdd�Zdd�ZdS)	�Stepz!
    Jobflow Step base class
    cC�t��)z=
        :rtype: str
        :return: URI to the jar
        ��NotImplemented��self�r	�//usr/lib/python3/dist-packages/boto/emr/step.py�jar�zStep.jarcCr)zS
        :rtype: list(str)
        :return: List of arguments for the step
        rrr	r	r
�args%rz	Step.argscCr)zB
        :rtype: str
        :return: The main class name
        rrr	r	r
�
main_class,rzStep.main_classN)�__name__�
__module__�__qualname__�__doc__rr
rr	r	r	r
rs
rc@s6eZdZdZ		ddd�Zdd�Zdd	�Zd
d�ZdS)
�JarStepz
    Custom jar step
    N�TERMINATE_JOB_FLOWcCs4||_||_||_||_t|tj�r|g}||_dS)a
        A elastic mapreduce step that executes a jar

        :type name: str
        :param name: The name of the step
        :type jar: str
        :param jar: S3 URI to the Jar file
        :type main_class: str
        :param main_class: The class to execute in the jar
        :type action_on_failure: str
        :param action_on_failure: An action, defined in the EMR docs to
            take on failure.
        :type step_args: list(str)
        :param step_args: A list of arguments to pass to the step
        N)�name�_jar�_main_class�action_on_failure�
isinstancer�string_types�	step_args)rrrrrrr	r	r
�__init__8s
zJarStep.__init__cC�|jS�N�rrr	r	r
rS�zJarStep.jarcCsg}|jr|�|j�|Sr)r�extend)rr
r	r	r
r
VszJarStep.argscCrr)rrr	r	r
r^r zJarStep.main_class)NrN)rrrrrrr
rr	r	r	r
r4s
�rc@sDeZdZdZ					ddd�Zdd�Zd	d
�Zdd�Zd
d�ZdS)�
StreamingStepz
    Hadoop streaming step
    Nr�3/home/hadoop/contrib/streaming/hadoop-streaming.jarcCsX||_||_||_||_||_||_||_|	|_|
|_||_	t
|tj�r'|g}||_
dS)a�
        A hadoop streaming elastic mapreduce step

        :type name: str
        :param name: The name of the step
        :type mapper: str
        :param mapper: The mapper URI
        :type reducer: str
        :param reducer: The reducer URI
        :type combiner: str
        :param combiner: The combiner URI. Only works for Hadoop 0.20
            and later!
        :type action_on_failure: str
        :param action_on_failure: An action, defined in the EMR docs to
            take on failure.
        :type cache_files: list(str)
        :param cache_files: A list of cache files to be bundled with the job
        :type cache_archives: list(str)
        :param cache_archives: A list of jar archives to be bundled with
            the job
        :type step_args: list(str)
        :param step_args: A list of arguments to pass to the step
        :type input: str or a list of str
        :param input: The input uri
        :type output: str
        :param output: The output uri
        :type jar: str
        :param jar: The hadoop streaming jar. This can be either a local
            path on the master node, or an s3:// URI.
        N)r�mapper�reducer�combinerr�cache_files�cache_archives�input�outputrrrrr)rrr$r%r&rr'r(rr)r*rr	r	r
rfs#
zStreamingStep.__init__cCrrrrr	r	r
r�r zStreamingStep.jarcCsdSrr	rr	r	r
r�szStreamingStep.main_classcCs�g}|jr|�|j�|�d|jg�|jr|�d|jg�|jr*|�d|jg�n|�ddg�|jrPt|jt�rH|jD]	}|�d|f�q=n|�d|jf�|jr[|�d|jf�|j	rk|j	D]	}|�d|f�qa|j
r{|j
D]	}|�d	|f�qq|S)
Nz-mapperz	-combinerz-reducerz-jobconfzmapred.reduce.tasks=0z-inputz-outputz
-cacheFilez
-cacheArchive)rr!r$r&r%r)r�listr*r'r()rr
r)�
cache_file�
cache_archiver	r	r
r
�s0
�

zStreamingStep.argsc
Cs<d|jj|jj|j|j|j|j|j|j|j	|j
|j|jfS)Nz�%s.%s(name=%r, mapper=%r, reducer=%r, action_on_failure=%r, cache_files=%r, cache_archives=%r, step_args=%r, input=%r, output=%r, jar=%r))
�	__class__rrrr$r%rr'r(rr)r*rrr	r	r
�__repr__�s�zStreamingStep.__repr__)	NNrNNNNNr#)	rrrrrrrr
r/r	r	r	r
r"bs
�3%r"cs eZdZdZ�fdd�Z�ZS)�ScriptRunnerStepzEs3n://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jarcs tt|�j||jfi|��dSr)�superr0r�ScriptRunnerJar)rr�kw�r.r	r
r�s zScriptRunnerStep.__init__)rrrr2r�
__classcell__r	r	r4r
r0�sr0c@�eZdZgd�ZdS)�PigBase)z4s3n://us-east-1.elasticmapreduce/libs/pig/pig-script�--base-pathz*s3n://us-east-1.elasticmapreduce/libs/pig/N�rrr�BaseArgsr	r	r	r
r7��r7cs&eZdZdZdZd�fdd�	Z�ZS)�InstallPigStepz!
    Install pig on emr step
    zInstall Pig�latestcsDg}|�|j�|�dg�|�d|g�tt|�j|j|d�dS)Nz
--install-pig�--pig-versions�r)r!r:r1r<r�InstallPigName)r�pig_versionsrr4r	r
r�s
zInstallPigStep.__init__)r=)rrrrr@rr5r	r	r4r
r<�sr<cs&eZdZdZdgf�fdd�	Z�ZS)�PigStepz
    Pig script step
    r=csRg}|�|j�|�d|g�|�ddd|g�|�|�tt|�j||d�dS)Nr>z--run-pig-script�--args�-fr?)r!r:r1rBr)rr�pig_filerA�pig_argsrr4r	r
r�s
zPigStep.__init__�rrrrrr5r	r	r4r
rB�srBc@r6)�HiveBase)z6s3n://us-east-1.elasticmapreduce/libs/hive/hive-scriptr8z+s3n://us-east-1.elasticmapreduce/libs/hive/Nr9r	r	r	r
rH�r;rHcs&eZdZdZdZd�fdd�	Z�ZS)�InstallHiveStepz"
    Install Hive on EMR step
    zInstall Hiver=Ncs\g}|�|j�|�dg�|�d|g�|dur!|�d|g�tt|�j|j|d�dS)Nz--install-hive�--hive-versionsz--hive-site=%sr?)r!r:r1rIr�InstallHiveName)r�
hive_versions�	hive_siterr4r	r
rs
�zInstallHiveStep.__init__�r=N)rrrrrKrr5r	r	r4r
rI�srIcs&eZdZdZ		d�fdd�	Z�ZS)�HiveStepz
    Hive script step
    r=NcsZg}|�|j�|�d|g�|�ddd|g�|dur!|�|�tt|�j||d�dS)NrJz--run-hive-scriptrCrDr?)r!r:r1rOr)rr�	hive_filerL�	hive_argsrr4r	r
rs
zHiveStep.__init__rNrGr	r	r4r
rOs
�rON)
�boto.compatr�objectrrr"r0r7r<rBrHrIrOr	r	r	r
�<module>s.j

https://t.me/RX1948 - 2025