U dt@sddlZddlZddlmZddlmZddlZddlZddlZddlZddl Z ddl Z ddl Z ddl Z ddl Z ddlZddlmZddlmZddlZddlZddlZddlZGdddeZdS)N)yaspin)Spinners)MultiDataModel)Modelc@seZdZd(dd Zd d Zd d ZddZddZddZddZ ddZ ddZ ddZ ddZ ddZd d!Zd)d$d%Zd&d'ZdS)*DeployNFTdc Cs|ddddg|_dgdgddgdgd|_||_||_||_||_| |_| |_||_| |_ ||_ d|_ ||_ t d d }i|_t|d d 0}t|}|D]}t|d |j|d<qW5QRX|jdkrd|_d|_n|jt|jksd|jkrv|jdkrvd|jkr@|jdkr2d|_d|_|jdkn d|_d|_n4|jdd ddkrhd|_d|_n d|_d|_ntdt|jdt|tkr|g|_d|_nTt|tkr||_d|_n8|dkrtdd}W5QRXdg|_d|_ntd|dddkr"td|dn||_|j}t|$}t| }dd|j!D}W5QRXd|krd |krtd!|| dkrt"j#$|_#n| |_#|dkrt%&'|_(n@t|tkr||_(|)dkrtd"ntd#tt|| dkr|j#*|_+n| |_+||_,|dkrF||jkrF||_-|j||_,n|dkrd||jkrdtd$||_.||_|/dS)%N tensorflowpytorchmxnetsklearnZtorchZgluon)r r r rF ezsmdeployz data/cost.csvrmoder rTlocal local_gpu.)pg!Please choose an instance type inz, or choose local for local testing. Don't pass in any instance or pass in None if you want to automatically choose an instance type.tmpmodelwzmodel must be a single serialized file (like 'model.pkl') or a list of files ([model.pkl, model2.pkl]). If you are downloading a model in the script or packaging with the container, pass in model = Nonepyz9please provide a valid python script with .py extension. z is invalidcSsg|]}t|tjr|jqS) isinstanceast FunctionDefname).0nrrQ/home/ec2-user/SageMaker/easy-amazon-sagemaker-deployments/ezsmdeploy/__init__.py s z#Deploy.__init__..Z load_modelZpredictzplease implement a load_model(modelpath) that returns a loaded model, and predict(inputdata) function that returns a prediction in yourzplease enter a name with lower case letters; we will be using this name for s3 bucket prefixes, model names, ECR repository names etc. that have various restrictionszKenter string for a name or don't pass in a name; type of name passed in is zIf requirements=None, please provide a value for framework; choice should be one of 'tensorflow','pytorch','mxnet','sklearn')0Z frameworklistZframeworkinstallswaitbudgetinstance_count instance_typeimagedockerfilepatheiprefixmonitordeployedautoscaletarget pkg_resourcesresource_filenamecostdictopencsvreaderfloatgpu multimodellistkeyssplit ValueErrortypestrmodelscriptrparsereadbody sagemakersessionSession shortuuiduuidlowerr!islowerZdefault_bucketbucket requirements framework autoscaledeploy)selfr@rArNrMr!rOr0r&rLr-rFr*r+r)r(r'r,r.Zcostpathinfiler6rowsfpfilenamefilenode functionsrrr$__init__s  "           zDeploy.__init__c Cs<i|_|jdkrtdd}t|ddv}t|}|D]`}t|ddt|d|j|d|j|dt|ddt|df|j|d<q8W5QRX| n|jt |j ks|jdkr"|jdkr|j|j|_ |j dkr d d d d d dd}|j ||j |_ nd|_ ntdt |j ddS)Nrdata/instancetypes.csvrrr rrrgQ?gQ?g(\?gp= ף?gp= ף?gp= ף?)zml.eia2.mediumz ml.eia2.largezml.eia2.xlargez ml.eia.mediumz ml.eia.largez ml.eia.xlargerz$, or choose local for local testing.) instancedictr)r1r2r4r5r6r7r3choose_instance_typer:r; costperhourr,r=)rQZinstancetypepathrRr6rSZeicostsrrr$process_instance_typesD   (    zDeploy.process_instance_typec CsJ|jdkrd|jd}n|jd|jd}||j|}tdd|_d}d}t|j D]l}|j|d}|j|d }|j|d }|j d kr||kr||kr|}|}q`||kr`||j kr`|}qq`|dkr|j d krt d t |j d t |dn*|dkr2|j d kr2t dt |d||_ |j|j |_dS)Nrezsmdeploy/model-//ezsmdeploy/model-rrZrrr r[r z9Could not find an instance that satisfies your budget of z8 per hour and can host your models with a total size of z, Gb. Please choose a higher budget per hour.z3You may be using large models with a total size of z` Gb. Please choose a high memory GPU instance and launch without multiple models (if applicable))r-r!get_sizerLr1r2Zinstancetypespathr:r]r;r'r=r?r)r3r_) rQtmppathsizeZchoseninstanceZmincostinstanceZ memperworkerZcostZ costpermemrrr$r^sV   zDeploy.choose_instance_typecCs|j||dS)N)sagemakermodel add_model)rQZs3pathZ relativepathrrr$riAszDeploy.add_modelcCs|js2td|j|jd|jttjjd|_ n`t d|jd |jd dddd|jttjjd|_ |jD]}| |dqzd|_dS) Nzmodel-r)r!Z model_data image_urirole predictor_clsrb)r!Zmodel_data_prefixrjrkrlzserving/F)r9rr! modelpathr*rEZget_execution_role predictorZ Predictorrhrjoinr<rir,)rQpathrrr$ create_modelDs$   zDeploy.create_modelc Cs|jrPddlm}tdkr,d|j|j}nd|j|j|j}|dd|d}nd}|jj|j |j |j d |jd |j |d |_ d |j|_dS) Nr)DataCaptureConfigr's3://{}/ezsmdeploy/model-{}/datacapturez*s3://{}/{}/ezsmdeploy/model-{}/datacaptureTr )Zenable_captureZsampling_percentageZdestination_s3_urizezsmdeploy-endpoint-F)Zinitial_instance_countr)Zaccelerator_type endpoint_nameZupdate_endpointr&data_capture_config)r.Zsagemaker.model_monitorrsr-formatrLr!rhrPr(r)r,r&roru)rQrsZtmps3urirvrrr$ deploy_modelas8  zDeploy.deploy_modelcCs>td}||}d}|jj|dD]}||j}q&|dS)Ns3g)ZPrefixgA)boto3resourceZBucketobjectsfilterrf)rQrLrqryZ my_bucketZ total_sizeobjrrr$rds    zDeploy.get_sizecCsdd}|jdkrd}n |jd}g|_|jD]4}|j|jjd||j||jd|d7}q*dS)Nr rrarcmodel{}.tar.gzrqrLZ key_prefix) r-rnr@appendrFZ upload_datarwrLr!)rQirer!rrr$ upload_models   zDeploy.upload_modelc CsZd}|jD]H}d|krd|kr|jjd||ddd|ddddttd |d }| d |W5QRXd |}nvd|krd|kr| d |t |d |ttd |d }| d |W5QRXd |}td|d}d|kr:|j |ddn | |||d7}q dS)Nr ztar.gzryz./downloads/{}rbr[rz./downloads/{}/*.tar.gzrz./extractedmodel/{}/zextractedmodel/{}/z./downloads/{}/rzw:gzr)arcname)r@rFZ download_datarwr<rptarfiler4glob extractall makedir_safeshutilcopyaddclose)rQrr!tarrrr$ tar_models:      zDeploy.tar_modelc Cstzt|Wn YnXztj|s4t|Wn8tk rn}z|jdkr^t|jW5d}~XYnXdS)N) rrmtreeosrqexistsmakedirsOSErrorerrnoprint)rQ directoryerrrrr$rs   zDeploy.makedir_safecCs|dt|jtkrBtj|jr6t|jdq|jdnDt|jt kr~t dd}t dd|j}| || ntddS)Nsrczsrc/requirements.txtz does not exist!rcSs|dS)N r)xrrr$z,Deploy.handle_requirements..zUpass in a path/to/requirements.txt or a list of requirements ['scikit-learn',...,...])rr>rMr?rrqrrrr:r4map writelinesrr=)rQfl1rrr$handle_requirementss     zDeploy.handle_requirementsc Csd}tdd}tj||j|dd}W5QRXtddd}td dd}d |||j|_ tj d st d qtd |_dS)Nz=chmod +x src/build-docker.sh & sudo ./src/build-docker.sh {}zsrc/dockeroutput.txtrT)stdoutshellz9aws sts get-caller-identity --query Account --output textrrzaws configure get regionz/{}.dkr.ecr.{}.amazonaws.com/ezsmdeploy-image-{} src/done.txtrzPlease see src/dockeroutput.txt)r4 subprocessPopenrwr!rpopenrCr<r*rqrtimesleepZ dockeroutput)rQcmdrrZacctZregionrrr$ build_dockers$ "  zDeploy.build_dockerc Cstdj|jd}|d}|ddd}td}|jdd||d d d d }|jd |jdd||d d|jddiddddd}||_ dS)NrE) EndpointNamerZProductionVariantsrZ VariantNamezapplication-autoscalingzendpoint/{}/variant/{}z&sagemaker:variant:DesiredInstanceCountr )ServiceNamespace ResourceIdScalableDimensionZ MinCapacityZ MaxCapacityzscaling-policy-{}ZTargetTrackingScalingZPredefinedMetricTypeZ&SageMakerVariantInvocationsPerInstanceiXF)Z TargetValueZPredefinedMetricSpecificationZScaleOutCooldownZScaleInCooldownZDisableScaleIn)Z PolicyNamerrrZ PolicyTypeZ(TargetTrackingScalingPolicyConfiguration) rzclientZdescribe_endpointruZregister_scalable_targetrwZput_scaling_policyr!r0Zscalingresponse)rQresponseZin1Zin2rrrr$autoscale_endpoints:     zDeploy.autoscale_endpointrc Csp|jr|dkrtd|jrdtdd}t|dtj}t t j ddd }| | ttj|d||jrtd d } t|jd d | W5QRXn*td d } t|jdd | W5QRXt|td dd|||} t| } tjds"tdqtd| | ttj|d|W5QRXntddS)NzXsince this is a multimodel endpoint, please pass in a target model that you wish to testrzdata/smlocust.pyzsrc/smlocust.pygreenrcolortextz | Starting test with Locustzsrc/locustdata.txtrz model1.tar.gz)ru target_modelzsrc/testdata.pwbzklocust -f src/smlocust.py --no-web -c {} -r {} --run-time {}s --csv=src/locuststats; touch src/testdone.txtzsrc/testdone.txtrz9 | Done! Please see the src folder for locuststats* filesz-Deploy model to endpoint first before testing)r9r=r/r1r2rrdatetimenowrrpointhidewriter?showr4jsondumprupicklerwrsystemrqrrrremove) rQZ input_datarZ usercountZ hatchrateZ timeoutsecspath1startspoutfilerrrrr$test,s^        z Deploy.testc Cstj}ttjddd}ztdWn YnX|||j dgkrt| t tj|dn| t tj|d| | ||| t tj|d| |jdkrt tj|d }|d n|t tj|d }|| || t|jd || t tj|d | |jdkr|jdkrtdd|_n&|jdkr|jdkrtdd|_t|jd|| t tj|d| |jrTtdd}tdd}tdd}t|dt|dt|dd|_ntdd}tdd}tdd}tdd}tdd}tdd} t|d t|d!t|d"t|d#t|d$t| d|jr*|jdkr*d|_|| t tj|d%| || t tj|d&| |jdkr| t tj|d'||| t tj|d(| ||| t tj|d)|j| ||| t tj|d*| |jrj|jd+krj| || t tj|d,| n@|jr|jd+kr|| t tj|d-| |jd+kr|| t tj|d.t |j!d/| |j"r4|| t tj|d0d1#|j$|j%| |j&'t tj|d2d|_(z6t)*d3t)*d t)*d4t)*d5t)*dWn YnX|j+W5QRSQRXdS)6Nrrrzsrc/rz^ | No model was passed. Assuming you are downloading a model in the script or in the containerz | compressed model(s)z7 | uploaded model tarball(s) ; check returned modelpathz# | no additional requirements foundrz | added requirements filezsrc/transformscript.pyz | added source fileTrzdata/DockerfileFzdata/Dockerfile_flaskzsrc/Dockerfilez | added Dockerfilezdata/model_handler.pyzdata/dockerd-entrypoint.pyzdata/build-docker.shzsrc/model_handler.pyzsrc/dockerd-entrypoint.pyzsrc/build-docker.shzdata/nginx.confzdata/predictor.pyz data/servez data/trainz data/wsgi.pyzsrc/nginx.confzsrc/predictor.pyz src/servez src/trainz src/wsgi.pyzZ | Setting Elastic Inference to None since you selected a GPU instancez' | added model_handler and docker utilsz | building docker containerz | built docker containerz& | created model(s). Now deploying on z | deployed modelr\z | set up autoscalingz0 | not setting up autoscaling; deploying locallyz | estimated cost is $z per hourz* | model monitor data capture location is rtu | Done! ✔rZ downloadsZextractedmodel),rrrrrrrrrr@rr?rrr`rMrrrrAr+r9r1r2r,r8r*rrrr)rxrOrr_r.rwrLr!rokr/rrro) rQrrZrtextrZpath2Zpath3Zpath4Zpath5Zpath6rrr$rPjsn                         z Deploy.deploy)NNNFrTNrNNNNr r NF)Nrrr)__name__ __module__ __qualname__rYr`r^rirrrxrdrrrrrrrrPrrrr$rsH 92<# (% >r)rErHrZyaspin.spinnersrrrrrerzrrrr1rZsagemaker.multidatamodelrZsagemaker.modelrrr5rrobjectrrrrr$s(