{ "cells": [ { "cell_type": "markdown", "id": "9b262721-9ba7-40c2-acc4-96c41cf9230a", "metadata": {}, "source": [ "# Dolly v2 SageMaker Finetuning\n", "\n", "This is a sample code to finetune and deploy Dolly v2 with LoRA on SageMaker." ] }, { "cell_type": "code", "execution_count": null, "id": "32904a97-bedb-451a-b2cc-ac691f7708de", "metadata": { "scrolled": true, "tags": [] }, "outputs": [], "source": [ "!pip install -U \"sagemaker>=2.143.0\"" ] }, { "cell_type": "code", "execution_count": null, "id": "2edc8a17-9a6b-4dd6-823e-878f6b950ea2", "metadata": { "tags": [] }, "outputs": [], "source": [ "import sagemaker, boto3, json\n", "from sagemaker import get_execution_role\n", "from sagemaker.pytorch.model import PyTorchModel\n", "from sagemaker.huggingface import HuggingFace\n", "\n", "role = get_execution_role()\n", "region = boto3.Session().region_name\n", "sess = sagemaker.Session()\n", "bucket = sess.default_bucket()\n", "\n", "sagemaker.__version__" ] }, { "cell_type": "markdown", "id": "bc1b5fe7-9ed9-4d24-b344-a40a781a24a1", "metadata": {}, "source": [ "## Upload Data\n", "\n", "We will use Databricks-dolly-15k as sample dataset to finetune the model. (License: [Creative Commons Attribution-ShareAlike 3.0 Unported License](https://creativecommons.org/licenses/by-sa/3.0/legalcode))\n", "\n", "You may also choose to use custom dataset." ] }, { "cell_type": "code", "execution_count": null, "id": "2ef68da3-a2d6-4f18-9803-d87f4687c9dd", "metadata": { "scrolled": true, "tags": [] }, "outputs": [], "source": [ "!curl -L https://huggingface.co/datasets/databricks/databricks-dolly-15k/resolve/main/databricks-dolly-15k.jsonl --create-dirs -o data/databricks-dolly-15k.jsonl" ] }, { "cell_type": "code", "execution_count": null, "id": "ab6171f8-8e4b-4711-9c19-0681a6b953fb", "metadata": { "tags": [] }, "outputs": [], "source": [ "!head -n 2 data/databricks-dolly-15k.jsonl" ] }, { "cell_type": "code", "execution_count": null, "id": "ace3ac7d-d1c2-4872-90c9-34f844f70f6e", "metadata": { "tags": [] }, "outputs": [], "source": [ "# Convet .jsonl to .json\n", "import pandas as pd\n", "df = pd.read_json('data/databricks-dolly-15k.jsonl', orient='records', lines=True)\n", "df = df.rename(columns={\"context\": \"input\", \"response\": \"output\"})\n", "df.to_json(\"data/databricks-dolly-15k.json\", orient='records')" ] }, { "cell_type": "code", "execution_count": null, "id": "014e57f3-9435-490c-b9d9-0fa5a6a8a6a9", "metadata": { "tags": [] }, "outputs": [], "source": [ "input_train = sess.upload_data(\n", " path=\"./data/databricks-dolly-15k.json\",\n", " key_prefix=\"Dolly\"\n", ")\n", "input_train" ] }, { "cell_type": "markdown", "id": "07b5098b-1ea9-4b85-88ee-9297d5e00c09", "metadata": {}, "source": [ "## Fine-tuning" ] }, { "cell_type": "code", "execution_count": null, "id": "1cb98557-3a3f-40ef-baf3-538c4ee4aa8a", "metadata": { "tags": [] }, "outputs": [], "source": [ "hyperparameters={\n", " 'base_model':'databricks/dolly-v2-3b',\n", " 'load_in_8bit': False,\n", " 'data_path': '/opt/ml/input/data/train/databricks-dolly-15k.json',\n", " 'num_epochs': 3, # default 3\n", " 'cutoff_len': 512,\n", " 'group_by_length': True,\n", " 'output_dir': '/opt/ml/model',\n", " 'lora_target_modules': '[query_key_value]',\n", " 'lora_r': 16,\n", " 'batch_size': 32,\n", " 'micro_batch_size': 4,\n", " 'prompt_template_name': 'alpaca',\n", "}" ] }, { "cell_type": "code", "execution_count": null, "id": "e4a8d836-d44f-4bdc-8d87-cff90f9f6ea7", "metadata": { "scrolled": true, "tags": [] }, "outputs": [], "source": [ "huggingface_estimator = HuggingFace(\n", " base_job_name=\"Dolly-v2\",\n", " role=role,\n", " entry_point='finetune.py',\n", " source_dir='./scripts/code',\n", " instance_type='ml.g5.2xlarge',\n", " instance_count=1,\n", " volume_size=200,\n", " transformers_version='4.26',\n", " pytorch_version='1.13',\n", " py_version='py39',\n", " use_spot_instances=True,\n", " max_wait=86400,\n", " hyperparameters=hyperparameters,\n", ")\n", "huggingface_estimator.fit({'train': input_train})" ] }, { "cell_type": "markdown", "id": "e8df141b-86f8-4eff-8ba5-129b78d3efd4", "metadata": { "tags": [] }, "source": [ "## Download and Extract Model" ] }, { "cell_type": "code", "execution_count": null, "id": "a02eb6aa-b4c3-4d60-a3fc-94f394df7ac1", "metadata": { "tags": [] }, "outputs": [], "source": [ "import boto3\n", "import sagemaker\n", "\n", "def get_latest_training_job_artifact(base_job_name):\n", " sagemaker_client = boto3.client('sagemaker')\n", " response = sagemaker_client.list_training_jobs(NameContains=base_job_name, SortBy='CreationTime', SortOrder='Descending')\n", " training_job_arn = response['TrainingJobSummaries'][0]['TrainingJobArn']\n", " training_job_description = sagemaker_client.describe_training_job(TrainingJobName=training_job_arn.split('/')[-1])\n", " return training_job_description['ModelArtifacts']['S3ModelArtifacts']\n", "\n", "try:\n", " model_data = huggingface_estimator.model_data\n", "except:\n", " # Retrieve artifact url when kernel is restarted\n", " model_data = get_latest_training_job_artifact('Dolly-v2')\n", " \n", "!aws s3 cp {model_data} dolly-v2.tar.gz" ] }, { "cell_type": "code", "execution_count": null, "id": "8e23bbc7-6038-4dfd-a632-21b38be59e78", "metadata": { "tags": [] }, "outputs": [], "source": [ "!rm -rf scripts/model && mkdir scripts/model\n", "!tar -xvf dolly-v2.tar.gz -C scripts/model --no-same-owner --wildcards adapter_*" ] }, { "cell_type": "markdown", "id": "44bf1cde-57d4-43f6-be6b-b09bd942219f", "metadata": {}, "source": [ "## Package and Upload Model" ] }, { "cell_type": "code", "execution_count": null, "id": "916cee87-d530-4e67-8fa6-18550ff92532", "metadata": { "scrolled": true, "tags": [] }, "outputs": [], "source": [ "%cd scripts\n", "!tar -czvf ../package.tar.gz *\n", "%cd -" ] }, { "cell_type": "code", "execution_count": null, "id": "e252d1ba-c402-4352-a0f5-c59dc51d6fc9", "metadata": { "tags": [] }, "outputs": [], "source": [ "model_path = sess.upload_data('package.tar.gz', bucket=bucket, key_prefix=f\"Dolly-v2\")\n", "model_path" ] }, { "cell_type": "markdown", "id": "cee0a96e-b185-4e2f-a8dd-75cbb086abfa", "metadata": {}, "source": [ "## Deploy Model" ] }, { "cell_type": "code", "execution_count": null, "id": "e8232665-f10e-4a74-84d6-d5c87640d1f2", "metadata": { "tags": [] }, "outputs": [], "source": [ "from sagemaker.async_inference import AsyncInferenceConfig\n", "from sagemaker.serializers import JSONSerializer\n", "\n", "endpoint_name = \"Dolly-v2\"\n", "\n", "huggingface_model = PyTorchModel(\n", " model_data=model_path,\n", " framework_version=\"1.13\",\n", " py_version='py39',\n", " role=role,\n", " name=endpoint_name,\n", " env={\n", " \"model_params\": json.dumps({\n", " \"base_model\": \"databricks/dolly-v2-3b\",\n", " \"lora_weights\": \"model\", # path relative to model package\n", " \"peft\": True,\n", " \"load_8bit\": True,\n", " \"prompt_template\": \"alpaca\",\n", " })\n", " }\n", ")\n", "\n", "# deploy model to SageMaker Inference\n", "predictor = huggingface_model.deploy(\n", " initial_instance_count=1,\n", " instance_type='ml.g5.2xlarge',\n", " endpoint_name=endpoint_name,\n", " serializer=JSONSerializer(),\n", " async_inference_config=AsyncInferenceConfig()\n", ")" ] }, { "cell_type": "markdown", "id": "fb4f6d9f-666d-40d6-b2c3-93ec553dded1", "metadata": {}, "source": [ "## Run Inference" ] }, { "cell_type": "code", "execution_count": null, "id": "be982e42-166c-47fb-bc35-cc20ee6a5c4e", "metadata": { "tags": [] }, "outputs": [], "source": [ "from sagemaker.predictor import Predictor\n", "from sagemaker.predictor_async import AsyncPredictor\n", "from sagemaker.serializers import JSONSerializer\n", "from sagemaker.deserializers import NumpyDeserializer\n", "\n", "predictor_client = AsyncPredictor(\n", " predictor=Predictor(\n", " endpoint_name=endpoint_name,\n", " sagemaker_session=sess,\n", " serializer=JSONSerializer(),\n", " deserializer=NumpyDeserializer()\n", " ),\n", " name=endpoint_name\n", ")\n", "data = {\n", " \"instruction\": \"When was George Washington president?\",\n", " \"input\": \"\"\"George Washington (February 22, 1732[b] – December 14, 1799) was an American military officer, statesman,\n", "and Founding Father who served as the first president of the United States from 1789 to 1797.\n", "\"\"\",\n", " \"max_new_tokens\": 64,\n", " \"temperature\": 0.7,\n", " \"do_sample\": True,\n", " \"stop_ids\": [50278, 50279, 50277, 1, 0],\n", "}\n", "response = predictor_client.predict(\n", " data=data\n", ")\n", "print(response)" ] }, { "cell_type": "markdown", "id": "39737593-7053-4dbb-b39b-112e27d228a1", "metadata": {}, "source": [ "## Delete Endpoint" ] }, { "cell_type": "code", "execution_count": null, "id": "82559f16-10f8-4573-91d7-e0b85cb7d0de", "metadata": { "tags": [] }, "outputs": [], "source": [ "predictor.delete_model()\n", "predictor.delete_endpoint()" ] }, { "cell_type": "code", "execution_count": null, "id": "c75d2062-a88e-4c2f-97e9-8778ee231d29", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "availableInstances": [ { "_defaultOrder": 0, "_isFastLaunch": true, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 4, "name": "ml.t3.medium", "vcpuNum": 2 }, { "_defaultOrder": 1, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 8, "name": "ml.t3.large", "vcpuNum": 2 }, { "_defaultOrder": 2, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.t3.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 3, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.t3.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 4, "_isFastLaunch": true, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 8, "name": "ml.m5.large", "vcpuNum": 2 }, { "_defaultOrder": 5, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.m5.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 6, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.m5.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 7, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 64, "name": "ml.m5.4xlarge", "vcpuNum": 16 }, { "_defaultOrder": 8, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 128, "name": "ml.m5.8xlarge", "vcpuNum": 32 }, { "_defaultOrder": 9, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 192, "name": "ml.m5.12xlarge", "vcpuNum": 48 }, { "_defaultOrder": 10, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 256, "name": "ml.m5.16xlarge", "vcpuNum": 64 }, { "_defaultOrder": 11, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 384, "name": "ml.m5.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 12, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 8, "name": "ml.m5d.large", "vcpuNum": 2 }, { "_defaultOrder": 13, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.m5d.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 14, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.m5d.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 15, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 64, "name": "ml.m5d.4xlarge", "vcpuNum": 16 }, { "_defaultOrder": 16, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 128, "name": "ml.m5d.8xlarge", "vcpuNum": 32 }, { "_defaultOrder": 17, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 192, "name": "ml.m5d.12xlarge", "vcpuNum": 48 }, { "_defaultOrder": 18, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 256, "name": "ml.m5d.16xlarge", "vcpuNum": 64 }, { "_defaultOrder": 19, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 384, "name": "ml.m5d.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 20, "_isFastLaunch": false, "category": "General purpose", "gpuNum": 0, "hideHardwareSpecs": true, "memoryGiB": 0, "name": "ml.geospatial.interactive", "supportedImageNames": [ "sagemaker-geospatial-v1-0" ], "vcpuNum": 0 }, { "_defaultOrder": 21, "_isFastLaunch": true, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 4, "name": "ml.c5.large", "vcpuNum": 2 }, { "_defaultOrder": 22, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 8, "name": "ml.c5.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 23, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.c5.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 24, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.c5.4xlarge", "vcpuNum": 16 }, { "_defaultOrder": 25, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 72, "name": "ml.c5.9xlarge", "vcpuNum": 36 }, { "_defaultOrder": 26, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 96, "name": "ml.c5.12xlarge", "vcpuNum": 48 }, { "_defaultOrder": 27, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 144, "name": "ml.c5.18xlarge", "vcpuNum": 72 }, { "_defaultOrder": 28, "_isFastLaunch": false, "category": "Compute optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 192, "name": "ml.c5.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 29, "_isFastLaunch": true, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.g4dn.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 30, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.g4dn.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 31, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 64, "name": "ml.g4dn.4xlarge", "vcpuNum": 16 }, { "_defaultOrder": 32, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 128, "name": "ml.g4dn.8xlarge", "vcpuNum": 32 }, { "_defaultOrder": 33, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 4, "hideHardwareSpecs": false, "memoryGiB": 192, "name": "ml.g4dn.12xlarge", "vcpuNum": 48 }, { "_defaultOrder": 34, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 256, "name": "ml.g4dn.16xlarge", "vcpuNum": 64 }, { "_defaultOrder": 35, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 61, "name": "ml.p3.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 36, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 4, "hideHardwareSpecs": false, "memoryGiB": 244, "name": "ml.p3.8xlarge", "vcpuNum": 32 }, { "_defaultOrder": 37, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 8, "hideHardwareSpecs": false, "memoryGiB": 488, "name": "ml.p3.16xlarge", "vcpuNum": 64 }, { "_defaultOrder": 38, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 8, "hideHardwareSpecs": false, "memoryGiB": 768, "name": "ml.p3dn.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 39, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.r5.large", "vcpuNum": 2 }, { "_defaultOrder": 40, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.r5.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 41, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 64, "name": "ml.r5.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 42, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 128, "name": "ml.r5.4xlarge", "vcpuNum": 16 }, { "_defaultOrder": 43, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 256, "name": "ml.r5.8xlarge", "vcpuNum": 32 }, { "_defaultOrder": 44, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 384, "name": "ml.r5.12xlarge", "vcpuNum": 48 }, { "_defaultOrder": 45, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 512, "name": "ml.r5.16xlarge", "vcpuNum": 64 }, { "_defaultOrder": 46, "_isFastLaunch": false, "category": "Memory Optimized", "gpuNum": 0, "hideHardwareSpecs": false, "memoryGiB": 768, "name": "ml.r5.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 47, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 16, "name": "ml.g5.xlarge", "vcpuNum": 4 }, { "_defaultOrder": 48, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 32, "name": "ml.g5.2xlarge", "vcpuNum": 8 }, { "_defaultOrder": 49, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 64, "name": "ml.g5.4xlarge", "vcpuNum": 16 }, { "_defaultOrder": 50, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 128, "name": "ml.g5.8xlarge", "vcpuNum": 32 }, { "_defaultOrder": 51, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 1, "hideHardwareSpecs": false, "memoryGiB": 256, "name": "ml.g5.16xlarge", "vcpuNum": 64 }, { "_defaultOrder": 52, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 4, "hideHardwareSpecs": false, "memoryGiB": 192, "name": "ml.g5.12xlarge", "vcpuNum": 48 }, { "_defaultOrder": 53, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 4, "hideHardwareSpecs": false, "memoryGiB": 384, "name": "ml.g5.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 54, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 8, "hideHardwareSpecs": false, "memoryGiB": 768, "name": "ml.g5.48xlarge", "vcpuNum": 192 }, { "_defaultOrder": 55, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 8, "hideHardwareSpecs": false, "memoryGiB": 1152, "name": "ml.p4d.24xlarge", "vcpuNum": 96 }, { "_defaultOrder": 56, "_isFastLaunch": false, "category": "Accelerated computing", "gpuNum": 8, "hideHardwareSpecs": false, "memoryGiB": 1152, "name": "ml.p4de.24xlarge", "vcpuNum": 96 } ], "instance_type": "ml.t3.medium", "kernelspec": { "display_name": "Python 3 (Data Science)", "language": "python", "name": "python3__SAGEMAKER_INTERNAL__arn:aws:sagemaker:us-west-2:236514542706:image/datascience-1.0" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.7.10" } }, "nbformat": 4, "nbformat_minor": 5 }