{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Time series forecasting with DeepAR - Telecom data" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Introduction" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "Time series forecasasting with DeepAR is a supervised learning algorithm for forecasting scalar time series with Telecom data. This notebook demonstrates how to prepare a dataset of time series for training DeepAR with telecom Call Detail Record(CDR) data, classify Call Disconnect Reason and how to use the trained model for inference. The notebook uses a hybrid approach of Spark ML Random Forest Classifier and DeepAR.\n" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "%config IPCompleter.greedy=True" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "This demonstrates the use of sparkml RandomForestClassifier for classification and feeds as input to DeepAR for Time series Prediction" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['Start_Time_HH_MM_SS_s_index', 'Called_Number_index', 'Call_Service_Duration_index', 'Accounting_ID_index', 'Calling_Number_index', 'Start_Time_MM_DD_YYYY_index']\n" ] } ], "source": [ "from pyspark.sql.types import *\n", "from pyspark.sql import SparkSession\n", "from sagemaker import get_execution_role\n", "import sagemaker_pyspark\n", "import pandas as pd\n", "import numpy as np\n", "\n", "role = get_execution_role()\n", "\n", "# Configure Spark to use the SageMaker Spark dependency jars\n", "jars = sagemaker_pyspark.classpath_jars()\n", "\n", "classpath = \":\".join(sagemaker_pyspark.classpath_jars())\n", "\n", "spark = SparkSession.builder.config(\"spark.driver.extraClassPath\", classpath)\\\n", " .master(\"local[*]\").getOrCreate()\n", "\n", "def getCdrDataframe():\n", " cdr_start_loc = \"<%CDRStartFile%>\"\n", " cdr_stop_loc = \"<%CDRStopFile%>\"\n", " cdr_start_sample_loc = \"<%CDRStartSampleFile%>\"\n", " cdr_stop_sample_loc = \"<%CDRStopSampleFile%>\"\n", " \n", " df = spark.read.format(\"s3select\").parquet(cdr_stop_sample_loc)\n", " df.createOrReplaceTempView(\"cdr\")\n", " return df\n", "\n", "getCdrDataframe()\n", "\n", "def build_schema():\n", " \"\"\"Build and return a schema to use for the sample data.\"\"\"\n", " schema = StructType(\n", " [\n", " StructField(\"Accounting_ID\", StringType(), True),\n", " StructField(\"Start_Time_MM_DD_YYYY\", StringType(), True),\n", " StructField(\"Start_Time_HH_MM_SS_s\", StringType(), True),\n", " StructField(\"Call_Service_Duration\", StringType(), True),\n", " StructField(\"Call_Disconnect_Reason\", StringType(), True),\n", " StructField(\"Calling_Number\", StringType(), True),\n", " StructField(\"Called_Number\", StringType(), True)\n", " ]\n", " )\n", " return schema\n", "\n", "import matplotlib.pyplot as plt\n", "dataDF = spark.sql(\"SELECT _c2,_c5,_c6,_c13,_c14,_c19,_c20 from cdr where _c0 = 'STOP'\")\n", "dataPanda = dataDF.toPandas()\n", "newDataDF = spark.createDataFrame(dataPanda.dropna(),build_schema())\n", "dataPd = newDataDF.toPandas()\n", "\n", "integerColumns = [\"Call_Service_Duration\" , \"Call_Disconnect_Reason\", \"Calling_Number\", \"Called_Number\"]\n", "for col in integerColumns:\n", " dataPd[col] = dataPd[col].astype(int)\n", " \n", "#Mock Data\n", "def mock_data():\n", " from pyspark.sql.functions import rand,when\n", " addDF = newDataDF\n", " unionDF = addDF.union(newDataDF)\n", " df = unionDF.drop('Call_Disconnect_Reason') \n", " df1 = df.withColumn('Call_Disconnect_Reason', when(rand(seed=1234) > 0.5, 16).otherwise(17)) \n", " return df1\n", "\n", "df1 = mock_data() \n", " \n", " \n", "from pyspark.sql.functions import rand\n", "\n", "trainingFraction = 0.75; testingFraction = (1-trainingFraction);\n", "seed = 1234;\n", "trainData, testData = df1.randomSplit([trainingFraction, testingFraction], seed=seed);\n", "\n", "# # CACHE TRAIN AND TEST DATA\n", "trainData.cache()\n", "testData.cache()\n", "trainData.count(),testData.count()\n", "\n", "from pyspark.ml.feature import StringIndexer\n", "columns_list = list(set(newDataDF.columns)-set(['Call_Disconnect_Reason']) ) \n", "indexers = []\n", "for column in columns_list:\n", " indexer = StringIndexer(inputCol=column, outputCol=column+\"_index\")\n", " indexer.setHandleInvalid(\"skip\")\n", " indexers.append(indexer)\n", "\n", "from pyspark.ml.feature import StringIndexer\n", "# Convert target into numerical categories\n", "labelIndexer = StringIndexer(inputCol=\"Call_Disconnect_Reason\", outputCol=\"label\")\n", "labelIndexer.setHandleInvalid(\"skip\")\n", " \n", "from pyspark.ml.feature import VectorAssembler\n", "from array import array\n", "\n", "inputcolsIndexer = []\n", "for col in columns_list:\n", " inputcolsIndexer.append(col+\"_index\")\n", "print(inputcolsIndexer)\n", "\n", "vecAssembler = VectorAssembler(inputCols=inputcolsIndexer, outputCol=\"features\")\n", "\n", "from pyspark.ml.classification import RandomForestClassifier\n", "from pyspark.ml.evaluation import MulticlassClassificationEvaluator\n", "\n", "# Train a RandomForest model.\n", "rf = RandomForestClassifier(labelCol=\"label\", featuresCol=\"features\", maxDepth=8, maxBins=2400000, numTrees=128,impurity=\"gini\")\n", "\n", "from pyspark.ml.feature import ChiSqSelector\n", "chisqSelector = ChiSqSelector(numTopFeatures=3, featuresCol=\"features\",\n", " outputCol=\"selectedFeatures\", labelCol=\"label\")\n", "\n", "from pyspark.ml import Pipeline\n", "stages = []\n", "stages += indexers \n", "stages += [labelIndexer]\n", "stages += [vecAssembler]\n", "stages += [rf]\n", "stages += [chisqSelector]\n", "\n", "pipeline = Pipeline(stages=stages)\n" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(33607, 11219)" ] }, "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ "trainData.count(),testData.count()" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "(3403, 4487)" ] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ "train_sdata = trainData.sample(False,0.1)\n", "test_sdata = testData.sample(False,0.4)\n", "train_sdata.count(),test_sdata.count()" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "CPU times: user 0 ns, sys: 0 ns, total: 0 ns\n", "Wall time: 8.34 µs\n" ] }, { "data": { "text/plain": [ "356" ] }, "execution_count": 5, "metadata": {}, "output_type": "execute_result" } ], "source": [ "%time\n", "model = pipeline.fit(train_sdata)\n", "predictions = model.transform(test_sdata)\n", "predictions.createOrReplaceTempView(\"predicted_table\")\n", "predictions.count()" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "+------------------+---------------------+---------------------+---------------------+--------------+-------------+----------------------+---------------------------+-------------------+---------------------------+-------------------+--------------------+---------------------------+-----+--------------------+--------------------+--------------------+----------+----------------+\n", "| Accounting_ID|Start_Time_MM_DD_YYYY|Start_Time_HH_MM_SS_s|Call_Service_Duration|Calling_Number|Called_Number|Call_Disconnect_Reason|Start_Time_HH_MM_SS_s_index|Called_Number_index|Call_Service_Duration_index|Accounting_ID_index|Calling_Number_index|Start_Time_MM_DD_YYYY_index|label| features| rawPrediction| probability|prediction|selectedFeatures|\n", "+------------------+---------------------+---------------------+---------------------+--------------+-------------+----------------------+---------------------------+-------------------+---------------------------+-------------------+--------------------+---------------------------+-----+--------------------+--------------------+--------------------+----------+----------------+\n", "|0x00016E0F11780902| 08/10/2018| 12:57:43.1| 5| 9645000099| 3512000099| 16| 1339.0| 8.0| 277.0| 2840.0| 8.0| 0.0| 1.0|[1339.0,8.0,277.0...|[0.47079490632979...|[0.00367808520570...| 1.0| [8.0,277.0,8.0]|\n", "|0x00016E0F1240F35C| 08/10/2018| 12:49:03.1| 135| 9645000072| 3512000072| 16| 38.0| 95.0| 9.0| 2958.0| 93.0| 0.0| 1.0|[38.0,95.0,9.0,29...|[81.7280438800052...|[0.63850034281254...| 0.0| [95.0,9.0,93.0]|\n", "+------------------+---------------------+---------------------+---------------------+--------------+-------------+----------------------+---------------------------+-------------------+---------------------------+-------------------+--------------------+---------------------------+-----+--------------------+--------------------+--------------------+----------+----------------+\n", "only showing top 2 rows\n", "\n" ] } ], "source": [ "predictions.show(2)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "- _Call Disconnect Reason prediction count is computed to classify Normal Call Clearing(16) records and non Normal Call Clearing records as anomalous._" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "pred_sql = spark.sql(\"Select Start_Time_MM_DD_YYYY,Start_Time_HH_MM_SS_s,Call_Disconnect_Reason,prediction, CASE WHEN Call_Disconnect_Reason = 16 AND prediction = 0.0 THEN 0 ELSE 1 END AS anomaly from predicted_table\")\n", "dft = pred_sql.toPandas()" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/html": [ "
\n", " | Start_Time_MM_DD_YYYY | \n", "Start_Time_HH_MM_SS_s | \n", "Call_Disconnect_Reason | \n", "prediction | \n", "anomaly | \n", "
---|---|---|---|---|---|
Date | \n", "\n", " | \n", " | \n", " | \n", " | \n", " |
08/10/2018 11:37:28.1 | \n", "08/10/2018 | \n", "11:37:28.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:37:30.1 | \n", "08/10/2018 | \n", "11:37:30.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:37:37.1 | \n", "08/10/2018 | \n", "11:37:37.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:38:00.1 | \n", "08/10/2018 | \n", "11:38:00.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:38:33.1 | \n", "08/10/2018 | \n", "11:38:33.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:39:33.1 | \n", "08/10/2018 | \n", "11:39:33.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:39:43.1 | \n", "08/10/2018 | \n", "11:39:43.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:39:48.1 | \n", "08/10/2018 | \n", "11:39:48.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:41:13.1 | \n", "08/10/2018 | \n", "11:41:13.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:42:17.1 | \n", "08/10/2018 | \n", "11:42:17.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:42:21.1 | \n", "08/10/2018 | \n", "11:42:21.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:42:32.1 | \n", "08/10/2018 | \n", "11:42:32.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:42:38.1 | \n", "08/10/2018 | \n", "11:42:38.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:43:10.1 | \n", "08/10/2018 | \n", "11:43:10.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:43:20.1 | \n", "08/10/2018 | \n", "11:43:20.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:43:38.1 | \n", "08/10/2018 | \n", "11:43:38.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:44:35.1 | \n", "08/10/2018 | \n", "11:44:35.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:45:05.1 | \n", "08/10/2018 | \n", "11:45:05.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:45:13.1 | \n", "08/10/2018 | \n", "11:45:13.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:45:33.1 | \n", "08/10/2018 | \n", "11:45:33.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:46:59.1 | \n", "08/10/2018 | \n", "11:46:59.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:48:55.1 | \n", "08/10/2018 | \n", "11:48:55.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:48:56.1 | \n", "08/10/2018 | \n", "11:48:56.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:49:00.1 | \n", "08/10/2018 | \n", "11:49:00.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:49:07.1 | \n", "08/10/2018 | \n", "11:49:07.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:49:08.1 | \n", "08/10/2018 | \n", "11:49:08.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:49:16.1 | \n", "08/10/2018 | \n", "11:49:16.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 11:50:13.1 | \n", "08/10/2018 | \n", "11:50:13.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 11:50:22.1 | \n", "08/10/2018 | \n", "11:50:22.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 11:50:35.1 | \n", "08/10/2018 | \n", "11:50:35.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
... | \n", "... | \n", "... | \n", "... | \n", "... | \n", "... | \n", "
08/10/2018 13:54:28.1 | \n", "08/10/2018 | \n", "13:54:28.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 13:54:33.1 | \n", "08/10/2018 | \n", "13:54:33.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 13:55:19.1 | \n", "08/10/2018 | \n", "13:55:19.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 13:56:27.1 | \n", "08/10/2018 | \n", "13:56:27.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 13:56:36.1 | \n", "08/10/2018 | \n", "13:56:36.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
08/10/2018 13:58:12.1 | \n", "08/10/2018 | \n", "13:58:12.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 13:58:43.1 | \n", "08/10/2018 | \n", "13:58:43.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 13:58:54.1 | \n", "08/10/2018 | \n", "13:58:54.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 13:59:02.1 | \n", "08/10/2018 | \n", "13:59:02.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 13:59:40.1 | \n", "08/10/2018 | \n", "13:59:40.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 13:59:46.1 | \n", "08/10/2018 | \n", "13:59:46.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 13:59:47.1 | \n", "08/10/2018 | \n", "13:59:47.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:00:12.1 | \n", "08/10/2018 | \n", "14:00:12.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:00:32.1 | \n", "08/10/2018 | \n", "14:00:32.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:00:38.1 | \n", "08/10/2018 | \n", "14:00:38.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:01:12.1 | \n", "08/10/2018 | \n", "14:01:12.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 14:01:40.1 | \n", "08/10/2018 | \n", "14:01:40.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
08/10/2018 14:02:04.1 | \n", "08/10/2018 | \n", "14:02:04.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:02:07.1 | \n", "08/10/2018 | \n", "14:02:07.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:02:19.1 | \n", "08/10/2018 | \n", "14:02:19.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:03:02.1 | \n", "08/10/2018 | \n", "14:03:02.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:03:38.1 | \n", "08/10/2018 | \n", "14:03:38.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:04:18.1 | \n", "08/10/2018 | \n", "14:04:18.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:04:36.1 | \n", "08/10/2018 | \n", "14:04:36.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:04:46.1 | \n", "08/10/2018 | \n", "14:04:46.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:04:56.1 | \n", "08/10/2018 | \n", "14:04:56.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:05:34.1 | \n", "08/10/2018 | \n", "14:05:34.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:06:03.1 | \n", "08/10/2018 | \n", "14:06:03.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:06:43.1 | \n", "08/10/2018 | \n", "14:06:43.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
08/10/2018 14:06:53.1 | \n", "08/10/2018 | \n", "14:06:53.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
356 rows × 5 columns
\n", "\n", " | Start_Time_MM_DD_YYYY | \n", "Start_Time_HH_MM_SS_s | \n", "Call_Disconnect_Reason | \n", "prediction | \n", "anomaly | \n", "
---|---|---|---|---|---|
Date | \n", "\n", " | \n", " | \n", " | \n", " | \n", " |
2018-08-10 11:37:00 | \n", "08/10/2018 | \n", "11:37:28.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:38:00 | \n", "08/10/2018 | \n", "11:37:30.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:38:00 | \n", "08/10/2018 | \n", "11:37:37.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:38:00 | \n", "08/10/2018 | \n", "11:38:00.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:39:00 | \n", "08/10/2018 | \n", "11:38:33.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:40:00 | \n", "08/10/2018 | \n", "11:39:33.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:40:00 | \n", "08/10/2018 | \n", "11:39:43.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:40:00 | \n", "08/10/2018 | \n", "11:39:48.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:41:00 | \n", "08/10/2018 | \n", "11:41:13.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:42:00 | \n", "08/10/2018 | \n", "11:42:17.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
\n", " | Start_Time_MM_DD_YYYY | \n", "Start_Time_HH_MM_SS_s | \n", "Call_Disconnect_Reason | \n", "prediction | \n", "anomaly | \n", "
---|---|---|---|---|---|
Date | \n", "\n", " | \n", " | \n", " | \n", " | \n", " |
2018-08-10 11:37:00 | \n", "08/10/2018 | \n", "11:37:28.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:38:00 | \n", "08/10/2018 | \n", "11:37:30.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:38:00 | \n", "08/10/2018 | \n", "11:37:37.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:38:00 | \n", "08/10/2018 | \n", "11:38:00.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:39:00 | \n", "08/10/2018 | \n", "11:38:33.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:40:00 | \n", "08/10/2018 | \n", "11:39:33.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:40:00 | \n", "08/10/2018 | \n", "11:39:43.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:40:00 | \n", "08/10/2018 | \n", "11:39:48.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:41:00 | \n", "08/10/2018 | \n", "11:41:13.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:42:00 | \n", "08/10/2018 | \n", "11:42:17.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:42:00 | \n", "08/10/2018 | \n", "11:42:21.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:43:00 | \n", "08/10/2018 | \n", "11:42:32.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:43:00 | \n", "08/10/2018 | \n", "11:42:38.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:43:00 | \n", "08/10/2018 | \n", "11:43:10.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:43:00 | \n", "08/10/2018 | \n", "11:43:20.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:44:00 | \n", "08/10/2018 | \n", "11:43:38.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:45:00 | \n", "08/10/2018 | \n", "11:44:35.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:45:00 | \n", "08/10/2018 | \n", "11:45:05.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:45:00 | \n", "08/10/2018 | \n", "11:45:13.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:46:00 | \n", "08/10/2018 | \n", "11:45:33.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:47:00 | \n", "08/10/2018 | \n", "11:46:59.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:49:00 | \n", "08/10/2018 | \n", "11:48:55.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:49:00 | \n", "08/10/2018 | \n", "11:48:56.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:49:00 | \n", "08/10/2018 | \n", "11:49:00.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:49:00 | \n", "08/10/2018 | \n", "11:49:07.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:49:00 | \n", "08/10/2018 | \n", "11:49:08.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:49:00 | \n", "08/10/2018 | \n", "11:49:16.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 11:50:00 | \n", "08/10/2018 | \n", "11:50:13.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 11:50:00 | \n", "08/10/2018 | \n", "11:50:22.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 11:51:00 | \n", "08/10/2018 | \n", "11:50:35.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
... | \n", "... | \n", "... | \n", "... | \n", "... | \n", "... | \n", "
2018-08-10 13:44:00 | \n", "08/10/2018 | \n", "13:44:08.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:45:00 | \n", "08/10/2018 | \n", "13:44:41.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:45:00 | \n", "08/10/2018 | \n", "13:44:59.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:45:00 | \n", "08/10/2018 | \n", "13:45:16.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:45:00 | \n", "08/10/2018 | \n", "13:45:25.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:46:00 | \n", "08/10/2018 | \n", "13:45:31.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:46:00 | \n", "08/10/2018 | \n", "13:46:09.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:46:00 | \n", "08/10/2018 | \n", "13:46:13.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:48:00 | \n", "08/10/2018 | \n", "13:47:59.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:49:00 | \n", "08/10/2018 | \n", "13:48:34.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:50:00 | \n", "08/10/2018 | \n", "13:49:32.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:50:00 | \n", "08/10/2018 | \n", "13:49:40.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:50:00 | \n", "08/10/2018 | \n", "13:50:07.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:50:00 | \n", "08/10/2018 | \n", "13:50:17.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:51:00 | \n", "08/10/2018 | \n", "13:50:30.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:51:00 | \n", "08/10/2018 | \n", "13:50:49.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:52:00 | \n", "08/10/2018 | \n", "13:51:44.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:52:00 | \n", "08/10/2018 | \n", "13:51:50.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:53:00 | \n", "08/10/2018 | \n", "13:52:54.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:54:00 | \n", "08/10/2018 | \n", "13:53:52.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:54:00 | \n", "08/10/2018 | \n", "13:54:28.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:55:00 | \n", "08/10/2018 | \n", "13:54:33.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
2018-08-10 13:55:00 | \n", "08/10/2018 | \n", "13:55:19.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:56:00 | \n", "08/10/2018 | \n", "13:56:27.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:57:00 | \n", "08/10/2018 | \n", "13:56:36.1 | \n", "17 | \n", "0.0 | \n", "1 | \n", "
2018-08-10 13:58:00 | \n", "08/10/2018 | \n", "13:58:12.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:59:00 | \n", "08/10/2018 | \n", "13:58:43.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:59:00 | \n", "08/10/2018 | \n", "13:58:54.1 | \n", "17 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 13:59:00 | \n", "08/10/2018 | \n", "13:59:02.1 | \n", "16 | \n", "1.0 | \n", "1 | \n", "
2018-08-10 14:00:00 | \n", "08/10/2018 | \n", "13:59:40.1 | \n", "16 | \n", "0.0 | \n", "0 | \n", "
336 rows × 5 columns
\n", "