From 050a2b6ab1dc6cac182a6f8b53a3624b4ac5e873 Mon Sep 17 00:00:00 2001 From: Zhipeng Zhao Date: Wed, 31 Jul 2019 11:16:22 -0700 Subject: [PATCH] Adding Anomaly Lookup notebook --- Notebooks/Get Started.ipynb | 16 +- ...uided Investigation - Anomaly Lookup.ipynb | 118 +++++++++ Notebooks/HowTos/CSharpMagic.ipynb | 85 ------ .../IntegratedWithGitHubAndLogAnalytics.ipynb | 107 -------- .../HowTos/ManageAzureSentinelBookmarks.ipynb | 242 ------------------ Notebooks/HowTos/PowerShell.ipynb | 82 ------ ...ningDSVM.ipynb => Provisioning DSVM.ipynb} | 174 ++++++++----- .../SentinelAnomalyLookup/anomaly_finder.py | 2 +- Notebooks/SentinelUtilities/setup.py | 2 +- 9 files changed, 240 insertions(+), 588 deletions(-) create mode 100644 Notebooks/Guided Investigation - Anomaly Lookup.ipynb delete mode 100644 Notebooks/HowTos/CSharpMagic.ipynb delete mode 100644 Notebooks/HowTos/IntegratedWithGitHubAndLogAnalytics.ipynb delete mode 100644 Notebooks/HowTos/ManageAzureSentinelBookmarks.ipynb delete mode 100644 Notebooks/HowTos/PowerShell.ipynb rename Notebooks/HowTos/{ProvisioningDSVM.ipynb => Provisioning DSVM.ipynb} (61%) diff --git a/Notebooks/Get Started.ipynb b/Notebooks/Get Started.ipynb index 0108b1dc90b..04be78daf25 100644 --- a/Notebooks/Get Started.ipynb +++ b/Notebooks/Get Started.ipynb @@ -17,7 +17,7 @@ "trusted": true }, "cell_type": "code", - "source": "# only run once, current version 0.1.2\n!pip install --upgrade Sentinel-Utilities", + "source": "# only run once\n!pip install --upgrade Azure-Sentinel-Utilities", "execution_count": null, "outputs": [] }, @@ -208,16 +208,16 @@ "language": "python" }, "language_info": { - "pygments_lexer": "ipython3", + "mimetype": "text/x-python", + "nbconvert_exporter": "python", "name": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6", "file_extension": ".py", "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "nbconvert_exporter": "python", - "mimetype": "text/x-python", - "version": "3.6.7" + "version": 3, + "name": "ipython" + } }, "toc": { "base_numbering": 1, diff --git a/Notebooks/Guided Investigation - Anomaly Lookup.ipynb b/Notebooks/Guided Investigation - Anomaly Lookup.ipynb new file mode 100644 index 00000000000..5ad6530156f --- /dev/null +++ b/Notebooks/Guided Investigation - Anomaly Lookup.ipynb @@ -0,0 +1,118 @@ +{ + "cells": [ + { + "metadata": {}, + "cell_type": "markdown", + "source": "# Guided Investigation - Anomaly Lookup\n\n__Notebook Version:__ 1.0
\n__Python Version:__ Python 3.6 (including Python 3.6 - AzureML)
\n__Required Packages:__ azure 4.0.0, azure-cli-profile 2.1.4
\n__Platforms Supported:__
\n - Azure Notebooks Free Compute\n - Azure Notebook on DSVM\n \n__Data Source Required:__
\n - Log Analytics tables \n \n### Description\nGain insights into the possible root cause of an alert by searching for related anomalies on the corresponding entities around the alert’s time. This notebook will provide valuable leads for an alert’s investigation, listing all suspicious increase in event counts or their properties around the time of the alert, and linking to the corresponding raw records in Log Analytics for the investigator to focus on and interpret.\n\nWhen you switch between Azure Notebooks Free Compute and Data Science Virtual Machine (DSVM), you may need to select Python version: please select Python 3.6 for Free Compute, and Python 3.6 - AzureML for DSVM." + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "## Table of Contents\n\n1. Initialize Azure Resource Management Clients\n2. Looking up for anomaly entities" + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "## 1. Initialize Azure Resource Management Clients" + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# only run once\n!pip install --upgrade Azure-Sentinel-Utilities", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# User Input and Save to Environmental store\nfrom azure.mgmt.loganalytics import LogAnalyticsManagementClient\nimport SentinelAzure\nfrom SentinelWidgets import *\nfrom SentinelAnomalyLookup import *\n\nfrom pandas.io.json import json_normalize\nfrom azure.loganalytics import LogAnalyticsDataClient\nfrom azure.loganalytics.models import QueryBody\nimport sys\nimport timeit\nimport datetime as dt\nimport pandas as pd\nimport copy\nfrom IPython.display import HTML\n\n# User Input and Save to Environmental store\n# Set to True if you want to reset the environmental values\nimport os\nfrom SentinelWidgets import *\nreset_env_vars = False\nenv_dir = %env\nenv_list = ['tenant_domain', 'subscription_id', 'resource_group']\nenvs = {env_list[0]:'', env_list[1]:'', env_list[2]:''}\nenvs = WidgetViewHelper.set_env(reset_env_vars, env_dir, envs)\ntenant_domain = envs[env_list[0]]\nsubscription_id = envs[env_list[1]]\nresource_group = envs[env_list[2]]", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# Authentication to Log Analytics\nfrom azure.common.client_factory import get_client_from_cli_profile\nfrom azure.common.credentials import get_azure_cli_credentials\n# please enter your tenant domain below, for Microsoft, using: microsoft.onmicrosoft.com\n!az login --tenant $tenant_domain\nla_client = get_client_from_cli_profile(LogAnalyticsManagementClient, subscription_id = subscription_id)\nla = SentinelAzure.azure_loganalytics_helper.LogAnalyticsHelper(la_client)\ncreds, _ = get_azure_cli_credentials(resource=\"https://api.loganalytics.io\")\nla_data_client = LogAnalyticsDataClient(creds)", + "execution_count": null, + "outputs": [] + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "## 2. Looking up for anomaly entities" + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# Select a workspace\nselected_workspace = WidgetViewHelper.select_log_analytics_workspace(la)\ndisplay(selected_workspace)", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "workspace_id = la.get_workspace_id(selected_workspace.value)\n#DateTime format: 2019-07-15T07:05:20.000\nq_timestamp = input('DateTime: ')\n#Entity format: computer\nq_entity = input('Entity for search: ')", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "anomaly_lookup = AnomalyFinder(workspace_id, la_data_client)\nselected_tables = WidgetViewHelper.select_multiple_tables(anomaly_lookup)\ndisplay(selected_tables)", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# This action may take a few minutes or more, please be patient.\nstart = timeit.default_timer()\nanomalies, queries = anomaly_lookup.run(q_timestamp, q_entity, list(selected_tables.value))\ndisplay(anomalies)\n\nif queries is not None:\n url = WidgetViewHelper.construct_url_for_log_analytics_logs(tenant_domain, subscription_id, resource_group, selected_workspace.value)\n WidgetViewHelper.display_html(WidgetViewHelper.copy_to_clipboard(url, queries, 'Add queries to clipboard and go to Log Analytics'))\n\nprint('==================')\nprint('Elapsed time: ', timeit.default_timer() - start, ' seconds')", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "", + "execution_count": null, + "outputs": [] + } + ], + "metadata": { + "kernelspec": { + "name": "python36", + "display_name": "Python 3.6", + "language": "python" + }, + "language_info": { + "mimetype": "text/x-python", + "nbconvert_exporter": "python", + "name": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6", + "file_extension": ".py", + "codemirror_mode": { + "version": 3, + "name": "ipython" + } + }, + "celltoolbar": "Tags" + }, + "nbformat": 4, + "nbformat_minor": 1 +} \ No newline at end of file diff --git a/Notebooks/HowTos/CSharpMagic.ipynb b/Notebooks/HowTos/CSharpMagic.ipynb deleted file mode 100644 index a37a00cd7b7..00000000000 --- a/Notebooks/HowTos/CSharpMagic.ipynb +++ /dev/null @@ -1,85 +0,0 @@ -{ - "cells": [ - { - "metadata": {}, - "cell_type": "markdown", - "source": "# Using C# on Local Jupyter Environment\n\n__Notebook Version:__ 1.0
\n__Python Version:__ Python 3.6
\n__Platforms Supported:__
\n\n- Local Jupyter installation on Windows 10\n\n__Data Source Required:__
\n- no
\n\n__Description__
\nThe notebook demonstrates how to run C# programs on Jupyter.
" - }, - { - "metadata": { - "trusted": false - }, - "cell_type": "code", - "source": "# Run only once\n!pip install clrmagic", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": false - }, - "cell_type": "code", - "source": "# Load CLR_Magic \n%reload_ext clrmagic", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": false - }, - "cell_type": "code", - "source": "# Display all magic commands\n%lsmagic", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": false - }, - "cell_type": "code", - "source": "%%CS Hello System.dll\npublic static string Hello(string name)\n{\n string temp = \"Hello {0}\";\n return string.Format(temp, name);\n}", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": false - }, - "cell_type": "code", - "source": "# Calling C# method\nHello('Your name')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": false - }, - "cell_type": "code", - "source": "", - "execution_count": null, - "outputs": [] - } - ], - "metadata": { - "kernelspec": { - "name": "python36", - "display_name": "Python 3.6", - "language": "python" - }, - "language_info": { - "mimetype": "text/x-python", - "nbconvert_exporter": "python", - "name": "python", - "pygments_lexer": "ipython3", - "version": "3.5.4", - "file_extension": ".py", - "codemirror_mode": { - "version": 3, - "name": "ipython" - } - }, - "celltoolbar": "Tags" - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/Notebooks/HowTos/IntegratedWithGitHubAndLogAnalytics.ipynb b/Notebooks/HowTos/IntegratedWithGitHubAndLogAnalytics.ipynb deleted file mode 100644 index 0c4d1b18b13..00000000000 --- a/Notebooks/HowTos/IntegratedWithGitHubAndLogAnalytics.ipynb +++ /dev/null @@ -1,107 +0,0 @@ -{ - "cells": [ - { - "metadata": { - "collapsed": true - }, - "cell_type": "markdown", - "source": "# How To: Integarted with GitHub and Azure Log Analytics\n\n__Notebook Version:__ 1.0
\n__Python Version:__ Python 3.6 (including Python 3.6 - AzureML)
\n__Platforms Supported:__
\n - Azure Notebooks Free Compute\n - Azure Notebooks DSVM\n__Data Source Required:__
\n - no\n \n### Description\nThe sample notebook retrieves Kusto query (KQL) definition file from GitHub, parses the JSON object. Then it copies query to clipboard and opens Log Analytics window.\n\nWhen you switch between Azure Notebooks Free Compute and Data Science Virtual Machine (DSVM), you may need to select Python version: please select Python 3.6 for Free Compute, and Python 3.6 - AzureML for DSVM." - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## Prerequisite check" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# only run once, current version 0.1.2\n!pip install --upgrade Sentinel-Utilities", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "import SentinelUtils\n# checking Python version\ncheck = SentinelUtils.version_management.ModuleVersionCheck()\npy_check = check.validate_python('3.6.0')\nif py_check.requirement_met == False:\n print('Please select Python 3.6 or Python 3.6 - AzureML at the upper right corner')\nelse:\n print('Please continue')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## Table of Contents\n\n1. Retrieve Log Analytics information \n2. Retrieve KQL from GitHub and go to Log Analytics" - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 1. Retrieve Log Analytics Information" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "path = %env PATH\ndsvm = False\nif '/dsvm/' in path:\n dsvm = True\n \nif dsvm == False:\n # Run this if you are using Free Compute\n tenant_id = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[0]\n subscription_id = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[1]\n resource_group = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[2]\n workspace_id = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[3]\n workspace_name = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[4]\n print('Your Log Analytic Workspace: {}'.format(workspace_name))\nelse:\n # Run this if you are using DSVM. You need to copy the values from config.json, if the file has no value, then you need to go to Log Analytics Portal to get the information.\n tenant_id = input('tenant_id:')\n subscription_id = input('subscription_id:')\n resource_group = input('resource_group:')\n workspace_id = input('workspace_id:')\n workspace_name = input('workspace_name:')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 2. Retrieve KQL from GitHub and go to Log Analytics" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# Load queries from GitHub\ndef get_query_from_github(query_name):\n import requests\n url = \"https://raw.githubusercontent.com/Azure/Azure-Sentinel/master/Hunting%20Queries/Deployed/{}.json\".format(query_name)\n\n response = requests.get(url)\n response.encoding = response.apparent_encoding\n resJson = json.loads(response.text)\n if resJson != None:\n try:\n print(resJson['query'])\n return resJson['query']\n except Exception as e:\n print(e)\n \n# Copy the query to Clipboard\ndef copy_query_to_clipboard(query_text):\n url = 'https://ms.portal.azure.com/?feature.showassettypes=Microsoft_Azure_Security_Insights_SecurityInsightsDashboard#blade/Microsoft_Azure_Security_Insights/MainMenuBlade/7/subscriptionId/{}/resourceGroup/{}/workspaceName/{}'.format(subscription_id, resource_group, workspace_name)\n html_str = (\n \"\"\"\n \n \n Copy query to clipboard and go to Log Analytics\n \n \"\"\"\n )\n\n return html_str ", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "from IPython.display import HTML\nimport json\n# file name: Create Office Account.json\nHTML(copy_query_to_clipboard(get_query_from_github('Create Office Account')))", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "", - "execution_count": null, - "outputs": [] - } - ], - "metadata": { - "kernelspec": { - "name": "python36", - "display_name": "Python 3.6", - "language": "python" - }, - "language_info": { - "pygments_lexer": "ipython3", - "name": "python", - "file_extension": ".py", - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "nbconvert_exporter": "python", - "mimetype": "text/x-python", - "version": "3.6.7" - }, - "celltoolbar": "Tags" - }, - "nbformat": 4, - "nbformat_minor": 2 -} \ No newline at end of file diff --git a/Notebooks/HowTos/ManageAzureSentinelBookmarks.ipynb b/Notebooks/HowTos/ManageAzureSentinelBookmarks.ipynb deleted file mode 100644 index 6bad866cb29..00000000000 --- a/Notebooks/HowTos/ManageAzureSentinelBookmarks.ipynb +++ /dev/null @@ -1,242 +0,0 @@ -{ - "cells": [ - { - "metadata": {}, - "cell_type": "markdown", - "source": "# How To: Manage Azure Sentinel Bookmarks\n\n__Notebook Version:__ 1.0
\n__Python Version:__ Python 3.6 (including Python 3.6 - AzureML), Plotly 3.5
\n__Required Packages:__ Kqlmagic 0.1.90
\n__Platforms Supported:__
\n - Azure Notebooks Free Compute\n - Azure Notebooks DSVM\n__Data Source Required:__
\n - Log Analytics - Bookmarks\n \n### Description\nThe sample notebook get bookmarks from Azure Sentinel\n\nWhen you switch between Azure Notebooks Free Compute and Data Science Virtual Machine (DSVM), you may need to select Python version: please select Python 3.6 for Free Compute, and Python 3.6 - AzureML for DSVM." - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## Prerequisite check" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# only run once, current version 0.1.2\n!pip install --upgrade Sentinel-Utilities", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "import SentinelUtils\n# checking Python version\ncheck = SentinelUtils.version_management.ModuleVersionCheck()\npy_check = check.validate_python('3.6.0')\nif py_check.requirement_met == False:\n print('Please select Python 3.6 or Python 3.6 - AzureML at the upper right corner')\nelse:\n print('Please continue')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# checking required packages\nmods_check = check.validate_installed_modules(['Kqlmagic>=0.1.90'])\nfor mod_info in mods_check:\n if mod_info.requirement_met == False:\n print('Please install {} {} at the following cell.'.format(mod_info.name, mod_info.required_version))", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# Please install required packages based on the check at last cellp\n#!pip install Kqlmagic --upgrade", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## Table of Contents\n\n1. Retrieve Log Analytics information\n3. Log into Log Analytics\n4. Retrieve Bookmark Data\n5. Bookmark service functions \n6. Bookmark management through service API\n7. Go to Azure Log Analytics\n8. Data Analysis - Timeline Chart" - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 1. Retrieve Log Analytics Information" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "path = %env PATH\ndsvm = False\nif '/dsvm/' in path:\n dsvm = True\n \nif dsvm == False:\n # Run this if you are using Free Compute\n tenant_id = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[0]\n subscription_id = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[1]\n resource_group = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[2]\n workspace_id = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[3]\n workspace_name = SentinelUtils.config_reader.ConfigReader.read_config_values(\"../config.json\")[4]\n print('Your Log Analytic Workspace: {}'.format(workspace_name))\nelse:\n # Run this if you are using DSVM. You need to copy the values from config.json, if the file has no value, then you need to go to Log Analytics Portal to get the information.\n tenant_id = input('tenant_id:')\n subscription_id = input('subscription_id:')\n resource_group = input('resource_group:')\n workspace_id = input('workspace_id:')\n workspace_name = input('workspace_name:')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 2. Log into Log Analytics" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# You must run this cell to log into Log Analytics to continue\n# Make sure you have 0.1.90 or above, if not, run Kqlmagic installation again\n%reload_ext Kqlmagic\n%kql loganalytics://code;workspace=workspace_id;tenant=tenant_id;alias=\"SentinelDB\"", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 3. Retrieve Bookmark Data" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# Checking required Data Sources\nrequired_data_sources = ['HuntingBookmark']\ndbSchema = %kql --schema \"SentinelDB@loganalytics\"\ntables = list(dbSchema.keys())\nfor source in required_data_sources:\n if source not in tables:\n print('You do not have required data source: {}'.format(source))\n else:\n print('Please continue')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "%kql HuntingBookmark | take 100", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 4. Bookmark service functions" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# head\nimport json\nimport requests\nimport pandas as pd\nfrom pandas.io.json import json_normalize\n\n# calling test site\ntest_subscription_id = input('test subscription id:')\ntest_resource_group = input('test resource_group:')\ntest_workspace_name = input('test workspace_name:')", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# functions, this is using testing environment\ndef generate_guid():\n import uuid\n return str(uuid.uuid4())\n\ndef construct_url(bookmark_id):\n base_url = 'https://resourceprovider.westus2.cloudapp.azure.com/subscriptions/{}/resourceGroups/{}/providers/Microsoft.OperationalInsights/workspaces/{}/providers/Microsoft.SecurityInsights/bookmarks{}?api-version=1.0'\n return base_url.format(test_subscription_id, test_resource_group, test_workspace_name, '/' + bookmark_id) if bookmark_id != None else base_url.format(test_subscription_id, test_resource_group, test_workspace_name, '')\n\ndef create_bookmark():\n import datetime\n # Sample bookmark data\n newbookmark = {\n \"properties\": {\n \"displayName\": \"Example bookmark\",\n \"notes\": datetime.datetime.now().isoformat(),\n \"labels\": [\"Azure Sentinel Notebooks\"],\n \"query\": \"SecurityEvent | take 5 | project Account, Computer\",\n \"queryResult\": \"'Account': 'WORKGROUP\\vm3$', 'Computer': 'vm3'\"\n }\n }\n \n url = construct_url(generate_guid())\n response = None\n headers = {\"Content-Type\" : \"application/json\"}\n try:\n response = requests.put(url, data=json.dumps(newbookmark), headers=headers)\n\n if response.status_code != 200:\n print(response.status_code)\n print(response.text)\n raise Exception('Recieved non 200 response while sending response to CFN.')\n return response\n except requests.exceptions.RequestException as e:\n if response != None:\n print(response.text)\n print(e)\n raise\n \ndef get_bookmarks():\n url = construct_url(None)\n response = requests.get(url)\n response.encoding = response.apparent_encoding\n resJson = json.loads(response.text)\n if resJson != None:\n try:\n return json_normalize(resJson['value'])\n except Exception as e:\n print(e)", - "execution_count": null, - "outputs": [] - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 5. Bookmark management through service API" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# Create a new bookmark\nres = create_bookmark()\nprint(res)", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# get all bookamrks and filter by displayName\norigin_df = get_bookmarks()\ndf = origin_df[origin_df['properties.displayName'] == 'Example bookmark'][['properties.created', 'properties.notes', 'properties.displayName', 'properties.labels', 'properties.query', 'properties.queryResult']]\ndf", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "markdown", - "source": "## 6. Go to Azure Log Analytics" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# Utils functions\ndef copy_query_to_clipboard(query_text):\n import ipywidgets as widgets\n from IPython.display import display\n from IPython.display import HTML\n\n url = 'https://ms.portal.azure.com/?feature.showassettypes=Microsoft_Azure_Security_Insights_SecurityInsightsDashboard#blade/Microsoft_Azure_Security_Insights/MainMenuBlade/7/subscriptionId/{}/resourceGroup/{}/workspaceName/{}'.format(subscription_id, resource_group, workspace_name)\n html_str = (\n \"\"\"\n \n\n \n\n Go to Log Analytics\n \n \n \"\"\"\n )\n\n return html_str \n\ndef make_clickable(val):\n # target _blank to open new window\n return ''.format(val)", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "# appending link column to the dataframe\ndf = df.assign(GoToLogAnalytics=df['properties.query'].apply(lambda x: copy_query_to_clipboard(x)))\ndf.style.format({'': make_clickable})", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "markdown", - "source": "## 7. Data Analysis - Timeline Chart" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "origin_df['Start'] = origin_df.groupby('properties.displayName')['properties.created'].transform(min)\norigin_df['Finish'] = origin_df.groupby('properties.displayName')['properties.created'].transform(max)\n\nplot_df = origin_df[['properties.displayName', 'Start', 'Finish']].copy().rename(columns = {'properties.displayName' : 'Task'}).drop_duplicates('Task')\nplot_df = plot_df.reset_index().drop('index', 1)\nplot_df", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "import plotly\nimport plotly.plotly as pplotly\nimport plotly.figure_factory as factory\n\n# input your account info for Plotly, https://plot.ly\nyour_plotly_username = input('plotly username:')\nyour_api_key = input('plotly API key:')\nplotly.tools.set_credentials_file(username=your_plotly_username, api_key=your_api_key)", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "chart = factory.create_gantt(plot_df)\npplotly.iplot(chart, filename='Bookmark Time Line', title='Bookmark Time Line', world_readable=True)", - "execution_count": null, - "outputs": [] - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "", - "execution_count": null, - "outputs": [] - } - ], - "metadata": { - "kernelspec": { - "name": "python36", - "display_name": "Python 3.6", - "language": "python" - }, - "celltoolbar": "Tags", - "language_info": { - "pygments_lexer": "ipython3", - "name": "python", - "file_extension": ".py", - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "nbconvert_exporter": "python", - "mimetype": "text/x-python", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 1 -} \ No newline at end of file diff --git a/Notebooks/HowTos/PowerShell.ipynb b/Notebooks/HowTos/PowerShell.ipynb deleted file mode 100644 index 430784f627a..00000000000 --- a/Notebooks/HowTos/PowerShell.ipynb +++ /dev/null @@ -1,82 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Using PowerShell on Local Jupyter Environment\n", - "\n", - "__Notebook Version:__ 1.0
\n", - "__Python Version:__ Python 3.6
\n", - "__Platforms Supported:__
\n", - "\n", - "- Local Jupyter installation on Windows 10\n", - "\n", - "__Data Source Required:__
\n", - "- no
\n", - "\n", - "__Description__
\n", - "The notebook demonstrates how to enable PowerShell on Jupyter.
" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import sys\n", - "sys.path.append('c:\\program files (x86)\\microsoft visual studio\\shared\\anaconda3_64\\lib\\dist-packages')\n", - "\n", - "!pip install powershellmagic" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%load_ext powershellmagic" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "%%powershell\n", - "Get-Command" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "name": "python36", - "display_name": "Python 3.6", - "language": "python" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.4" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/Notebooks/HowTos/ProvisioningDSVM.ipynb b/Notebooks/HowTos/Provisioning DSVM.ipynb similarity index 61% rename from Notebooks/HowTos/ProvisioningDSVM.ipynb rename to Notebooks/HowTos/Provisioning DSVM.ipynb index 0e47504c714..6b1603732f5 100644 --- a/Notebooks/HowTos/ProvisioningDSVM.ipynb +++ b/Notebooks/HowTos/Provisioning DSVM.ipynb @@ -1,63 +1,113 @@ -{ - "cells": [ - { - "metadata": {}, - "cell_type": "markdown", - "source": "# How To: Provisioning Data Science Virtual Machine (DSVM)\n\n__Notebook Version:__ 1.0
\n__Python Version:__ Python 3.6
\n__Platforms Supported:__
\n - Azure Notebooks Free Compute\n__Data Source Required:__
\n - no\n \n### Description\nThe sample notebook shows how to provision a Azure DSVM as an alternate computing resource for hosting Azure Notebooks.\n\nAzure Notebooks provides Free Compute as the default computing resource, which is free of charge. However, sometimes you do want to have a powerful computing environment, and you don't want to go through Direct Compute route which requires JupyterHub installation on Linux machines, then Data Science Virtual Machine (DSVM) becomes a vital choice.\n\nYou may reference this article for details. In a nutshell, you need to select Linux VM with Ubuntu flavor. And keep in mind that on Azure DSVM, if you want to use Python 3.6 which is required by Azure Sentinel notebooks, you need to select Python 3.6 - AzureML." - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## Table of Contents\n\n1. How to create a new DSVM \n2. How to use DSVM\n3. Things to know about using DSVM" - }, - { - "metadata": {}, - "cell_type": "markdown", - "source": "## 1. How to create a new DSVM\n\n0. First, please read this article for details\n1. Go to Azure portal\n2. Search for Data Science Virtual Machine under All Services
\n![select](images/select.png)
\n3. Select DSVM for Linux (Ubuntu), read the introduction, click Create button. On the following page shown below, following the instruction to complete the form. You need to use the same Azure subscription that you are using for your Azure Sentinel and Azure Log Analytics. And make sure you select Password and check 'Login with Azure Active Directory'.
\n![create](images/create.png)
\n4. Once a DSVM created, make sure you keep SSH public key and password in a safe place.\n5. If you want to remote into the VM using SSH, you can add inbound port rule for port 22." - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "markdown", - "source": "## 2. How to use DSVM\n\n1. Now that you have a DSVM, when you login to https://notebooks.azure.com, you can see you DSVM on the drop down list under Free Compute and Direct Compute.
\n![dropdown](images/dropdown.png)
\n2. Of course you will select DSVM, it will ask you to validate your JIT credentials.
\n![login](images/login.png)
\n3. Once you pick a notebook to run, you may encounter the following warning:
\n![warning](images/warning.png)
\nAs you may see, [Python 3.6 - AzureML] is the correct answer.\n" - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "markdown", - "source": "## 3. Things to know about using DSVM\n\n1. The most important thing to know about Azure Notebooks on DSVM is that: Azure Notebooks project home directory is not mounted on the DSVM. So any references to Azure Notebooks folders / files will incur File/folder not found exception. In other words, each ipynb notebook need to be independent of other files. \n2. There are work-around solutions:
\n a. Data files can be stored on Azure Blob storage and blobfufe
\n b. Python files can be added to the notebook by using the Jupyter magic, you can find an example here: %%writefile
\n c. Configuration files are a bit more complicated. Using our Azure Sentinel config.json as an example, it is generated when you import Azure Sentinel Jupyter project from GitHub repo through Azure portal. The configuration JSON is Azure Log Analytics workspace specific file, so you clone one project for one Log Analytics workspace. You can find the config.json file at the root of the project home directory. Get Start.jpynb section 1 demonstrates how to set the configuration settings manually. " - }, - { - "metadata": { - "trusted": true - }, - "cell_type": "code", - "source": "", - "execution_count": null, - "outputs": [] - } - ], - "metadata": { - "kernelspec": { - "name": "python36", - "display_name": "Python 3.6", - "language": "python" - }, - "language_info": { - "pygments_lexer": "ipython3", - "name": "python", - "file_extension": ".py", - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "nbconvert_exporter": "python", - "mimetype": "text/x-python", - "version": "3.6.7" - } - }, - "nbformat": 4, - "nbformat_minor": 1 +{ + "cells": [ + { + "metadata": {}, + "cell_type": "markdown", + "source": "# How To: Provisioning Data Science Virtual Machine (DSVM)\n\n__Notebook Version:__ 1.0
\n__Python Version:__ Python 3.6 (including Python 3.6 - AzureML)
\n__Required Packages:__ azure 4.0.0, azure-cli-profile 2.1.4
\n__Platforms Supported:__
\n - Azure Notebooks Free Compute\n - Azure Notebooks DSVM\n__Data Source Required:__
\n - no\n \n### Description\nThe sample notebook shows how to provision a Azure DSVM as an alternate computing resource for hosting Azure Notebooks.\n\nAzure Notebooks provides Free Compute as the default computing resource, which is free of charge. However, sometimes you do want to have a powerful computing environment, and you don't want to go through Direct Compute route which requires JupyterHub installation on Linux machines, then Data Science Virtual Machine (DSVM) becomes a vital choice.\n\nYou may reference this article for details. In a nutshell, you need to select Linux VM with Ubuntu flavor. And keep in mind that on Azure DSVM, if you want to use Python 3.6 which is required by Azure Sentinel notebooks, you need to select Python 3.6 - AzureML." + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "## Table of Contents\n\n1. How to create a new DSVM \n2. How to use DSVM\n3. Things to know about using DSVM" + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "## 1. How to create a new DSVM" + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# only run once\n!pip install --upgrade Azure-Sentinel-Utilities", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# please enter your tenant domain below, for Microsoft, using: microsoft.onmicrosoft.com\n!az login --tenant ''", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# User Input for creating a new DSVM\nvm_size = 'Standard_DS3_v2'\n\n# replace [[your_subcription_id]] with 'real subscription id'\n!az account set --subscription [[your_subcription_id]]", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "# replace all [[your_stuff]] with 'real values'\n!az group deployment create \\\n --resource-group [[your_subcription_id]] \\\n --template-uri https://raw.githubusercontent.com/Azure/DataScienceVM/master/Scripts/CreateDSVM/Ubuntu/azuredeploy.json \\\n --parameters \\\n '{ \\\n \"$schema\": \"https://schema.management.azure.com/schemas/2015-01-01/deploymentParameters.json#\",\\\n \"contentVersion\": \"1.0.0.0\",\\\n \"parameters\": {\\\n \"adminUsername\": { \"value\" : \"[[your_admin_id]]\"},\\\n \"adminPassword\": { \"value\" : \"[[your_admin_password]]\"},\\\n \"vmName\": { \"value\" : \"[[vm_name]]\"},\\\n \"vmSize\": { \"value\" : \"Standard_DS3_v2\"}\\\n }\\\n }'", + "execution_count": null, + "outputs": [] + }, + { + "metadata": {}, + "cell_type": "markdown", + "source": "*** Please go to the project page to select the VM that you just created as your new computing platform (Run on ...) to continue ..." + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "markdown", + "source": "## 2. How to use DSVM\n\n1. Now that you have a DSVM, when you login to https://notebooks.azure.com, you can see you DSVM on the drop down list under Free Compute and Direct Compute.
\n![dropdown](images/dropdown.png)
\n2. Of course you will select DSVM, it will ask you to validate your JIT credentials.
\n![login](images/login.png)
\n3. Once you pick a notebook to run, you may encounter the following warning:
\n![warning](images/warning.png)
\nAs you may see, [Python 3.6 - AzureML] is the correct answer.\n" + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "", + "execution_count": null, + "outputs": [] + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "markdown", + "source": "## 3. Things to know about using DSVM\n\n1. The most important thing to know about Azure Notebooks on DSVM is that: Azure Notebooks project home directory is not mounted on the DSVM. So any references to Azure Notebooks folders / files will incur File/folder not found exception. In other words, each ipynb notebook need to be independent of other files. \n2. There are work-around solutions:
\n a. Data files can be stored on Azure Blob storage and blobfufe
\n b. Python files can be added to the notebook by using the Jupyter magic, you can find an example here: %%writefile
\n c. Configuration files are a bit more complicated. Using our Azure Sentinel config.json as an example, it is generated when you import Azure Sentinel Jupyter project from GitHub repo through Azure portal. The configuration JSON is Azure Log Analytics workspace specific file, so you clone one project for one Log Analytics workspace. You can find the config.json file at the root of the project home directory. Get Start.jpynb section 1 demonstrates how to set the configuration settings manually. " + }, + { + "metadata": { + "trusted": true + }, + "cell_type": "code", + "source": "", + "execution_count": null, + "outputs": [] + } + ], + "metadata": { + "kernelspec": { + "name": "python36", + "display_name": "Python 3.6", + "language": "python" + }, + "language_info": { + "mimetype": "text/x-python", + "nbconvert_exporter": "python", + "name": "python", + "pygments_lexer": "ipython3", + "version": "3.6.6", + "file_extension": ".py", + "codemirror_mode": { + "version": 3, + "name": "ipython" + } + } + }, + "nbformat": 4, + "nbformat_minor": 1 } \ No newline at end of file diff --git a/Notebooks/SentinelUtilities/SentinelAnomalyLookup/anomaly_finder.py b/Notebooks/SentinelUtilities/SentinelAnomalyLookup/anomaly_finder.py index 021492f744f..67b139fb150 100644 --- a/Notebooks/SentinelUtilities/SentinelAnomalyLookup/anomaly_finder.py +++ b/Notebooks/SentinelUtilities/SentinelAnomalyLookup/anomaly_finder.py @@ -106,7 +106,7 @@ def construct_related_queries(df_anomalies): ) query = query[:-2] # drop the last or - query += " | take 1000" # limit the output size + query += " | take 1000; " # limit the output size query = query.replace("\\", "\\\\") queries += query diff --git a/Notebooks/SentinelUtilities/setup.py b/Notebooks/SentinelUtilities/setup.py index 237d85a5616..682869e9e52 100644 --- a/Notebooks/SentinelUtilities/setup.py +++ b/Notebooks/SentinelUtilities/setup.py @@ -15,7 +15,7 @@ setuptools.setup( name="Azure-Sentinel-Utilities", - version="0.3.1", + version="0.3.2", author="Azure Sentinel Notebooks Devs", author_email="zhzhao@microsoft.com", description="AZURE SENTINEL NOTEBOOKS PYTHON TOOLS: \