Skip to content

Instantly share code, notes, and snippets.

View lordlinus's full-sized avatar

Sunil Sattiraju lordlinus

View GitHub Profile
// Databricks notebook source
import org.graphframes.GraphFrame
import org.apache.spark.sql.functions._
import org.apache.spark.sql.types._
import org.apache.spark.sql.{Row, SparkSession}
import org.apache.spark.sql.functions.struct
sc.setCheckpointDir("/dbfs/cp")
// COMMAND ----------
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
// Fix SQL queries being returned as literals
import org.apache.spark.sql.jdbc.{JdbcDialect, JdbcDialects}
JdbcDialects.registerDialect(new JdbcDialect() {
override def canHandle(url: String): Boolean = url.toLowerCase.startsWith("jdbc:databricks:")
override
def quoteIdentifier(column: String): String = column
})
import time
from queue import Queue
from threading import Thread
table_list = [
"table1",
"table2",
"table3",
"table4",
"table5",
import pandas as pd
import os
folder_path = "/os/folder/path"
summaryDF = pd.DataFrame()
data = pd.concat(
[
pd.read_excel(os.path.join(folder_path, f), sheet_name="Data")
for f in os.listdir(folder_path)
],
az account list-locations --query "[?not_null(metadata.latitude)] .{RegionName:name, PairedRegion:metadata.pairedRegion[0].name}" --output json
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
#/bin/bash -e
az config set extension.use_dynamic_install=yes_without_prompt
# az extension add -n azure-cli-ml -y
# Attach the AKS to Azure ML as compute cluster
null=$(az ml computetarget attach aks \
--name $AKS_COMPUTE_NAME_IN_AML \
--resource-group $RESOURCE_GROUP_NAME \
pat_token_config=$(jq -n -c \
--arg ls "$PAT_LIFETIME" \
--arg co "Example Token" \
'{lifetime_seconds: ($ls|tonumber),
comment: $co
}')
# Databricks Auth headers
adbGlobalToken=$(az account get-access-token --resource 2ff814a6-3304-4ab8-85cb-cd0e6f879c1d --output json | jq -r .accessToken)
azureApiToken=$(az account get-access-token --resource https://management.core.windows.net/ --output json | jq -r .accessToken)