Skip to content

Commit fb5c4a4

Browse files
authored
Merge branch 'main' into adf_publish
2 parents 3371a91 + d1a4124 commit fb5c4a4

File tree

16 files changed

+166
-25
lines changed

16 files changed

+166
-25
lines changed

.github/FUNDING.yml

Lines changed: 0 additions & 1 deletion
This file was deleted.

README.md

Lines changed: 61 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,61 @@
1-
# timeseries-data-explorer
1+
# Timeseries Data Explorer
2+
3+
## Instructions for deployment
4+
5+
### Prerequisites
6+
7+
Install the following tools on your device:
8+
9+
- Azure CLI
10+
- Azure Subscription
11+
- Terraform
12+
13+
### Azure CLI configuration
14+
15+
Configure Azure CLI on your device by runnin the follwing commands:
16+
17+
```sh
18+
# Login to Azure
19+
az login
20+
21+
# Set azure account
22+
az account set --subscription "<your-subscription-id>"
23+
24+
# Configure CLI
25+
az config set extension.use_dynamic_install=yes_without_prompt
26+
```
27+
28+
### Update Variables
29+
30+
Open the [`code\infra\vars.tfvars`](code\infra\vars.tfvars) file and update the prefix and location parameters:
31+
32+
```hcl
33+
location = "<your-location>"
34+
environment = "dev"
35+
prefix = "<your-prefix-value>"
36+
tags = {}
37+
...
38+
```
39+
40+
### Terraform deployment (local backend)
41+
42+
Deploy the Terraform configuration using the following commands:
43+
44+
```sh
45+
# Move terraform_override.tf file
46+
move .\utilities\terraformConfigSamples\* .\code\infra\
47+
48+
# Change directory
49+
cd .\code\infra\
50+
51+
# Terraform init
52+
terraform init
53+
54+
# Terraform plan
55+
terraform plan -var-file="vars.tfvars"
56+
57+
# Terraform apply
58+
terraform apply -var-file="vars.tfvars"
59+
```
60+
61+
You successfully deployed the setup!

code/datafactory/dataflow/IptvCuratedToKusto.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,8 @@
5454
" Topology4 as string,",
5555
" Topology5 as string,",
5656
" TvModel as string,",
57-
" UserId as long",
57+
" UserId as long,",
58+
" EndTime as timestamp",
5859
" ),",
5960
" allowSchemaDrift: true,",
6061
" validateSchema: false,",

code/datafactory/dataflow/IptvRawToCurated.json

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,7 @@
5050
"source(output(",
5151
" app_version as string,",
5252
" country as string,",
53+
" end_time as timestamp 'yyyy-MM-dd\\'T\\'HH:mm:ss.SSS\\'Z\\'',",
5354
" happiness_score as float,",
5455
" hw_model as string,",
5556
" service as string,",
@@ -91,7 +92,8 @@
9192
" Topology4 = topology_4,",
9293
" Topology5 = topology_5,",
9394
" TvModel = tv_model,",
94-
" UserId = user_id",
95+
" UserId = user_id,",
96+
" EndTime = end_time",
9597
" ),",
9698
" skipDuplicateMapInputs: true,",
9799
" skipDuplicateMapOutputs: true) ~> RenameColumns",

code/datafactory/dataflow/OttCuratedToKusto.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,8 @@
5252
" CommercilizationType as string,",
5353
" DeviceVendor as string,",
5454
" HappinessScore as float,",
55-
" UserId as string",
55+
" UserId as long,",
56+
" EndTime as timestamp",
5657
" ),",
5758
" allowSchemaDrift: true,",
5859
" validateSchema: false,",

code/datafactory/dataflow/OttRawToCurated.json

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,8 @@
6060
" {COMMERCIALIZATION TYPE} as string,",
6161
" {Device Vendor} as string,",
6262
" {Happiness Score} as float,",
63-
" {User ID} as string",
63+
" {User ID} as long,",
64+
" {End Time} as timestamp",
6465
" ),",
6566
" useSchema: false,",
6667
" allowSchemaDrift: true,",
@@ -87,7 +88,8 @@
8788
" CommercilizationType = {COMMERCIALIZATION TYPE},",
8889
" DeviceVendor = {Device Vendor},",
8990
" HappinessScore = {Happiness Score},",
90-
" UserId = {User ID}",
91+
" UserId = {User ID},",
92+
" EndTime = {End Time}",
9193
" ),",
9294
" skipDuplicateMapInputs: true,",
9395
" skipDuplicateMapOutputs: true) ~> RenameColumns",

code/datafactory/pipeline/TestIptvRawToCurated.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@
113113
},
114114
"sourceFolderPath": {
115115
"type": "string",
116-
"defaultValue": "iptv"
116+
"defaultValue": "iptv/0156490b-efbe-4a27-af08-a237669f91cc"
117117
},
118118
"sourceFileName": {
119119
"type": "string",

code/datafactory/pipeline/TestOttRawToCurated.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@
113113
},
114114
"sourceFolderPath": {
115115
"type": "string",
116-
"defaultValue": "ott"
116+
"defaultValue": "ott/751cb63e-ce65-4f73-9894-6b1bdb8b7e1d"
117117
},
118118
"sourceFileName": {
119119
"type": "string",

code/datamodel/operationaldb.kql

Lines changed: 65 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,13 @@
1313
CommercilizationType: string,
1414
DeviceVendor: string,
1515
HappinessScore: real,
16-
UserId: string
16+
UserId: string,
17+
EndTime: datetime
1718
),
1819
iptv_raw(
1920
AppVersion: string,
2021
Country: string,
22+
EndTime: datetime,
2123
HappinessScore: real,
2224
HwModel: string,
2325
Service: string,
@@ -79,6 +81,16 @@
7981
)
8082

8183
// Create function
84+
.create-or-alter function with (docstring = 'Function to get latest threshold value for named threshold', folder='thresholds') GetThreshold(valueName: string) {
85+
toscalar(
86+
thresholds
87+
| where name == valueName
88+
| order by timestamp desc
89+
| limit 1
90+
| summarize max(value)
91+
)
92+
}
93+
8294
.create-or-alter function with (docstring = 'Function to get average happiness scores for iptv data', folder='iptv') IptvGetAvgHappinessScore() {
8395
let currentTimestamp = now();
8496
let movingWindowInMinutes = GetThreshold('movingWindowInMinutes');
@@ -125,14 +137,58 @@
125137
DeviceVendor
126138
}
127139

128-
.create-or-alter function with (docstring = 'Function to get latest threshold value for named threshold', folder='thresholds') GetThreshold(valueName: string) {
129-
toscalar(
130-
thresholds
131-
| where name == valueName
132-
| order by timestamp desc
133-
| limit 1
134-
| summarize max(value)
135-
)
140+
.create-or-alter function with (docstring = 'Function to get average happiness scores for ott data for one column', folder='ott') OttGetAvgHappinessScoreForOneColumn(columnName: string) {
141+
ott_happiness
142+
| extend TotalHappiness = NumberOfUsers * AvgHappinessScore
143+
| summarize
144+
SumTotalHappiness = sum(TotalHappiness),
145+
SumNumberOfUsers = sum(NumberOfUsers)
146+
by
147+
Timestamp,
148+
column_ifexists(columnName, 'columnName does not exist')
149+
| extend AvgHappinessScore = SumTotalHappiness / SumNumberOfUsers
150+
| project-away SumTotalHappiness
151+
}
152+
153+
.create-or-alter function with (docstring = 'Function to get average happiness scores for ott data for two columns', folder='ott') OttGetAvgHappinessScoreForTwoColumns(columnName1: string, columnName2: string) {
154+
ott_happiness
155+
| extend TotalHappiness = NumberOfUsers * AvgHappinessScore
156+
| summarize
157+
SumTotalHappiness = sum(TotalHappiness),
158+
SumNumberOfUsers = sum(NumberOfUsers)
159+
by
160+
Timestamp,
161+
column_ifexists(columnName1, 'columnName1 does not exist'),
162+
column_ifexists(columnName2, 'columnName2 does not exist')
163+
| extend AvgHappinessScore = SumTotalHappiness / SumNumberOfUsers
164+
| project-away SumTotalHappiness
165+
}
166+
167+
.create-or-alter function with (docstring = 'Function to get average happiness scores for iptv data for one column', folder='iptv') IptvGetAvgHappinessScoreForOneColumn(columnName: string) {
168+
iptv_happiness
169+
| extend TotalHappiness = NumberOfUsers * AvgHappinessScore
170+
| summarize
171+
SumTotalHappiness = sum(TotalHappiness),
172+
SumNumberOfUsers = sum(NumberOfUsers)
173+
by
174+
Timestamp,
175+
column_ifexists(columnName, 'columnName does not exist')
176+
| extend AvgHappinessScore = SumTotalHappiness / SumNumberOfUsers
177+
| project-away SumTotalHappiness
178+
}
179+
180+
.create-or-alter function with (docstring = 'Function to get average happiness scores for iptv data for two columns', folder='iptv') IptvGetAvgHappinessScoreForTwoColumns(columnName1: string, columnName2: string) {
181+
iptv_happiness
182+
| extend TotalHappiness = NumberOfUsers * AvgHappinessScore
183+
| summarize
184+
SumTotalHappiness = sum(TotalHappiness),
185+
SumNumberOfUsers = sum(NumberOfUsers)
186+
by
187+
Timestamp,
188+
column_ifexists(columnName1, 'columnName1 does not exist'),
189+
column_ifexists(columnName2, 'columnName2 does not exist')
190+
| extend AvgHappinessScore = SumTotalHappiness / SumNumberOfUsers
191+
| project-away SumTotalHappiness
136192
}
137193

138194
////////////////////////////////////////////////

code/infra/datafactory_content.tf

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ resource "azurerm_resource_group_template_deployment" "data_factory_content_depl
1212
}
1313

1414
resource "null_resource" "data_factory_triggers_start" {
15-
for_each = toset(var.data_factory_triggers_start)
15+
for_each = var.data_factory_published_content.parameters_file != "" && var.data_factory_published_content.template_file != "" ? toset(var.data_factory_triggers_start) : toset([])
1616

1717
provisioner "local-exec" {
1818
command = "az datafactory trigger start --resource-group ${azurerm_data_factory.data_factory.resource_group_name} --factory-name ${azurerm_data_factory.data_factory.name} --name ${each.value}"
@@ -24,7 +24,7 @@ resource "null_resource" "data_factory_triggers_start" {
2424
}
2525

2626
resource "null_resource" "data_factory_pipelines_run" {
27-
for_each = toset(var.data_factory_pipelines_run)
27+
for_each = var.data_factory_published_content.parameters_file != "" && var.data_factory_published_content.template_file != "" ? toset(var.data_factory_pipelines_run) : toset([])
2828

2929
provisioner "local-exec" {
3030
command = "az datafactory pipeline create-run --resource-group ${azurerm_data_factory.data_factory.resource_group_name} --factory-name ${azurerm_data_factory.data_factory.name} --name ${each.value}"

0 commit comments

Comments
 (0)