Creating a pipeline with a scheduled trigger with ADFV2
I try to migrate a pipeline that already exists in ADFV1 to ADFV2 and have some issues with the concept of triggers. My pipeline has two activiries, the first one is an Azure Data Lake Analytics activity and the second a copy activity.
The first activity runs a usql script where data is read from partioned folder /yyyy/MM/dd/, process it and write in folder /yyyy-MM-dd/.
Here are some JSON files from my factory (pipeline, trigger and datasets).
Pipeline:
"name": "StreamCompressionBlob2SQL",
"properties":
"activities": [
"name": "compress",
"type": "DataLakeAnalyticsU-SQL",
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"scriptPath": "d00044653/azure-configurations/usql-scripts/stream/compression.usql",
"scriptLinkedService":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')",
"type": "Expression"
,
"Month":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'MM')",
"type": "Expression"
,
"Day":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'dd')",
"type": "Expression"
,
"linkedServiceName":
"referenceName": "AzureDataLakeAnalytics1",
"type": "LinkedServiceReference"
,
"name": "Blob2SQL",
"type": "Copy",
"dependsOn": [
"activity": "compress",
"dependencyConditions": [
"Succeeded"
]
],
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"source":
"type": "BlobSource",
"recursive": true
,
"sink":
"type": "SqlSink",
"writeBatchSize": 10000
,
"enableStaging": false,
"dataIntegrationUnits": 0,
"translator":
"type": "TabularTranslator",
"columnMappings":
"tag": "TAG",
"device_id": "DEVICE_ID",
"system_id": "SYSTEM_ID",
"utc": "UTC",
"ts": "TS",
"median": "MEDIAN",
"min": "MIN",
"max": "MAX",
"avg": "AVG",
"stdev": "STDEV",
"first_value": "FIRST_VALUE",
"last_value": "LAST_VALUE",
"message_count": "MESSAGE_COUNT"
,
"inputs": [
"referenceName": "AzureBlobDataset_COMPRESSED_ASA_v1",
"type": "DatasetReference"
],
"outputs": [
"referenceName": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"type": "DatasetReference"
]
],
"parameters":
"windowStartTime":
"type": "String"
Trigger:
"name": "trigger1",
"properties":
"runtimeState": "Started",
"pipelines": [
"pipelineReference":
"referenceName": "StreamCompressionBlob2SQL",
"type": "PipelineReference"
,
"parameters":
"windowStartTime": "@trigger().scheduledTime"
],
"type": "ScheduleTrigger",
"typeProperties":
"recurrence":
"frequency": "Day",
"interval": 1,
"startTime": "2018-08-17T10:46:00.000Z",
"endTime": "2018-11-04T10:46:00.000Z",
"timeZone": "UTC"
Input Dataset for Copy Activity:
"name": "AzureBlobDataset_COMPRESSED_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Month":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Day":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"type": "AzureBlob",
"structure": [
"name": "tag",
"type": "String"
,
"name": "device_id",
"type": "String"
,
"name": "system_id",
"type": "String"
,
"name": "utc",
"type": "DateTime"
,
"name": "ts",
"type": "DateTime"
,
"name": "median",
"type": "Double"
,
"name": "min",
"type": "Double"
,
"name": "max",
"type": "Double"
,
"name": "avg",
"type": "Double"
,
"name": "stdev",
"type": "Double"
,
"name": "first_value",
"type": "Double"
,
"name": "last_value",
"type": "Double"
,
"name": "message_count",
"type": "Int16"
],
"typeProperties":
"format":
"type": "TextFormat",
"columnDelimiter": ";",
"nullValue": "\N",
"treatEmptyAsNull": true,
"skipLineCount": 0,
"firstRowAsHeader": true
,
"fileName": "",
"folderPath":
"value": "@concat('d00044653/processed/stream/compressed',dataset().Year,'-',dataset().Month,'-',dataset().Day)",
"type": "Expression"
,
"type": "Microsoft.DataFactory/factories/datasets"
Output Dataset for Copy Activity:
"name": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureSqlDatabase1",
"type": "LinkedServiceReference"
,
"type": "AzureSqlTable",
"structure": [
"name": "TAG",
"type": "String"
,
"name": "DEVICE_ID",
"type": "String"
,
"name": "SYSTEM_ID",
"type": "String"
,
"name": "UTC",
"type": "DateTime"
,
"name": "TS",
"type": "DateTime"
,
"name": "MEDIAN",
"type": "Decimal"
,
"name": "MIN",
"type": "Decimal"
,
"name": "MAX",
"type": "Decimal"
,
"name": "AVG",
"type": "Decimal"
,
"name": "STDEV",
"type": "Decimal"
,
"name": "FIRST_VALUE",
"type": "Decimal"
,
"name": "LAST_VALUE",
"type": "Decimal"
,
"name": "MESSAGE_COUNT",
"type": "Int32"
],
"typeProperties":
"tableName": "[dbo].[T_ASSET_MONITORING_WARM]"
,
"type": "Microsoft.DataFactory/factories/datasets"
My problem is that after publishing nothing happens.
Any suggestions??
azure triggers azure-pipelines azure-data-factory
add a comment |
I try to migrate a pipeline that already exists in ADFV1 to ADFV2 and have some issues with the concept of triggers. My pipeline has two activiries, the first one is an Azure Data Lake Analytics activity and the second a copy activity.
The first activity runs a usql script where data is read from partioned folder /yyyy/MM/dd/, process it and write in folder /yyyy-MM-dd/.
Here are some JSON files from my factory (pipeline, trigger and datasets).
Pipeline:
"name": "StreamCompressionBlob2SQL",
"properties":
"activities": [
"name": "compress",
"type": "DataLakeAnalyticsU-SQL",
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"scriptPath": "d00044653/azure-configurations/usql-scripts/stream/compression.usql",
"scriptLinkedService":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')",
"type": "Expression"
,
"Month":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'MM')",
"type": "Expression"
,
"Day":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'dd')",
"type": "Expression"
,
"linkedServiceName":
"referenceName": "AzureDataLakeAnalytics1",
"type": "LinkedServiceReference"
,
"name": "Blob2SQL",
"type": "Copy",
"dependsOn": [
"activity": "compress",
"dependencyConditions": [
"Succeeded"
]
],
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"source":
"type": "BlobSource",
"recursive": true
,
"sink":
"type": "SqlSink",
"writeBatchSize": 10000
,
"enableStaging": false,
"dataIntegrationUnits": 0,
"translator":
"type": "TabularTranslator",
"columnMappings":
"tag": "TAG",
"device_id": "DEVICE_ID",
"system_id": "SYSTEM_ID",
"utc": "UTC",
"ts": "TS",
"median": "MEDIAN",
"min": "MIN",
"max": "MAX",
"avg": "AVG",
"stdev": "STDEV",
"first_value": "FIRST_VALUE",
"last_value": "LAST_VALUE",
"message_count": "MESSAGE_COUNT"
,
"inputs": [
"referenceName": "AzureBlobDataset_COMPRESSED_ASA_v1",
"type": "DatasetReference"
],
"outputs": [
"referenceName": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"type": "DatasetReference"
]
],
"parameters":
"windowStartTime":
"type": "String"
Trigger:
"name": "trigger1",
"properties":
"runtimeState": "Started",
"pipelines": [
"pipelineReference":
"referenceName": "StreamCompressionBlob2SQL",
"type": "PipelineReference"
,
"parameters":
"windowStartTime": "@trigger().scheduledTime"
],
"type": "ScheduleTrigger",
"typeProperties":
"recurrence":
"frequency": "Day",
"interval": 1,
"startTime": "2018-08-17T10:46:00.000Z",
"endTime": "2018-11-04T10:46:00.000Z",
"timeZone": "UTC"
Input Dataset for Copy Activity:
"name": "AzureBlobDataset_COMPRESSED_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Month":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Day":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"type": "AzureBlob",
"structure": [
"name": "tag",
"type": "String"
,
"name": "device_id",
"type": "String"
,
"name": "system_id",
"type": "String"
,
"name": "utc",
"type": "DateTime"
,
"name": "ts",
"type": "DateTime"
,
"name": "median",
"type": "Double"
,
"name": "min",
"type": "Double"
,
"name": "max",
"type": "Double"
,
"name": "avg",
"type": "Double"
,
"name": "stdev",
"type": "Double"
,
"name": "first_value",
"type": "Double"
,
"name": "last_value",
"type": "Double"
,
"name": "message_count",
"type": "Int16"
],
"typeProperties":
"format":
"type": "TextFormat",
"columnDelimiter": ";",
"nullValue": "\N",
"treatEmptyAsNull": true,
"skipLineCount": 0,
"firstRowAsHeader": true
,
"fileName": "",
"folderPath":
"value": "@concat('d00044653/processed/stream/compressed',dataset().Year,'-',dataset().Month,'-',dataset().Day)",
"type": "Expression"
,
"type": "Microsoft.DataFactory/factories/datasets"
Output Dataset for Copy Activity:
"name": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureSqlDatabase1",
"type": "LinkedServiceReference"
,
"type": "AzureSqlTable",
"structure": [
"name": "TAG",
"type": "String"
,
"name": "DEVICE_ID",
"type": "String"
,
"name": "SYSTEM_ID",
"type": "String"
,
"name": "UTC",
"type": "DateTime"
,
"name": "TS",
"type": "DateTime"
,
"name": "MEDIAN",
"type": "Decimal"
,
"name": "MIN",
"type": "Decimal"
,
"name": "MAX",
"type": "Decimal"
,
"name": "AVG",
"type": "Decimal"
,
"name": "STDEV",
"type": "Decimal"
,
"name": "FIRST_VALUE",
"type": "Decimal"
,
"name": "LAST_VALUE",
"type": "Decimal"
,
"name": "MESSAGE_COUNT",
"type": "Int32"
],
"typeProperties":
"tableName": "[dbo].[T_ASSET_MONITORING_WARM]"
,
"type": "Microsoft.DataFactory/factories/datasets"
My problem is that after publishing nothing happens.
Any suggestions??
azure triggers azure-pipelines azure-data-factory
Did you trigger the pipeline and when did you trigger the pipeline?"startTime": "2018-08-17T10:46:00.000Z", "endTime": "2018-11-04T10:46:00.000Z",
It ended on Nov 4?
– Bo Xiao
Nov 14 '18 at 2:24
add a comment |
I try to migrate a pipeline that already exists in ADFV1 to ADFV2 and have some issues with the concept of triggers. My pipeline has two activiries, the first one is an Azure Data Lake Analytics activity and the second a copy activity.
The first activity runs a usql script where data is read from partioned folder /yyyy/MM/dd/, process it and write in folder /yyyy-MM-dd/.
Here are some JSON files from my factory (pipeline, trigger and datasets).
Pipeline:
"name": "StreamCompressionBlob2SQL",
"properties":
"activities": [
"name": "compress",
"type": "DataLakeAnalyticsU-SQL",
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"scriptPath": "d00044653/azure-configurations/usql-scripts/stream/compression.usql",
"scriptLinkedService":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')",
"type": "Expression"
,
"Month":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'MM')",
"type": "Expression"
,
"Day":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'dd')",
"type": "Expression"
,
"linkedServiceName":
"referenceName": "AzureDataLakeAnalytics1",
"type": "LinkedServiceReference"
,
"name": "Blob2SQL",
"type": "Copy",
"dependsOn": [
"activity": "compress",
"dependencyConditions": [
"Succeeded"
]
],
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"source":
"type": "BlobSource",
"recursive": true
,
"sink":
"type": "SqlSink",
"writeBatchSize": 10000
,
"enableStaging": false,
"dataIntegrationUnits": 0,
"translator":
"type": "TabularTranslator",
"columnMappings":
"tag": "TAG",
"device_id": "DEVICE_ID",
"system_id": "SYSTEM_ID",
"utc": "UTC",
"ts": "TS",
"median": "MEDIAN",
"min": "MIN",
"max": "MAX",
"avg": "AVG",
"stdev": "STDEV",
"first_value": "FIRST_VALUE",
"last_value": "LAST_VALUE",
"message_count": "MESSAGE_COUNT"
,
"inputs": [
"referenceName": "AzureBlobDataset_COMPRESSED_ASA_v1",
"type": "DatasetReference"
],
"outputs": [
"referenceName": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"type": "DatasetReference"
]
],
"parameters":
"windowStartTime":
"type": "String"
Trigger:
"name": "trigger1",
"properties":
"runtimeState": "Started",
"pipelines": [
"pipelineReference":
"referenceName": "StreamCompressionBlob2SQL",
"type": "PipelineReference"
,
"parameters":
"windowStartTime": "@trigger().scheduledTime"
],
"type": "ScheduleTrigger",
"typeProperties":
"recurrence":
"frequency": "Day",
"interval": 1,
"startTime": "2018-08-17T10:46:00.000Z",
"endTime": "2018-11-04T10:46:00.000Z",
"timeZone": "UTC"
Input Dataset for Copy Activity:
"name": "AzureBlobDataset_COMPRESSED_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Month":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Day":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"type": "AzureBlob",
"structure": [
"name": "tag",
"type": "String"
,
"name": "device_id",
"type": "String"
,
"name": "system_id",
"type": "String"
,
"name": "utc",
"type": "DateTime"
,
"name": "ts",
"type": "DateTime"
,
"name": "median",
"type": "Double"
,
"name": "min",
"type": "Double"
,
"name": "max",
"type": "Double"
,
"name": "avg",
"type": "Double"
,
"name": "stdev",
"type": "Double"
,
"name": "first_value",
"type": "Double"
,
"name": "last_value",
"type": "Double"
,
"name": "message_count",
"type": "Int16"
],
"typeProperties":
"format":
"type": "TextFormat",
"columnDelimiter": ";",
"nullValue": "\N",
"treatEmptyAsNull": true,
"skipLineCount": 0,
"firstRowAsHeader": true
,
"fileName": "",
"folderPath":
"value": "@concat('d00044653/processed/stream/compressed',dataset().Year,'-',dataset().Month,'-',dataset().Day)",
"type": "Expression"
,
"type": "Microsoft.DataFactory/factories/datasets"
Output Dataset for Copy Activity:
"name": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureSqlDatabase1",
"type": "LinkedServiceReference"
,
"type": "AzureSqlTable",
"structure": [
"name": "TAG",
"type": "String"
,
"name": "DEVICE_ID",
"type": "String"
,
"name": "SYSTEM_ID",
"type": "String"
,
"name": "UTC",
"type": "DateTime"
,
"name": "TS",
"type": "DateTime"
,
"name": "MEDIAN",
"type": "Decimal"
,
"name": "MIN",
"type": "Decimal"
,
"name": "MAX",
"type": "Decimal"
,
"name": "AVG",
"type": "Decimal"
,
"name": "STDEV",
"type": "Decimal"
,
"name": "FIRST_VALUE",
"type": "Decimal"
,
"name": "LAST_VALUE",
"type": "Decimal"
,
"name": "MESSAGE_COUNT",
"type": "Int32"
],
"typeProperties":
"tableName": "[dbo].[T_ASSET_MONITORING_WARM]"
,
"type": "Microsoft.DataFactory/factories/datasets"
My problem is that after publishing nothing happens.
Any suggestions??
azure triggers azure-pipelines azure-data-factory
I try to migrate a pipeline that already exists in ADFV1 to ADFV2 and have some issues with the concept of triggers. My pipeline has two activiries, the first one is an Azure Data Lake Analytics activity and the second a copy activity.
The first activity runs a usql script where data is read from partioned folder /yyyy/MM/dd/, process it and write in folder /yyyy-MM-dd/.
Here are some JSON files from my factory (pipeline, trigger and datasets).
Pipeline:
"name": "StreamCompressionBlob2SQL",
"properties":
"activities": [
"name": "compress",
"type": "DataLakeAnalyticsU-SQL",
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"scriptPath": "d00044653/azure-configurations/usql-scripts/stream/compression.usql",
"scriptLinkedService":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')",
"type": "Expression"
,
"Month":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'MM')",
"type": "Expression"
,
"Day":
"value": "@formatDateTime(pipeline().parameters.windowStartTime,'dd')",
"type": "Expression"
,
"linkedServiceName":
"referenceName": "AzureDataLakeAnalytics1",
"type": "LinkedServiceReference"
,
"name": "Blob2SQL",
"type": "Copy",
"dependsOn": [
"activity": "compress",
"dependencyConditions": [
"Succeeded"
]
],
"policy":
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
,
"typeProperties":
"source":
"type": "BlobSource",
"recursive": true
,
"sink":
"type": "SqlSink",
"writeBatchSize": 10000
,
"enableStaging": false,
"dataIntegrationUnits": 0,
"translator":
"type": "TabularTranslator",
"columnMappings":
"tag": "TAG",
"device_id": "DEVICE_ID",
"system_id": "SYSTEM_ID",
"utc": "UTC",
"ts": "TS",
"median": "MEDIAN",
"min": "MIN",
"max": "MAX",
"avg": "AVG",
"stdev": "STDEV",
"first_value": "FIRST_VALUE",
"last_value": "LAST_VALUE",
"message_count": "MESSAGE_COUNT"
,
"inputs": [
"referenceName": "AzureBlobDataset_COMPRESSED_ASA_v1",
"type": "DatasetReference"
],
"outputs": [
"referenceName": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"type": "DatasetReference"
]
],
"parameters":
"windowStartTime":
"type": "String"
Trigger:
"name": "trigger1",
"properties":
"runtimeState": "Started",
"pipelines": [
"pipelineReference":
"referenceName": "StreamCompressionBlob2SQL",
"type": "PipelineReference"
,
"parameters":
"windowStartTime": "@trigger().scheduledTime"
],
"type": "ScheduleTrigger",
"typeProperties":
"recurrence":
"frequency": "Day",
"interval": 1,
"startTime": "2018-08-17T10:46:00.000Z",
"endTime": "2018-11-04T10:46:00.000Z",
"timeZone": "UTC"
Input Dataset for Copy Activity:
"name": "AzureBlobDataset_COMPRESSED_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureBlobStorage",
"type": "LinkedServiceReference"
,
"parameters":
"Year":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Month":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"Day":
"type": "String",
"defaultValue": "@formatDateTime(pipeline().parameters.windowStartTime,'yyyy')"
,
"type": "AzureBlob",
"structure": [
"name": "tag",
"type": "String"
,
"name": "device_id",
"type": "String"
,
"name": "system_id",
"type": "String"
,
"name": "utc",
"type": "DateTime"
,
"name": "ts",
"type": "DateTime"
,
"name": "median",
"type": "Double"
,
"name": "min",
"type": "Double"
,
"name": "max",
"type": "Double"
,
"name": "avg",
"type": "Double"
,
"name": "stdev",
"type": "Double"
,
"name": "first_value",
"type": "Double"
,
"name": "last_value",
"type": "Double"
,
"name": "message_count",
"type": "Int16"
],
"typeProperties":
"format":
"type": "TextFormat",
"columnDelimiter": ";",
"nullValue": "\N",
"treatEmptyAsNull": true,
"skipLineCount": 0,
"firstRowAsHeader": true
,
"fileName": "",
"folderPath":
"value": "@concat('d00044653/processed/stream/compressed',dataset().Year,'-',dataset().Month,'-',dataset().Day)",
"type": "Expression"
,
"type": "Microsoft.DataFactory/factories/datasets"
Output Dataset for Copy Activity:
"name": "AzureSQLDataset_T_ASSET_MONITORING_WARM_ASA_v1",
"properties":
"linkedServiceName":
"referenceName": "AzureSqlDatabase1",
"type": "LinkedServiceReference"
,
"type": "AzureSqlTable",
"structure": [
"name": "TAG",
"type": "String"
,
"name": "DEVICE_ID",
"type": "String"
,
"name": "SYSTEM_ID",
"type": "String"
,
"name": "UTC",
"type": "DateTime"
,
"name": "TS",
"type": "DateTime"
,
"name": "MEDIAN",
"type": "Decimal"
,
"name": "MIN",
"type": "Decimal"
,
"name": "MAX",
"type": "Decimal"
,
"name": "AVG",
"type": "Decimal"
,
"name": "STDEV",
"type": "Decimal"
,
"name": "FIRST_VALUE",
"type": "Decimal"
,
"name": "LAST_VALUE",
"type": "Decimal"
,
"name": "MESSAGE_COUNT",
"type": "Int32"
],
"typeProperties":
"tableName": "[dbo].[T_ASSET_MONITORING_WARM]"
,
"type": "Microsoft.DataFactory/factories/datasets"
My problem is that after publishing nothing happens.
Any suggestions??
azure triggers azure-pipelines azure-data-factory
azure triggers azure-pipelines azure-data-factory
edited Nov 13 '18 at 15:12
Veysel Ko
asked Nov 13 '18 at 14:57
Veysel KoVeysel Ko
236
236
Did you trigger the pipeline and when did you trigger the pipeline?"startTime": "2018-08-17T10:46:00.000Z", "endTime": "2018-11-04T10:46:00.000Z",
It ended on Nov 4?
– Bo Xiao
Nov 14 '18 at 2:24
add a comment |
Did you trigger the pipeline and when did you trigger the pipeline?"startTime": "2018-08-17T10:46:00.000Z", "endTime": "2018-11-04T10:46:00.000Z",
It ended on Nov 4?
– Bo Xiao
Nov 14 '18 at 2:24
Did you trigger the pipeline and when did you trigger the pipeline?
"startTime": "2018-08-17T10:46:00.000Z", "endTime": "2018-11-04T10:46:00.000Z",
It ended on Nov 4?– Bo Xiao
Nov 14 '18 at 2:24
Did you trigger the pipeline and when did you trigger the pipeline?
"startTime": "2018-08-17T10:46:00.000Z", "endTime": "2018-11-04T10:46:00.000Z",
It ended on Nov 4?– Bo Xiao
Nov 14 '18 at 2:24
add a comment |
1 Answer
1
active
oldest
votes
Schedule trigger do not support backfill scenario (based on your trigger definition - you are starting from August 17th 2018). In schedule trigger, pipeline runs can be executed only on time periods from the current time and the future.
In your case, for backfill scenarios use Tumbling window trigger.
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
add a comment |
Your Answer
StackExchange.ifUsing("editor", function ()
StackExchange.using("externalEditor", function ()
StackExchange.using("snippets", function ()
StackExchange.snippets.init();
);
);
, "code-snippets");
StackExchange.ready(function()
var channelOptions =
tags: "".split(" "),
id: "1"
;
initTagRenderer("".split(" "), "".split(" "), channelOptions);
StackExchange.using("externalEditor", function()
// Have to fire editor after snippets, if snippets enabled
if (StackExchange.settings.snippets.snippetsEnabled)
StackExchange.using("snippets", function()
createEditor();
);
else
createEditor();
);
function createEditor()
StackExchange.prepareEditor(
heartbeatType: 'answer',
autoActivateHeartbeat: false,
convertImagesToLinks: true,
noModals: true,
showLowRepImageUploadWarning: true,
reputationToPostImages: 10,
bindNavPrevention: true,
postfix: "",
imageUploader:
brandingHtml: "Powered by u003ca class="icon-imgur-white" href="https://imgur.com/"u003eu003c/au003e",
contentPolicyHtml: "User contributions licensed under u003ca href="https://creativecommons.org/licenses/by-sa/3.0/"u003ecc by-sa 3.0 with attribution requiredu003c/au003e u003ca href="https://stackoverflow.com/legal/content-policy"u003e(content policy)u003c/au003e",
allowUrls: true
,
onDemand: true,
discardSelector: ".discard-answer"
,immediatelyShowMarkdownHelp:true
);
);
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53283769%2fcreating-a-pipeline-with-a-scheduled-trigger-with-adfv2%23new-answer', 'question_page');
);
Post as a guest
Required, but never shown
1 Answer
1
active
oldest
votes
1 Answer
1
active
oldest
votes
active
oldest
votes
active
oldest
votes
Schedule trigger do not support backfill scenario (based on your trigger definition - you are starting from August 17th 2018). In schedule trigger, pipeline runs can be executed only on time periods from the current time and the future.
In your case, for backfill scenarios use Tumbling window trigger.
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
add a comment |
Schedule trigger do not support backfill scenario (based on your trigger definition - you are starting from August 17th 2018). In schedule trigger, pipeline runs can be executed only on time periods from the current time and the future.
In your case, for backfill scenarios use Tumbling window trigger.
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
add a comment |
Schedule trigger do not support backfill scenario (based on your trigger definition - you are starting from August 17th 2018). In schedule trigger, pipeline runs can be executed only on time periods from the current time and the future.
In your case, for backfill scenarios use Tumbling window trigger.
Schedule trigger do not support backfill scenario (based on your trigger definition - you are starting from August 17th 2018). In schedule trigger, pipeline runs can be executed only on time periods from the current time and the future.
In your case, for backfill scenarios use Tumbling window trigger.
answered Nov 14 '18 at 13:55
databashdatabash
312313
312313
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
add a comment |
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Thank you that was it. I was not aware of that and with thumbling windows it works.
– Veysel Ko
Nov 21 '18 at 9:03
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
Glad it helped. Can you please mark this as answer if it solved your question.
– databash
Nov 21 '18 at 9:23
add a comment |
Thanks for contributing an answer to Stack Overflow!
- Please be sure to answer the question. Provide details and share your research!
But avoid …
- Asking for help, clarification, or responding to other answers.
- Making statements based on opinion; back them up with references or personal experience.
To learn more, see our tips on writing great answers.
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
StackExchange.ready(
function ()
StackExchange.openid.initPostLogin('.new-post-login', 'https%3a%2f%2fstackoverflow.com%2fquestions%2f53283769%2fcreating-a-pipeline-with-a-scheduled-trigger-with-adfv2%23new-answer', 'question_page');
);
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Sign up or log in
StackExchange.ready(function ()
StackExchange.helpers.onClickDraftSave('#login-link');
);
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Sign up using Google
Sign up using Facebook
Sign up using Email and Password
Post as a guest
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Required, but never shown
Did you trigger the pipeline and when did you trigger the pipeline?
"startTime": "2018-08-17T10:46:00.000Z", "endTime": "2018-11-04T10:46:00.000Z",
It ended on Nov 4?– Bo Xiao
Nov 14 '18 at 2:24