Commit d9f98b9a authored by Mitar's avatar Mitar
Browse files

Merge branch 'distil-audio-pipelines' into 'master'

Adds pipelines and runs for audio primitives

See merge request !174
parents 0753ae62 643b8dc2
Pipeline #113830143 passed with stages
in 91 minutes and 15 seconds
{
"id": "8c3a2db6-4449-4a7a-9830-1b9cf2b993d6",
"id": "88e43cbf-2cfa-49e9-9b50-9e1b3a224a2f",
"schema": "https://metadata.datadrivendiscovery.org/schemas/v0/pipeline.json",
"created": "2020-01-23T21:06:32.806107Z",
"created": "2020-01-31T02:19:18.401593Z",
"inputs": [
{
"name": "inputs"
......@@ -9,7 +9,7 @@
],
"outputs": [
{
"data": "steps.6.produce",
"data": "steps.5.produce",
"name": "output"
}
],
......@@ -17,10 +17,10 @@
{
"type": "PRIMITIVE",
"primitive": {
"id": "4b42ce1e-9b98-4a25-b68e-fad13311eb65",
"version": "0.3.0",
"python_path": "d3m.primitives.data_transformation.dataset_to_dataframe.Common",
"name": "Extract a DataFrame from a Dataset"
"id": "f2a0cf71-0f61-41a7-a0ad-b907083ae56c",
"version": "0.2.0",
"python_path": "d3m.primitives.data_preprocessing.audio_reader.DistilAudioDatasetLoader",
"name": "Load audio collection from dataset into a single dataframe"
},
"arguments": {
"inputs": {
......@@ -31,26 +31,9 @@
"outputs": [
{
"id": "produce"
}
]
},
{
"type": "PRIMITIVE",
"primitive": {
"id": "e193afa1-b45e-4d29-918f-5bb1fa3b88a7",
"version": "0.2.0",
"python_path": "d3m.primitives.schema_discovery.profiler.Common",
"name": "Determine missing semantic types for columns automatically"
},
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.0.produce"
}
},
"outputs": [
},
{
"id": "produce"
"id": "produce_collection"
}
]
},
......@@ -65,7 +48,7 @@
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.1.produce"
"data": "steps.0.produce"
}
},
"outputs": [
......@@ -96,7 +79,7 @@
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.2.produce"
"data": "steps.1.produce"
}
},
"outputs": [
......@@ -108,7 +91,8 @@
"semantic_types": {
"type": "VALUE",
"data": [
"https://metadata.datadrivendiscovery.org/types/Attribute"
"https://metadata.datadrivendiscovery.org/types/Target",
"https://metadata.datadrivendiscovery.org/types/TrueTarget"
]
}
}
......@@ -116,39 +100,30 @@
{
"type": "PRIMITIVE",
"primitive": {
"id": "4503a4c6-42f7-45a1-a1d4-ed69699cf5e1",
"version": "0.3.0",
"python_path": "d3m.primitives.data_transformation.extract_columns_by_semantic_types.Common",
"name": "Extracts columns by semantic type"
"id": "f2f149c8-a984-4f5b-8a9b-2f13ee0cf16d",
"version": "0.1.2",
"python_path": "d3m.primitives.feature_extraction.audio_transfer.DistilAudioTransfer",
"name": "Audio Transfer"
},
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.2.produce"
"data": "steps.0.produce_collection"
}
},
"outputs": [
{
"id": "produce"
}
],
"hyperparams": {
"semantic_types": {
"type": "VALUE",
"data": [
"https://metadata.datadrivendiscovery.org/types/Target",
"https://metadata.datadrivendiscovery.org/types/TrueTarget"
]
}
}
]
},
{
"type": "PRIMITIVE",
"primitive": {
"id": "a242314d-7955-483f-aed6-c74cd2b880df",
"version": "0.1.4",
"python_path": "d3m.primitives.collaborative_filtering.collaborative_filtering_link_prediction.DistilCollaborativeFiltering",
"name": "Collaborative filtering"
"id": "e0ad06ce-b484-46b0-a478-c567e1ea7e02",
"version": "0.3.0",
"python_path": "d3m.primitives.learner.random_forest.DistilEnsembleForest",
"name": "EnsembleForest"
},
"arguments": {
"inputs": {
......@@ -157,14 +132,20 @@
},
"outputs": {
"type": "CONTAINER",
"data": "steps.4.produce"
"data": "steps.2.produce"
}
},
"outputs": [
{
"id": "produce"
}
]
],
"hyperparams": {
"metric": {
"type": "VALUE",
"data": "accuracy"
}
}
},
{
"type": "PRIMITIVE",
......@@ -177,11 +158,11 @@
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.5.produce"
"data": "steps.4.produce"
},
"reference": {
"type": "CONTAINER",
"data": "steps.2.produce"
"data": "steps.1.produce"
}
},
"outputs": [
......@@ -191,5 +172,5 @@
]
}
],
"digest": "778e6038785fdd2c98c2f85a2ad11d24dfebfd00d6d76e121930eff97251e568"
"digest": "fc5483015960c8f3f5755e4fd45ab616ade2e401b086e094400081291f4ac63d"
}
{
"id": "231b69dc-3740-4b91-b77e-6263b1e8fa2f",
"id": "88e43cbf-2cfa-49e9-9b50-9e1b3a224a2f",
"schema": "https://metadata.datadrivendiscovery.org/schemas/v0/pipeline.json",
"created": "2020-01-23T21:06:32.589639Z",
"created": "2020-01-31T02:19:18.401593Z",
"inputs": [
{
"name": "inputs"
......@@ -9,7 +9,7 @@
],
"outputs": [
{
"data": "steps.6.produce",
"data": "steps.5.produce",
"name": "output"
}
],
......@@ -17,10 +17,10 @@
{
"type": "PRIMITIVE",
"primitive": {
"id": "f31f8c1f-d1c5-43e5-a4b2-2ae4a761ef2e",
"id": "f2a0cf71-0f61-41a7-a0ad-b907083ae56c",
"version": "0.2.0",
"python_path": "d3m.primitives.data_transformation.denormalize.Common",
"name": "Denormalize datasets"
"python_path": "d3m.primitives.data_preprocessing.audio_reader.DistilAudioDatasetLoader",
"name": "Load audio collection from dataset into a single dataframe"
},
"arguments": {
"inputs": {
......@@ -31,26 +31,9 @@
"outputs": [
{
"id": "produce"
}
]
},
{
"type": "PRIMITIVE",
"primitive": {
"id": "4b42ce1e-9b98-4a25-b68e-fad13311eb65",
"version": "0.3.0",
"python_path": "d3m.primitives.data_transformation.dataset_to_dataframe.Common",
"name": "Extract a DataFrame from a Dataset"
},
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.0.produce"
}
},
"outputs": [
},
{
"id": "produce"
"id": "produce_collection"
}
]
},
......@@ -65,7 +48,7 @@
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.1.produce"
"data": "steps.0.produce"
}
},
"outputs": [
......@@ -96,7 +79,7 @@
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.2.produce"
"data": "steps.1.produce"
}
},
"outputs": [
......@@ -108,7 +91,8 @@
"semantic_types": {
"type": "VALUE",
"data": [
"https://metadata.datadrivendiscovery.org/types/Attribute"
"https://metadata.datadrivendiscovery.org/types/Target",
"https://metadata.datadrivendiscovery.org/types/TrueTarget"
]
}
}
......@@ -116,39 +100,30 @@
{
"type": "PRIMITIVE",
"primitive": {
"id": "4503a4c6-42f7-45a1-a1d4-ed69699cf5e1",
"version": "0.3.0",
"python_path": "d3m.primitives.data_transformation.extract_columns_by_semantic_types.Common",
"name": "Extracts columns by semantic type"
"id": "f2f149c8-a984-4f5b-8a9b-2f13ee0cf16d",
"version": "0.1.2",
"python_path": "d3m.primitives.feature_extraction.audio_transfer.DistilAudioTransfer",
"name": "Audio Transfer"
},
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.2.produce"
"data": "steps.0.produce_collection"
}
},
"outputs": [
{
"id": "produce"
}
],
"hyperparams": {
"semantic_types": {
"type": "VALUE",
"data": [
"https://metadata.datadrivendiscovery.org/types/Target",
"https://metadata.datadrivendiscovery.org/types/TrueTarget"
]
}
}
]
},
{
"type": "PRIMITIVE",
"primitive": {
"id": "7c305f3a-442a-41ad-b9db-8c437753b119",
"version": "0.1.1",
"python_path": "d3m.primitives.classification.bert_classifier.DistilBertPairClassification",
"name": "BERT pair classification"
"id": "e0ad06ce-b484-46b0-a478-c567e1ea7e02",
"version": "0.3.0",
"python_path": "d3m.primitives.learner.random_forest.DistilEnsembleForest",
"name": "EnsembleForest"
},
"arguments": {
"inputs": {
......@@ -157,7 +132,7 @@
},
"outputs": {
"type": "CONTAINER",
"data": "steps.4.produce"
"data": "steps.2.produce"
}
},
"outputs": [
......@@ -166,17 +141,9 @@
}
],
"hyperparams": {
"doc_col_0": {
"type": "VALUE",
"data": 1
},
"doc_col_1": {
"type": "VALUE",
"data": 3
},
"batch_size": {
"metric": {
"type": "VALUE",
"data": 16
"data": "accuracy"
}
}
},
......@@ -191,11 +158,11 @@
"arguments": {
"inputs": {
"type": "CONTAINER",
"data": "steps.5.produce"
"data": "steps.4.produce"
},
"reference": {
"type": "CONTAINER",
"data": "steps.2.produce"
"data": "steps.1.produce"
}
},
"outputs": [
......@@ -205,5 +172,5 @@
]
}
],
"digest": "dc2f882d2dcef3682708243ee877008cc89b587a5d5f4f82ef2ba6ebd465a979"
"digest": "fc5483015960c8f3f5755e4fd45ab616ade2e401b086e094400081291f4ac63d"
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment