Commit 0753ae62 authored by Mitar's avatar Mitar
Browse files

Merge branch 'jpl-updates' into 'master'

updated annotations

See merge request !173
parents 252dfa00 734d2743
Pipeline #113785100 passed with stages
in 95 minutes and 36 seconds
......@@ -18,7 +18,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"schema": "https://metadata.datadrivendiscovery.org/schemas/v0/primitive.json",
......@@ -326,5 +326,5 @@
},
"structural_type": "jpl_primitives.tpot.SKOneHotEncoder.SKOneHotEncoder",
"description": "Primitive wrapping for sklearn OneHotEncoder\n`tpot documentation <https://github.com/EpistasisLab/tpot/blob/master/tpot/builtins/one_hot_encoder.py>`_\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "20f733c6f34dae24f90323a4efb8e78d61e94565a90a2fa4bd473b572a240633"
"digest": "2b9800205a8967c038426247ab4bd2c7df698d6b2ee1ad621e0733c7d65195c5"
}
......@@ -18,7 +18,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"schema": "https://metadata.datadrivendiscovery.org/schemas/v0/primitive.json",
......@@ -261,5 +261,5 @@
},
"structural_type": "jpl_primitives.tpot.SKCategoricalSelector.SKCategoricalSelector",
"description": "Primitive wrapping for TPOT CategoricalSelector\nGenerated from TPOT commit: https://github.com/EpistasisLab/tpot/commit/1764731234d47849456e6a59c935bc9ec7c62c8e#diff-006b5a271ae3e6b6735939a2c66f0ef1\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "e99a871359dcda992615c9927d7f023a5cd96814c6b3d385fcc8d78ff35a4560"
"digest": "ce8acef3f9dc56d624205e942c6b5cd0df7c8823334579071a01e6465d7e17eb"
}
......@@ -18,7 +18,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"schema": "https://metadata.datadrivendiscovery.org/schemas/v0/primitive.json",
......@@ -261,5 +261,5 @@
},
"structural_type": "jpl_primitives.tpot.SKContinuousSelector.SKContinuousSelector",
"description": "Primitive wrapping for TPOT ContinuousSelector\nGenerated from TPOT commit: https://github.com/EpistasisLab/tpot/commit/1764731234d47849456e6a59c935bc9ec7c62c8e#diff-006b5a271ae3e6b6735939a2c66f0ef1\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "4362d27da03c6b14d2b8f53b4544018b766606cae9ca28682cfdbf844f518715"
"digest": "e0bb396f3237e48bc4fdae4fa5baa7b8c598cc95b9756960faf676fb03a45648"
}
......@@ -18,7 +18,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"schema": "https://metadata.datadrivendiscovery.org/schemas/v0/primitive.json",
......@@ -248,5 +248,5 @@
},
"structural_type": "jpl_primitives.tpot.SKZeroCount.SKZeroCount",
"description": "Primitive wrapping for TPOT ZeroCount\nGenerated from TPOT commit: https://github.com/EpistasisLab/tpot/commit/1764731234d47849456e6a59c935bc9ec7c62c8e#diff-006b5a271ae3e6b6735939a2c66f0ef1\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "db76428d63020232152cba944994c14a4c0c38eab7d2fa29dfba587202a7906c"
"digest": "2c0d3809f9f1e8447aabb80e31aba77838a24b78bcd3f21e20810eca784c8b1c"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.add.KerasWrap",
......@@ -215,5 +215,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.add.Add",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nLayer that adds a list of inputs.\n\n It takes as input a list of tensors,\n all of the same shape, and returns\n a single tensor (also of the same shape).\n\n Examples:\n\n ```python\n import keras\n\n input1 = keras.layers.Input(shape=(16,))\n x1 = keras.layers.Dense(8, activation='relu')(input1)\n input2 = keras.layers.Input(shape=(32,))\n x2 = keras.layers.Dense(8, activation='relu')(input2)\n # equivalent to `added = keras.layers.add([x1, x2])`\n added = keras.layers.Add()([x1, x2])\n out = keras.layers.Dense(4)(added)\n model = keras.models.Model(inputs=[input1, input2], outputs=out)\n ```\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "8a585dd4f475367ae6ef9f72ea1745f58c2196888b4d113dd048713e4dac04d2"
"digest": "77985185ae9433ca52a776d8e21dd7743438b07ca8b2048996d742290492ced1"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.average_pooling_1d.KerasWrap",
......@@ -276,5 +276,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.average_pooling_1d.AveragePooling1D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nAverage pooling for temporal data.\n\n Arguments:\n pool_size: Integer, size of the average pooling windows.\n strides: Integer, or None. Factor by which to downscale.\n E.g. 2 will halve the input.\n If None, it will default to `pool_size`.\n padding: One of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, steps, features)` while `channels_first`\n corresponds to inputs with shape\n `(batch, features, steps)`.\n\n Input shape:\n - If `data_format='channels_last'`:\n 3D tensor with shape `(batch_size, steps, features)`.\n - If `data_format='channels_first'`:\n 3D tensor with shape `(batch_size, features, steps)`.\n\n Output shape:\n - If `data_format='channels_last'`:\n 3D tensor with shape `(batch_size, downsampled_steps, features)`.\n - If `data_format='channels_first'`:\n 3D tensor with shape `(batch_size, features, downsampled_steps)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "6fe22b69e0d12304bc81909dbd3b802c9e9282ef28f730260b5b1b200f9f3892"
"digest": "943d04a41be6a7c1097392600394e514917de2c354aada5072d55dcaa4284af9"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.average_pooling_2d.KerasWrap",
......@@ -276,5 +276,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.average_pooling_2d.AveragePooling2D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nAverage pooling operation for spatial data.\n\n Arguments:\n pool_size: integer or tuple of 2 integers,\n factors by which to downscale (vertical, horizontal).\n `(2, 2)` will halve the input in both spatial dimension.\n If only one integer is specified, the same window length\n will be used for both dimensions.\n strides: Integer, tuple of 2 integers, or None.\n Strides values.\n If None, it will default to `pool_size`.\n padding: One of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, height, width, channels)` while `channels_first`\n corresponds to inputs with shape\n `(batch, channels, height, width)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Input shape:\n - If `data_format='channels_last'`:\n 4D tensor with shape `(batch_size, rows, cols, channels)`.\n - If `data_format='channels_first'`:\n 4D tensor with shape `(batch_size, channels, rows, cols)`.\n\n Output shape:\n - If `data_format='channels_last'`:\n 4D tensor with shape `(batch_size, pooled_rows, pooled_cols, channels)`.\n - If `data_format='channels_first'`:\n 4D tensor with shape `(batch_size, channels, pooled_rows, pooled_cols)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "e49e562350910ea41fbb2852ef979896960d9186da72cd7f173c52d10c9c7f26"
"digest": "65278dda9659d14060e2fb9c595b3409883b737aca9135ba9058c86e29a75c5f"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.average_pooling_3d.KerasWrap",
......@@ -276,5 +276,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.average_pooling_3d.AveragePooling3D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nAverage pooling operation for 3D data (spatial or spatio-temporal).\n\n Arguments:\n pool_size: tuple of 3 integers,\n factors by which to downscale (dim1, dim2, dim3).\n `(2, 2, 2)` will halve the size of the 3D input in each dimension.\n strides: tuple of 3 integers, or None. Strides values.\n padding: One of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n while `channels_first` corresponds to inputs with shape\n `(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Input shape:\n - If `data_format='channels_last'`:\n 5D tensor with shape:\n `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n - If `data_format='channels_first'`:\n 5D tensor with shape:\n `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`\n\n Output shape:\n - If `data_format='channels_last'`:\n 5D tensor with shape:\n `(batch_size, pooled_dim1, pooled_dim2, pooled_dim3, channels)`\n - If `data_format='channels_first'`:\n 5D tensor with shape:\n `(batch_size, channels, pooled_dim1, pooled_dim2, pooled_dim3)`\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "986e06622d8ae6bad62571be631fe0700d02f18c8cf6c5377c79a0160d3e586f"
"digest": "57da812c7d983d3a9387641911660d7b6d64183094b68aaf72058ffdde11aafe"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.batch_normalization.KerasWrap",
......@@ -1752,5 +1752,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.batch_normalization.BatchNormalization",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nBase class of Batch normalization layer (Ioffe and Szegedy, 2014).\n\n Normalize the activations of the previous layer at each batch,\n i.e. applies a transformation that maintains the mean activation\n close to 0 and the activation standard deviation close to 1.\n\n Arguments:\n axis: Integer, the axis that should be normalized\n (typically the features axis).\n For instance, after a `Conv2D` layer with\n `data_format=\"channels_first\"`,\n set `axis=1` in `BatchNormalization`.\n momentum: Momentum for the moving average.\n epsilon: Small float added to variance to avoid dividing by zero.\n center: If True, add offset of `beta` to normalized tensor.\n If False, `beta` is ignored.\n scale: If True, multiply by `gamma`.\n If False, `gamma` is not used.\n When the next layer is linear (also e.g. `nn.relu`),\n this can be disabled since the scaling\n will be done by the next layer.\n beta_initializer: Initializer for the beta weight.\n gamma_initializer: Initializer for the gamma weight.\n moving_mean_initializer: Initializer for the moving mean.\n moving_variance_initializer: Initializer for the moving variance.\n beta_regularizer: Optional regularizer for the beta weight.\n gamma_regularizer: Optional regularizer for the gamma weight.\n beta_constraint: Optional constraint for the beta weight.\n gamma_constraint: Optional constraint for the gamma weight.\n renorm: Whether to use Batch Renormalization\n (https://arxiv.org/abs/1702.03275). This adds extra variables during\n training. The inference is the same for either value of this parameter.\n renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to\n scalar `Tensors` used to clip the renorm correction. The correction\n `(r, d)` is used as `corrected_value = normalized_value * r + d`, with\n `r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin,\n dmax are set to inf, 0, inf, respectively.\n renorm_momentum: Momentum used to update the moving means and standard\n deviations with renorm. Unlike `momentum`, this affects training\n and should be neither too small (which would add noise) nor too large\n (which would give stale estimates). Note that `momentum` is still applied\n to get the means and variances for inference.\n fused: if `True`, use a faster, fused implementation, or raise a ValueError\n if the fused implementation cannot be used. If `None`, use the faster\n implementation if possible. If False, do not used the fused\n implementation.\n trainable: Boolean, if `True` the variables will be marked as trainable.\n virtual_batch_size: An `int`. By default, `virtual_batch_size` is `None`,\n which means batch normalization is performed across the whole batch. When\n `virtual_batch_size` is not `None`, instead perform \"Ghost Batch\n Normalization\", which creates virtual sub-batches which are each\n normalized separately (with shared gamma, beta, and moving statistics).\n Must divide the actual batch size during execution.\n adjustment: A function taking the `Tensor` containing the (dynamic) shape of\n the input tensor and returning a pair (scale, bias) to apply to the\n normalized values (before gamma and beta), only during training. For\n example, if axis==-1,\n `adjustment = lambda shape: (\n tf.random.uniform(shape[-1:], 0.93, 1.07),\n tf.random.uniform(shape[-1:], -0.1, 0.1))`\n will scale the normalized value by up to 7% up or down, then shift the\n result by up to 0.1 (with independent scaling and bias for each feature\n but shared across all examples), and finally apply gamma and/or beta. If\n `None`, no adjustment is applied. Cannot be specified if\n virtual_batch_size is specified.\n\n Call arguments:\n inputs: Input tensor (of any rank).\n training: Python boolean indicating whether the layer should behave in\n training mode or in inference mode.\n - `training=True`: The layer will normalize its inputs using the\n mean and variance of the current batch of inputs.\n - `training=False`: The layer will normalize its inputs using the\n mean and variance of its moving statistics, learned during training.\n\n Input shape:\n Arbitrary. Use the keyword argument `input_shape`\n (tuple of integers, does not include the samples axis)\n when using this layer as the first layer in a model.\n\n Output shape:\n Same shape as input.\n\n References:\n - [Batch Normalization: Accelerating Deep Network Training by Reducing\n Internal Covariate Shift](https://arxiv.org/abs/1502.03167)\n\n {{TRAINABLE_ATTRIBUTE_NOTE}}\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "08a811588de33934ca60baabfe74e5ae2ba95b775f8c9f2cdf99b8b010d2c5b2"
"digest": "14531dfc2e261f75afe17da10f8d80429560a54df00f90ed8e0236e73f164f09"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.concat.KerasWrap",
......@@ -215,5 +215,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.concat.Concatenate",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nLayer that concatenates a list of inputs.\n\n It takes as input a list of tensors,\n all of the same shape except for the concatenation axis,\n and returns a single tensor, the concatenation of all inputs.\n\n Arguments:\n axis: Axis along which to concatenate.\n **kwargs: standard layer keyword arguments.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "1408f29b5035d549fd467f0dbe7b5f85070a448842c7161344689194a7c6564f"
"digest": "d2d578aa1052e547fd185e840b7dd4e12b9573def673ef3701215c8554c15979"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.convolution_1d.KerasWrap",
......@@ -1362,5 +1362,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.convolution_1d.Conv1D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\n1D convolution layer (e.g. temporal convolution).\n\n This layer creates a convolution kernel that is convolved\n with the layer input over a single spatial (or temporal) dimension\n to produce a tensor of outputs.\n If `use_bias` is True, a bias vector is created and added to the outputs.\n Finally, if `activation` is not `None`,\n it is applied to the outputs as well.\n\n When using this layer as the first layer in a model,\n provide an `input_shape` argument\n (tuple of integers or `None`, e.g.\n `(10, 128)` for sequences of 10 vectors of 128-dimensional vectors,\n or `(None, 128)` for variable-length sequences of 128-dimensional vectors.\n\n Arguments:\n filters: Integer, the dimensionality of the output space\n (i.e. the number of output filters in the convolution).\n kernel_size: An integer or tuple/list of a single integer,\n specifying the length of the 1D convolution window.\n strides: An integer or tuple/list of a single integer,\n specifying the stride length of the convolution.\n Specifying any stride value != 1 is incompatible with specifying\n any `dilation_rate` value != 1.\n padding: One of `\"valid\"`, `\"causal\"` or `\"same\"` (case-insensitive).\n `\"causal\"` results in causal (dilated) convolutions, e.g. output[t]\n does not depend on input[t+1:]. Useful when modeling temporal data\n where the model should not violate the temporal order.\n See [WaveNet: A Generative Model for Raw Audio, section\n 2.1](https://arxiv.org/abs/1609.03499).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n dilation_rate: an integer or tuple/list of a single integer, specifying\n the dilation rate to use for dilated convolution.\n Currently, specifying any `dilation_rate` value != 1 is\n incompatible with specifying any `strides` value != 1.\n activation: Activation function to use.\n If you don't specify anything, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix.\n bias_initializer: Initializer for the bias vector.\n kernel_regularizer: Regularizer function applied to\n the `kernel` weights matrix.\n bias_regularizer: Regularizer function applied to the bias vector.\n activity_regularizer: Regularizer function applied to\n the output of the layer (its \"activation\")..\n kernel_constraint: Constraint function applied to the kernel matrix.\n bias_constraint: Constraint function applied to the bias vector.\n\n Examples:\n ```python\n # Small convolutional model for 128-length vectors with 6 timesteps\n # model.input_shape == (None, 6, 128)\n\n model = Sequential()\n model.add(Conv1D(32, 3,\n activation='relu',\n input_shape=(6, 128)))\n\n # now: model.output_shape == (None, 4, 32)\n ```\n\n Input shape:\n 3D tensor with shape: `(batch_size, steps, input_dim)`\n\n Output shape:\n 3D tensor with shape: `(batch_size, new_steps, filters)`\n `steps` value might have changed due to padding or strides.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "7a6cc31a674d22d8353ef5e3889bc80470998ac99cf6fbbe2b7b416f71ec6627"
"digest": "431ad20326636163b0df4d6172044e0c71fdac7e6838bf7b98ed83eff4dc4b74"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.convolution_2d.KerasWrap",
......@@ -1362,5 +1362,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.convolution_2d.Conv2D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\n2D convolution layer (e.g. spatial convolution over images).\n\n This layer creates a convolution kernel that is convolved\n with the layer input to produce a tensor of\n outputs. If `use_bias` is True,\n a bias vector is created and added to the outputs. Finally, if\n `activation` is not `None`, it is applied to the outputs as well.\n\n When using this layer as the first layer in a model,\n provide the keyword argument `input_shape`\n (tuple of integers, does not include the sample axis),\n e.g. `input_shape=(128, 128, 3)` for 128x128 RGB pictures\n in `data_format=\"channels_last\"`.\n\n Arguments:\n filters: Integer, the dimensionality of the output space\n (i.e. the number of output filters in the convolution).\n kernel_size: An integer or tuple/list of 2 integers, specifying the\n height and width of the 2D convolution window.\n Can be a single integer to specify the same value for\n all spatial dimensions.\n strides: An integer or tuple/list of 2 integers,\n specifying the strides of the convolution along the height and width.\n Can be a single integer to specify the same value for\n all spatial dimensions.\n Specifying any stride value != 1 is incompatible with specifying\n any `dilation_rate` value != 1.\n padding: one of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, height, width, channels)` while `channels_first`\n corresponds to inputs with shape\n `(batch, channels, height, width)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n dilation_rate: an integer or tuple/list of 2 integers, specifying\n the dilation rate to use for dilated convolution.\n Can be a single integer to specify the same value for\n all spatial dimensions.\n Currently, specifying any `dilation_rate` value != 1 is\n incompatible with specifying any stride value != 1.\n activation: Activation function to use.\n If you don't specify anything, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix.\n bias_initializer: Initializer for the bias vector.\n kernel_regularizer: Regularizer function applied to\n the `kernel` weights matrix.\n bias_regularizer: Regularizer function applied to the bias vector.\n activity_regularizer: Regularizer function applied to\n the output of the layer (its \"activation\")..\n kernel_constraint: Constraint function applied to the kernel matrix.\n bias_constraint: Constraint function applied to the bias vector.\n\n Input shape:\n 4D tensor with shape:\n `(samples, channels, rows, cols)` if data_format='channels_first'\n or 4D tensor with shape:\n `(samples, rows, cols, channels)` if data_format='channels_last'.\n\n Output shape:\n 4D tensor with shape:\n `(samples, filters, new_rows, new_cols)` if data_format='channels_first'\n or 4D tensor with shape:\n `(samples, new_rows, new_cols, filters)` if data_format='channels_last'.\n `rows` and `cols` values might have changed due to padding.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "7636cf9a124ccd65cdf5f66d2cbc678d82715cbe541b68aa3631981516726cee"
"digest": "26e4f42118f63052ba752e2bf4de04e562fbaeb1fe63a4971cc9ed12cd56d2c5"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.convolution_3d.KerasWrap",
......@@ -1362,5 +1362,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.convolution_3d.Conv3D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\n3D convolution layer (e.g. spatial convolution over volumes).\n\n This layer creates a convolution kernel that is convolved\n with the layer input to produce a tensor of\n outputs. If `use_bias` is True,\n a bias vector is created and added to the outputs. Finally, if\n `activation` is not `None`, it is applied to the outputs as well.\n\n When using this layer as the first layer in a model,\n provide the keyword argument `input_shape`\n (tuple of integers, does not include the sample axis),\n e.g. `input_shape=(128, 128, 128, 1)` for 128x128x128 volumes\n with a single channel,\n in `data_format=\"channels_last\"`.\n\n Arguments:\n filters: Integer, the dimensionality of the output space\n (i.e. the number of output filters in the convolution).\n kernel_size: An integer or tuple/list of 3 integers, specifying the\n depth, height and width of the 3D convolution window.\n Can be a single integer to specify the same value for\n all spatial dimensions.\n strides: An integer or tuple/list of 3 integers,\n specifying the strides of the convolution along each spatial\n dimension.\n Can be a single integer to specify the same value for\n all spatial dimensions.\n Specifying any stride value != 1 is incompatible with specifying\n any `dilation_rate` value != 1.\n padding: one of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n while `channels_first` corresponds to inputs with shape\n `(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n dilation_rate: an integer or tuple/list of 3 integers, specifying\n the dilation rate to use for dilated convolution.\n Can be a single integer to specify the same value for\n all spatial dimensions.\n Currently, specifying any `dilation_rate` value != 1 is\n incompatible with specifying any stride value != 1.\n activation: Activation function to use.\n If you don't specify anything, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix.\n bias_initializer: Initializer for the bias vector.\n kernel_regularizer: Regularizer function applied to\n the `kernel` weights matrix.\n bias_regularizer: Regularizer function applied to the bias vector.\n activity_regularizer: Regularizer function applied to\n the output of the layer (its \"activation\")..\n kernel_constraint: Constraint function applied to the kernel matrix.\n bias_constraint: Constraint function applied to the bias vector.\n\n Input shape:\n 5D tensor with shape:\n `(samples, channels, conv_dim1, conv_dim2, conv_dim3)` if\n data_format='channels_first'\n or 5D tensor with shape:\n `(samples, conv_dim1, conv_dim2, conv_dim3, channels)` if\n data_format='channels_last'.\n\n Output shape:\n 5D tensor with shape:\n `(samples, filters, new_conv_dim1, new_conv_dim2, new_conv_dim3)` if\n data_format='channels_first'\n or 5D tensor with shape:\n `(samples, new_conv_dim1, new_conv_dim2, new_conv_dim3, filters)` if\n data_format='channels_last'.\n `new_conv_dim1`, `new_conv_dim2` and `new_conv_dim3` values might have\n changed due to padding.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "5eb574cc350c4bb508a3b7b90c72f449d79adddc3a60e5e6f58621068caf8d3d"
"digest": "edbd6bac0d9fc1c3f14fca4515e600cf9d26d7bf6463a8c07612414da5ac7ed9"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.dense.KerasWrap",
......@@ -1291,5 +1291,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.dense.Dense",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nJust your regular densely-connected NN layer.\n\n `Dense` implements the operation:\n `output = activation(dot(input, kernel) + bias)`\n where `activation` is the element-wise activation function\n passed as the `activation` argument, `kernel` is a weights matrix\n created by the layer, and `bias` is a bias vector created by the layer\n (only applicable if `use_bias` is `True`).\n\n Note: If the input to the layer has a rank greater than 2, then\n it is flattened prior to the initial dot product with `kernel`.\n\n Example:\n\n ```python\n # as first layer in a sequential model:\n model = Sequential()\n model.add(Dense(32, input_shape=(16,)))\n # now the model will take as input arrays of shape (*, 16)\n # and output arrays of shape (*, 32)\n\n # after the first layer, you don't need to specify\n # the size of the input anymore:\n model.add(Dense(32))\n ```\n\n Arguments:\n units: Positive integer, dimensionality of the output space.\n activation: Activation function to use.\n If you don't specify anything, no activation is applied\n (ie. \"linear\" activation: `a(x) = x`).\n use_bias: Boolean, whether the layer uses a bias vector.\n kernel_initializer: Initializer for the `kernel` weights matrix.\n bias_initializer: Initializer for the bias vector.\n kernel_regularizer: Regularizer function applied to\n the `kernel` weights matrix.\n bias_regularizer: Regularizer function applied to the bias vector.\n activity_regularizer: Regularizer function applied to\n the output of the layer (its \"activation\")..\n kernel_constraint: Constraint function applied to\n the `kernel` weights matrix.\n bias_constraint: Constraint function applied to the bias vector.\n\n Input shape:\n N-D tensor with shape: `(batch_size, ..., input_dim)`.\n The most common situation would be\n a 2D input with shape `(batch_size, input_dim)`.\n\n Output shape:\n N-D tensor with shape: `(batch_size, ..., units)`.\n For instance, for a 2D input with shape `(batch_size, input_dim)`,\n the output would have shape `(batch_size, units)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "b8a9cd8c0ac411dcb9261609d0dad847e59ede782212232bb2a36b4159294474"
"digest": "f9a5043f892d265792152d8d63623fdc1c3c327296132031e47dfa40755ec8b4"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.dropout.KerasWrap",
......@@ -232,5 +232,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.dropout.Dropout",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nApplies Dropout to the input.\n\n Dropout consists in randomly setting\n a fraction `rate` of input units to 0 at each update during training time,\n which helps prevent overfitting.\n\n Arguments:\n rate: Float between 0 and 1. Fraction of the input units to drop.\n noise_shape: 1D integer tensor representing the shape of the\n binary dropout mask that will be multiplied with the input.\n For instance, if your inputs have shape\n `(batch_size, timesteps, features)` and\n you want the dropout mask to be the same for all timesteps,\n you can use `noise_shape=(batch_size, 1, features)`.\n seed: A Python integer to use as random seed.\n\n Call arguments:\n inputs: Input tensor (of any rank).\n training: Python boolean indicating whether the layer should behave in\n training mode (adding dropout) or in inference mode (doing nothing).\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "0d07c59d6385eb22c086713991ff3b6bc6721047f0e206ac8e32cc0846a31071"
"digest": "d7f3b3f30159a7454b6212b9d5867ed91cf4764765dab60b2f4e1a399a229876"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.flatten.KerasWrap",
......@@ -219,5 +219,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.flatten.Flatten",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nFlattens the input. Does not affect the batch size.\n\n If inputs are shaped `(batch,)` without a channel dimension, then flattening\n adds an extra channel dimension and output shapes are `(batch, 1)`.\n\n Arguments:\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, ..., channels)` while `channels_first` corresponds to\n inputs with shape `(batch, channels, ...)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Example:\n\n ```python\n model = Sequential()\n model.add(Convolution2D(64, 3, 3,\n border_mode='same',\n input_shape=(3, 32, 32)))\n # now: model.output_shape == (None, 64, 32, 32)\n\n model.add(Flatten())\n # now: model.output_shape == (None, 65536)\n ```\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "0c4f7a43dec862852e34470283c177b0ae7403e11d9332875da43851f55b985f"
"digest": "13f8d1125e659fbdb02f92ee3427c6c5fdd3e0942aaf2a119e09d702d382072c"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.global_average_pooling_1d.KerasWrap",
......@@ -233,5 +233,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.global_average_pooling_1d.GlobalAveragePooling1D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nGlobal average pooling operation for temporal data.\n\n Arguments:\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, steps, features)` while `channels_first`\n corresponds to inputs with shape\n `(batch, features, steps)`.\n\n Call arguments:\n inputs: A 3D tensor.\n mask: Binary tensor of shape `(batch_size, steps)` indicating whether\n a given step should be masked (excluded from the average).\n\n Input shape:\n - If `data_format='channels_last'`:\n 3D tensor with shape:\n `(batch_size, steps, features)`\n - If `data_format='channels_first'`:\n 3D tensor with shape:\n `(batch_size, features, steps)`\n\n Output shape:\n 2D tensor with shape `(batch_size, features)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "a6090ebb73d35dfda86a326b446f92cb5b307a4125270e2f99992247652004ba"
"digest": "c99cef702a96c0b67f861e52cecc19314f9e60ad56ce9ab95343ace0a43d1243"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.global_average_pooling_2d.KerasWrap",
......@@ -233,5 +233,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.global_average_pooling_2d.GlobalAveragePooling2D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nGlobal average pooling operation for spatial data.\n\n Arguments:\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, height, width, channels)` while `channels_first`\n corresponds to inputs with shape\n `(batch, channels, height, width)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Input shape:\n - If `data_format='channels_last'`:\n 4D tensor with shape `(batch_size, rows, cols, channels)`.\n - If `data_format='channels_first'`:\n 4D tensor with shape `(batch_size, channels, rows, cols)`.\n\n Output shape:\n 2D tensor with shape `(batch_size, channels)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "95a0c1d641ea9ac3e5579bb93896e114cb86332f0b16e72d7f212071c048009b"
"digest": "951d59bb939332cd27b0573a2d26f3882556c37aeb47b9e20edfa19c495f7bb2"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.global_average_pooling_3d.KerasWrap",
......@@ -233,5 +233,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.global_average_pooling_3d.GlobalAveragePooling3D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nGlobal Average pooling operation for 3D data.\n\n Arguments:\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n while `channels_first` corresponds to inputs with shape\n `(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Input shape:\n - If `data_format='channels_last'`:\n 5D tensor with shape:\n `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n - If `data_format='channels_first'`:\n 5D tensor with shape:\n `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`\n\n Output shape:\n 2D tensor with shape `(batch_size, channels)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "6161438a7847181a4ec71ee9e2c73c22dcb359c0097f445677c526e2218ebcbc"
"digest": "6e15b9d55f43cc4ff7aa3eee3095650c153a5d3020846355dc9e4727cf8a1a5a"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.max_pooling_1d.KerasWrap",
......@@ -276,5 +276,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.max_pooling_1d.MaxPooling1D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nMax pooling operation for temporal data.\n\n Arguments:\n pool_size: Integer, size of the max pooling windows.\n strides: Integer, or None. Factor by which to downscale.\n E.g. 2 will halve the input.\n If None, it will default to `pool_size`.\n padding: One of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, steps, features)` while `channels_first`\n corresponds to inputs with shape\n `(batch, features, steps)`.\n\n Input shape:\n - If `data_format='channels_last'`:\n 3D tensor with shape `(batch_size, steps, features)`.\n - If `data_format='channels_first'`:\n 3D tensor with shape `(batch_size, features, steps)`.\n\n Output shape:\n - If `data_format='channels_last'`:\n 3D tensor with shape `(batch_size, downsampled_steps, features)`.\n - If `data_format='channels_first'`:\n 3D tensor with shape `(batch_size, features, downsampled_steps)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "23ba9218f31e3dfb99c5dec64053969afe631db76fbafeccfa30aa69273b0362"
"digest": "fa35b6d074f08ccc9b645d0215b70529ba51569ec561bc189853c4be93a242a5"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.max_pooling_2d.KerasWrap",
......@@ -287,5 +287,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.max_pooling_2d.MaxPooling2D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nMax pooling operation for spatial data.\n\n Arguments:\n pool_size: integer or tuple of 2 integers,\n factors by which to downscale (vertical, horizontal).\n `(2, 2)` will halve the input in both spatial dimension.\n If only one integer is specified, the same window length\n will be used for both dimensions.\n strides: Integer, tuple of 2 integers, or None.\n Strides values.\n If None, it will default to `pool_size`.\n padding: One of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, height, width, channels)` while `channels_first`\n corresponds to inputs with shape\n `(batch, channels, height, width)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Input shape:\n - If `data_format='channels_last'`:\n 4D tensor with shape `(batch_size, rows, cols, channels)`.\n - If `data_format='channels_first'`:\n 4D tensor with shape `(batch_size, channels, rows, cols)`.\n\n Output shape:\n - If `data_format='channels_last'`:\n 4D tensor with shape `(batch_size, pooled_rows, pooled_cols, channels)`.\n - If `data_format='channels_first'`:\n 4D tensor with shape `(batch_size, channels, pooled_rows, pooled_cols)`.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "782cebec9e675ea144b58516bd9805a156f74af628c346accb10a9c1f57c9a76"
"digest": "12750376b1e3699926360b76c931ff04992a8a8be80027e16e4543147c568dad"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.max_pooling_3d.KerasWrap",
......@@ -276,5 +276,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.max_pooling_3d.MaxPooling3D",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nMax pooling operation for 3D data (spatial or spatio-temporal).\n\n Arguments:\n pool_size: Tuple of 3 integers,\n factors by which to downscale (dim1, dim2, dim3).\n `(2, 2, 2)` will halve the size of the 3D input in each dimension.\n strides: tuple of 3 integers, or None. Strides values.\n padding: One of `\"valid\"` or `\"same\"` (case-insensitive).\n data_format: A string,\n one of `channels_last` (default) or `channels_first`.\n The ordering of the dimensions in the inputs.\n `channels_last` corresponds to inputs with shape\n `(batch, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n while `channels_first` corresponds to inputs with shape\n `(batch, channels, spatial_dim1, spatial_dim2, spatial_dim3)`.\n It defaults to the `image_data_format` value found in your\n Keras config file at `~/.keras/keras.json`.\n If you never set it, then it will be \"channels_last\".\n\n Input shape:\n - If `data_format='channels_last'`:\n 5D tensor with shape:\n `(batch_size, spatial_dim1, spatial_dim2, spatial_dim3, channels)`\n - If `data_format='channels_first'`:\n 5D tensor with shape:\n `(batch_size, channels, spatial_dim1, spatial_dim2, spatial_dim3)`\n\n Output shape:\n - If `data_format='channels_last'`:\n 5D tensor with shape:\n `(batch_size, pooled_dim1, pooled_dim2, pooled_dim3, channels)`\n - If `data_format='channels_first'`:\n 5D tensor with shape:\n `(batch_size, channels, pooled_dim1, pooled_dim2, pooled_dim3)`\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "805ad2a5a199802f613f3a8644687426b8092472936aabd6f102c33a9472ac1f"
"digest": "086e5009478413983269c350bfe9a983290e27e5a176b86011d3fe847909654a"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.null.KerasWrap",
......@@ -189,5 +189,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.null.Null",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.This is a special Null primitive to avoid circular imports with defaults.",
"digest": "9fc27e7fefb363e369b6052d2aac6c85887842d0935ef8c4b05a11807717ba3b"
"digest": "e5d6fe7521dff50c02336510ee2cadd8af67a0950d0bdee842e0a119238d32d6"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.layer.subtract.KerasWrap",
......@@ -245,5 +245,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.layers.subtract.Subtract",
"description": "A neural network layer that has been wrapped from Keras. You can assemble these layers togetherto form any architecture\nnerual network. To assemble, every layer has a hyperparameter 'previous_layer'. This hyperparameter takes in another Keras wrapped\nlayer primitive and you are allowed to chain your neural network together. This is chained until the very first layer you\nwant to serve as your initial input layer.\n\nPure Keras Documentation:\n\nLayer that subtracts two inputs.\n\n It takes as input a list of tensors of size 2,\n both of the same shape, and returns a single tensor, (inputs[0] - inputs[1]),\n also of the same shape.\n\n Examples:\n\n ```python\n import keras\n\n input1 = keras.layers.Input(shape=(16,))\n x1 = keras.layers.Dense(8, activation='relu')(input1)\n input2 = keras.layers.Input(shape=(32,))\n x2 = keras.layers.Dense(8, activation='relu')(input2)\n # Equivalent to subtracted = keras.layers.subtract([x1, x2])\n subtracted = keras.layers.Subtract()([x1, x2])\n\n out = keras.layers.Dense(4)(subtracted)\n model = keras.models.Model(inputs=[input1, input2], outputs=out)\n ```\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "5f8237217854d0f68f15d523e8a5a591233949d5df97ddf12dcfb0a4e6fbd31c"
"digest": "459c1d919a714c2a23e96a4068794d4f50e787fe1b1f7706d1c9b29c0dd3b6e4"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.learner.model.KerasWrap",
......@@ -691,5 +691,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.model.Model",
"description": "The main learner primitive that is responsible for assembling the custom built neural network as part of Keras Wrap.\nIn order to use this primitive, you should assemble your architecture via your pipeline and pass the primitive reference\nto the last layer as your 'network_last_layer'. All Keras Wrap layers inherit from NeuralNetworkModuleMixin and follow the\nnaming convention 'd3m.primitives.layer.<layer_name>.KerasWrap'. You should also specify the proper loss function and any\naccompanying metrics that should be tracked during the training process as well. These primtives that are support as port of\nKeras wrap inherit from NeuralNetworkObjectMixin and follow the form 'd3m.primitives.loss_function.<loss_name>.KerasWrap'.\n\nThis primitive will infer the size and number of channels of the input images from the data (in w x h x c format), however,\nevery image has to be square as well as all images have to be scaled to the same size.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "85f0113ee7567b017e1b5ff08095502e3b19270a93e219d539c6874938a2ffa3"
"digest": "568efce4aad5703c27b1896843f81370d77e852dc81caa900432ef452ab98591"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.loss_function.binary_crossentropy.KerasWrap",
......@@ -183,5 +183,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.losses.binary_crossentropy.BinaryCrossentropy",
"description": "A neural network object that represents a reference to a function. These objects are used to specify\na particular type of loss or metric for the 'Learner' primitive, which constructs it's architecture from scratch.\n\nPure Keras Documentation:\n\nKeras documentation not available.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "231f85863e86ca6406dd0ac490f221b31a4b0e39c9e66c4c6be5400892c7fa29"
"digest": "16f56fb88244187c5a10f9cf20d03accbb429be310c924ed6b649df405592bc4"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.loss_function.categorical_accuracy.KerasWrap",
......@@ -183,5 +183,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.losses.categorical_accuracy.CategoricalAccuracy",
"description": "A neural network object that represents a reference to a function. These objects are used to specify\na particular type of loss or metric for the 'Learner' primitive, which constructs it's architecture from scratch.\n\nPure Keras Documentation:\n\nKeras documentation not available.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "dd9c6683be3427de2612679a7a3029d9eb041f4200c064a8ceddb8923ff3c313"
"digest": "23881324fda4a4976e04dc0cef1e06c4b7048511e9437590efd49d8e7d54c3bb"
}
......@@ -17,7 +17,7 @@
"installation": [
{
"type": "PIP",
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@4472172b10af09829e426c3cb7accd7ee0717cbf#egg=jpl_primitives"
"package_uri": "git+https://gitlab.com/datadrivendiscovery/jpl-primitives.git@3c76a344dab48ac559370f16debfd4a02a67251f#egg=jpl_primitives"
}
],
"python_path": "d3m.primitives.loss_function.categorical_crossentropy.KerasWrap",
......@@ -183,5 +183,5 @@
},
"structural_type": "jpl_primitives.keras_wrap.losses.categorical_crossentropy.CategoricalCrossentropy",
"description": "A neural network object that represents a reference to a function. These objects are used to specify\na particular type of loss or metric for the 'Learner' primitive, which constructs it's architecture from scratch.\n\nPure Keras Documentation:\n\nComputes the categorical crossentropy loss.\n\n Args:\n y_true: tensor of true targets.\n y_pred: tensor of predicted targets.\n from_logits: Whether `y_pred` is expected to be a logits tensor. By default,\n we assume that `y_pred` encodes a probability distribution.\n label_smoothing: Float in [0, 1]. If > `0` then smooth the labels.\n\n Returns:\n Categorical crossentropy loss value.\n\nAttributes\n----------\nmetadata : PrimitiveMetadata\n Primitive's metadata. Available as a class attribute.\nlogger : Logger\n Primitive's logger. Available as a class attribute.\nhyperparams : Hyperparams\n Hyperparams passed to the constructor.\nrandom_seed : int\n Random seed passed to the constructor.\ndocker_containers : Dict[str, DockerContainer]\n A dict mapping Docker image keys from primitive's metadata to (named) tuples containing\n container's address under which the container is accessible by the primitive, and a\n dict mapping exposed ports to ports on that address.\nvolumes : Dict[str, str]\n A dict mapping volume keys from primitive's metadata to file and directory paths\n where downloaded and extracted files are available to the primitive.\ntemporary_directory : str\n An absolute path to a temporary directory a primitive can use to store any files\n for the duration of the current pipeline run phase. Directory is automatically\n cleaned up after the current pipeline run phase finishes.",
"digest": "7df6a83102c30168ceeedcfe8d43842e1e46c1e029315566f95546d3631a4dd0"
"digest": "f8b78d8d16a7b96d7006bc74bb5dc09a48e63eef39eeda255645629bc2c0c58b"
}