Initial commit
This commit is contained in:
@@ -0,0 +1,81 @@
|
||||
{
|
||||
"_comment": "Template for defining the hyperparameter search space. This file should be used as a guide for creating your own hyperparameter configuration.",
|
||||
"algorithm": {
|
||||
"_comment": "The search algorithm to use. Options: 'grid', 'random', 'bayesian'.",
|
||||
"type": "string",
|
||||
"default": "random",
|
||||
"enum": ["grid", "random", "bayesian"]
|
||||
},
|
||||
"objective": {
|
||||
"_comment": "The metric to optimize. The plugin will attempt to maximize this metric.",
|
||||
"type": "string",
|
||||
"default": "val_loss"
|
||||
},
|
||||
"max_trials": {
|
||||
"_comment": "The maximum number of trials to run. Each trial will explore a different set of hyperparameters.",
|
||||
"type": "integer",
|
||||
"default": 10
|
||||
},
|
||||
"hyperparameters": {
|
||||
"_comment": "A dictionary of hyperparameters to search. Each key is the name of the hyperparameter, and the value is a dictionary defining the search space for that hyperparameter.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"learning_rate": {
|
||||
"_comment": "Example: Learning rate for a neural network.",
|
||||
"type": "number",
|
||||
"distribution": "loguniform",
|
||||
"min": 0.0001,
|
||||
"max": 0.1
|
||||
},
|
||||
"num_layers": {
|
||||
"_comment": "Example: Number of layers in a neural network.",
|
||||
"type": "integer",
|
||||
"distribution": "uniform",
|
||||
"min": 2,
|
||||
"max": 6
|
||||
},
|
||||
"dropout_rate": {
|
||||
"_comment": "Example: Dropout rate for regularization.",
|
||||
"type": "number",
|
||||
"distribution": "uniform",
|
||||
"min": 0.0,
|
||||
"max": 0.5
|
||||
},
|
||||
"batch_size": {
|
||||
"_comment": "Example: Batch size for training.",
|
||||
"type": "integer",
|
||||
"distribution": "categorical",
|
||||
"values": [32, 64, 128, 256]
|
||||
},
|
||||
"optimizer": {
|
||||
"_comment": "Example: Optimization algorithm to use",
|
||||
"type": "string",
|
||||
"distribution": "categorical",
|
||||
"values": ["adam", "sgd", "rmsprop"]
|
||||
}
|
||||
},
|
||||
"required": ["learning_rate", "num_layers"]
|
||||
},
|
||||
"early_stopping": {
|
||||
"_comment": "Parameters for early stopping. If enabled, the tuning process will stop if the objective metric does not improve for a specified number of epochs.",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"monitor": {
|
||||
"_comment": "The metric to monitor for early stopping.",
|
||||
"type": "string",
|
||||
"default": "val_loss"
|
||||
},
|
||||
"patience": {
|
||||
"_comment": "The number of epochs with no improvement after which training will be stopped.",
|
||||
"type": "integer",
|
||||
"default": 3
|
||||
},
|
||||
"enabled": {
|
||||
"_comment": "Whether early stopping is enabled.",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
}
|
||||
},
|
||||
"required": ["monitor", "patience", "enabled"]
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user