Skip to content

Commit

Permalink
Default encoder activation to None
Browse files Browse the repository at this point in the history
  • Loading branch information
marcopeix committed Nov 21, 2024
1 parent 9fbf407 commit fcb1b75
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
4 changes: 2 additions & 2 deletions nbs/models.gru.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@
" `inference_input_size`: int, maximum sequence length for truncated inference. Default -1 uses all history.<br>\n",
" `encoder_n_layers`: int=2, number of layers for the GRU.<br>\n",
" `encoder_hidden_size`: int=200, units for the GRU's hidden state size.<br>\n",
" `encoder_activation`: str=`tanh`, type of GRU activation from `tanh` or `relu`.<br>\n",
" `encoder_activation`: Optinal[str]=None, Deprecated. Activation function in GRU is frozen in PyTorch.<br>\n",
" `encoder_bias`: bool=True, whether or not to use biases b_ih, b_hh within GRU units.<br>\n",
" `encoder_dropout`: float=0., dropout regularization applied to GRU outputs.<br>\n",
" `context_size`: int=10, size of context vector for each timestamp on the forecasting window.<br>\n",
Expand Down Expand Up @@ -153,7 +153,7 @@
" inference_input_size: int = -1,\n",
" encoder_n_layers: int = 2,\n",
" encoder_hidden_size: int = 200,\n",
" encoder_activation: str = 'tanh',\n",
" encoder_activation: Optional[str] = None,\n",
" encoder_bias: bool = True,\n",
" encoder_dropout: float = 0.,\n",
" context_size: int = 10,\n",
Expand Down
4 changes: 2 additions & 2 deletions neuralforecast/models/gru.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ class GRU(BaseRecurrent):
`inference_input_size`: int, maximum sequence length for truncated inference. Default -1 uses all history.<br>
`encoder_n_layers`: int=2, number of layers for the GRU.<br>
`encoder_hidden_size`: int=200, units for the GRU's hidden state size.<br>
`encoder_activation`: str=`tanh`, type of GRU activation from `tanh` or `relu`.<br>
`encoder_activation`: Optinal[str]=None, Deprecated. Activation function in GRU is frozen in PyTorch.<br>
`encoder_bias`: bool=True, whether or not to use biases b_ih, b_hh within GRU units.<br>
`encoder_dropout`: float=0., dropout regularization applied to GRU outputs.<br>
`context_size`: int=10, size of context vector for each timestamp on the forecasting window.<br>
Expand Down Expand Up @@ -73,7 +73,7 @@ def __init__(
inference_input_size: int = -1,
encoder_n_layers: int = 2,
encoder_hidden_size: int = 200,
encoder_activation: str = "tanh",
encoder_activation: Optional[str] = None,
encoder_bias: bool = True,
encoder_dropout: float = 0.0,
context_size: int = 10,
Expand Down

0 comments on commit fcb1b75

Please sign in to comment.