@@ -83,16 +83,48 @@ function params!(p::Zygote.Params, x, seen = IdSet())
8383 end
8484end
8585
86+ """
87+ params(model)
88+
89+ Returns a `Zygote.Params` object containing all parameter arrays from the model.
90+ This is deprecated!
91+
92+ This function was the cornerstone of how Flux used Zygote's implicit mode gradients,
93+ but since Flux 0.13 we use explicit mode `gradient(m -> loss(m, x, y), model)` instead.
94+
95+ To collect all the parameter arrays for other purposes, use `Flux.trainables(model)`.
96+ """
8697function params (m... )
87- Base. depwarn ("""
88- Flux.params(m...) is deprecated. Use `Flux.trainable(model)` for parameters' collection
89- and the explicit `gradient(m -> loss(m, x, y), model)` for gradient computation.
90- """ , :params )
98+ @warn """ `Flux.params(m...)` is deprecated. Use `Flux.trainable(model)` for parameter collection,
99+ and the explicit `gradient(m -> loss(m, x, y), model)` for gradient computation.""" maxlog= 1
91100 ps = Params ()
92101 params! (ps, m)
93102 return ps
94103end
95104
105+
106+ """
107+ @functor MyLayer
108+
109+ Flux used to require the use of `Functors.@functor` to mark any new layer-like struct.
110+ This allowed it to explore inside the struct, and update any trainable parameters within.
111+ [email protected] removes this requirement. This is because [email protected] changed ist behaviour112+ to be opt-out instead of opt-in. Arbitrary structs will now be explored without special marking.
113+ Hence calling `@functor` is no longer required.
114+
115+ Calling `Flux.@layer MyLayer` is, however, still recommended. This adds various convenience methods
116+ for your layer type, such as pretty printing, and use with Adapt.jl.
117+ """
118+ macro functor (ex)
119+ @warn """ The use of `Flux.@functor` is deprecated.
120+ Most likely, you should write `Flux.@layer MyLayer` which will add various convenience methods for your type,
121+ such as pretty-printing, and use with Adapt.jl.
122+ However, this is not required. Flux.jl v0.15 uses Functors.jl v0.5, which makes exploration of most nested `struct`s
123+ opt-out instead of opt-in... so Flux will automatically see inside any custom struct definitions.
124+ """ maxlog= 1
125+ _layer_macro (ex)
126+ end
127+
96128# Allows caching of the parameters when params is called within gradient() to fix #2040.
97129# @non_differentiable params(m...) # https://github.com/FluxML/Flux.jl/pull/2054
98130# That speeds up implicit use, and silently breaks explicit use.
@@ -101,6 +133,14 @@ Zygote._pullback(::Zygote.Context{true}, ::typeof(params), m...) = params(m), _
101133
102134include (" optimise/Optimise.jl" ) # # deprecated Module
103135
136+ function Optimiser (rules... )
137+ @warn " `Flux.Optimiser(...)` has been removed, please call `OptimiserChain(...)`, exported by Flux from Optimisers.jl" maxlog= 1
138+ OptimiserChain (rules... )
139+ end
140+ function ClipValue (val)
141+ @warn " `Flux.ClipValue(...)` has been removed, please call `ClipGrad(...)`, exported by Flux from Optimisers.jl" maxlog= 1
142+ ClipGrad (val)
143+ end
104144
105145# TODO this friendly error should go in Optimisers.jl.
106146# remove after https://github.com/FluxML/Optimisers.jl/pull/181
119159# ## v0.16 deprecations ####################
120160
121161
122- # Enable these when 0.16 is released, and delete const ClipGrad = Optimise.ClipValue etc:
123- # Base.@deprecate_binding Optimiser OptimiserChain
124- # Base.@deprecate_binding ClipValue ClipGrad
125162
126163# train!(loss::Function, ps::Zygote.Params, data, opt) = throw(ArgumentError(
127164# """On Flux 0.16, `train!` no longer accepts implicit `Zygote.Params`.
0 commit comments