X = T.tensor4()
y = T.ivector()
# training output
output_train = lasagne.layers.get_output(model, X, deterministic=False)
# evaluation output. Also includes output of transform for plotting
output_eval, transform_eval = lasagne.layers.get_output([model, l_transform], X, deterministic=True)
sh_lr = theano.shared(lasagne.utils.floatX(LEARNING_RATE))
cost = T.mean(T.nnet.categorical_crossentropy(output_train, y))
updates = lasagne.updates.adam(cost, model_params, learning_rate=sh_lr)
train = theano.function([X, y], [cost, output_train], updates=updates)
eval = theano.function([X], [output_eval, transform_eval])
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-22-c9e0c22f04d4> in <module>()
12 updates = lasagne.updates.adam(cost, model_params, learning_rate=sh_lr)
13
---> 14 train = theano.function([X, y], [cost, output_train], updates=updates)
15 eval = theano.function([X], [output_eval, transform_eval])
/usr/local/lib/python2.7/site-packages/theano/compile/function.pyc in function(inputs, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input)
315 on_unused_input=on_unused_input,
316 profile=profile,
--> 317 output_keys=output_keys)
318 # We need to add the flag check_aliased inputs if we have any mutable or
319 # borrowed used defined inputs
/usr/local/lib/python2.7/site-packages/theano/compile/pfunc.pyc in pfunc(params, outputs, mode, updates, givens, no_default_updates, accept_inplace, name, rebuild_strict, allow_input_downcast, profile, on_unused_input, output_keys)
487 rebuild_strict=rebuild_strict,
488 copy_inputs_over=True,
--> 489 no_default_updates=no_default_updates)
490 # extracting the arguments
491 input_variables, cloned_extended_outputs, other_stuff = output_vars
/usr/local/lib/python2.7/site-packages/theano/compile/pfunc.pyc in rebuild_collect_shared(outputs, inputs, replace, updates, rebuild_strict, copy_inputs_over, no_default_updates)
202 ' function to remove broadcastable dimensions.')
203
--> 204 raise TypeError(err_msg, err_sug)
205 assert update_val.type == store_into.type
206
TypeError: ('An update must have the same type as the original shared variable (shared_var=<CudaNdarrayType(float32, vector)>, shared_var.type=CudaNdarrayType(float32, vector), update_val=Elemwise{add,no_inplace}.0, update_val.type=TensorType(float64, vector)).', 'If the difference is related to the broadcast pattern, you can call the tensor.unbroadcast(var, axis_to_unbroadcast[, ...]) function to remove broadcastable dimensions.')