Sign up with your email address to be the first to know about new products, VIP offers, blog features & more.

Tag Archives red neuronal

Red neuronal convolucional – Theano

Aquí os dejo con un ejemplo (red_neuronal_convolucional.py) muy sencillo y claro para realizar una red neuronal convolucional.

import theano
from theano import tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import numpy as np
from Load import mnist
from theano.tensor.nnet.conv import conv2d
from theano.tensor.signal.downsample import max_pool_2d

srng = RandomStreams()

def floatX(X):
return np.asarray(X, dtype=theano.config.floatX)

def init_weights(shape):
return theano.shared(floatX(np.random.randn(*shape) * 0.01))

def rectify(X):
return T.maximum(X, 0.)

def softmax(X):
e_x = T.exp(X – X.max(axis=1).dimshuffle(0, ‘x’))
return e_x / e_x.sum(axis=1).dimshuffle(0, ‘x’)

def dropout(X, p=0.):
if p 0:
retain_prob = 1 – p
X *= srng.binomial(X.shape, p=retain_prob, dtype=theano.config.floatX)
X /= retain_prob
return X

def RMSprop(cost,

Red neuronal moderna – Theano

Aquí os dejo con un ejemplo (red_neuronal_moderna.py) muy sencillo y claro para realizar una red neuronal moderna.

import theano
from theano import tensor as T
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
import numpy as np
from Load import mnist

srng = RandomStreams()

def floatX(X):
return np.asarray(X, dtype=theano.config.floatX)

def init_weights(shape):
return theano.shared(floatX(np.random.randn(*shape) * 0.01))

def rectify(X):
return T.maximum(X, 0.)

def softmax(X):
e_x = T.exp(X – X.max(axis=1).dimshuffle(0, ‘x’))
return e_x / e_x.sum(axis=1).dimshuffle(0, ‘x’)

def RMSprop(cost, params, lr=0.001, rho=0.9, epsilon=1e-6):
grads = T.grad(cost=cost, wrt=params)
updates = []
for p, g in zip(params, grads):
acc = theano.shared(p.get_value() * 0.)
acc_new = rho * acc + (1 – rho) * g ** 2
gradient_scaling = T.sqrt(acc_new + epsilon)
g = g / gradient_scaling
updates.append((acc,

Red neuronal clásica – Theano

Aquí os dejo con un ejemplo (red_neuronal_clasica.py) muy sencillo y claro para realizar una red neuronal clásica.

import theano
from theano import tensor as T
import numpy as np
from Load import mnist
from scipy.misc import imsave

def floatX(X):
return np.asarray(X, dtype=theano.config.floatX)

def init_weights(shape):
return theano.shared(floatX(np.random.randn(*shape) * 0.01))

def sgd(cost, params, lr=0.05):
grads = T.grad(cost=cost, wrt=params)
updates = []
for p, g in zip(params, grads):
updates.append([p, p – g * lr])
return updates

def model(X, w_h, w_o):
h = T.nnet.sigmoid(T.dot(X, w_h))
pyx = T.nnet.softmax(T.dot(h, w_o))
return pyx

trX, teX, trY, teY = mnist(onehot=True)

X = T.fmatrix()
Y = T.fmatrix()

w_h = init_weights((784,