Skip to content

Instantly share code, notes, and snippets.

@mpozpnd
mpozpnd / ctr.py
Last active March 24, 2019 15:39
import numpy as np
import numpy.linalg
from tqdm import tqdm
class CTRModel:
"""
もとの論文:
Chong Wang, David M. Blei, ''Collaborative Topic Modeling for Recommending Scientific Articles'', KDD2011
"""
@mpozpnd
mpozpnd / python
Last active January 20, 2019 13:30
Compressed Interaction Network (CIN)
import torch as t
import torch.nn
from torch.nn import *
import numpy as np
class CIN(torch.nn.Module):
def __init__(self, num_fields, embedding_dim, num_layers, Hk=None):
super(CIN, self).__init__()
self.num_fields = num_fields
self.embedding_dim = embedding_dim
@mpozpnd
mpozpnd / test.py
Created December 21, 2016 16:16
ネットワーク中にsoftmaxが二重に現れるとどうなるかの検証
#!/usr/bin/env python
# vim: set fileencoding=utf-8 :
#
# Author: Graphium
# URL: http://tehutehu.com
# License: MIT License
# Created: 2016-12-22
#
import theano
import theano.tensor as T
function t2conda () {
ssh -t t2a006170 "export https_proxy=proxy.noc.titech.ac.jp:3128;conda $@"
}
def sgd_momentum(lr,momentum,cost,params,L1_rate,L2_rate):↲
gparams = [T.grad(cost + L1_rate * param.norm(L=1) + L2_rate * param.norm(L=2), param) for param in params]↲
ex2=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
updates=OrderedDict()↲
for gparam,param,ex2_ in zip(gparams,params,ex2):↲
updates[ex2_]=T.cast(momentum * ex2_ - lr * gparam,theano.config.floatX)↲
updates[param]=T.cast(param + momentum * ex2_ - lr * gparam,theano.config.floatX)↲
return updates,ex2,gparams↲
@mpozpnd
mpozpnd / adam
Last active August 29, 2015 14:18
def adam(a,b1,b2,la,t,cost,params,L1_rate,L2_rate,eps_ = 1e-08):↲
updates=OrderedDict()↲
gparams=[T.grad(cost+ L1_rate * param.norm(L=1) + L2_rate * param.norm(L=2),param) for param in params]↲
b1 = b1 * (la ** t)↲
t = t + 1↲
mt=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
vt=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
def adadelta(rho_,eps_,cost,params,L1_rate,L2_rate):↲
updates = OrderedDict()↲
gparams = [T.grad(cost + L1_rate * param.norm(L=1) + L2_rate * param.norm(L=2) , param) for param in params]↲
gparams_mean=[gparam.mean() for gparam in gparams]↲
eg2=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
dxt=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
ex2=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
for gparam, param, eg2_,ex2_ in zip(gparams,params,eg2,ex2):↲
updates[eg2_]=T.cast(rho_ * eg2_ +(1. -rho_)*(gparam **2) ,theano.config.floatX)↲
dparam = -T.sqrt((ex2_ + eps_)/(updates[eg2_] + eps_)) * gparam↲
def adagrad(lr,eps_,cost,params,L1_rate,L2_rate):↲
updates=OrderedDict()↲
gparams=[T.grad(cost+ L1_rate * param.norm(L=1) + L2_rate * param.norm(L=2),param) for param in params]↲
gsum=[theano.shared(np.zeros_like(param.get_value(borrow=True))) for param in params]↲
for gparam, param, gsum_ in zip(gparams,params,gsum):↲
updates[param]=T.cast(param - lr * gparam / T.sqrt(1 + gsum_),theano.config.floatX)↲
updates[gsum_]=T.cast(gsum_ + gparam ** 2,theano.config.floatX)↲
return updates,gsum ,gparams