pytorch
import numpy as np
import torch.nn as nn
import random
import os
import torch
import seaborn as sns
import matplotlib.pyplot as plt
def seed_torch(seed=42):
random.seed(seed)
os.environ['PYTHONHASHSEED'] = '0'
np.random.seed(seed)
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.badatahmark = False
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.enabled = False
seed_torch()
model=nn.Sequential(nn.Linear(100,100))
nn.init.xavier_uniform_(model[0].weight)
nn.init.constant_(model[0].bias, 0)
plt.figure()
sns.kdeplot(model[0].weight.data.numpy().flatten())
plt.figure()
plt.hist(model[0].weight.data.numpy().flatten())
结果如下
tensorfow
import tensorflow as tf import seaborn as sns os.environ[‘PYTHONHASHSEED’] = ‘0’ random_seed=42 random.seed(random_seed) np.random.seed(random_seed) #tf.set_random_seed(random_seed) tf.random.set_seed(random_seed)
from tensorflow import keras import tensorflow.keras.backend as K from tensorflow.keras.callbacks import TensorBoard, ModelCheckpoint, EarlyStopping, ReduceLROnPlateau,History from tensorflow.keras.layers import Dense, Input,Layer,InputSpec from tensorflow.keras.layers import Activation from tensorflow.keras import Sequential from tensorflow.keras.models import Model,load_model from tensorflow.keras.optimizers import SGD from tensorflow.keras import callbacks from tensorflow.keras.initializers import VarianceScaling
os.environ[“CUDA_VISIBLE_DEVICES”] = “-1” tf.compat.v1.keras.backend.set_session(tf.compat.v1.Session(graph=tf.compat.v1.get_default_graph(),config=tf.compat.v1.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)))
model=Sequential() #在模型中添加 1-10-1 ,一个输入,一个输出,中间10个隐藏层 init_kernel=keras.initializers.glorot_uniform(seed=42) model.add(Dense(units=100,input_dim=100,name=“hidden”,kernel_initializer=init_kernel)) #1-10部分 #model.add(Activation(‘tanh’)) #双曲正切函数,这个地方仅仅是添加一个激活函数,不是添加层 #model.add(Dense(units=1,name=“output”))#10-1部分 等效 model.add(Dense(units=1,input_dim=10))
hidden_weight,hidden_bias=model.get_layer(“hidden”).get_weights() import matplotlib.pyplot as plt #output_weight,output_bias=model.get_layer(“output”).get_weights() #print(hidden_weight.shape) #print(hidden_bias.shape) #print(np.transpose(hidden_weight)) #print(np.transpose(hidden_bias)) plt.figure() sns.kdeplot(hidden_weight.flatten()) #print(output_weight) #print(output_bias) plt.figure() plt.hist(hidden_weight.flatten())
虽然我不能保证tensorflow和torch产生的结果是完全一模一样的,但是这个能保证是统一分布的
|