AI

실습- MINST

✿(๑❛ڡ❛๑)✿ 2022. 10. 3. 15:32
728x90
SMALL
import random
import torch

seed = 42
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
    torch.cuda.manual_seed(seed)
    torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.deterministic = True
import torch
import torch.optim as optim
import torch.nn.functional as F

device='cuda' if torch.cuda.is_available() else 'cpu'

#데이터 불러오기
train=pd.read_csv('/kaggle/input/2022-ai-w5p1/train.csv')
test=pd.read_csv('/kaggle/input/2022-ai-w5p1/test.csv')
submit=pd.read_csv('/kaggle/input/2022-ai-w5p1/sample_submit.csv')

x=train.drop(['label'],axis=1)
y=train['label']

x=torch.FloatTensor(np.array(x)).to(device)
test=torch.FloatTensor(np.array(test)).to(device)
y=torch.LongTensor(y).to(device)

#모델 학습
w=torch.zeros((784,10),requires_grad=True,device=device)
b=torch.zeros((1),requires_grad=True,device=device)
optimizer=optim.SGD([w,b],lr=0.00001)
nb_epochs=100000

for i in range(nb_epochs+1):
    cost=F.cross_entropy(x.matmul(w)+b,y) #cross_entropy에는 softmax가 포함되어있음
    
    optimizer.zero_grad()
    cost.backward()
    optimizer.step()
    
    if(i%1000==0):print(cost)
    
 #예측
H = F.softmax(test.matmul(w) + b, dim=1)
predict = torch.argmax(H, dim=1)
submit['label']=predict.cpu()
submit.to_csv('submit.csv',index=False)

cross_entropy에는 softmax가 포함되어있음

 

728x90
LIST