1-dastur. Bir va ko’p qatlamli sun’iy neyron to’rlari bo’yicha dastur tuzish Dastur kodi


Download 450.51 Kb.
Sana17.06.2023
Hajmi450.51 Kb.
#1531004
Bog'liq
Suniy topshiriq


Fan: Sun’iy intellect va neyron to’rli texnologiyalar 21.06-guruh

1-dastur. Bir va ko’p qatlamli sun’iy neyron to’rlari bo’yicha dastur tuzish
Dastur kodi:
from sklearn.linear_model import Perceptron
from sklearn.metrics import accuracy_score

# Trening dataset


X_train = [[0, 0], [0, 1], [1, 0], [1, 1]]
y_train = [0, 0, 0, 1]

# Perceptron sinfi obyekti yaratish


clf = Perceptron(max_iter=1000)

# Trening datasetga mos keladigan ko'rsatkichlarni o'rganish


clf.fit(X_train, y_train)

# Test dataset


X_test = [[0, 0], [0, 1], [1, 0], [1, 1]]
y_test = [0, 0, 0, 1]

# Test datasetga mos keladigan natijalarni aniqlash


y_pred = clf.predict(X_test)

# Natijalar uchun aniqlovchi natija chiqarish


print("natija:", accuracy_score(y_test, y_pred))
Natija:


2-dastur. Perseptron. Perseptron modeli bo’yicha dastur tuzish
2.1 Dasturning kod qismi:
import numpy as np

class Perceptron:


def __init__(self, input_size, lr=1, epochs=10):
self.W = np.zeros(input_size+1)
self.epochs = epochs
self.lr = lr
def activation_fn(self, x):
return 1 if x >= 0 else 0
def predict(self, x):
z = self.W.T.dot(x) # dot product
a = self.activation_fn(z)
return a
def fit(self, X, d):
for _ in range(self.epochs):
for i in range(d.shape[0]):
x = np.insert(X[i], 0, 1)
y = self.predict(x)
e = d[i] - y
self.W = self.W + self.lr * e * x

X = np.array([


[0, 0],
[0, 1],
[1, 0],
[1, 1]
])

d = np.array([0, 1, 1, 0])

perceptron = Perceptron(input_size=2)
perceptron.fit(X, d)

print(perceptron.W) # Output: [-3. 2. 2.]

# Test the perceptron
for i in range(X.shape[0]):
x = np.insert(X[i], 0, 1)
print(X[i], ":", perceptron.predict(x))
Natija:




2.2.Dasturning kod qismi :
# Numpy kutubxonasini yuklab olamiz
import numpy as np

#Perseptron sinfi


class Perceptron:
def init(self):
self.weights = None
self.bias = 0
def initialize(self, n_features):
# w va b ni dastlabki qiymatini 0 deb belgilaymiz
self.weights = np.zeros(n_features)
self.bias = 0
return
def predict(self, inputs):
# Yangi kiruvchi ma'lumotlarni bashorat qilib olamiz
activation = np.dot(inputs, self.weights) + self.bias
return 1 if activation > 0 else 0

def train(self, X, y, epochs=100, learning_rate=0.1):


# Berilgan ma'lumot yordamida modelni train qilamiz
self.initialize(X.shape[1])
for epoch in range(epochs):
for inputs, label in zip(X, y):
# bashorat qilingan qiymat
y_pred = self.predict(inputs)
# delta xatolikni hisoblaymiz
error = label - y_pred
# w va b ni qiymatini yangilaymiz
self.weights += learning_rate * error * inputs
self.bias += learning_rate * error
return
# AND amali yordamida tekshirish

X_train = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])


y_train = np.array([0, 0, 0, 1])

p = Perceptron()


p.train(X_train, y_train, epochs=100, learning_rate=0.1)
test_input = np.array([0, 0])
print(p.predict(test_input))

# OR amali yordamida tekshirish


X_train = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_train = np.array([0, 1, 1, 1])

p = Perceptron()


p.train(X_train, y_train, epochs=100, learning_rate=0.1)
test_input = np.array([0, 1])
print(p.predict(test_input))
Natija:


3-dastur. Sun’iy neyron to’rini o’qituvchili va o’qituvchisiz o’rgatish bo’yicha dastur tuzish.
Dastur kodi:
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(x):
return 1 / (1 + np.exp(-x))

class NeuralNetwork:


def __init__(self, input_size, output_size):
self.input_size = input_size
self.output_size = output_size
self.weights = np.random.randn(output_size, input_size)
self.bias = np.random.randn(output_size, 1)

def feedforward(self, inputs):


self.inputs = inputs
self.output = sigmoid(np.dot(self.weights, inputs) + self.bias)
return self.output
nn = NeuralNetwork(2, 1)
print(nn.feedforward(np.array([0.5, 0.5])))

Natija:



Bajardi: Abdujabborov Davronbek Qabul qildi: I.Tojimamatov

Download 450.51 Kb.

Do'stlaringiz bilan baham:




Ma'lumotlar bazasi mualliflik huquqi bilan himoyalangan ©fayllar.org 2024
ma'muriyatiga murojaat qiling