import numpy as np
import matplotlib.pyplot as plt
# sigmoidfunction
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# define the derivative of the sigmoid function
def sigmoid_derivative(x):
return x * (1 - x)
class NeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
# Weights and biases
self.W1 = np.random.randn(self.input_size, self.hidden_size) # Weights between input and hidden layer
self.b1 = np.ones((1, self.hidden_size)) # Biases for the hidden layer
self.W2 = np.random.randn(self.hidden_size, self.output_size) # Weights between hidden and output layer
self.b2 = np.ones((1, self.output_size)) # Biases for the output layer
# Forward Pass
def forward(self, X):
self.z1 = np.dot(X, self.W1) + self.b1 # Linear combination for hidden layer
self.a1 = sigmoid(self.z1) # Apply activation function to hidden layer
self.z2 = np.dot(self.a1, self.W2) + self.b2 # Linear combination for output layer
self.a2 = self.z2 # Output layer (no activation for regression)
return self.a2
def backward(self, X, y, output, learning_rate):
m = X.shape[0] # Number of training examples
# Error and delta calculations
self.error = y - output # Error at the output layer
self.delta_output = self.error # Delta for the output layer
self.error_hidden = np.dot(self.delta_output, self.W2.T) # Error at the hidden layer
self.delta_hidden = self.error_hidden * sigmoid_derivative(self.a1) # Delta for the hidden layer
# Gradient calculations
self.W2_grad = np.dot(self.a1.T, self.delta_output) / m
self.b2_grad = np.sum(self.delta_output, axis=0, keepdims=True) / m
self.W1_grad = np.dot(X.T, self.delta_hidden) / m
self.b1_grad = np.sum(self.delta_hidden, axis=0, keepdims=True) / m
# Update weights and biases
self.W2 += learning_rate * self.W2_grad
self.b2 += learning_rate * self.b2_grad
self.W1 += learning_rate * self.W1_grad
self.b1 += learning_rate * self.b1_grad
# create a networkobjekt
nn = NeuralNetwork(input_size=2, hidden_size=4, output_size=1)
# define data
# [size, age]
X = np.array([
[100, 5], [120, 10], [80, 15], [150, 2], [90, 20],
[110, 7], [95, 12], [130, 8], [140, 5], [75, 18],
[85, 14], [125, 6], [100, 10], [135, 4], [105, 9],
[115, 11], [140, 3], [80, 20], [90, 22], [120, 14]
])
# Price in thousand euros
y = np.array([
[200], [220], [170], [280], [160],
[210], [175], [225], [270], [155],
[185], [230], [195], [265], [175],
[215], [275], [165], [185], [225]
])
# normalize data
X_mean, X_std = X.mean(axis=0), X.std(axis=0)
y_mean, y_std = y.mean(), y.std()
X_normalized = (X - X_mean) / X_std
y_normalized = (y - y_mean) / y_std
# Training loop
epochs = 2000
learning_rate = 0.01
losses = []
for epoch in range(epochs):
# Forward pass
output = nn.forward(X_normalized)
# Backward pass
nn.backward(X_normalized, y_normalized, output, learning_rate)
# Calculate and print loss
mse = np.mean(np.square(y_normalized - output))
losses.append(mse)
if epoch % 100 == 0:
print(f"Epoch {epoch}, Loss: {mse}")
# Prediction function
def predict(size, age):
"""
Predicts the house price based on size and age.
Args:
size: Size of the house.
age: Age of the house.
Returns:
The predicted price in thousand euros.
"""
input_normalized = (np.array([[size, age]]) - X_mean) / X_std
output_normalized = nn.forward(input_normalized)
return output_normalized * y_std + y_mean
plt.plot(losses)
plt.xlabel("Epoch")
plt.ylabel("Loss")
plt.title("Loss while training")
plt.show()
# Test Prediction
print("\nPredictions:")
print(f"House with 110 m² and 7 years: {predict(110, 7)[0][0]:.2f} t€")
print(f"House with 85 m² and 12 years: {predict(85, 12)[0][0]:.2f} t€")
- 1.
- 2.
- 3.
- 4.
- 5.
- 6.
- 7.
- 8.
- 9.
- 10.
- 11.
- 12.
- 13.
- 14.
- 15.
- 16.
- 17.
- 18.
- 19.
- 20.
- 21.
- 22.
- 23.
- 24.
- 25.
- 26.
- 27.
- 28.
- 29.
- 30.
- 31.
- 32.
- 33.
- 34.
- 35.
- 36.
- 37.
- 38.
- 39.
- 40.
- 41.
- 42.
- 43.
- 44.
- 45.
- 46.
- 47.
- 48.
- 49.
- 50.
- 51.
- 52.
- 53.
- 54.
- 55.
- 56.
- 57.
- 58.
- 59.
- 60.
- 61.
- 62.
- 63.
- 64.
- 65.
- 66.
- 67.
- 68.
- 69.
- 70.
- 71.
- 72.
- 73.
- 74.
- 75.
- 76.
- 77.
- 78.
- 79.
- 80.
- 81.
- 82.
- 83.
- 84.
- 85.
- 86.
- 87.
- 88.
- 89.
- 90.
- 91.
- 92.
- 93.
- 94.
- 95.
- 96.
- 97.
- 98.
- 99.
- 100.
- 101.
- 102.
- 103.
- 104.
- 105.
- 106.
- 107.
- 108.
- 109.
- 110.
- 111.
- 112.
- 113.
- 114.
- 115.
- 116.
- 117.
- 118.
- 119.
- 120.
- 121.
- 122.
- 123.
- 124.