122 lines
3.4 KiB
Python
Executable File
122 lines
3.4 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
|
|
"""
|
|
@authors: Yann & Sam'
|
|
"""
|
|
|
|
|
|
import random
|
|
|
|
from computeIterations import computeIteration
|
|
from computeIterations import computeIterationAvg
|
|
|
|
import numpy as np
|
|
|
|
|
|
"""
|
|
The goal of this program is to make a linear regression for any number of
|
|
parameters in the input.
|
|
"""
|
|
|
|
|
|
###############################################################################
|
|
|
|
|
|
# This function generates 'nbExamples' tuples of coordinates.
|
|
# The first member being the values of the input and the second member being
|
|
# the output corresponding to the input, to which is added # some noise of
|
|
# variance 'intervalWidth'.
|
|
# The output is linear in each parameters of the input.
|
|
# 'leadingCoefficients' contains all the slopes.
|
|
def getRandomCoordinatesVectors(leadingCoefficients, nbExamples,
|
|
intervalWidth, intervalValues):
|
|
|
|
# 'coordinates' will be created in this way :
|
|
# [([x0, ..., xn], y0), ..., ([x0, ..., xn], ym)]
|
|
# (with 'n' the number of parameters, and 'm' the number of examples)
|
|
coordinates = []
|
|
|
|
for i in range(nbExamples):
|
|
|
|
X = []
|
|
|
|
for j in range(len(leadingCoefficients)):
|
|
|
|
# Creates the input parameters with random values for
|
|
# each dimension.
|
|
X.append(random.uniform(*intervalValues))
|
|
|
|
# Creates the output corresponding to the input.
|
|
y = np.dot(X, leadingCoefficients) + \
|
|
random.uniform(-abs(intervalWidth), abs(intervalWidth))
|
|
# --> 'np.dot()': Given two 1-D vectors, returns the inner product of
|
|
# the two vectors.
|
|
|
|
coordinates.append((X, y))
|
|
|
|
return coordinates
|
|
|
|
|
|
def main():
|
|
|
|
# ############################### Parameters ##############################
|
|
|
|
nbIterations = 100
|
|
nbIterationsAvg = 100
|
|
|
|
alpha = 0.0001
|
|
alphaAvg = 0.001
|
|
|
|
# The leading coefficients and the constant of the line, here :
|
|
# y = x0 + 2 * x1 + 3 * x2 + 4 * x3
|
|
leadingCoefficients = [1, 2, 3, 4]
|
|
|
|
nbExamples = 25
|
|
intervalWidth = 3
|
|
intervalValues = -10, 10
|
|
|
|
# ########################## Computing iterations #########################
|
|
|
|
# Creates the function that makes an iteration of the gradient descent
|
|
training, theta = computeIteration(alpha, len(leadingCoefficients))
|
|
|
|
# Same stuff with an averaged gradient descent
|
|
trainingAvg, thetaAvg = computeIterationAvg(alphaAvg,
|
|
len(leadingCoefficients),
|
|
nbExamples)
|
|
|
|
# ################################# Begin #################################
|
|
|
|
coordinates = getRandomCoordinatesVectors(leadingCoefficients,
|
|
nbExamples,
|
|
intervalWidth,
|
|
intervalValues)
|
|
|
|
for i in range(nbIterations):
|
|
|
|
for x, y in coordinates:
|
|
|
|
training(x, y)
|
|
|
|
# The three following lines compute the coordinates into the shape needed
|
|
# by Theano for 'trainingAvg()'
|
|
|
|
inputs, outputs = zip(*coordinates)
|
|
|
|
inputs = list(inputs)
|
|
outputs = [list(outputs)]
|
|
|
|
for i in range(nbIterationsAvg):
|
|
|
|
trainingAvg(inputs, outputs)
|
|
|
|
print("Example by example: ", theta.get_value())
|
|
print("Average: ", thetaAvg.get_value())
|
|
|
|
# ################################## End ##################################
|
|
|
|
|
|
if __name__ == '__main__':
|
|
main()
|