This repository has been archived on 2023-11-03. You can view files and clone it, but cannot push or open issues or pull requests.
MINDLE/dev_null/Theano/LinearRegression/linearRegression.py

167 lines
4.1 KiB
Python
Executable File

#!/usr/bin/env python3
"""
@authors: Yann & Sam'
"""
import random
from computeIterations import computeIteration
from computeIterations import computeIterationAvg
import matplotlib.pyplot as plt
import numpy as np
###############################################################################
# This function generates 'nbPoints' tuples of coordinates "drowned" in some
# noises of variance 'intervalWidth',
# according to a straight line of slope 'leadingCoeficient'.
# These tuples are stocked in a list.
def getRandomCoordinates(leadingCoeficient, nbPoints, intervalWidth):
coordinates = []
for i in range(nbPoints):
x, y = i, (i * leadingCoeficient) + \
random.uniform(-abs(intervalWidth), abs(intervalWidth))
coordinates.append((x, y))
return coordinates
# Adds some points, represented by their coordinates, for next display
def addCoordinates(coordinates):
# The list of tuples becomes here a list of two lists as :
# [[x0, ..., xn], [y0, ..., yn]]
L = list(map(list, zip(*coordinates)))
plt.plot(L[0], L[1], 'r')
# Warms up window for next display
def setDisplay(leadingCoeficient, nbPoints, intervalWidth, displayTitle):
# Creates the window used next
plt.figure('Linear Regression')
# Some display parameters
plt.title(displayTitle)
plt.xlabel('Input')
plt.ylabel('Output')
plt.grid(True)
plt.axis([0,
nbPoints,
-abs(intervalWidth),
nbPoints * leadingCoeficient + abs(intervalWidth)])
# Adds to the display the legends, axes, points, and the calculated
# linear regression
def displayWindow(nbPoints, theta, myColor):
# "time" vector
t = np.arange(0., nbPoints)
# Linear regression added here
plt.plot(t, t * theta, color=myColor,
label="y = {} * x".format(round(float(theta), 3)))
# Places the legend in the upper left corner
plt.legend(loc="upper left", frameon=True)
def main():
# ############################## Parameters ##############################
nbIterations = 10
nbIterationsAvg = 250
# The slope of the straight line, parameter that we'll try to find out soon
leadingCoeficient = 2
nbPoints = 25
intervalWidth = 3
alpha = 0.001
alphaAvg = 0.001
# ######################### Computing iterations ##########################
# Creates the function that makes an iteration of the gradient descent
training, theta = computeIteration(alpha, 1)
# Same stuff with an averaged gradient descent
trainingAvg, thetaAvg = computeIterationAvg(alphaAvg, 1, nbPoints)
# ################################ Begin #################################
setDisplay(leadingCoeficient, nbPoints, intervalWidth,
"Getting close by linear regression with "
"(blue: ex by ex, red: average) and without (green) gradient")
coordinates = getRandomCoordinates(leadingCoeficient,
nbPoints,
intervalWidth)
addCoordinates(coordinates)
# Linear Regression with gradient (example by example method)
for i in range(nbIterations):
for x, y in coordinates:
training(x, y)
#################################
# Linear Regression with gradient (average method)
# The three following lines compute the coordinates into the shape needed
# by Theano for 'trainingAvg()'
inputs, outputs = zip(*coordinates)
inputs = [list(inputs)]
outputs = [list(outputs)]
for i in range(nbIterationsAvg):
trainingAvg(inputs, outputs)
#################################
# Linear Regression without gradient
values = []
for x, y in coordinates:
if x != 0:
values.append(y / x)
average = sum(values) / len(values)
####################################
displayWindow(nbPoints, average, 'green')
displayWindow(nbPoints, theta.get_value(), 'blue')
displayWindow(nbPoints, thetaAvg.get_value(), 'red')
plt.show(block=True)
# ################################## End ##################################
if __name__ == '__main__':
main()