Implement B&B method in affineRegression.cpp and linearRegression.cpp and remove useless loop

This commit is contained in:
Yann Caumartin 2016-10-16 16:08:57 +02:00
parent 5ef8324403
commit 8826e304f7
2 changed files with 47 additions and 16 deletions

@ -10,9 +10,9 @@ using namespace std;
#define ALPHA 0.001
#define NBPOINTS 25
#define NBITERATIONS 2088
#define INTERNALWIDTH 0
#define NBPOINTS 1000
#define NBITERATIONS 1000
#define INTERNALWIDTH 5
double leadingCoeficient[] = {1, 2};
@ -36,21 +36,39 @@ int main(int argc, char const *argv[])
getRandomCoordinates(x, y);
double theta[2] = {0};
double gradient[2] = {0};
double alpha(ALPHA);
double gradOld[2];
getGradient(x, y, theta, gradOld);
double grad[2];
double deltaGrad[2];
double i;
/* Training ! */
for(short int i(0), j; i < NBITERATIONS; i++)
for(i=0; i < NBITERATIONS; i++)
{
for(j = 0; j < NBPOINTS; j++)
{
getGradient(x, y, theta, gradient);
theta[0] -= alpha * gradOld[0];
theta[1] -= alpha * gradOld[1];
theta[0] -= ALPHA * gradient[0];
theta[1] -= ALPHA * gradient[1];
getGradient(x, y, theta, grad);
if(grad[0]!=gradOld[0] && grad[1]!=gradOld[1])
{
deltaGrad[0]=grad[0]-gradOld[0];
deltaGrad[1]=grad[1]-gradOld[1];
alpha=(-alpha * (gradOld[0] * deltaGrad[0] + gradOld[1] * deltaGrad[1]))
/ (deltaGrad[0]*deltaGrad[0] + deltaGrad[1]*deltaGrad[1]);
}
else
break;
gradOld[0]=grad[0];
gradOld[1]=grad[1];
}
cout << "Theta: [" << theta[0] << "; " << theta[1] << "]" << endl;
cout<<"Done in "<<i<<" iterations."<<endl;
return 0;
}
@ -69,6 +87,8 @@ void getRandomCoordinates(double x[], double y[])
void getGradient(double x[], double y[], double theta[], double gradient[])
{
gradient[0]=0;
gradient[1]=0;
for(short int i(0); i < NBPOINTS; i++)
{
gradient[0] += (theta[1] * x[i] + theta[0]) - y[i];

@ -11,7 +11,7 @@ using namespace std;
#define ALPHA 0.001
#define NBPOINTS 25
#define NBITERATIONS 50
#define NBITERATIONS 1000
#define INTERNALWIDTH 5
#define LEADINGCOEFICIENT 2
@ -35,17 +35,28 @@ int main(int argc, char const *argv[])
getRandomCoordinates(x, y);
double theta(0);
double alpha(ALPHA);
double gradOld(getGradient(x, y, theta));
double grad;
double i;
/* Training ! */
for(short int i(0), j; i < NBITERATIONS; i++)
for(i=0; i < NBITERATIONS; i++)
{
for(j = 0; j < NBPOINTS; j++)
{
theta -= ALPHA * getGradient(x, y, theta);
}
theta -= alpha * gradOld;
grad = getGradient(x, y, theta);
if (grad!=gradOld)
alpha = -alpha * gradOld / (grad - gradOld);
else
break;
gradOld = grad;
}
cout << "Theta: " << theta << endl;
cout<<"Done in "<<i<<" iterations."<<endl;
return 0;
}