I think the gsl_ran_gaussian function is broken in the GNU Scientific Library.

This function is supposed to return a random value from the Gaussian distribution with a mean = 0.0 and a user-specified standard deviation. However, consider the C code (below), in which the user-specified standard deviation value seems to have no effect on the random value.

### Code:

`#include`

#include "gsl/gsl_rng.h"

#include

int main( int argc, const char* argv[] )

{

printf( "\nHello World\n\n" );

gsl_rng *rng;

int random_seed = (int)time(NULL);

rng = gsl_rng_alloc(gsl_rng_mt19937);

gsl_rng_set(rng, random_seed);

`double rand_gauss;`

` `

` int reps = 5;`

int i = 0;

for (i = 0; i < reps; i++)

{

rand_gauss = gsl_ran_gaussian(rng, 0.001);

printf("sigma = 0.001, random value = %f\n", rand_gauss);

rand_gauss = gsl_ran_gaussian(rng, 0.01);

printf("sigma = 0.01, random value = %f\n", rand_gauss);

rand_gauss = gsl_ran_gaussian(rng, 0.1);

printf("sigma = 0.1, random value = %f\n", rand_gauss);

rand_gauss = gsl_ran_gaussian(rng, 1.0);

printf("sigma = 1.0, random value = %f\n", rand_gauss);

fflush(NULL);

}

}

### Result:

Hello World

`sigma = 0.001, random value = 6304.000000`

sigma = 0.01, random value = 3104.000000

sigma = 0.1, random value = 1952.000000

sigma = 1.0, random value = 1536.000000

sigma = 0.001, random value = 3712.000000

sigma = 0.01, random value = 1312.000000

sigma = 0.1, random value = 3424.000000

sigma = 1.0, random value = 6752.000000

sigma = 0.001, random value = 6016.000000

sigma = 0.01, random value = 8000.000000

sigma = 0.1, random value = 6752.000000

sigma = 1.0, random value = 7200.000000

sigma = 0.001, random value = 5408.000000

sigma = 0.01, random value = 7136.000000

sigma = 0.1, random value = 1056.000000

sigma = 1.0, random value = 4160.000000

sigma = 0.001, random value = 7744.000000

sigma = 0.01, random value = 5376.000000

sigma = 0.1, random value = 3072.000000

sigma = 1.0, random value = 4896.000000

WTF libGSL?