Here's my code, it is supposed to calculate the standard deviation of the randomly generated array that fillArray populates. stdDev is supposed to calculate the standard deviation. otherStats is supposed to find the largest and smallest value in the array. So far, the only thing that is being generated are 0s for the deviation, smallest, and largest. Here's the code:
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
float fillArray (float array[7][5]);
float printArray (float array[7][5], float deviation, float largest, float smallest);
float stdDev (float array[7][5], float deviation, float average);
float otherStats (float array[7][5], float largest, float smallest);
int main ()
{
float deviation, average, largest, smallest;
float array[7][5];
fillArray (array);
stdDev (array, deviation, average);
otherStats (array, largest, smallest);
printArray (array, deviation, largest, smallest);
}
float fillArray (float array[7][5])
{
int row, column;
for (row = 0; row < 7; row++)
{
for (column = 0; column < 5; column++)
{
array[row][column] = (float) rand () / (float) RAND_MAX;
}
}
return array[7][5];
}
float stdDev (float array[7][5], float deviation, float average)
{
float number1, number2;
array[7][5] = fillArray(array);
int ROw, Col;
for (ROw = 0; ROw < 7; ROw++)
{
for (Col = 0; Col < 5; Col++)
{
number1 = array[ROw][Col] + number1;
average = number1 / 35;
}
}
for (ROw = 0; ROw < 7; ROw++)
{
for (Col = 0; Col < 5; Col++)
{
number2 = average - array[ROw][Col];
deviation = sqrt (number2 / 35);
}
}
return deviation;
}
float otherStats (float array[7][5], float largest, float smallest)
{
array[7][5] = fillArray(array);
float num1, num2; //Check which ones largest or smallest.
int ROW, COLUMN;
for (ROW = 0; ROW < 7; ROW++)
{
for (COLUMN = 0; COLUMN < 5; COLUMN++)
{
num1 = array[ROW][COLUMN];
num2 = array[1][1];
largest = num2;
smallest = num1;
if (num1 > num2)
{
largest = num1;
}
else
{
smallest = num1;
}
}
}
return largest, smallest;
}
float printArray (float array[7][5], float deviation, float largest, float
smallest)
{
int Row, Column;
printf("Column #: ");
for (Column = 0; Column < 5; Column++)
{
printf ("%d ", Column);
}
printf("\nRow #|________________________________\n");
for (Row = 0; Row < 7; Row++)
{
printf("%d | ", Row);
for (Column = 0; Column < 5; Column++)
{
printf ("%4.2f ", array[Row][Column]);
}
printf ("\n");
}
printf("The standard deviation is %f, the largest is %f, the smallest is %f.\n",
deviation, largest, smallest);
}
Any help to figure out my error would be super appreciate. It compiles fine, it's just that my logic or something is messed up.
Thanks in advance.
This is the output:
Column #: 0 1 2 3 4
Row #|________________________________
0 | 0.53 0.04 0.44 0.93 0.93
1 | 0.72 0.28 0.74 0.64 0.35
2 | 0.69 0.17 0.44 0.88 0.83
3 | 0.33 0.23 0.89 0.35 0.69
4 | 0.96 0.59 0.66 0.86 0.44
5 | 0.92 0.40 0.81 0.68 0.91
6 | 0.48 0.22 0.95 0.92 0.15
The standard deviation is -0.000000, the largest is 0.000000, the smallest is 0.000000.