I am trying to improve a SWRTC by modifying the definition of a second (long unsigned n_ticks_per_second) by synchronizing time with a server.
#include <stdint.h>
#include <stdio.h>
int main(int argc, char * argv[]){
int32_t total_drift_SEC;
int32_t drift_per_sec_TICK;
uint32_t at_update_posix_time = 1491265740;
uint32_t posix_time = 1491265680;
uint32_t last_update_posix_time = 1491251330;
long unsigned n_ticks_per_sec = 1000;
total_drift_SEC = (posix_time - at_update_posix_time);
drift_per_sec_TICK = ((float) total_drift_SEC) / (at_update_posix_time - last_update_posix_time);
n_ticks_per_sec += drift_per_sec_TICK;
printf("Total drift sec %d\r\n", total_drift_SEC);
printf("Drift per sec in ticks %d\r\n", drift_per_sec_TICK);
printf("n_ticks_per_second %lu\r\n", n_ticks_per_sec);
return 0;
}
What I don't understand is that I need to cast total_drift_SEC to float in order to have a correct result in the end, ie to have n_ticks_per_sec equal to 1000 in the end.
The output of this code is:
Total drift sec -60
Drift per sec in ticks 0
n_ticks_per_second 1000
Whereas the output of the code without the cast to float is:
Total drift sec -60
Drift per sec in ticks 298054
n_ticks_per_second 299054