I got code from another user link
And i connect to: pool.ntp.org
But i cannot have any differences in time. (i need PERFECT sync with ntp server - then i will be happy)
My CODE:
time_t t = response.tx.to_time_t();
char *s = ctime(&t);
WSACleanup();
h_qtimeonStatusBar->setDateTime(QDateTime::fromTime_t(response.tx.to_time_t()));
But First i have this code:
getNTPTime(); //function above
QTimer *timer = new QTimer(this);
connect(timer, SIGNAL(timeout()), this, SLOT(updateTime())); // update time = current time from 'getNTPTime()' + 1 s
timer->start(0);
timer->setInterval(1000);
My diffrence is in miliseconds (max 1000) but it is really visible. My clock is litte slowly than ntp server (this is reliable information)
How to get rid of this differences ?
I try with that:
//func run after program start
{
getNTPTime();
QTimer *timer = new QTimer(this);
connect(timer, SIGNAL(timeout()), this, SLOT(updateTime()));
timer->start(0);
timer->setInterval(1000);
}
bool plemionabot1::getNTPTime(){
using namespace std::chrono;
WSADATA wsaData;
DWORD ret = WSAStartup(MAKEWORD(2, 0), &wsaData);
char *host = "pool.ntp.org"; /* Don't distribute stuff pointing here, it's not polite. */
//char *host = "time.nist.gov"; /* This one's probably ok, but can get grumpy about request rates during debugging. */
NTPMessage msg;
/* Important, if you don't set the version/mode, the server will ignore you. */
msg.clear();
msg.version = 3;
msg.mode = 3 /* client */;
NTPMessage response;
response.clear();
int sock = socket(PF_INET, SOCK_DGRAM, IPPROTO_UDP);
sockaddr_in srv_addr;
memset(&srv_addr, 0, sizeof(srv_addr));
msg.dns_lookup(host, &srv_addr); /* Helper function defined below. */
msg.sendto(sock, &srv_addr);
auto t0 = high_resolution_clock::now();
response.recv(sock);
time_t t = response.tx.to_time_t();
char *s = ctime(&t);
WSACleanup();
//QDateTime * tmp = new QDateTime;
//tmp->setMSecsSinceEpoch(response.tx.seconds); // time is too much
//h_qtimeonStatusBar->setDateTime(tmp->currentDateTime());
h_qtimeonStatusBar->setDateTime(QDateTime::fromTime_t(response.tx.to_time_t())); // tą opcją wychodzi za mało
auto t1 = high_resolution_clock::now();
h_qtimeonStatusBar->setTime(h_qtimeonStatusBar->time().addMSecs(duration_cast<milliseconds>(t1-t0).count())); // time not enough
return true;
}