0

The code below works great until i have around 25000-30000 objects in the Json array stored. Then the file isn't update anymore.
Does someone know what could be the issue?

foreach ($serials as $key => $serial_list) {
    foreach ($serial_list as $key => $serial) {
        $url  = 'json/voltage/voltage_' . $serial . '.json';
        $file = file_get_contents($url, true);
        if ($file != true) {
            $sql    = "SELECT * FROM realtimedata_V WHERE device_serial ='{$serial}' ORDER by timeStamp ASC limit 1200  ";
            $result = $db->mysqli->query($sql);
            while ($row = $result->fetch_assoc()) {
                $voltage_tmp[$serial][] = array(
                    (int) strtotime($row['timeStamp']) * 1000,
                    (int) $row['Va'],
                    (int) $row['Vb'],
                    (int) $row['Vc']
                );

            }
            echo '<hr>';
            echo 'new_datas_' . $serial . '= ' . json_encode($voltage_tmp[$serial]);
            echo '<hr>';
            file_put_contents($url, json_encode($voltage_tmp[$serial]));
        } else {
            $data = json_decode($file, true);
            if ($data === NULL)
                die('Unable to decode');
            unset($file);


         $AllKeys = array_keys($data);
         echo '<hr> Last_Key= <hr>';
          echo $last_index = end($AllKeys);
          echo '<hr>';
         $key = $data[$AllKeys[$last_index]];
         $key_unix = $key[0]/1000;
          $limit_sql = $last_index +2000;
            echo '<hr>';
            echo 'Last_date ' . $serial . '= ' . $last_date = date('Y-m-d H:i:s', $key_unix);
            echo '<hr>';
            echo $sql = "SELECT * FROM realtimedata_V WHERE device_serial ='{$serial}' AND timeStamp > '{$last_date}' ORDER by timeStamp ASC limit {$limit_sql}  ";
            $result  = $db->mysqli->query($sql);
            $row_cnt = $result->num_rows;
            if ($row_cnt === 0) {
                echo 'rows = ' . $row_cnt;
                echo '<hr>Any new datas for the serial N°: ' . $serial . '<hr>';
                continue;
            } else {
                while ($row = $result->fetch_assoc()) {

                    $voltage[$serial][] = array(
                        (int) strtotime($row['timeStamp']) * 1000,
                        (int) $row['Va'],
                        (int) $row['Vb'],
                        (int) $row['Vc']
                    );

                }

                $result = $data + $voltage[$serial];
                echo '<hr>';
                echo 'new_datas_' . $serial . '= ' . $new_data = json_encode($result);
                echo '<hr>';
                if (file_put_contents($url, $new_data) === false) {
                    die('unable to write file');
                }

                unset($result);
                file_put_contents($url, $new_data, LOCK_EX);
                //$result->free();
            }
        }
    }
}

I changed the configuration of the php.ini in case of memory limits. But nothing...
I tried in an another server but same issue. Does someone have an idea?

Thanks

llaid
  • 85
  • 1
  • 10
  • So which part is failing, encoding or decoding? From that spaghetti code, it's absolutely unclear what part you're having problems with. – N.B. Apr 24 '15 at 11:19
  • Hello, the code works perfectly but after 25000-30000 objects in the array, the file is not update. Encoding and decoding works. May be an idea to add an error message if file is not saved and the reason? – llaid Apr 24 '15 at 11:22
  • Did you check the php error log? Is there any info on what is wrong? – Stepashka Apr 24 '15 at 11:26
  • Good idea i do it = > error : 32767 ! :( – llaid Apr 24 '15 at 11:36
  • Not sure that really this code is an error, in fact. I have it also when the script is working and datas save on the file. – llaid Apr 24 '15 at 11:49

0 Answers0