I'm curious as to why I get two different results when decrementing the arr.length with arr.length-- and arr.length - 1. I would have thought these would have functioned the same. For example:
const numbers = [10, 20, 40, 30, 50]
function solution(numbers) {
for (let i = 0; i < numbers.length - 1; i++) {
if (numbers[i] < numbers[i + 1]) {
console.log('TRUE: ' + numbers[i]+" is increasing to "+ numbers[i+1])
} else {
return console.log('FALSE: ' + numbers[i] +' is decreasing to ' + numbers[i+1])
}
}
}
logs out as:
"TRUE: 10 is increasing to 20"
"TRUE: 20 is increasing to 40"
"FALSE: 40 is decreasing to 30"
but when I swap in a decrement:
(let i = 0; i < numbers.length--; i++)
the else returns undefined
"TRUE: 10 is increasing to 20"
"TRUE: 20 is increasing to 40"
"FALSE: undefined is decreasing to undefined"