Hi!
I am trying to figure out the app_timer_library but am running into some (to me) strange behaviour. For the first seconds it appears as if the LFCLK is running much faster than expected, before it starts to run as it should. I have the following snippet which when measuring pin 27 with oscilloscope produces a very rapid signal for approximately 10 seconds before going into 10Hz. If I print the time it behaves analogously.
In the sdk_config file, I have defined APP_TIMER_CONFIG_RTC_FREQUENCY 0, which I believe (although in my opinion inconcurrent with documentation?) would mean that the RTC would count up by 1000 Hz.
Could someone help me in the right direction of what I am doing wrong and how I should do to create a signal of 10 Hz already from start?
It is nrf52840DK.
Thank you!
#include "nrf_drv_spi.h" #include "app_util_platform.h" #include "nrf_gpio.h" #include "nrf_delay.h" #include "boards.h" #include "app_error.h" #include <string.h> #include "nrf_log.h" #include "nrf_log_ctrl.h" #include "nrf_log_default_backends.h" #include "app_timer.h" #include "nrf_drv_clock.h" #include "nrfx_clock.h" #define TEST_PIN NRF_GPIO_PIN_MAP(0,27) //TIMER uint32_t millis(void) { return(app_timer_cnt_get() ); } static void lfclk_request(void) { ret_code_t err_code = nrf_drv_clock_init(); APP_ERROR_CHECK(err_code); nrf_drv_clock_lfclk_request(NULL); } int main(void) { bsp_board_init(BSP_INIT_LEDS); APP_ERROR_CHECK(NRF_LOG_INIT(NULL)); NRF_LOG_DEFAULT_BACKENDS_INIT(); nrf_gpio_cfg_output(TEST_PIN); uint32_t currentMillis; uint32_t previousMillis; lfclk_request(); app_timer_init(); uint32_t freq; while (1) { //TIMER previousMillis=currentMillis; currentMillis=millis(); freq=freq+currentMillis-previousMillis; if (freq>(100)){ freq=0; nrf_gpio_pin_toggle(TEST_PIN); } //10 Hz //TIMER //NRF_LOG_INFO("task time (ticks: %d", currentMillis-previousMillis); //NRF_LOG_INFO("ms: %d", freq); //NRF_LOG_FLUSH(); //nrf_delay_ms(10); } }