bluetooth: controller: openisa/RV32M1: BLE Link Layer ULL/LLL split

This commit takes the Nordic LLL and adapts it for RV32M1 SoCs, using
the blocks that are specific to this SoC: the GenFSK & LPTMR IP
blocks.

Signed-off-by: George Stefan <george.stefan@nxp.com>
Signed-off-by: Radu Alexe <radu.alexe@nxp.com>
Signed-off-by: Ionut Ursescu <ionut.ursescu@nxp.com>
This commit is contained in:
George Stefan 2019-09-25 08:12:14 +03:00 committed by Carles Cufí
commit b063456015
23 changed files with 4788 additions and 0 deletions

View file

@ -0,0 +1,15 @@
/*
* Copyright (c) 2016 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#define BT_HCI_VS_HW_PLAT 0
#define BT_HCI_VS_HW_VAR 0
/* Map vendor command handler directly to common implementation */
inline int hci_vendor_cmd_handle(u16_t ocf, struct net_buf *cmd,
struct net_buf **evt)
{
return hci_vendor_cmd_handle_common(ocf, cmd, evt);
}

View file

@ -0,0 +1,599 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
* Copyright 2019 NXP
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <errno.h>
#include <zephyr/types.h>
#include <device.h>
#include <drivers/entropy.h>
#include <drivers/clock_control.h>
#include <soc.h>
#include "hal/swi.h"
#include "hal/ccm.h"
#include "hal/radio.h"
#include "hal/ticker.h"
#include "util/mem.h"
#include "util/memq.h"
#include "util/mayfly.h"
#include "ticker/ticker.h"
#include "lll.h"
#include "lll_vendor.h"
#include "lll_internal.h"
#define LOG_MODULE_NAME bt_ctlr_llsw_openisa_lll
#include "common/log.h"
#include "hal/debug.h"
static struct {
struct {
void *param;
lll_is_abort_cb_t is_abort_cb;
lll_abort_cb_t abort_cb;
} curr;
} event;
static struct {
struct device *clk_hf;
} lll;
/* Entropy device */
static struct device *dev_entropy;
static int init_reset(void);
static int prepare(lll_is_abort_cb_t is_abort_cb, lll_abort_cb_t abort_cb,
lll_prepare_cb_t prepare_cb, int prio,
struct lll_prepare_param *prepare_param, u8_t is_resume);
static int resume_enqueue(lll_prepare_cb_t resume_cb, int resume_prio);
#if !defined(CONFIG_BT_CTLR_LOW_LAT)
static void ticker_start_op_cb(u32_t status, void *param);
static void preempt_ticker_cb(u32_t ticks_at_expire, u32_t remainder,
u16_t lazy, void *param);
static void preempt(void *param);
#else /* CONFIG_BT_CTLR_LOW_LAT */
#if (CONFIG_BT_CTLR_LLL_PRIO == CONFIG_BT_CTLR_ULL_LOW_PRIO)
static void ticker_op_job_disable(u32_t status, void *op_context);
#endif
#endif /* CONFIG_BT_CTLR_LOW_LAT */
static void rtc0_rv32m1_isr(void *arg)
{
DEBUG_TICKER_ISR(1);
/* On compare0 run ticker worker instance0 */
if (LPTMR1->CSR & LPTMR_CSR_TCF(1)) {
LPTMR1->CSR |= LPTMR_CSR_TCF(1);
ticker_trigger(0);
}
mayfly_run(TICKER_USER_ID_ULL_HIGH);
#if !defined(CONFIG_BT_CTLR_LOW_LAT) && \
(CONFIG_BT_CTLR_ULL_HIGH_PRIO == CONFIG_BT_CTLR_ULL_LOW_PRIO)
mayfly_run(TICKER_USER_ID_ULL_LOW);
#endif
DEBUG_TICKER_ISR(0);
}
static void swi_lll_rv32m1_isr(void *arg)
{
DEBUG_RADIO_ISR(1);
mayfly_run(TICKER_USER_ID_LLL);
DEBUG_RADIO_ISR(0);
}
#if defined(CONFIG_BT_CTLR_LOW_LAT) || \
(CONFIG_BT_CTLR_ULL_HIGH_PRIO != CONFIG_BT_CTLR_ULL_LOW_PRIO)
static void swi_ull_low_rv32m1_isr(void *arg)
{
DEBUG_TICKER_JOB(1);
mayfly_run(TICKER_USER_ID_ULL_LOW);
DEBUG_TICKER_JOB(0);
}
#endif
int lll_init(void)
{
struct device *clk_k32;
int err;
ARG_UNUSED(clk_k32);
/* Get reference to entropy device */
dev_entropy = device_get_binding(CONFIG_ENTROPY_NAME);
if (!dev_entropy) {
dev_entropy = NULL;
/* return -ENODEV; */
}
/* Initialise LLL internals */
event.curr.abort_cb = NULL;
/* Initialize HF CLK */
lll.clk_hf = NULL;
err = init_reset();
if (err) {
return err;
}
/* Initialize SW IRQ structure */
hal_swi_init();
/* Connect ISRs */
IRQ_CONNECT(LL_RADIO_IRQn, CONFIG_BT_CTLR_LLL_PRIO, isr_radio, NULL, 0);
IRQ_CONNECT(LL_RTC0_IRQn, CONFIG_BT_CTLR_ULL_HIGH_PRIO,
rtc0_rv32m1_isr, NULL, 0);
IRQ_CONNECT(HAL_SWI_RADIO_IRQ, CONFIG_BT_CTLR_LLL_PRIO,
swi_lll_rv32m1_isr, NULL, 0);
#if defined(CONFIG_BT_CTLR_LOW_LAT) || \
(CONFIG_BT_CTLR_ULL_HIGH_PRIO != CONFIG_BT_CTLR_ULL_LOW_PRIO)
IRQ_CONNECT(HAL_SWI_JOB_IRQ, CONFIG_BT_CTLR_ULL_LOW_PRIO,
swi_ull_low_rv32m1_isr, NULL, 0);
#endif
/* Enable IRQs */
irq_enable(LL_RADIO_IRQn);
irq_enable(LL_RTC0_IRQn);
irq_enable(HAL_SWI_RADIO_IRQ);
#if defined(CONFIG_BT_CTLR_LOW_LAT) || \
(CONFIG_BT_CTLR_ULL_HIGH_PRIO != CONFIG_BT_CTLR_ULL_LOW_PRIO)
irq_enable(HAL_SWI_JOB_IRQ);
#endif
/* Call it after IRQ enable to be able to measure ISR latency */
radio_setup();
return 0;
}
u8_t lll_entropy_get(u8_t len, void *rand)
{
/* entropy_get_entropy_isr(dev_entropy, rand, len, 0); */
return 0;
}
int lll_reset(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
int lll_prepare(lll_is_abort_cb_t is_abort_cb, lll_abort_cb_t abort_cb,
lll_prepare_cb_t prepare_cb, int prio,
struct lll_prepare_param *prepare_param)
{
return prepare(is_abort_cb, abort_cb, prepare_cb, prio, prepare_param,
0);
}
void lll_resume(void *param)
{
struct lll_event *next = param;
int ret;
ret = prepare(next->is_abort_cb, next->abort_cb, next->prepare_cb,
next->prio, &next->prepare_param, next->is_resume);
LL_ASSERT(!ret || ret == -EINPROGRESS);
}
void lll_disable(void *param)
{
/* LLL disable of current event, done is generated */
if (!param || (param == event.curr.param)) {
if (event.curr.abort_cb && event.curr.param) {
event.curr.abort_cb(NULL, event.curr.param);
} else {
LL_ASSERT(!param);
}
}
{
struct lll_event *next;
u8_t idx = UINT8_MAX;
next = ull_prepare_dequeue_iter(&idx);
while (next) {
if (!next->is_aborted &&
(!param || (param == next->prepare_param.param))) {
next->is_aborted = 1;
next->abort_cb(&next->prepare_param,
next->prepare_param.param);
}
next = ull_prepare_dequeue_iter(&idx);
}
}
}
int lll_prepare_done(void *param)
{
#if defined(CONFIG_BT_CTLR_LOW_LAT) && \
(CONFIG_BT_CTLR_LLL_PRIO == CONFIG_BT_CTLR_ULL_LOW_PRIO)
u32_t ret;
/* Ticker Job Silence */
ret = ticker_job_idle_get(TICKER_INSTANCE_ID_CTLR,
TICKER_USER_ID_LLL,
ticker_op_job_disable, NULL);
return ((ret == TICKER_STATUS_SUCCESS) ||
(ret == TICKER_STATUS_BUSY)) ? 0 : -EFAULT;
#else
return 0;
#endif /* CONFIG_BT_CTLR_LOW_LAT */
}
int lll_done(void *param)
{
struct lll_event *next = ull_prepare_dequeue_get();
struct ull_hdr *ull = NULL;
void *evdone;
int ret = 0;
/* Assert if param supplied without a pending prepare to cancel. */
LL_ASSERT(!param || next);
/* check if current LLL event is done */
if (!param) {
/* Reset current event instance */
LL_ASSERT(event.curr.abort_cb);
event.curr.abort_cb = NULL;
param = event.curr.param;
event.curr.param = NULL;
if (param) {
ull = HDR_ULL(((struct lll_hdr *)param)->parent);
}
if (IS_ENABLED(CONFIG_BT_CTLR_LOW_LAT) &&
(CONFIG_BT_CTLR_LLL_PRIO == CONFIG_BT_CTLR_ULL_LOW_PRIO)) {
mayfly_enable(TICKER_USER_ID_LLL,
TICKER_USER_ID_ULL_LOW,
1);
}
DEBUG_RADIO_CLOSE(0);
} else {
ull = HDR_ULL(((struct lll_hdr *)param)->parent);
}
/* Let ULL know about LLL event done */
evdone = ull_event_done(ull);
LL_ASSERT(evdone);
return ret;
}
bool lll_is_done(void *param)
{
/* FIXME: use param to check */
return !event.curr.abort_cb;
}
int lll_clk_on(void)
{
return 0;
}
int lll_clk_on_wait(void)
{
return 0;
}
int lll_clk_off(void)
{
return 0;
}
u32_t lll_evt_offset_get(struct evt_hdr *evt)
{
if (0) {
#if defined(CONFIG_BT_CTLR_XTAL_ADVANCED)
} else if (evt->ticks_xtal_to_start & XON_BITMASK) {
return MAX(evt->ticks_active_to_start,
evt->ticks_preempt_to_start);
#endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
} else {
return MAX(evt->ticks_active_to_start,
evt->ticks_xtal_to_start);
}
}
u32_t lll_preempt_calc(struct evt_hdr *evt, u8_t ticker_id,
u32_t ticks_at_event)
{
u32_t ticks_now = ticker_ticks_now_get();
u32_t diff;
diff = ticker_ticks_diff_get(ticks_now, ticks_at_event);
diff += HAL_TICKER_CNTR_CMP_OFFSET_MIN;
if (!(diff & BIT(HAL_TICKER_CNTR_MSBIT)) &&
(diff > HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US))) {
/* TODO: for Low Latency Feature with Advanced XTAL feature.
* 1. Release retained HF clock.
* 2. Advance the radio event to accommodate normal prepare
* duration.
* 3. Increase the preempt to start ticks for future events.
*/
return 1;
}
return 0;
}
void lll_chan_set(u32_t chan)
{
switch (chan) {
case 37:
radio_freq_chan_set(2);
break;
case 38:
radio_freq_chan_set(26);
break;
case 39:
radio_freq_chan_set(80);
break;
default:
if (chan < 11) {
radio_freq_chan_set(4 + (chan * 2U));
} else if (chan < 40) {
radio_freq_chan_set(28 + ((chan - 11) * 2U));
} else {
LL_ASSERT(0);
}
break;
}
radio_whiten_iv_set(chan);
}
u32_t lll_radio_is_idle(void)
{
return radio_is_idle();
}
static int init_reset(void)
{
return 0;
}
static int prepare(lll_is_abort_cb_t is_abort_cb, lll_abort_cb_t abort_cb,
lll_prepare_cb_t prepare_cb, int prio,
struct lll_prepare_param *prepare_param, u8_t is_resume)
{
struct lll_event *p;
u8_t idx = UINT8_MAX;
/* Find the ready prepare in the pipeline */
p = ull_prepare_dequeue_iter(&idx);
while (p && (p->is_aborted || p->is_resume)) {
p = ull_prepare_dequeue_iter(&idx);
}
/* Current event active or another prepare is ready in the pipeline */
if (event.curr.abort_cb || (p && is_resume)) {
#if !defined(CONFIG_BT_CTLR_LOW_LAT)
u32_t preempt_anchor;
struct evt_hdr *evt;
u32_t preempt_to;
#else /* CONFIG_BT_CTLR_LOW_LAT */
lll_prepare_cb_t resume_cb;
struct lll_event *next;
int resume_prio;
#endif /* CONFIG_BT_CTLR_LOW_LAT */
int ret;
if (IS_ENABLED(CONFIG_BT_CTLR_LOW_LAT) && event.curr.param) {
/* early abort */
event.curr.abort_cb(NULL, event.curr.param);
}
/* Store the next prepare for deferred call */
ret = ull_prepare_enqueue(is_abort_cb, abort_cb, prepare_param,
prepare_cb, prio, is_resume);
LL_ASSERT(!ret);
#if !defined(CONFIG_BT_CTLR_LOW_LAT)
if (is_resume) {
return -EINPROGRESS;
}
/* Calc the preempt timeout */
evt = HDR_LLL2EVT(prepare_param->param);
preempt_anchor = prepare_param->ticks_at_expire;
preempt_to = MAX(evt->ticks_active_to_start,
evt->ticks_xtal_to_start) -
evt->ticks_preempt_to_start;
/* Setup pre empt timeout */
ret = ticker_start(TICKER_INSTANCE_ID_CTLR,
TICKER_USER_ID_LLL,
TICKER_ID_LLL_PREEMPT,
preempt_anchor,
preempt_to,
TICKER_NULL_PERIOD,
TICKER_NULL_REMAINDER,
TICKER_NULL_LAZY,
TICKER_NULL_SLOT,
preempt_ticker_cb, NULL,
ticker_start_op_cb, NULL);
LL_ASSERT((ret == TICKER_STATUS_SUCCESS) ||
(ret == TICKER_STATUS_FAILURE) ||
(ret == TICKER_STATUS_BUSY));
#else /* CONFIG_BT_CTLR_LOW_LAT */
next = NULL;
while (p) {
if (!p->is_aborted) {
if (event.curr.param ==
p->prepare_param.param) {
p->is_aborted = 1;
p->abort_cb(&p->prepare_param,
p->prepare_param.param);
} else {
next = p;
}
}
p = ull_prepare_dequeue_iter(&idx);
}
if (next) {
/* check if resume requested by curr */
ret = event.curr.is_abort_cb(NULL, 0, event.curr.param,
&resume_cb, &resume_prio);
LL_ASSERT(ret);
if (ret == -EAGAIN) {
ret = resume_enqueue(resume_cb, resume_prio);
LL_ASSERT(!ret);
} else {
LL_ASSERT(ret == -ECANCELED);
}
}
#endif /* CONFIG_BT_CTLR_LOW_LAT */
return -EINPROGRESS;
}
event.curr.param = prepare_param->param;
event.curr.is_abort_cb = is_abort_cb;
event.curr.abort_cb = abort_cb;
return prepare_cb(prepare_param);
}
static int resume_enqueue(lll_prepare_cb_t resume_cb, int resume_prio)
{
struct lll_prepare_param prepare_param;
prepare_param.param = event.curr.param;
event.curr.param = NULL;
return ull_prepare_enqueue(event.curr.is_abort_cb, event.curr.abort_cb,
&prepare_param, resume_cb, resume_prio, 1);
}
#if !defined(CONFIG_BT_CTLR_LOW_LAT)
static void ticker_start_op_cb(u32_t status, void *param)
{
/* NOTE: this callback is present only for addition debug messages
* when needed, else can be dispensed with.
*/
ARG_UNUSED(param);
LL_ASSERT((status == TICKER_STATUS_SUCCESS) ||
(status == TICKER_STATUS_FAILURE));
}
static void preempt_ticker_cb(u32_t ticks_at_expire, u32_t remainder,
u16_t lazy, void *param)
{
static memq_link_t link;
static struct mayfly mfy = {0, 0, &link, NULL, preempt};
u32_t ret;
ret = mayfly_enqueue(TICKER_USER_ID_ULL_HIGH, TICKER_USER_ID_LLL,
0, &mfy);
LL_ASSERT(!ret);
}
static void preempt(void *param)
{
struct lll_event *next = ull_prepare_dequeue_get();
lll_prepare_cb_t resume_cb;
u8_t idx = UINT8_MAX;
int resume_prio;
int ret;
next = ull_prepare_dequeue_iter(&idx);
if (!next || !event.curr.abort_cb || !event.curr.param) {
return;
}
while (next && (next->is_aborted || next->is_resume)) {
next = ull_prepare_dequeue_iter(&idx);
}
if (!next) {
return;
}
ret = event.curr.is_abort_cb(next->prepare_param.param, next->prio,
event.curr.param,
&resume_cb, &resume_prio);
if (!ret) {
/* Let LLL know about the cancelled prepare */
next->is_aborted = 1;
next->abort_cb(&next->prepare_param, next->prepare_param.param);
return;
}
event.curr.abort_cb(NULL, event.curr.param);
if (ret == -EAGAIN) {
struct lll_event *iter;
u8_t idx = UINT8_MAX;
iter = ull_prepare_dequeue_iter(&idx);
while (iter) {
if (!iter->is_aborted &&
event.curr.param == iter->prepare_param.param) {
iter->is_aborted = 1;
iter->abort_cb(&iter->prepare_param,
iter->prepare_param.param);
}
iter = ull_prepare_dequeue_iter(&idx);
}
ret = resume_enqueue(resume_cb, resume_prio);
LL_ASSERT(!ret);
} else {
LL_ASSERT(ret == -ECANCELED);
}
}
#else /* CONFIG_BT_CTLR_LOW_LAT */
#if (CONFIG_BT_CTLR_LLL_PRIO == CONFIG_BT_CTLR_ULL_LOW_PRIO)
static void ticker_op_job_disable(u32_t status, void *op_context)
{
ARG_UNUSED(status);
ARG_UNUSED(op_context);
/* FIXME: */
if (1 /* _radio.state != STATE_NONE */) {
mayfly_enable(TICKER_USER_ID_ULL_LOW,
TICKER_USER_ID_ULL_LOW, 0);
}
}
#endif
#endif /* CONFIG_BT_CTLR_LOW_LAT */

View file

@ -0,0 +1,871 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stdbool.h>
#include <stddef.h>
#include <zephyr/types.h>
#include <sys/byteorder.h>
#include <toolchain.h>
#include <bluetooth/hci.h>
#include "hal/ccm.h"
#include "hal/radio.h"
#include "hal/ticker.h"
#include "util/util.h"
#include "util/memq.h"
#include "ticker/ticker.h"
#include "pdu.h"
#include "lll.h"
#include "lll_vendor.h"
#include "lll_adv.h"
#include "lll_chan.h"
#include "lll_filter.h"
#include "lll_internal.h"
#include "lll_tim_internal.h"
#include "lll_adv_internal.h"
#include "lll_prof_internal.h"
#define LOG_MODULE_NAME bt_ctlr_llsw_openisa_lll_adv
#include "common/log.h"
#include <soc.h>
#include "hal/debug.h"
static int init_reset(void);
static int prepare_cb(struct lll_prepare_param *prepare_param);
static int is_abort_cb(void *next, int prio, void *curr,
lll_prepare_cb_t *resume_cb, int *resume_prio);
static void abort_cb(struct lll_prepare_param *prepare_param, void *param);
static void isr_tx(void *param);
static void isr_rx(void *param);
static void isr_done(void *param);
static void isr_abort(void *param);
static void isr_cleanup(void *param);
static void isr_race(void *param);
static void chan_prepare(struct lll_adv *lll);
static inline int isr_rx_pdu(struct lll_adv *lll,
u8_t devmatch_ok, u8_t devmatch_id,
u8_t irkmatch_ok, u8_t irkmatch_id,
u8_t rssi_ready);
static inline bool isr_rx_sr_check(struct lll_adv *lll, struct pdu_adv *adv,
struct pdu_adv *sr, u8_t devmatch_ok,
u8_t *rl_idx);
static inline bool isr_rx_sr_adva_check(struct pdu_adv *adv,
struct pdu_adv *sr);
#if defined(CONFIG_BT_CTLR_SCAN_REQ_NOTIFY)
static inline int isr_rx_sr_report(struct pdu_adv *pdu_adv_rx,
u8_t rssi_ready);
#endif /* CONFIG_BT_CTLR_SCAN_REQ_NOTIFY */
static inline bool isr_rx_ci_check(struct lll_adv *lll, struct pdu_adv *adv,
struct pdu_adv *ci, u8_t devmatch_ok,
u8_t *rl_idx);
static inline bool isr_rx_ci_tgta_check(struct lll_adv *lll,
struct pdu_adv *adv, struct pdu_adv *ci,
u8_t rl_idx);
static inline bool isr_rx_ci_adva_check(struct pdu_adv *adv,
struct pdu_adv *ci);
int lll_adv_init(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
int lll_adv_reset(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
void lll_adv_prepare(void *param)
{
struct lll_prepare_param *p = param;
int err;
err = lll_clk_on();
LL_ASSERT(!err || err == -EINPROGRESS);
err = lll_prepare(is_abort_cb, abort_cb, prepare_cb, 0, p);
LL_ASSERT(!err || err == -EINPROGRESS);
}
static int init_reset(void)
{
return 0;
}
static int prepare_cb(struct lll_prepare_param *prepare_param)
{
struct lll_adv *lll = prepare_param->param;
u32_t aa = sys_cpu_to_le32(0x8e89bed6);
u32_t ticks_at_event, ticks_at_start;
struct evt_hdr *evt;
u32_t remainder_us;
u32_t remainder;
DEBUG_RADIO_START_A(1);
/* Check if stopped (on connection establishment race between LLL and
* ULL.
*/
if (lll_is_stop(lll)) {
int err;
err = lll_clk_off();
LL_ASSERT(!err || err == -EBUSY);
lll_done(NULL);
DEBUG_RADIO_START_A(0);
return 0;
}
radio_reset();
/* TODO: other Tx Power settings */
radio_tx_power_set(RADIO_TXP_DEFAULT);
#if defined(CONFIG_BT_CTLR_ADV_EXT)
/* TODO: if coded we use S8? */
radio_phy_set(lll->phy_p, 1);
radio_pkt_configure(8, PDU_AC_PAYLOAD_SIZE_MAX, (lll->phy_p << 1));
#else /* !CONFIG_BT_CTLR_ADV_EXT */
radio_phy_set(0, 0);
radio_pkt_configure(8, PDU_AC_PAYLOAD_SIZE_MAX, 0);
#endif /* !CONFIG_BT_CTLR_ADV_EXT */
radio_aa_set((u8_t *)&aa);
radio_crc_configure(((0x5bUL) | ((0x06UL) << 8) | ((0x00UL) << 16)),
0x555555);
lll->chan_map_curr = lll->chan_map;
chan_prepare(lll);
#if defined(CONFIG_BT_HCI_MESH_EXT)
_radio.mesh_adv_end_us = 0;
#endif /* CONFIG_BT_HCI_MESH_EXT */
#if defined(CONFIG_BT_CTLR_PRIVACY)
if (ull_filter_lll_rl_enabled()) {
struct lll_filter *filter =
ull_filter_lll_get(!!(lll->filter_policy));
radio_filter_configure(filter->enable_bitmask,
filter->addr_type_bitmask,
(u8_t *)filter->bdaddr);
} else
#endif /* CONFIG_BT_CTLR_PRIVACY */
if (IS_ENABLED(CONFIG_BT_CTLR_FILTER) && lll->filter_policy) {
/* Setup Radio Filter */
struct lll_filter *wl = ull_filter_lll_get(true);
radio_filter_configure(wl->enable_bitmask,
wl->addr_type_bitmask,
(u8_t *)wl->bdaddr);
}
ticks_at_event = prepare_param->ticks_at_expire;
evt = HDR_LLL2EVT(lll);
ticks_at_event += lll_evt_offset_get(evt);
ticks_at_start = ticks_at_event;
ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
remainder = prepare_param->remainder;
remainder_us = radio_tmr_start(1, ticks_at_start, remainder);
/* capture end of Tx-ed PDU, used to calculate HCTO. */
radio_tmr_end_capture();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_gpio_pa_setup();
radio_gpio_pa_lna_enable(remainder_us +
radio_tx_ready_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#else /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
ARG_UNUSED(remainder_us);
#endif /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
#if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
(EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
/* check if preempt to start has changed */
if (lll_preempt_calc(evt, (TICKER_ID_ADV_BASE +
ull_adv_lll_handle_get(lll)),
ticks_at_event)) {
radio_isr_set(isr_abort, lll);
radio_disable();
} else
#endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
{
u32_t ret;
ret = lll_prepare_done(lll);
LL_ASSERT(!ret);
}
DEBUG_RADIO_START_A(1);
return 0;
}
#if defined(CONFIG_BT_PERIPHERAL)
static int resume_prepare_cb(struct lll_prepare_param *p)
{
struct evt_hdr *evt = HDR_LLL2EVT(p->param);
p->ticks_at_expire = ticker_ticks_now_get() - lll_evt_offset_get(evt);
p->remainder = 0;
p->lazy = 0;
return prepare_cb(p);
}
#endif /* CONFIG_BT_PERIPHERAL */
static int is_abort_cb(void *next, int prio, void *curr,
lll_prepare_cb_t *resume_cb, int *resume_prio)
{
#if defined(CONFIG_BT_PERIPHERAL)
struct lll_adv *lll = curr;
struct pdu_adv *pdu;
#endif /* CONFIG_BT_PERIPHERAL */
/* TODO: prio check */
if (next != curr) {
if (0) {
#if defined(CONFIG_BT_PERIPHERAL)
} else if (lll->is_hdcd) {
int err;
/* wrap back after the pre-empter */
*resume_cb = resume_prepare_cb;
*resume_prio = 0; /* TODO: */
/* Retain HF clk */
err = lll_clk_on();
LL_ASSERT(!err || err == -EINPROGRESS);
return -EAGAIN;
#endif /* CONFIG_BT_PERIPHERAL */
} else {
return -ECANCELED;
}
}
#if defined(CONFIG_BT_PERIPHERAL)
pdu = lll_adv_data_curr_get(lll);
if (pdu->type == PDU_ADV_TYPE_DIRECT_IND) {
return 0;
}
#endif /* CONFIG_BT_PERIPHERAL */
return -ECANCELED;
}
static void abort_cb(struct lll_prepare_param *prepare_param, void *param)
{
int err;
/* NOTE: This is not a prepare being cancelled */
if (!prepare_param) {
/* Perform event abort here.
* After event has been cleanly aborted, clean up resources
* and dispatch event done.
*/
radio_isr_set(isr_abort, param);
radio_disable();
return;
}
/* NOTE: Else clean the top half preparations of the aborted event
* currently in preparation pipeline.
*/
err = lll_clk_off();
LL_ASSERT(!err || err == -EBUSY);
lll_done(param);
}
static void isr_tx(void *param)
{
u32_t hcto;
/* TODO: MOVE to a common interface, isr_lll_radio_status? */
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
lll_prof_latency_capture();
}
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
if (IS_ENABLED(CONFIG_BT_CTLR_GPIO_PA_PIN) ||
IS_ENABLED(CONFIG_BT_CTLR_GPIO_LNA_PIN)) {
radio_gpio_pa_lna_disable();
}
/* TODO: MOVE ^^ */
radio_isr_set(isr_rx, param);
radio_tmr_tifs_set(EVENT_IFS_US);
radio_switch_complete_and_tx(0, 0, 0, 0);
radio_pkt_rx_set(radio_pkt_scratch_get());
/* assert if radio packet ptr is not set and radio started rx */
LL_ASSERT(!radio_is_ready());
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
lll_prof_cputime_capture();
}
#if defined(CONFIG_BT_CTLR_PRIVACY)
if (ull_filter_lll_rl_enabled()) {
u8_t count, *irks = ull_filter_lll_irks_get(&count);
radio_ar_configure(count, irks);
}
#endif /* CONFIG_BT_CTLR_PRIVACY */
/* +/- 2us active clock jitter, +1 us hcto compensation */
hcto = radio_tmr_tifs_base_get() + EVENT_IFS_US + 4 + 1;
hcto += radio_rx_chain_delay_get(0, 0);
hcto += addr_us_get(0);
hcto -= radio_tx_chain_delay_get(0, 0);
radio_tmr_hcto_configure(hcto);
/* capture end of CONNECT_IND PDU, used for calculating first
* slave event.
*/
radio_tmr_end_capture();
if (IS_ENABLED(CONFIG_BT_CTLR_SCAN_REQ_RSSI)) {
radio_rssi_measure();
}
#if defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
/* PA/LNA enable is overwriting packet end used in ISR
* profiling, hence back it up for later use.
*/
lll_prof_radio_end_backup();
}
radio_gpio_lna_setup();
radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + EVENT_IFS_US - 4 -
radio_tx_chain_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_LNA_OFFSET);
#endif /* CONFIG_BT_CTLR_GPIO_LNA_PIN */
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
/* NOTE: as scratch packet is used to receive, it is safe to
* generate profile event using rx nodes.
*/
lll_prof_send();
}
}
static void isr_rx(void *param)
{
u8_t trx_done;
u8_t crc_ok;
u8_t devmatch_ok;
u8_t devmatch_id;
u8_t irkmatch_ok;
u8_t irkmatch_id;
u8_t rssi_ready;
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
lll_prof_latency_capture();
}
/* Read radio status and events */
trx_done = radio_is_done();
if (trx_done) {
crc_ok = radio_crc_is_valid();
devmatch_ok = radio_filter_has_match();
devmatch_id = radio_filter_match_get();
irkmatch_ok = radio_ar_has_match();
irkmatch_id = radio_ar_match_get();
rssi_ready = radio_rssi_is_ready();
} else {
crc_ok = devmatch_ok = irkmatch_ok = rssi_ready = 0U;
devmatch_id = irkmatch_id = 0xFF;
}
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
radio_filter_status_reset();
radio_ar_status_reset();
radio_rssi_status_reset();
if (IS_ENABLED(CONFIG_BT_CTLR_GPIO_PA_PIN) ||
IS_ENABLED(CONFIG_BT_CTLR_GPIO_LNA_PIN)) {
radio_gpio_pa_lna_disable();
}
if (!trx_done) {
goto isr_rx_do_close;
}
if (crc_ok) {
int err;
err = isr_rx_pdu(param, devmatch_ok, devmatch_id, irkmatch_ok,
irkmatch_id, rssi_ready);
if (!err) {
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
lll_prof_send();
}
return;
}
}
isr_rx_do_close:
radio_isr_set(isr_done, param);
radio_disable();
}
static void isr_done(void *param)
{
struct node_rx_hdr *node_rx;
struct lll_adv *lll = param;
/* TODO: MOVE to a common interface, isr_lll_radio_status? */
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
radio_filter_status_reset();
radio_ar_status_reset();
radio_rssi_status_reset();
if (IS_ENABLED(CONFIG_BT_CTLR_GPIO_PA_PIN) ||
IS_ENABLED(CONFIG_BT_CTLR_GPIO_LNA_PIN)) {
radio_gpio_pa_lna_disable();
}
/* TODO: MOVE ^^ */
#if defined(CONFIG_BT_HCI_MESH_EXT)
if (_radio.advertiser.is_mesh &&
!_radio.mesh_adv_end_us) {
_radio.mesh_adv_end_us = radio_tmr_end_get();
}
#endif /* CONFIG_BT_HCI_MESH_EXT */
#if defined(CONFIG_BT_PERIPHERAL)
if (!lll->chan_map_curr && lll->is_hdcd) {
lll->chan_map_curr = lll->chan_map;
}
#endif /* CONFIG_BT_PERIPHERAL */
if (lll->chan_map_curr) {
u32_t start_us;
chan_prepare(lll);
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
start_us = radio_tmr_start_now(1);
radio_gpio_pa_setup();
radio_gpio_pa_lna_enable(start_us +
radio_tx_ready_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#else /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
ARG_UNUSED(start_us);
radio_tx_enable();
#endif /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
/* capture end of Tx-ed PDU, used to calculate HCTO. */
radio_tmr_end_capture();
return;
}
radio_filter_disable();
#if defined(CONFIG_BT_PERIPHERAL)
if (!lll->is_hdcd)
#endif /* CONFIG_BT_PERIPHERAL */
{
#if defined(CONFIG_BT_HCI_MESH_EXT)
if (_radio.advertiser.is_mesh) {
u32_t err;
err = isr_close_adv_mesh();
if (err) {
return 0;
}
}
#endif /* CONFIG_BT_HCI_MESH_EXT */
}
#if defined(CONFIG_BT_CTLR_ADV_INDICATION)
node_rx = ull_pdu_rx_alloc_peek(3);
if (node_rx) {
ull_pdu_rx_alloc();
/* TODO: add other info by defining a payload struct */
node_rx->type = NODE_RX_TYPE_ADV_INDICATION;
ull_rx_put(node_rx->link, node_rx);
ull_rx_sched();
}
#else /* !CONFIG_BT_CTLR_ADV_INDICATION */
ARG_UNUSED(node_rx);
#endif /* !CONFIG_BT_CTLR_ADV_INDICATION */
isr_cleanup(param);
}
static void isr_abort(void *param)
{
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
radio_filter_status_reset();
radio_ar_status_reset();
radio_rssi_status_reset();
if (IS_ENABLED(CONFIG_BT_CTLR_GPIO_PA_PIN) ||
IS_ENABLED(CONFIG_BT_CTLR_GPIO_LNA_PIN)) {
radio_gpio_pa_lna_disable();
}
radio_filter_disable();
isr_cleanup(param);
}
static void isr_cleanup(void *param)
{
int err;
radio_isr_set(isr_race, param);
radio_tmr_stop();
err = lll_clk_off();
LL_ASSERT(!err || err == -EBUSY);
lll_done(NULL);
}
static void isr_race(void *param)
{
/* NOTE: lll_disable could have a race with ... */
radio_status_reset();
}
static void chan_prepare(struct lll_adv *lll)
{
struct pdu_adv *pdu;
struct pdu_adv *scan_pdu;
u8_t chan;
u8_t upd = 0U;
pdu = lll_adv_data_latest_get(lll, &upd);
scan_pdu = lll_adv_scan_rsp_latest_get(lll, &upd);
#if defined(CONFIG_BT_CTLR_PRIVACY)
if (upd) {
/* Copy the address from the adv packet we will send into the
* scan response.
*/
memcpy(&scan_pdu->scan_rsp.addr[0],
&pdu->adv_ind.addr[0], BDADDR_SIZE);
}
#else
ARG_UNUSED(scan_pdu);
ARG_UNUSED(upd);
#endif /* !CONFIG_BT_CTLR_PRIVACY */
radio_pkt_tx_set(pdu);
if ((pdu->type != PDU_ADV_TYPE_NONCONN_IND) &&
(!IS_ENABLED(CONFIG_BT_CTLR_ADV_EXT) ||
(pdu->type != PDU_ADV_TYPE_EXT_IND))) {
radio_isr_set(isr_tx, lll);
radio_tmr_tifs_set(EVENT_IFS_US);
radio_switch_complete_and_rx(0);
} else {
radio_isr_set(isr_done, lll);
radio_switch_complete_and_disable();
}
chan = find_lsb_set(lll->chan_map_curr);
LL_ASSERT(chan);
lll->chan_map_curr &= (lll->chan_map_curr - 1);
lll_chan_set(36 + chan);
}
static inline int isr_rx_pdu(struct lll_adv *lll,
u8_t devmatch_ok, u8_t devmatch_id,
u8_t irkmatch_ok, u8_t irkmatch_id,
u8_t rssi_ready)
{
struct pdu_adv *pdu_rx, *pdu_adv;
#if defined(CONFIG_BT_CTLR_PRIVACY)
/* An IRK match implies address resolution enabled */
u8_t rl_idx = irkmatch_ok ? ull_filter_lll_rl_irk_idx(irkmatch_id) :
FILTER_IDX_NONE;
#else
u8_t rl_idx = FILTER_IDX_NONE;
#endif /* CONFIG_BT_CTLR_PRIVACY */
pdu_rx = (void *)radio_pkt_scratch_get();
pdu_adv = lll_adv_data_curr_get(lll);
if ((pdu_rx->type == PDU_ADV_TYPE_SCAN_REQ) &&
(pdu_rx->len == sizeof(struct pdu_adv_scan_req)) &&
(pdu_adv->type != PDU_ADV_TYPE_DIRECT_IND) &&
isr_rx_sr_check(lll, pdu_adv, pdu_rx, devmatch_ok, &rl_idx)) {
radio_isr_set(isr_done, lll);
radio_switch_complete_and_disable();
radio_pkt_tx_set(lll_adv_scan_rsp_curr_get(lll));
/* assert if radio packet ptr is not set and radio started tx */
LL_ASSERT(!radio_is_ready());
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
lll_prof_cputime_capture();
}
#if defined(CONFIG_BT_CTLR_SCAN_REQ_NOTIFY)
if (!IS_ENABLED(CONFIG_BT_CTLR_ADV_EXT) ||
0 /* TODO: extended adv. scan req notification enabled */) {
u32_t err;
/* Generate the scan request event */
err = isr_rx_sr_report(pdu_rx, rssi_ready);
if (err) {
/* Scan Response will not be transmitted */
return err;
}
}
#endif /* CONFIG_BT_CTLR_SCAN_REQ_NOTIFY */
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
/* PA/LNA enable is overwriting packet end used in ISR
* profiling, hence back it up for later use.
*/
lll_prof_radio_end_backup();
}
radio_gpio_pa_setup();
radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() +
EVENT_IFS_US -
radio_rx_chain_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN */
return 0;
#if defined(CONFIG_BT_PERIPHERAL)
} else if ((pdu_rx->type == PDU_ADV_TYPE_CONNECT_IND) &&
(pdu_rx->len == sizeof(struct pdu_adv_connect_ind)) &&
isr_rx_ci_check(lll, pdu_adv, pdu_rx, devmatch_ok,
&rl_idx) &&
lll->conn) {
struct node_rx_ftr *ftr;
struct node_rx_pdu *rx;
int ret;
if (IS_ENABLED(CONFIG_BT_CTLR_CHAN_SEL_2)) {
rx = ull_pdu_rx_alloc_peek(4);
} else {
rx = ull_pdu_rx_alloc_peek(3);
}
if (!rx) {
return -ENOBUFS;
}
radio_isr_set(isr_abort, lll);
radio_disable();
/* assert if radio started tx */
LL_ASSERT(!radio_is_ready());
if (IS_ENABLED(CONFIG_BT_CTLR_PROFILE_ISR)) {
lll_prof_cputime_capture();
}
/* Stop further LLL radio events */
ret = lll_stop(lll);
LL_ASSERT(!ret);
rx = ull_pdu_rx_alloc();
rx->hdr.type = NODE_RX_TYPE_CONNECTION;
rx->hdr.handle = 0xffff;
memcpy(rx->pdu, pdu_rx, (offsetof(struct pdu_adv, connect_ind) +
sizeof(struct pdu_adv_connect_ind)));
ftr = &(rx->hdr.rx_ftr);
ftr->param = lll;
ftr->ticks_anchor = radio_tmr_start_get();
ftr->us_radio_end = radio_tmr_end_get() -
radio_tx_chain_delay_get(0, 0);
ftr->us_radio_rdy = radio_rx_ready_delay_get(0, 0);
#if defined(CONFIG_BT_CTLR_PRIVACY)
ftr->rl_idx = irkmatch_ok ? rl_idx : FILTER_IDX_NONE;
#endif /* CONFIG_BT_CTLR_PRIVACY */
if (IS_ENABLED(CONFIG_BT_CTLR_CHAN_SEL_2)) {
ftr->extra = ull_pdu_rx_alloc();
}
ull_rx_put(rx->hdr.link, rx);
ull_rx_sched();
return 0;
#endif /* CONFIG_BT_PERIPHERAL */
}
return -EINVAL;
}
static inline bool isr_rx_sr_check(struct lll_adv *lll, struct pdu_adv *adv,
struct pdu_adv *sr, u8_t devmatch_ok,
u8_t *rl_idx)
{
#if defined(CONFIG_BT_CTLR_PRIVACY)
return ((((lll->filter_policy & 0x01) == 0) &&
ull_filter_lll_rl_addr_allowed(sr->tx_addr,
sr->scan_req.scan_addr,
rl_idx)) ||
(((lll->filter_policy & 0x01) != 0) &&
(devmatch_ok || ull_filter_lll_irk_whitelisted(*rl_idx)))) &&
isr_rx_sr_adva_check(adv, sr);
#else
return (((lll->filter_policy & 0x01) == 0U) || devmatch_ok) &&
isr_rx_sr_adva_check(adv, sr);
#endif /* CONFIG_BT_CTLR_PRIVACY */
}
static inline bool isr_rx_sr_adva_check(struct pdu_adv *adv,
struct pdu_adv *sr)
{
return (adv->tx_addr == sr->rx_addr) &&
!memcmp(adv->adv_ind.addr, sr->scan_req.adv_addr, BDADDR_SIZE);
}
#if defined(CONFIG_BT_CTLR_SCAN_REQ_NOTIFY)
static inline int isr_rx_sr_report(struct pdu_adv *pdu_adv_rx,
u8_t rssi_ready)
{
struct node_rx_pdu *node_rx;
struct pdu_adv *pdu_adv;
u8_t pdu_len;
node_rx = ull_pdu_rx_alloc_peek(3);
if (!node_rx) {
return -ENOBUFS;
}
ull_pdu_rx_alloc();
/* Prepare the report (scan req) */
node_rx->hdr.type = NODE_RX_TYPE_SCAN_REQ;
node_rx->hdr.handle = 0xffff;
/* Make a copy of PDU into Rx node (as the received PDU is in the
* scratch buffer), and save the RSSI value.
*/
pdu_adv = (void *)node_rx->pdu;
pdu_len = offsetof(struct pdu_adv, payload) + pdu_adv_rx->len;
memcpy(pdu_adv, pdu_adv_rx, pdu_len);
node_rx->hdr.rx_ftr.rssi = (rssi_ready) ? (radio_rssi_get() & 0x7f) :
0x7f;
ull_rx_put(node_rx->hdr.link, node_rx);
ull_rx_sched();
return 0;
}
#endif /* CONFIG_BT_CTLR_SCAN_REQ_NOTIFY */
static inline bool isr_rx_ci_check(struct lll_adv *lll, struct pdu_adv *adv,
struct pdu_adv *ci, u8_t devmatch_ok,
u8_t *rl_idx)
{
/* LL 4.3.2: filter policy shall be ignored for directed adv */
if (adv->type == PDU_ADV_TYPE_DIRECT_IND) {
#if defined(CONFIG_BT_CTLR_PRIVACY)
return ull_filter_lll_rl_addr_allowed(ci->tx_addr,
ci->connect_ind.init_addr,
rl_idx) &&
#else
return (1) &&
#endif
isr_rx_ci_adva_check(adv, ci) &&
isr_rx_ci_tgta_check(lll, adv, ci, *rl_idx);
}
#if defined(CONFIG_BT_CTLR_PRIVACY)
return ((((lll->filter_policy & 0x02) == 0) &&
ull_filter_lll_rl_addr_allowed(ci->tx_addr,
ci->connect_ind.init_addr,
rl_idx)) ||
(((lll->filter_policy & 0x02) != 0) &&
(devmatch_ok || ull_filter_lll_irk_whitelisted(*rl_idx)))) &&
isr_rx_ci_adva_check(adv, ci);
#else
return (((lll->filter_policy & 0x02) == 0) ||
(devmatch_ok)) &&
isr_rx_ci_adva_check(adv, ci);
#endif /* CONFIG_BT_CTLR_PRIVACY */
}
static inline bool isr_rx_ci_tgta_check(struct lll_adv *lll,
struct pdu_adv *adv, struct pdu_adv *ci,
u8_t rl_idx)
{
#if defined(CONFIG_BT_CTLR_PRIVACY)
if (rl_idx != FILTER_IDX_NONE) {
return rl_idx == lll->rl_idx;
}
#endif /* CONFIG_BT_CTLR_PRIVACY */
return (adv->rx_addr == ci->tx_addr) &&
!memcmp(adv->direct_ind.tgt_addr, ci->connect_ind.init_addr,
BDADDR_SIZE);
}
static inline bool isr_rx_ci_adva_check(struct pdu_adv *adv,
struct pdu_adv *ci)
{
return (adv->tx_addr == ci->rx_addr) &&
(((adv->type == PDU_ADV_TYPE_DIRECT_IND) &&
!memcmp(adv->direct_ind.adv_addr, ci->connect_ind.adv_addr,
BDADDR_SIZE)) ||
(!memcmp(adv->adv_ind.addr, ci->connect_ind.adv_addr,
BDADDR_SIZE)));
}

View file

@ -0,0 +1,105 @@
/*
* Copyright (c) 2017-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
struct lll_adv_pdu {
u8_t first;
u8_t last;
/* TODO: use,
* struct pdu_adv *pdu[DOUBLE_BUFFER_SIZE];
*/
u8_t pdu[DOUBLE_BUFFER_SIZE][PDU_AC_SIZE_MAX];
};
struct lll_adv {
struct lll_hdr hdr;
#if defined(CONFIG_BT_PERIPHERAL)
/* NOTE: conn context has to be after lll_hdr */
struct lll_conn *conn;
u8_t is_hdcd:1;
#endif /* CONFIG_BT_PERIPHERAL */
u8_t chan_map:3;
u8_t chan_map_curr:3;
u8_t filter_policy:2;
#if defined(CONFIG_BT_CTLR_ADV_EXT)
u8_t phy_p:3;
#endif /* !CONFIG_BT_CTLR_ADV_EXT */
#if defined(CONFIG_BT_HCI_MESH_EXT)
u8_t is_mesh:1;
#endif /* CONFIG_BT_HCI_MESH_EXT */
#if defined(CONFIG_BT_CTLR_PRIVACY)
u8_t rl_idx;
#endif /* CONFIG_BT_CTLR_PRIVACY */
struct lll_adv_pdu adv_data;
struct lll_adv_pdu scan_rsp;
};
int lll_adv_init(void);
int lll_adv_reset(void);
void lll_adv_prepare(void *param);
static inline struct pdu_adv *lll_adv_pdu_alloc(struct lll_adv_pdu *pdu,
u8_t *idx)
{
u8_t last;
if (pdu->first == pdu->last) {
last = pdu->last + 1;
if (last == DOUBLE_BUFFER_SIZE) {
last = 0U;
}
} else {
last = pdu->last;
}
*idx = last;
return (void *)pdu->pdu[last];
}
static inline void lll_adv_pdu_enqueue(struct lll_adv_pdu *pdu, u8_t idx)
{
pdu->last = idx;
}
static inline struct pdu_adv *lll_adv_data_alloc(struct lll_adv *lll, u8_t *idx)
{
return lll_adv_pdu_alloc(&lll->adv_data, idx);
}
static inline void lll_adv_data_enqueue(struct lll_adv *lll, u8_t idx)
{
lll_adv_pdu_enqueue(&lll->adv_data, idx);
}
static inline struct pdu_adv *lll_adv_data_peek(struct lll_adv *lll)
{
return (void *)lll->adv_data.pdu[lll->adv_data.last];
}
static inline struct pdu_adv *lll_adv_scan_rsp_alloc(struct lll_adv *lll,
u8_t *idx)
{
return lll_adv_pdu_alloc(&lll->scan_rsp, idx);
}
static inline void lll_adv_scan_rsp_enqueue(struct lll_adv *lll, u8_t idx)
{
lll_adv_pdu_enqueue(&lll->scan_rsp, idx);
}
static inline struct pdu_adv *lll_adv_scan_rsp_peek(struct lll_adv *lll)
{
return (void *)lll->scan_rsp.pdu[lll->scan_rsp.last];
}
extern u16_t ull_adv_lll_handle_get(struct lll_adv *lll);

View file

@ -0,0 +1,45 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
static inline struct pdu_adv *lll_adv_pdu_latest_get(struct lll_adv_pdu *pdu,
u8_t *is_modified)
{
u8_t first;
first = pdu->first;
if (first != pdu->last) {
first += 1U;
if (first == DOUBLE_BUFFER_SIZE) {
first = 0U;
}
pdu->first = first;
*is_modified = 1U;
}
return (void *)pdu->pdu[first];
}
static inline struct pdu_adv *lll_adv_data_latest_get(struct lll_adv *lll,
u8_t *is_modified)
{
return lll_adv_pdu_latest_get(&lll->adv_data, is_modified);
}
static inline struct pdu_adv *lll_adv_scan_rsp_latest_get(struct lll_adv *lll,
u8_t *is_modified)
{
return lll_adv_pdu_latest_get(&lll->scan_rsp, is_modified);
}
static inline struct pdu_adv *lll_adv_data_curr_get(struct lll_adv *lll)
{
return (void *)lll->adv_data.pdu[lll->adv_data.first];
}
static inline struct pdu_adv *lll_adv_scan_rsp_curr_get(struct lll_adv *lll)
{
return (void *)lll->scan_rsp.pdu[lll->scan_rsp.first];
}

View file

@ -0,0 +1,19 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <zephyr/types.h>
#include <soc.h>
#include <device.h>
#include <drivers/clock_control.h>
#define LOG_MODULE_NAME bt_ctlr_llsw_openisa_lll_clock
#include "common/log.h"
#include "hal/debug.h"
void lll_clock_wait(void)
{
}

View file

@ -0,0 +1,7 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
void lll_clock_wait(void);

View file

@ -0,0 +1,866 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stdbool.h>
#include <stddef.h>
#include <toolchain.h>
#include <zephyr/types.h>
#include <sys/util.h>
#include "util/mem.h"
#include "util/memq.h"
#include "util/mfifo.h"
#include "hal/ccm.h"
#include "hal/radio.h"
#include "pdu.h"
#include "lll.h"
#include "lll_conn.h"
#include "lll_internal.h"
#include "lll_tim_internal.h"
#include "lll_prof_internal.h"
#define LOG_MODULE_NAME bt_ctlr_llsw_openisa_lll_conn
#include "common/log.h"
#include <soc.h>
#include "hal/debug.h"
static int init_reset(void);
static void isr_done(void *param);
static void isr_cleanup(void *param);
static void isr_race(void *param);
static int isr_rx_pdu(struct lll_conn *lll, struct pdu_data *pdu_data_rx,
struct node_tx **tx_release, u8_t *is_rx_enqueue);
static struct pdu_data *empty_tx_enqueue(struct lll_conn *lll);
static u16_t const sca_ppm_lut[] = {500, 250, 150, 100, 75, 50, 30, 20};
static u8_t crc_expire;
static u8_t crc_valid;
static u16_t trx_cnt;
#if defined(CONFIG_BT_CTLR_LE_ENC)
static u8_t mic_state;
#endif /* CONFIG_BT_CTLR_LE_ENC */
int lll_conn_init(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
int lll_conn_reset(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
u8_t lll_conn_sca_local_get(void)
{
return 0;
}
u32_t lll_conn_ppm_local_get(void)
{
return sca_ppm_lut[0];
}
u32_t lll_conn_ppm_get(u8_t sca)
{
return sca_ppm_lut[sca];
}
void lll_conn_prepare_reset(void)
{
trx_cnt = 0U;
crc_expire = 0U;
crc_valid = 0U;
#if defined(CONFIG_BT_CTLR_LE_ENC)
mic_state = LLL_CONN_MIC_NONE;
#endif /* CONFIG_BT_CTLR_LE_ENC */
}
int lll_conn_is_abort_cb(void *next, int prio, void *curr,
lll_prepare_cb_t *resume_cb, int *resume_prio)
{
return -ECANCELED;
}
void lll_conn_abort_cb(struct lll_prepare_param *prepare_param, void *param)
{
int err;
/* NOTE: This is not a prepare being cancelled */
if (!prepare_param) {
/* Perform event abort here.
* After event has been cleanly aborted, clean up resources
* and dispatch event done.
*/
radio_isr_set(isr_done, param);
radio_disable();
return;
}
/* NOTE: Else clean the top half preparations of the aborted event
* currently in preparation pipeline.
*/
err = lll_clk_off();
LL_ASSERT(!err || err == -EBUSY);
lll_done(param);
}
void lll_conn_isr_rx(void *param)
{
struct node_tx *tx_release = NULL;
struct lll_conn *lll = param;
struct pdu_data *pdu_data_rx;
struct pdu_data *pdu_data_tx;
struct node_rx_pdu *node_rx;
u8_t is_empty_pdu_tx_retry;
u8_t is_crc_backoff = 0U;
u8_t is_rx_enqueue = 0U;
u8_t is_ull_rx = 0U;
u8_t rssi_ready;
u8_t trx_done;
u8_t is_done;
u8_t crc_ok;
#if defined(CONFIG_BT_CTLR_PROFILE_ISR)
lll_prof_latency_capture();
#endif /* CONFIG_BT_CTLR_PROFILE_ISR */
/* Read radio status and events */
trx_done = radio_is_done();
if (trx_done) {
crc_ok = radio_crc_is_valid();
rssi_ready = radio_rssi_is_ready();
} else {
crc_ok = rssi_ready = 0U;
}
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
radio_rssi_status_reset();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN) || \
defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_pa_lna_disable();
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN || CONFIG_BT_CTLR_GPIO_LNA_PIN */
if (!trx_done) {
radio_isr_set(isr_done, param);
radio_disable();
return;
}
trx_cnt++;
node_rx = ull_pdu_rx_alloc_peek(1);
LL_ASSERT(node_rx);
pdu_data_rx = (void *)node_rx->pdu;
if (crc_ok) {
u32_t err;
err = isr_rx_pdu(lll, pdu_data_rx, &tx_release, &is_rx_enqueue);
if (err) {
goto lll_conn_isr_rx_exit;
}
/* Reset CRC expiry counter */
crc_expire = 0U;
/* CRC valid flag used to detect supervision timeout */
crc_valid = 1U;
} else {
/* Start CRC error countdown, if not already started */
if (crc_expire == 0U) {
crc_expire = 2U;
}
/* CRC error countdown */
crc_expire--;
is_crc_backoff = (crc_expire == 0U);
}
/* prepare tx packet */
is_empty_pdu_tx_retry = lll->empty;
lll_conn_pdu_tx_prep(lll, &pdu_data_tx);
/* Decide on event continuation and hence Radio Shorts to use */
is_done = is_crc_backoff || ((crc_ok) && (pdu_data_rx->md == 0) &&
(pdu_data_tx->len == 0));
if (is_done) {
radio_isr_set(isr_done, param);
if (0) {
#if defined(CONFIG_BT_CENTRAL)
/* Event done for master */
} else if (!lll->role) {
radio_disable();
/* assert if radio packet ptr is not set and radio
* started tx.
*/
LL_ASSERT(!radio_is_ready());
/* Restore state if last transmitted was empty PDU */
lll->empty = is_empty_pdu_tx_retry;
goto lll_conn_isr_rx_exit;
#endif /* CONFIG_BT_CENTRAL */
#if defined(CONFIG_BT_PERIPHERAL)
/* Event done for slave */
} else {
radio_switch_complete_and_disable();
#endif /* CONFIG_BT_PERIPHERAL */
}
} else {
radio_isr_set(lll_conn_isr_tx, param);
radio_tmr_tifs_set(EVENT_IFS_US);
#if defined(CONFIG_BT_CTLR_PHY)
radio_switch_complete_and_rx(lll->phy_rx);
#else /* !CONFIG_BT_CTLR_PHY */
radio_switch_complete_and_rx(0);
#endif /* !CONFIG_BT_CTLR_PHY */
/* capture end of Tx-ed PDU, used to calculate HCTO. */
radio_tmr_end_capture();
}
/* Fill sn and nesn */
pdu_data_tx->sn = lll->sn;
pdu_data_tx->nesn = lll->nesn;
/* setup the radio tx packet buffer */
lll_conn_tx_pkt_set(lll, pdu_data_tx);
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
#if defined(CONFIG_BT_CTLR_PROFILE_ISR)
/* PA enable is overwriting packet end used in ISR profiling, hence
* back it up for later use.
*/
lll_prof_radio_end_backup();
#endif /* CONFIG_BT_CTLR_PROFILE_ISR */
radio_gpio_pa_setup();
#if defined(CONFIG_BT_CTLR_PHY)
radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + EVENT_IFS_US -
radio_rx_chain_delay_get(lll->phy_rx, 1) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#else /* !CONFIG_BT_CTLR_PHY */
radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + EVENT_IFS_US -
radio_rx_chain_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#endif /* !CONFIG_BT_CTLR_PHY */
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN */
/* assert if radio packet ptr is not set and radio started tx */
LL_ASSERT(!radio_is_ready());
lll_conn_isr_rx_exit:
/* Save the AA captured for the first Rx in connection event */
if (!radio_tmr_aa_restore()) {
radio_tmr_aa_save(radio_tmr_aa_get());
}
#if defined(CONFIG_BT_CTLR_PROFILE_ISR)
lll_prof_cputime_capture();
#endif /* CONFIG_BT_CTLR_PROFILE_ISR */
if (tx_release) {
LL_ASSERT(lll->handle != 0xFFFF);
ull_conn_lll_ack_enqueue(lll->handle, tx_release);
is_ull_rx = 1U;
}
if (is_rx_enqueue) {
ull_pdu_rx_alloc();
node_rx->hdr.type = NODE_RX_TYPE_DC_PDU;
node_rx->hdr.handle = lll->handle;
ull_rx_put(node_rx->hdr.link, node_rx);
is_ull_rx = 1U;
}
if (is_ull_rx) {
ull_rx_sched();
}
#if defined(CONFIG_BT_CTLR_CONN_RSSI)
/* Collect RSSI for connection */
if (rssi_ready) {
u8_t rssi = radio_rssi_get();
lll->rssi_latest = rssi;
if (((lll->rssi_reported - rssi) & 0xFF) >
LLL_CONN_RSSI_THRESHOLD) {
if (lll->rssi_sample_count) {
lll->rssi_sample_count--;
}
} else {
lll->rssi_sample_count = LLL_CONN_RSSI_SAMPLE_COUNT;
}
}
#else /* !CONFIG_BT_CTLR_CONN_RSSI */
ARG_UNUSED(rssi_ready);
#endif /* !CONFIG_BT_CTLR_CONN_RSSI */
#if defined(CONFIG_BT_CTLR_PROFILE_ISR)
lll_prof_send();
#endif /* CONFIG_BT_CTLR_PROFILE_ISR */
}
void lll_conn_isr_tx(void *param)
{
struct lll_conn *lll = (void *)param;
u32_t hcto;
/* TODO: MOVE to a common interface, isr_lll_radio_status? */
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN) || \
defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_pa_lna_disable();
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN || CONFIG_BT_CTLR_GPIO_LNA_PIN */
/* TODO: MOVE ^^ */
radio_isr_set(lll_conn_isr_rx, param);
radio_tmr_tifs_set(EVENT_IFS_US);
#if defined(CONFIG_BT_CTLR_PHY)
radio_switch_complete_and_tx(lll->phy_rx, 0,
lll->phy_tx,
lll->phy_flags);
#else /* !CONFIG_BT_CTLR_PHY */
radio_switch_complete_and_tx(0, 0, 0, 0);
#endif /* !CONFIG_BT_CTLR_PHY */
lll_conn_rx_pkt_set(lll);
/* assert if radio packet ptr is not set and radio started rx */
LL_ASSERT(!radio_is_ready());
/* +/- 2us active clock jitter, +1 us hcto compensation */
hcto = radio_tmr_tifs_base_get() + EVENT_IFS_US + 4 +
RANGE_DELAY_US + 1;
#if defined(CONFIG_BT_CTLR_PHY)
hcto += radio_rx_chain_delay_get(lll->phy_rx, 1);
hcto += addr_us_get(lll->phy_rx);
hcto -= radio_tx_chain_delay_get(lll->phy_tx, lll->phy_flags);
#else /* !CONFIG_BT_CTLR_PHY */
hcto += radio_rx_chain_delay_get(0, 0);
hcto += addr_us_get(0);
hcto -= radio_tx_chain_delay_get(0, 0);
#endif /* !CONFIG_BT_CTLR_PHY */
radio_tmr_hcto_configure(hcto);
#if defined(CONFIG_BT_CENTRAL) && defined(CONFIG_BT_CTLR_CONN_RSSI)
if (!trx_cnt && !lll->role) {
radio_rssi_measure();
}
#endif /* CONFIG_BT_CENTRAL && CONFIG_BT_CTLR_CONN_RSSI */
#if defined(CONFIG_BT_CTLR_PROFILE_ISR) || \
defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_tmr_end_capture();
#endif /* CONFIG_BT_CTLR_PROFILE_ISR */
#if defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_lna_setup();
#if defined(CONFIG_BT_CTLR_PHY)
radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + EVENT_IFS_US - 4 -
radio_tx_chain_delay_get(lll->phy_tx,
lll->phy_flags) -
CONFIG_BT_CTLR_GPIO_LNA_OFFSET);
#else /* !CONFIG_BT_CTLR_PHY */
radio_gpio_pa_lna_enable(radio_tmr_tifs_base_get() + EVENT_IFS_US - 4 -
radio_tx_chain_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_LNA_OFFSET);
#endif /* !CONFIG_BT_CTLR_PHY */
#endif /* CONFIG_BT_CTLR_GPIO_LNA_PIN */
}
void lll_conn_isr_abort(void *param)
{
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
radio_filter_status_reset();
radio_ar_status_reset();
radio_rssi_status_reset();
if (IS_ENABLED(CONFIG_BT_CTLR_GPIO_PA_PIN) ||
IS_ENABLED(CONFIG_BT_CTLR_GPIO_LNA_PIN)) {
radio_gpio_pa_lna_disable();
}
isr_cleanup(param);
}
void lll_conn_rx_pkt_set(struct lll_conn *lll)
{
struct node_rx_pdu *node_rx;
u16_t max_rx_octets;
u8_t phy;
node_rx = ull_pdu_rx_alloc_peek(1);
LL_ASSERT(node_rx);
#if defined(CONFIG_BT_CTLR_DATA_LENGTH)
max_rx_octets = lll->max_rx_octets;
#else /* !CONFIG_BT_CTLR_DATA_LENGTH */
max_rx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
#endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
#if defined(CONFIG_BT_CTLR_PHY)
phy = lll->phy_rx;
#else /* !CONFIG_BT_CTLR_PHY */
phy = 0U;
#endif /* !CONFIG_BT_CTLR_PHY */
radio_phy_set(phy, 0);
if (0) {
#if defined(CONFIG_BT_CTLR_LE_ENC)
} else if (lll->enc_rx) {
radio_pkt_configure(8, (max_rx_octets + 4), (phy << 1) | 0x01);
radio_pkt_rx_set(radio_ccm_rx_pkt_set(&lll->ccm_rx, phy,
node_rx->pdu));
#endif /* CONFIG_BT_CTLR_LE_ENC */
} else {
radio_pkt_configure(8, max_rx_octets, (phy << 1) | 0x01);
radio_pkt_rx_set(node_rx->pdu);
}
}
void lll_conn_tx_pkt_set(struct lll_conn *lll, struct pdu_data *pdu_data_tx)
{
u16_t max_tx_octets;
u8_t phy, flags;
#if defined(CONFIG_BT_CTLR_DATA_LENGTH)
max_tx_octets = lll->max_tx_octets;
#else /* !CONFIG_BT_CTLR_DATA_LENGTH */
max_tx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
#endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
#if defined(CONFIG_BT_CTLR_PHY)
phy = lll->phy_tx;
flags = lll->phy_flags;
#else /* !CONFIG_BT_CTLR_PHY */
phy = 0U;
flags = 0U;
#endif /* !CONFIG_BT_CTLR_PHY */
radio_phy_set(phy, flags);
if (0) {
#if defined(CONFIG_BT_CTLR_LE_ENC)
} else if (lll->enc_tx) {
radio_pkt_configure(8, (max_tx_octets + 4U),
(phy << 1) | 0x01);
radio_pkt_tx_set(radio_ccm_tx_pkt_set(&lll->ccm_tx,
pdu_data_tx));
#endif /* CONFIG_BT_CTLR_LE_ENC */
} else {
radio_pkt_configure(8, max_tx_octets, (phy << 1) | 0x01);
radio_pkt_tx_set(pdu_data_tx);
}
}
void lll_conn_pdu_tx_prep(struct lll_conn *lll, struct pdu_data **pdu_data_tx)
{
struct node_tx *tx;
struct pdu_data *p;
memq_link_t *link;
if (lll->empty) {
*pdu_data_tx = empty_tx_enqueue(lll);
return;
}
link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail, (void **)&tx);
if (!link) {
p = empty_tx_enqueue(lll);
} else {
u16_t max_tx_octets;
p = (void *)(tx->pdu + lll->packet_tx_head_offset);
if (!lll->packet_tx_head_len) {
lll->packet_tx_head_len = p->len;
}
if (lll->packet_tx_head_offset) {
p->ll_id = PDU_DATA_LLID_DATA_CONTINUE;
}
p->len = lll->packet_tx_head_len - lll->packet_tx_head_offset;
p->md = 0;
#if defined(CONFIG_BT_CTLR_DATA_LENGTH)
#if defined(CONFIG_BT_CTLR_PHY)
switch (lll->phy_tx_time) {
default:
case BIT(0):
/* 1M PHY, 1us = 1 bit, hence divide by 8.
* Deduct 10 bytes for preamble (1), access address (4),
* header (2), and CRC (3).
*/
max_tx_octets = (lll->max_tx_time >> 3) - 10;
break;
case BIT(1):
/* 2M PHY, 1us = 2 bits, hence divide by 4.
* Deduct 11 bytes for preamble (2), access address (4),
* header (2), and CRC (3).
*/
max_tx_octets = (lll->max_tx_time >> 2) - 11;
break;
#if defined(CONFIG_BT_CTLR_PHY_CODED)
case BIT(2):
if (lll->phy_flags & 0x01) {
/* S8 Coded PHY, 8us = 1 bit, hence divide by
* 64.
* Subtract time for preamble (80), AA (256),
* CI (16), TERM1 (24), CRC (192) and
* TERM2 (24), total 592 us.
* Subtract 2 bytes for header.
*/
max_tx_octets = ((lll->max_tx_time - 592) >>
6) - 2;
} else {
/* S2 Coded PHY, 2us = 1 bit, hence divide by
* 16.
* Subtract time for preamble (80), AA (256),
* CI (16), TERM1 (24), CRC (48) and
* TERM2 (6), total 430 us.
* Subtract 2 bytes for header.
*/
max_tx_octets = ((lll->max_tx_time - 430) >>
4) - 2;
}
break;
#endif /* CONFIG_BT_CTLR_PHY_CODED */
}
#if defined(CONFIG_BT_CTLR_LE_ENC)
if (lll->enc_tx) {
/* deduct the MIC */
max_tx_octets -= 4U;
}
#endif /* CONFIG_BT_CTLR_LE_ENC */
if (max_tx_octets > lll->max_tx_octets) {
max_tx_octets = lll->max_tx_octets;
}
#else /* !CONFIG_BT_CTLR_PHY */
max_tx_octets = lll->max_tx_octets;
#endif /* !CONFIG_BT_CTLR_PHY */
#else /* !CONFIG_BT_CTLR_DATA_LENGTH */
max_tx_octets = PDU_DC_PAYLOAD_SIZE_MIN;
#endif /* !CONFIG_BT_CTLR_DATA_LENGTH */
if (p->len > max_tx_octets) {
p->len = max_tx_octets;
p->md = 1;
}
if (link->next) {
p->md = 1;
}
}
p->rfu = 0U;
#if !defined(CONFIG_SOC_OPENISA_RV32M1_RISCV32)
#if !defined(CONFIG_BT_CTLR_DATA_LENGTH_CLEAR)
p->resv = 0U;
#endif /* !CONFIG_BT_CTLR_DATA_LENGTH_CLEAR */
#endif /* !CONFIG_SOC_OPENISA_RV32M1_RISCV32 */
*pdu_data_tx = p;
}
static int init_reset(void)
{
return 0;
}
static void isr_done(void *param)
{
struct event_done_extra *e;
/* TODO: MOVE to a common interface, isr_lll_radio_status? */
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
radio_filter_status_reset();
radio_ar_status_reset();
radio_rssi_status_reset();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN) || \
defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_pa_lna_disable();
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN || CONFIG_BT_CTLR_GPIO_LNA_PIN */
/* TODO: MOVE ^^ */
e = ull_event_done_extra_get();
LL_ASSERT(e);
e->type = EVENT_DONE_EXTRA_TYPE_CONN;
e->trx_cnt = trx_cnt;
e->crc_valid = crc_valid;
#if defined(CONFIG_BT_CTLR_LE_ENC)
e->mic_state = mic_state;
#endif /* CONFIG_BT_CTLR_LE_ENC */
#if defined(CONFIG_BT_PERIPHERAL)
if (trx_cnt) {
struct lll_conn *lll = param;
if (lll->role) {
u32_t preamble_to_addr_us;
#if defined(CONFIG_BT_CTLR_PHY)
preamble_to_addr_us =
addr_us_get(lll->phy_rx);
#else /* !CONFIG_BT_CTLR_PHY */
preamble_to_addr_us =
addr_us_get(0);
#endif /* !CONFIG_BT_CTLR_PHY */
e->slave.start_to_address_actual_us =
radio_tmr_aa_restore() - radio_tmr_ready_get();
e->slave.window_widening_event_us =
lll->slave.window_widening_event_us;
e->slave.preamble_to_addr_us = preamble_to_addr_us;
/* Reset window widening, as anchor point sync-ed */
lll->slave.window_widening_event_us = 0;
lll->slave.window_size_event_us = 0;
}
}
#endif /* CONFIG_BT_PERIPHERAL */
isr_cleanup(param);
}
static void isr_cleanup(void *param)
{
int err;
radio_isr_set(isr_race, param);
radio_tmr_stop();
err = lll_clk_off();
LL_ASSERT(!err || err == -EBUSY);
lll_done(NULL);
}
static void isr_race(void *param)
{
/* NOTE: lll_disable could have a race with ... */
radio_status_reset();
}
static inline bool ctrl_pdu_len_check(u8_t len)
{
return len <= (offsetof(struct pdu_data, llctrl) +
sizeof(struct pdu_data_llctrl));
}
static int isr_rx_pdu(struct lll_conn *lll, struct pdu_data *pdu_data_rx,
struct node_tx **tx_release, u8_t *is_rx_enqueue)
{
/* Ack for tx-ed data */
if (pdu_data_rx->nesn != lll->sn) {
/* Increment serial number */
lll->sn++;
#if defined(CONFIG_BT_PERIPHERAL)
/* First ack (and redundantly any other ack) enable use of
* slave latency.
*/
if (lll->role) {
lll->slave.latency_enabled = 1;
}
#endif /* CONFIG_BT_PERIPHERAL */
if (!lll->empty) {
struct pdu_data *pdu_data_tx;
u8_t pdu_data_tx_len;
struct node_tx *tx;
memq_link_t *link;
link = memq_peek(lll->memq_tx.head, lll->memq_tx.tail,
(void **)&tx);
LL_ASSERT(link);
pdu_data_tx = (void *)(tx->pdu +
lll->packet_tx_head_offset);
pdu_data_tx_len = pdu_data_tx->len;
#if defined(CONFIG_BT_CTLR_LE_ENC)
if (pdu_data_tx_len != 0U) {
/* if encrypted increment tx counter */
if (lll->enc_tx) {
lll->ccm_tx.counter++;
}
}
#endif /* CONFIG_BT_CTLR_LE_ENC */
lll->packet_tx_head_offset += pdu_data_tx_len;
if (lll->packet_tx_head_offset ==
lll->packet_tx_head_len) {
lll->packet_tx_head_len = 0;
lll->packet_tx_head_offset = 0;
memq_dequeue(lll->memq_tx.tail,
&lll->memq_tx.head, NULL);
/* TX node UPSTREAM, i.e. Tx node ack path */
link->next = tx->next; /* Indicates ctrl or data
* pool.
*/
tx->next = link;
*tx_release = tx;
}
} else {
lll->empty = 0;
}
}
/* process received data */
if ((pdu_data_rx->sn == lll->nesn) &&
/* check so that we will NEVER use the rx buffer reserved for empty
* packet and internal control enqueue
*/
(ull_pdu_rx_alloc_peek(3) != 0)) {
/* Increment next expected serial number */
lll->nesn++;
if (pdu_data_rx->len != 0) {
#if defined(CONFIG_BT_CTLR_LE_ENC)
/* If required, wait for CCM to finish
*/
if (lll->enc_rx) {
u32_t done;
done = radio_ccm_is_done();
LL_ASSERT(done);
bool mic_failure = !radio_ccm_mic_is_valid();
if (mic_failure &&
lll->ccm_rx.counter == 0 &&
(pdu_data_rx->ll_id ==
PDU_DATA_LLID_CTRL)) {
/* Received an LL control packet in the
* middle of the LL encryption procedure
* with MIC failure.
* This could be an unencrypted packet
*/
struct pdu_data *scratch_pkt =
radio_pkt_scratch_get();
if (ctrl_pdu_len_check(
scratch_pkt->len)) {
memcpy(pdu_data_rx,
scratch_pkt,
scratch_pkt->len +
offsetof(struct pdu_data,
llctrl));
mic_failure = false;
lll->ccm_rx.counter--;
}
}
if (mic_failure) {
/* Record MIC invalid */
mic_state = LLL_CONN_MIC_FAIL;
return -EINVAL;
}
/* Increment counter */
lll->ccm_rx.counter++;
/* Record MIC valid */
mic_state = LLL_CONN_MIC_PASS;
}
#endif /* CONFIG_BT_CTLR_LE_ENC */
/* Enqueue non-empty PDU */
*is_rx_enqueue = 1U;
}
}
return 0;
}
static struct pdu_data *empty_tx_enqueue(struct lll_conn *lll)
{
struct pdu_data *p;
lll->empty = 1;
p = (void *)radio_pkt_empty_get();
p->ll_id = PDU_DATA_LLID_DATA_CONTINUE;
p->len = 0;
if (memq_peek(lll->memq_tx.head, lll->memq_tx.tail, NULL)) {
p->md = 1;
} else {
p->md = 0;
}
return p;
}
void lll_conn_flush(struct lll_conn *lll)
{
/* Nothing to be flushed */
}

View file

@ -0,0 +1,11 @@
/*
* Copyright (c) 2019 Demant
*
* SPDX-License-Identifier: Apache-2.0
*/
#if defined(CONFIG_BT_CTLR_CONN_META)
/*
* struct lll_conn_meta { };
*/
#error Please define struct lll_conn_meta when enabling BT_CTLR_CONN_META
#endif /* CONFIG_BT_CTLR_CONN_META */

View file

@ -0,0 +1,17 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
int lll_prepare_done(void *param);
int lll_done(void *param);
bool lll_is_done(void *param);
int lll_clk_on(void);
int lll_clk_on_wait(void);
int lll_clk_off(void);
u32_t lll_evt_offset_get(struct evt_hdr *evt);
u32_t lll_preempt_calc(struct evt_hdr *evt, u8_t ticker_id,
u32_t ticks_at_event);
void lll_chan_set(u32_t chan);
u8_t lll_entropy_get(u8_t len, void *rand);

View file

@ -0,0 +1,212 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stdbool.h>
#include <toolchain.h>
#include <zephyr/types.h>
#include <sys/util.h>
#include "hal/ccm.h"
#include "hal/radio.h"
#include "hal/ticker.h"
#include "util/memq.h"
#include "pdu.h"
#include "lll.h"
#include "lll_vendor.h"
#include "lll_conn.h"
#include "lll_master.h"
#include "lll_chan.h"
#include "lll_internal.h"
#include "lll_tim_internal.h"
#define LOG_MODULE_NAME bt_ctlr_llsw_openisa_lll_master
#include "common/log.h"
#include <soc.h>
#include "hal/debug.h"
static int init_reset(void);
static int prepare_cb(struct lll_prepare_param *prepare_param);
int lll_master_init(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
int lll_master_reset(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
void lll_master_prepare(void *param)
{
struct lll_prepare_param *p = param;
int err;
err = lll_clk_on();
LL_ASSERT(!err || err == -EINPROGRESS);
err = lll_prepare(lll_conn_is_abort_cb, lll_conn_abort_cb, prepare_cb,
0, p);
LL_ASSERT(!err || err == -EINPROGRESS);
}
static int init_reset(void)
{
return 0;
}
static int prepare_cb(struct lll_prepare_param *prepare_param)
{
struct lll_conn *lll = prepare_param->param;
u32_t ticks_at_event, ticks_at_start;
struct pdu_data *pdu_data_tx;
struct evt_hdr *evt;
u16_t event_counter;
u32_t remainder_us;
u8_t data_chan_use = 0;
u32_t remainder;
u16_t lazy;
DEBUG_RADIO_START_M(1);
/* TODO: Do the below in ULL ? */
lazy = prepare_param->lazy;
/* save the latency for use in event */
lll->latency_prepare += lazy;
/* calc current event counter value */
event_counter = lll->event_counter + lll->latency_prepare;
/* store the next event counter value */
lll->event_counter = event_counter + 1;
/* TODO: Do the above in ULL ? */
/* Reset connection event global variables */
lll_conn_prepare_reset();
/* TODO: can we do something in ULL? */
lll->latency_event = lll->latency_prepare;
lll->latency_prepare = 0;
if (lll->data_chan_sel) {
#if defined(CONFIG_BT_CTLR_CHAN_SEL_2)
data_chan_use = lll_chan_sel_2(lll->event_counter - 1,
lll->data_chan_id,
&lll->data_chan_map[0],
lll->data_chan_count);
#else /* !CONFIG_BT_CTLR_CHAN_SEL_2 */
data_chan_use = 0;
LL_ASSERT(0);
#endif /* !CONFIG_BT_CTLR_CHAN_SEL_2 */
} else {
data_chan_use = lll_chan_sel_1(&lll->data_chan_use,
lll->data_chan_hop,
lll->latency_event,
&lll->data_chan_map[0],
lll->data_chan_count);
}
/* Prepare the Tx PDU */
lll_conn_pdu_tx_prep(lll, &pdu_data_tx);
pdu_data_tx->sn = lll->sn;
pdu_data_tx->nesn = lll->nesn;
/* Start setting up of Radio h/w */
radio_reset();
/* TODO: other Tx Power settings */
radio_tx_power_set(RADIO_TXP_DEFAULT);
radio_aa_set(lll->access_addr);
radio_crc_configure(((0x5bUL) | ((0x06UL) << 8) | ((0x00UL) << 16)),
(((u32_t)lll->crc_init[2] << 16) |
((u32_t)lll->crc_init[1] << 8) |
((u32_t)lll->crc_init[0])));
lll_chan_set(data_chan_use);
/* setup the radio tx packet buffer */
lll_conn_tx_pkt_set(lll, pdu_data_tx);
radio_isr_set(lll_conn_isr_tx, lll);
radio_tmr_tifs_set(EVENT_IFS_US);
#if defined(CONFIG_BT_CTLR_PHY)
radio_switch_complete_and_rx(lll->phy_rx);
#else /* !CONFIG_BT_CTLR_PHY */
radio_switch_complete_and_rx(0);
#endif /* !CONFIG_BT_CTLR_PHY */
ticks_at_event = prepare_param->ticks_at_expire;
evt = HDR_LLL2EVT(lll);
ticks_at_event += lll_evt_offset_get(evt);
ticks_at_start = ticks_at_event;
ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
remainder = prepare_param->remainder;
remainder_us = radio_tmr_start(1, ticks_at_start, remainder);
/* capture end of Tx-ed PDU, used to calculate HCTO. */
radio_tmr_end_capture();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_gpio_pa_setup();
#if defined(CONFIG_BT_CTLR_PHY)
radio_gpio_pa_lna_enable(remainder_us +
radio_tx_ready_delay_get(lll->phy_tx,
lll->phy_flags) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#else /* !CONFIG_BT_CTLR_PHY */
radio_gpio_pa_lna_enable(remainder_us +
radio_tx_ready_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#endif /* !CONFIG_BT_CTLR_PHY */
#else /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
ARG_UNUSED(remainder_us);
#endif /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
#if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
(EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
/* check if preempt to start has changed */
if (lll_preempt_calc(evt, (TICKER_ID_CONN_BASE + lll->handle),
ticks_at_event)) {
radio_isr_set(lll_conn_isr_abort, lll);
radio_disable();
} else
#endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
{
u32_t ret;
ret = lll_prepare_done(lll);
LL_ASSERT(!ret);
}
DEBUG_RADIO_START_M(1);
return 0;
}

View file

@ -0,0 +1,9 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
int lll_master_init(void);
int lll_master_reset(void);
void lll_master_prepare(void *param);

View file

@ -0,0 +1,13 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#if defined(CONFIG_BT_CTLR_RX_PDU_META)
/*
* typedef struct { } lll_rx_pdu_meta_t;
*/
#error Please define typedef struct lll_rx_pdu_meta_t when enabling \
CONFIG_BT_CTLR_RX_PDU_META
#endif /* CONFIG_BT_CTLR_RX_PDU_META */

View file

@ -0,0 +1,140 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <toolchain.h>
#include <zephyr/types.h>
#include "hal/ccm.h"
#include "hal/radio.h"
#include "util/memq.h"
#include "pdu.h"
#include "lll.h"
static u8_t latency_min = (u8_t) -1;
static u8_t latency_max;
static u8_t latency_prev;
static u8_t cputime_min = (u8_t) -1;
static u8_t cputime_max;
static u8_t cputime_prev;
static u32_t timestamp_latency;
void lll_prof_latency_capture(void)
{
/* sample the packet timer, use it to calculate ISR latency
* and generate the profiling event at the end of the ISR.
*/
radio_tmr_sample();
}
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
static u32_t timestamp_radio_end;
u32_t lll_prof_radio_end_backup(void)
{
/* PA enable is overwriting packet end used in ISR profiling, hence
* back it up for later use.
*/
timestamp_radio_end = radio_tmr_end_get();
return timestamp_radio_end;
}
#endif /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
void lll_prof_cputime_capture(void)
{
/* get the ISR latency sample */
timestamp_latency = radio_tmr_sample_get();
/* sample the packet timer again, use it to calculate ISR execution time
* and use it in profiling event
*/
radio_tmr_sample();
}
void lll_prof_send(void)
{
u8_t latency, cputime, prev;
u8_t chg = 0U;
/* calculate the elapsed time in us since on-air radio packet end
* to ISR entry
*/
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
latency = timestamp_latency - timestamp_radio_end;
#else /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
latency = timestamp_latency - radio_tmr_end_get();
#endif /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
/* check changes in min, avg and max of latency */
if (latency > latency_max) {
latency_max = latency;
chg = 1U;
}
if (latency < latency_min) {
latency_min = latency;
chg = 1U;
}
/* check for +/- 1us change */
prev = ((u16_t)latency_prev + latency) >> 1;
if (prev != latency_prev) {
latency_prev = latency;
chg = 1U;
}
/* calculate the elapsed time in us since ISR entry */
cputime = radio_tmr_sample_get() - timestamp_latency;
/* check changes in min, avg and max */
if (cputime > cputime_max) {
cputime_max = cputime;
chg = 1U;
}
if (cputime < cputime_min) {
cputime_min = cputime;
chg = 1U;
}
/* check for +/- 1us change */
prev = ((u16_t)cputime_prev + cputime) >> 1;
if (prev != cputime_prev) {
cputime_prev = cputime;
chg = 1U;
}
/* generate event if any change */
if (chg) {
struct node_rx_pdu *rx;
/* NOTE: enqueue only if rx buffer available, else ignore */
rx = ull_pdu_rx_alloc_peek(3);
if (rx) {
struct pdu_data *pdu;
struct profile *p;
ull_pdu_rx_alloc();
rx->hdr.type = NODE_RX_TYPE_PROFILE;
rx->hdr.handle = 0xFFFF;
pdu = (void *)rx->pdu;
p = &pdu->profile;
p->lcur = latency;
p->lmin = latency_min;
p->lmax = latency_max;
p->cur = cputime;
p->min = cputime_min;
p->max = cputime_max;
ull_rx_put(rx->hdr.link, rx);
ull_rx_sched();
}
}
}

View file

@ -0,0 +1,10 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
void lll_prof_latency_capture(void);
void lll_prof_radio_end_backup(void);
void lll_prof_cputime_capture(void);
void lll_prof_send(void);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,47 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
struct lll_scan {
struct lll_hdr hdr;
#if defined(CONFIG_BT_CENTRAL)
/* NOTE: conn context has to be after lll_hdr */
struct lll_conn *conn;
u32_t conn_ticks_slot;
u32_t conn_win_offset_us;
u16_t conn_timeout;
#endif /* CONFIG_BT_CENTRAL */
u8_t state:1;
u8_t chan:2;
u8_t filter_policy:2;
u8_t adv_addr_type:1;
u8_t init_addr_type:1;
u8_t type:1;
#if defined(CONFIG_BT_CTLR_ADV_EXT)
u8_t phy:3;
#endif /* CONFIG_BT_CTLR_ADV_EXT */
#if defined(CONFIG_BT_CTLR_PRIVACY)
u8_t rpa_gen:1;
/* initiator only */
u8_t rl_idx;
#endif /* CONFIG_BT_CTLR_PRIVACY */
u8_t init_addr[BDADDR_SIZE];
u8_t adv_addr[BDADDR_SIZE];
u16_t interval;
u32_t ticks_window;
};
int lll_scan_init(void);
int lll_scan_reset(void);
void lll_scan_prepare(void *param);
extern u16_t ull_scan_lll_handle_get(struct lll_scan *lll);

View file

@ -0,0 +1,257 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stdbool.h>
#include <toolchain.h>
#include <zephyr/types.h>
#include <sys/util.h>
#include "hal/ccm.h"
#include "hal/radio.h"
#include "hal/ticker.h"
#include "util/memq.h"
#include "pdu.h"
#include "lll.h"
#include "lll_vendor.h"
#include "lll_conn.h"
#include "lll_slave.h"
#include "lll_chan.h"
#include "lll_internal.h"
#include "lll_tim_internal.h"
#define LOG_MODULE_NAME bt_ctlr_llsw_openisa_lll_slave
#include "common/log.h"
#include <soc.h>
#include "hal/debug.h"
static int init_reset(void);
static int prepare_cb(struct lll_prepare_param *prepare_param);
int lll_slave_init(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
int lll_slave_reset(void)
{
int err;
err = init_reset();
if (err) {
return err;
}
return 0;
}
void lll_slave_prepare(void *param)
{
struct lll_prepare_param *p = param;
int err;
err = lll_clk_on();
LL_ASSERT(!err || err == -EINPROGRESS);
err = lll_prepare(lll_conn_is_abort_cb, lll_conn_abort_cb, prepare_cb,
0, p);
LL_ASSERT(!err || err == -EINPROGRESS);
}
static int init_reset(void)
{
return 0;
}
static int prepare_cb(struct lll_prepare_param *prepare_param)
{
struct lll_conn *lll = prepare_param->param;
u32_t ticks_at_event, ticks_at_start;
struct evt_hdr *evt;
u16_t event_counter;
u32_t remainder_us;
u8_t data_chan_use = 0;
u32_t remainder;
u32_t hcto;
u16_t lazy;
DEBUG_RADIO_START_S(1);
/* TODO: Do the below in ULL ? */
lazy = prepare_param->lazy;
/* Calc window widening */
if (lll->role) {
lll->slave.window_widening_prepare_us +=
lll->slave.window_widening_periodic_us * (lazy + 1);
if (lll->slave.window_widening_prepare_us >
lll->slave.window_widening_max_us) {
lll->slave.window_widening_prepare_us =
lll->slave.window_widening_max_us;
}
}
/* save the latency for use in event */
lll->latency_prepare += lazy;
/* calc current event counter value */
event_counter = lll->event_counter + lll->latency_prepare;
/* store the next event counter value */
lll->event_counter = event_counter + 1;
/* TODO: Do the above in ULL ? */
/* Reset connection event global variables */
lll_conn_prepare_reset();
/* TODO: can we do something in ULL? */
lll->latency_event = lll->latency_prepare;
lll->latency_prepare = 0;
if (lll->data_chan_sel) {
#if defined(CONFIG_BT_CTLR_CHAN_SEL_2)
data_chan_use = lll_chan_sel_2(lll->event_counter - 1,
lll->data_chan_id,
&lll->data_chan_map[0],
lll->data_chan_count);
#else /* !CONFIG_BT_CTLR_CHAN_SEL_2 */
data_chan_use = 0;
LL_ASSERT(0);
#endif /* !CONFIG_BT_CTLR_CHAN_SEL_2 */
} else {
data_chan_use = lll_chan_sel_1(&lll->data_chan_use,
lll->data_chan_hop,
lll->latency_event,
&lll->data_chan_map[0],
lll->data_chan_count);
}
/* current window widening */
lll->slave.window_widening_event_us +=
lll->slave.window_widening_prepare_us;
lll->slave.window_widening_prepare_us = 0;
if (lll->slave.window_widening_event_us >
lll->slave.window_widening_max_us) {
lll->slave.window_widening_event_us =
lll->slave.window_widening_max_us;
}
/* current window size */
lll->slave.window_size_event_us +=
lll->slave.window_size_prepare_us;
lll->slave.window_size_prepare_us = 0;
/* Start setting up Radio h/w */
radio_reset();
/* TODO: other Tx Power settings */
radio_tx_power_set(RADIO_TXP_DEFAULT);
lll_conn_rx_pkt_set(lll);
radio_aa_set(lll->access_addr);
radio_crc_configure(((0x5bUL) | ((0x06UL) << 8) | ((0x00UL) << 16)),
(((u32_t)lll->crc_init[2] << 16) |
((u32_t)lll->crc_init[1] << 8) |
((u32_t)lll->crc_init[0])));
lll_chan_set(data_chan_use);
radio_isr_set(lll_conn_isr_rx, lll);
radio_tmr_tifs_set(EVENT_IFS_US);
#if defined(CONFIG_BT_CTLR_PHY)
radio_switch_complete_and_tx(lll->phy_rx, 0, lll->phy_tx,
lll->phy_flags);
#else /* !CONFIG_BT_CTLR_PHY */
radio_switch_complete_and_tx(0, 0, 0, 0);
#endif /* !CONFIG_BT_CTLR_PHY */
ticks_at_event = prepare_param->ticks_at_expire;
evt = HDR_LLL2EVT(lll);
ticks_at_event += lll_evt_offset_get(evt);
ticks_at_start = ticks_at_event;
ticks_at_start += HAL_TICKER_US_TO_TICKS(EVENT_OVERHEAD_START_US);
remainder = prepare_param->remainder;
remainder_us = radio_tmr_start(0, ticks_at_start, remainder);
radio_tmr_aa_capture();
radio_tmr_aa_save(0);
hcto = remainder_us + EVENT_JITTER_US + (EVENT_JITTER_US << 2) +
(lll->slave.window_widening_event_us << 1) +
lll->slave.window_size_event_us;
#if defined(CONFIG_BT_CTLR_PHY)
hcto += radio_rx_ready_delay_get(lll->phy_rx, 1);
hcto += addr_us_get(lll->phy_rx);
hcto += radio_rx_chain_delay_get(lll->phy_rx, 1);
#else /* !CONFIG_BT_CTLR_PHY */
hcto += radio_rx_ready_delay_get(0, 0);
hcto += addr_us_get(0);
hcto += radio_rx_chain_delay_get(0, 0);
#endif /* !CONFIG_BT_CTLR_PHY */
radio_tmr_hcto_configure(hcto);
#if defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_lna_setup();
#if defined(CONFIG_BT_CTLR_PHY)
radio_gpio_pa_lna_enable(remainder_us +
radio_rx_ready_delay_get(lll->phy_rx, 1) -
CONFIG_BT_CTLR_GPIO_LNA_OFFSET);
#else /* !CONFIG_BT_CTLR_PHY */
radio_gpio_pa_lna_enable(remainder_us +
radio_rx_ready_delay_get(0, 0) -
CONFIG_BT_CTLR_GPIO_LNA_OFFSET);
#endif /* !CONFIG_BT_CTLR_PHY */
#endif /* CONFIG_BT_CTLR_GPIO_LNA_PIN */
#if defined(CONFIG_BT_CTLR_PROFILE_ISR) || \
defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_tmr_end_capture();
#endif /* CONFIG_BT_CTLR_PROFILE_ISR */
#if defined(CONFIG_BT_CTLR_CONN_RSSI)
radio_rssi_measure();
#endif /* CONFIG_BT_CTLR_CONN_RSSI */
#if defined(CONFIG_BT_CTLR_XTAL_ADVANCED) && \
(EVENT_OVERHEAD_PREEMPT_US <= EVENT_OVERHEAD_PREEMPT_MIN_US)
/* check if preempt to start has changed */
if (lll_preempt_calc(evt, (TICKER_ID_CONN_BASE + lll->handle),
ticks_at_event)) {
radio_isr_set(lll_conn_isr_abort, lll);
radio_disable();
} else
#endif /* CONFIG_BT_CTLR_XTAL_ADVANCED */
{
u32_t ret;
ret = lll_prepare_done(lll);
LL_ASSERT(!ret);
}
DEBUG_RADIO_START_S(1);
return 0;
}

View file

@ -0,0 +1,9 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
int lll_slave_init(void);
int lll_slave_reset(void);
void lll_slave_prepare(void *param);

View file

@ -0,0 +1,348 @@
/*
* Copyright (c) 2017-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <stddef.h>
#include <string.h>
#include <toolchain.h>
#include <zephyr/types.h>
#include <soc.h>
#include <drivers/clock_control.h>
#include "hal/cpu.h"
#include "hal/cntr.h"
#include "hal/ccm.h"
#include "hal/radio.h"
#include "util/memq.h"
#include "lll.h"
#include "lll_internal.h"
#include "ll_test.h"
#include "hal/debug.h"
#include "common/log.h"
#define CNTR_MIN_DELTA 3
static const u32_t test_sync_word = 0x71764129;
static u8_t test_phy;
static u8_t test_phy_flags;
static u16_t test_num_rx;
static bool started;
/* NOTE: The PRBS9 sequence used as packet payload.
* The bytes in the sequence are in the right order, but the bits of each byte
* in the array are reverse from that found by running the PRBS9 algorithm. This
* is done to transmit MSbit first on air.
*/
static const u8_t prbs9[] = {
0xFF, 0xC1, 0xFB, 0xE8, 0x4C, 0x90, 0x72, 0x8B,
0xE7, 0xB3, 0x51, 0x89, 0x63, 0xAB, 0x23, 0x23,
0x02, 0x84, 0x18, 0x72, 0xAA, 0x61, 0x2F, 0x3B,
0x51, 0xA8, 0xE5, 0x37, 0x49, 0xFB, 0xC9, 0xCA,
0x0C, 0x18, 0x53, 0x2C, 0xFD, 0x45, 0xE3, 0x9A,
0xE6, 0xF1, 0x5D, 0xB0, 0xB6, 0x1B, 0xB4, 0xBE,
0x2A, 0x50, 0xEA, 0xE9, 0x0E, 0x9C, 0x4B, 0x5E,
0x57, 0x24, 0xCC, 0xA1, 0xB7, 0x59, 0xB8, 0x87,
0xFF, 0xE0, 0x7D, 0x74, 0x26, 0x48, 0xB9, 0xC5,
0xF3, 0xD9, 0xA8, 0xC4, 0xB1, 0xD5, 0x91, 0x11,
0x01, 0x42, 0x0C, 0x39, 0xD5, 0xB0, 0x97, 0x9D,
0x28, 0xD4, 0xF2, 0x9B, 0xA4, 0xFD, 0x64, 0x65,
0x06, 0x8C, 0x29, 0x96, 0xFE, 0xA2, 0x71, 0x4D,
0xF3, 0xF8, 0x2E, 0x58, 0xDB, 0x0D, 0x5A, 0x5F,
0x15, 0x28, 0xF5, 0x74, 0x07, 0xCE, 0x25, 0xAF,
0x2B, 0x12, 0xE6, 0xD0, 0xDB, 0x2C, 0xDC, 0xC3,
0x7F, 0xF0, 0x3E, 0x3A, 0x13, 0xA4, 0xDC, 0xE2,
0xF9, 0x6C, 0x54, 0xE2, 0xD8, 0xEA, 0xC8, 0x88,
0x00, 0x21, 0x86, 0x9C, 0x6A, 0xD8, 0xCB, 0x4E,
0x14, 0x6A, 0xF9, 0x4D, 0xD2, 0x7E, 0xB2, 0x32,
0x03, 0xC6, 0x14, 0x4B, 0x7F, 0xD1, 0xB8, 0xA6,
0x79, 0x7C, 0x17, 0xAC, 0xED, 0x06, 0xAD, 0xAF,
0x0A, 0x94, 0x7A, 0xBA, 0x03, 0xE7, 0x92, 0xD7,
0x15, 0x09, 0x73, 0xE8, 0x6D, 0x16, 0xEE, 0xE1,
0x3F, 0x78, 0x1F, 0x9D, 0x09, 0x52, 0x6E, 0xF1,
0x7C, 0x36, 0x2A, 0x71, 0x6C, 0x75, 0x64, 0x44,
0x80, 0x10, 0x43, 0x4E, 0x35, 0xEC, 0x65, 0x27,
0x0A, 0xB5, 0xFC, 0x26, 0x69, 0x3F, 0x59, 0x99,
0x01, 0x63, 0x8A, 0xA5, 0xBF, 0x68, 0x5C, 0xD3,
0x3C, 0xBE, 0x0B, 0xD6, 0x76, 0x83, 0xD6, 0x57,
0x05, 0x4A, 0x3D, 0xDD, 0x81, 0x73, 0xC9, 0xEB,
0x8A, 0x84, 0x39, 0xF4, 0x36, 0x0B, 0xF7};
/* TODO: fill correct prbs15 */
static const u8_t prbs15[255] = { 0x00, };
static u8_t tx_req;
static u8_t volatile tx_ack;
static void isr_tx(void *param)
{
u32_t l, i, s, t;
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_gpio_pa_lna_disable();
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN */
/* Exit if radio disabled */
if (((tx_req - tx_ack) & 0x01) == 0U) {
tx_ack = tx_req;
return;
}
/* LE Test Packet Interval */
l = radio_tmr_end_get() - radio_tmr_ready_get();
i = ((l + 249 + 624) / 625) * 625U;
t = radio_tmr_end_get() - l + i;
t -= radio_tx_ready_delay_get(test_phy, test_phy_flags);
/* Set timer capture in the future. */
radio_tmr_sample();
s = radio_tmr_sample_get();
while (t < s) {
t += 625U;
}
/* Setup next Tx */
radio_switch_complete_and_disable();
radio_tmr_start_us(1, t);
radio_tmr_aa_capture();
radio_tmr_end_capture();
/* TODO: check for probable stale timer capture being set */
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_gpio_pa_setup();
radio_gpio_pa_lna_enable(t + radio_tx_ready_delay_get(test_phy,
test_phy_flags) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#endif /* CONFIG_BT_CTLR_GPIO_PA_PIN */
}
static void isr_rx(void *param)
{
u8_t crc_ok = 0U;
u8_t trx_done;
/* Read radio status and events */
trx_done = radio_is_done();
if (trx_done) {
crc_ok = radio_crc_is_valid();
}
/* Clear radio status and events */
radio_status_reset();
radio_tmr_status_reset();
/* Exit if radio disabled */
if (!trx_done) {
return;
}
/* Setup next Rx */
radio_switch_complete_and_rx(test_phy);
/* Count Rx-ed packets */
if (crc_ok) {
test_num_rx++;
}
}
static u32_t init(u8_t chan, u8_t phy, void (*isr)(void *))
{
int err;
if (started) {
return 1;
}
/* start coarse timer */
cntr_start();
/* Setup resources required by Radio */
err = lll_clk_on_wait();
/* Reset Radio h/w */
radio_reset();
radio_isr_set(isr, NULL);
/* Store value needed in Tx/Rx ISR */
if (phy < 0x04) {
test_phy = BIT(phy - 1);
test_phy_flags = 1U;
} else {
test_phy = BIT(2);
test_phy_flags = 0U;
}
/* Setup Radio in Tx/Rx */
/* NOTE: No whitening in test mode. */
radio_phy_set(test_phy, test_phy_flags);
radio_tmr_tifs_set(150);
radio_tx_power_max_set();
radio_freq_chan_set((chan << 1) + 2);
radio_aa_set((u8_t *)&test_sync_word);
radio_crc_configure(0x65b, 0x555555);
radio_pkt_configure(8, 255, (test_phy << 1));
return 0;
}
u32_t ll_test_tx(u8_t chan, u8_t len, u8_t type, u8_t phy)
{
u32_t start_us;
u8_t *payload;
u8_t *pdu;
u32_t err;
if ((type > 0x07) || !phy || (phy > 0x04)) {
return 1;
}
err = init(chan, phy, isr_tx);
if (err) {
return err;
}
tx_req++;
pdu = radio_pkt_scratch_get();
payload = &pdu[2];
switch (type) {
case 0x00:
memcpy(payload, prbs9, len);
break;
case 0x01:
memset(payload, 0x0f, len);
break;
case 0x02:
memset(payload, 0x55, len);
break;
case 0x03:
memcpy(payload, prbs15, len);
break;
case 0x04:
memset(payload, 0xff, len);
break;
case 0x05:
memset(payload, 0x00, len);
break;
case 0x06:
memset(payload, 0xf0, len);
break;
case 0x07:
memset(payload, 0xaa, len);
break;
}
pdu[0] = type;
pdu[1] = len;
radio_pkt_tx_set(pdu);
radio_switch_complete_and_disable();
start_us = radio_tmr_start(1, cntr_cnt_get() + CNTR_MIN_DELTA, 0);
radio_tmr_aa_capture();
radio_tmr_end_capture();
#if defined(CONFIG_BT_CTLR_GPIO_PA_PIN)
radio_gpio_pa_setup();
radio_gpio_pa_lna_enable(start_us +
radio_tx_ready_delay_get(test_phy,
test_phy_flags) -
CONFIG_BT_CTLR_GPIO_PA_OFFSET);
#else /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
ARG_UNUSED(start_us);
#endif /* !CONFIG_BT_CTLR_GPIO_PA_PIN */
started = true;
return 0;
}
u32_t ll_test_rx(u8_t chan, u8_t phy, u8_t mod_idx)
{
u32_t err;
if (!phy || (phy > 0x03)) {
return 1;
}
err = init(chan, phy, isr_rx);
if (err) {
return err;
}
radio_pkt_rx_set(radio_pkt_scratch_get());
radio_switch_complete_and_rx(test_phy);
radio_tmr_start(0, cntr_cnt_get() + CNTR_MIN_DELTA, 0);
#if defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_lna_on();
#endif /* !CONFIG_BT_CTLR_GPIO_LNA_PIN */
started = true;
return 0;
}
u32_t ll_test_end(u16_t *num_rx)
{
int err;
u8_t ack;
if (!started) {
return 1;
}
/* Return packets Rx-ed/Completed */
*num_rx = test_num_rx;
test_num_rx = 0U;
/* Disable Radio, if in Rx test */
ack = tx_ack;
if (tx_req == ack) {
radio_disable();
} else {
/* Wait for Tx to complete */
tx_req = ack + 2;
while (tx_req != tx_ack) {
cpu_sleep();
}
}
/* Stop packet timer */
radio_tmr_stop();
/* Release resources acquired for Radio */
err = lll_clk_off();
LL_ASSERT(!err || err == -EBUSY);
/* Stop coarse timer */
cntr_stop();
#if defined(CONFIG_BT_CTLR_GPIO_LNA_PIN)
radio_gpio_lna_off();
#endif /* !CONFIG_BT_CTLR_GPIO_LNA_PIN */
started = false;
return 0;
}

View file

@ -0,0 +1,26 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
/* Range Delay
* Refer to BT Spec v5.1 Vol.6, Part B, Section 4.2.3 Range Delay
* "4 / 1000" is an approximation of the propagation time in us of the
* signal to travel 1 meter.
*/
#define RANGE_DISTANCE 1000 /* meters */
#define RANGE_DELAY_US (2 * RANGE_DISTANCE * 4 / 1000)
static inline u32_t addr_us_get(u8_t phy)
{
switch (phy) {
default:
case BIT(0):
return 40;
case BIT(1):
return 24;
case BIT(2):
return 376;
}
}

View file

@ -0,0 +1,30 @@
/*
* Copyright (c) 2018-2019 Nordic Semiconductor ASA
*
* SPDX-License-Identifier: Apache-2.0
*/
#define EVENT_OVERHEAD_XTAL_US 1500
#define EVENT_OVERHEAD_PREEMPT_US 0 /* if <= min, then dynamic preempt */
#define EVENT_OVERHEAD_PREEMPT_MIN_US 0
#define EVENT_OVERHEAD_PREEMPT_MAX_US EVENT_OVERHEAD_XTAL_US
#define EVENT_OVERHEAD_START_US 300
/* Worst-case time margin needed after event end-time in the air
* (done/preempt race margin + power-down/chain delay)
*/
#define EVENT_OVERHEAD_END_US 40
#define EVENT_JITTER_US 64
/* Ticker resolution margin
* Needed due to the lack of fine timing resolution in ticker_start
* and ticker_update. Set to 32 us, which is ~1 tick with 131072 Hz
* clock.
*/
#define EVENT_TICKER_RES_MARGIN_US 128
#define EVENT_RX_JITTER_US(phy) 16 /* Radio Rx timing uncertainty */
#define EVENT_RX_TO_US(phy) ((((((phy)&0x03) + 4)<<3)/BIT((((phy)&0x3)>>1))) + \
EVENT_RX_JITTER_US(phy))
/* TODO - fix up numbers re. HW */
#define EVENT_RX_TX_TURNARROUND(phy) ((phy) == 1?100:((phy) == 2 ? 80:900))

View file

@ -527,9 +527,11 @@ struct pdu_data {
u8_t len; u8_t len;
#if !defined(CONFIG_SOC_OPENISA_RV32M1_RISCV32)
#if !defined(CONFIG_BT_CTLR_DATA_LENGTH_CLEAR) #if !defined(CONFIG_BT_CTLR_DATA_LENGTH_CLEAR)
u8_t resv:8; /* TODO: remove nRF specific code */ u8_t resv:8; /* TODO: remove nRF specific code */
#endif /* !CONFIG_BT_CTLR_DATA_LENGTH_CLEAR */ #endif /* !CONFIG_BT_CTLR_DATA_LENGTH_CLEAR */
#endif /* !CONFIG_SOC_OPENISA_RV32M1_RISCV32 */
union { union {
struct pdu_data_llctrl llctrl; struct pdu_data_llctrl llctrl;