Utility functions for non-atomic but volatile access. More...
Utility functions for non-atomic but volatile access.
This functions intentionally have the same signature as the Utility functions for atomic access but don't guarantee atomic access and sequential consistency (or any other consistency). They basically only guarantee that no compiler optimization is applied to that operation and that the compiler will not reorder these operations in regard to each other or other volatile
accesses.
Definition in file volatile_utils.h.
#include <stdint.h>
Go to the source code of this file.
Functions | |
static uint8_t | volatile_load_u8 (const volatile uint8_t *var) |
Load an 8 bit value completely unoptimized. | |
static uint16_t | volatile_load_u16 (const volatile uint16_t *var) |
Load an 16 bit value completely unoptimized. | |
static uint32_t | volatile_load_u32 (const volatile uint32_t *var) |
Load an 32 bit value completely unoptimized. | |
static uint64_t | volatile_load_u64 (const volatile uint64_t *var) |
Load an 64 bit value completely unoptimized. | |
static void | volatile_store_u8 (volatile uint8_t *dest, uint8_t val) |
Store an 8 bit value completely unoptimized. | |
static void | volatile_store_u16 (volatile uint16_t *dest, uint16_t val) |
Store a 16 bit value completely unoptimized. | |
static void | volatile_store_u32 (volatile uint32_t *dest, uint32_t val) |
Store a 32 bit value completely unoptimized. | |
static void | volatile_store_u64 (volatile uint64_t *dest, uint64_t val) |
Store a 64 bit value completely unoptimized. | |
static uint8_t | volatile_fetch_add_u8 (volatile uint8_t *dest, uint8_t val) |
Unoptimized version of *dest += val | |
static uint8_t | volatile_fetch_sub_u8 (volatile uint8_t *dest, uint8_t val) |
Unoptimized version of *dest -= val | |
static uint8_t | volatile_fetch_or_u8 (volatile uint8_t *dest, uint8_t val) |
Unoptimized version of *dest |= val | |
static uint8_t | volatile_fetch_xor_u8 (volatile uint8_t *dest, uint8_t val) |
Unoptimized version of *dest ^= val | |
static uint8_t | volatile_fetch_and_u8 (volatile uint8_t *dest, uint8_t val) |
Unoptimized version of *dest &= val | |
static uint16_t | volatile_fetch_add_u16 (volatile uint16_t *dest, uint16_t val) |
Unoptimized version of *dest += val | |
static uint16_t | volatile_fetch_sub_u16 (volatile uint16_t *dest, uint16_t val) |
Unoptimized version of *dest -= val | |
static uint16_t | volatile_fetch_or_u16 (volatile uint16_t *dest, uint16_t val) |
Unoptimized version of *dest |= val | |
static uint16_t | volatile_fetch_xor_u16 (volatile uint16_t *dest, uint16_t val) |
Unoptimized version of *dest ^= val | |
static uint16_t | volatile_fetch_and_u16 (volatile uint16_t *dest, uint16_t val) |
Unoptimized version of *dest &= val | |
static uint32_t | volatile_fetch_add_u32 (volatile uint32_t *dest, uint32_t val) |
Unoptimized version of *dest += val | |
static uint32_t | volatile_fetch_sub_u32 (volatile uint32_t *dest, uint32_t val) |
Unoptimized version of *dest -= val | |
static uint32_t | volatile_fetch_or_u32 (volatile uint32_t *dest, uint32_t val) |
Unoptimized version of *dest |= val | |
static uint32_t | volatile_fetch_xor_u32 (volatile uint32_t *dest, uint32_t val) |
Unoptimized version of *dest ^= val | |
static uint32_t | volatile_fetch_and_u32 (volatile uint32_t *dest, uint32_t val) |
Unoptimized version of *dest &= val | |
static uint64_t | volatile_fetch_add_u64 (volatile uint64_t *dest, uint64_t val) |
Unoptimized version of *dest += val | |
static uint64_t | volatile_fetch_sub_u64 (volatile uint64_t *dest, uint64_t val) |
Unoptimized version of *dest -= val | |
static uint64_t | volatile_fetch_or_u64 (volatile uint64_t *dest, uint64_t val) |
Unoptimized version of *dest |= val | |
static uint64_t | volatile_fetch_xor_u64 (volatile uint64_t *dest, uint64_t val) |
Unoptimized version of *dest ^= val | |
static uint64_t | volatile_fetch_and_u64 (volatile uint64_t *dest, uint64_t val) |
Unoptimized version of *dest &= val | |