mirror of
https://gitee.com/Lamdonn/varch.git
synced 2025-12-06 16:56:42 +08:00
Add feature to bind CoTask and CoTimer to the specified CoScheduler Improve functional safety TODO: 1. Compatibility with coroutine API highly intensive IO, sockets, etc 2. Fix running fail on arm64
173 lines
9.8 KiB
C
173 lines
9.8 KiB
C
/*********************************************************************************************************
|
|
* ------------------------------------------------------------------------------------------------------
|
|
* file description
|
|
* ------------------------------------------------------------------------------------------------------
|
|
* \file coroutine_cfg.h
|
|
* \unit coroutine
|
|
* \brief This is a C language coroutine library
|
|
* \author Lamdonn
|
|
* \version v0.2.0
|
|
* \license GPL-2.0
|
|
* \copyright Copyright (C) 2025 Lamdonn.
|
|
********************************************************************************************************/
|
|
#ifndef __coroutine_cfg_H
|
|
#define __coroutine_cfg_H
|
|
|
|
/**
|
|
* \brief Coroutine scheduler number
|
|
* \note This macro defines the number of coroutine schedulers
|
|
*/
|
|
#define COROUTINE_SCHEDULER_NUMBER 1
|
|
|
|
/**
|
|
* \brief Coroutine static task max number
|
|
* \note This macro defines the maximum number of coroutine tasks that can be created statically
|
|
*/
|
|
#define COROUTINE_STATIC_TASK_MAX_NUMBER 8
|
|
|
|
/**
|
|
* \brief Coroutine static timer max number
|
|
* \note This macro defines the maximum number of coroutine timers that can be created statically
|
|
*/
|
|
#define COROUTINE_STATIC_TIMER_MAX_NUMBER 8
|
|
|
|
/**
|
|
* \brief Coroutine static stack max number
|
|
* \note This macro defines the maximum number of coroutine stacks that can be created statically
|
|
*/
|
|
#define COROUTINE_STATIC_STACK_MAX_NUMBER 8
|
|
|
|
/**
|
|
* \brief Coroutine stack default size
|
|
* \note This macro defines the default size of the coroutine stack
|
|
* \note If the coroutine stack size is not specified, this value will be used
|
|
*/
|
|
#define COROUTINE_STACK_DEFAULT_SIZE 10240 // 4096 //
|
|
|
|
/**
|
|
* \brief Coroutine enable stack calculate
|
|
* \note This macro defines whether to enable the stack calculate feature
|
|
* \note If enabled, the coroutine stack using will be calculated automatically
|
|
*/
|
|
#define COROUTINE_ENABLE_STACK_CALCULATE 1
|
|
|
|
/**
|
|
* \brief Coroutine enable loading calculate
|
|
* \note This macro defines whether to enable the loading calculate feature
|
|
* \note If enabled, the coroutine loading will be calculated automatically
|
|
*/
|
|
#define COROUTINE_ENABLE_LOADING_CALCULATE 1
|
|
|
|
/**
|
|
* \brief Coroutine loading queue size
|
|
* \note This macro defines the size of the coroutine loading queue
|
|
*/
|
|
#define COROUTINE_LOADING_CALCULATE_QSIZE 10
|
|
|
|
/**
|
|
* \brief Coroutine loading calculate period
|
|
* \note This macro defines the period of the coroutine loading calculate
|
|
* \note The unit is ms
|
|
*/
|
|
#define COROUTINE_LOADING_CALCULATE_PERIOD 100
|
|
|
|
/**
|
|
* \brief Stack get and set Pointers depending on the platform definition
|
|
* \param p Pointer to the stack
|
|
* \return None
|
|
* \note This macro is used to get and set the stack pointer depending on the platform definition
|
|
* \note For x86_32, the stack pointer is esp; for x86_64, the stack pointer is rsp; for arm, the stack pointer is sp
|
|
* \note Common platforms have been implemented, not implemented platforms need to be implemented by users
|
|
*/
|
|
#if defined(__i386__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %%esp, %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov %0, %%esp" : : "r" (p) : "memory")
|
|
#elif defined(__x86_64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %%rsp, %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov %0, %%rsp" : : "r" (p) : "memory")
|
|
#elif defined(__arm__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__aarch64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__mips__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile(".set noreorder\n\t" "move %0, $sp\n\t" ".set reorder" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile(".set noreorder\n\t" "move $sp, %0\n\t" ".set reorder" : : "r" (p) : "memory")
|
|
#elif defined(__riscv__)
|
|
#if __riscv_xlen == 32
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mv %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mv sp, %0" : : "r" (p) : "memory")
|
|
#else
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mv %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mv sp, %0" : : "r" (p) : "memory")
|
|
#endif
|
|
#elif defined(__powerpc__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r1" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r1, %0" : : "r" (p) : "memory")
|
|
#elif defined(__powerpc64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r1" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r1, %0" : : "r" (p) : "memory")
|
|
#elif defined(__s390__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r15" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r15, %0" : : "r" (p) : "memory")
|
|
#elif defined(__s390x__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r15" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r15, %0" : : "r" (p) : "memory")
|
|
#elif defined(__sparc__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %%sp, %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov %0, %%sp" : : "r" (p) : "memory")
|
|
#elif defined(__sparcv9__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %%sp, %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov %0, %%sp" : : "r" (p) : "memory")
|
|
#elif defined(__tile__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %%sp, %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov %0, %%sp" : : "r" (p) : "memory")
|
|
#elif defined(__tilegx__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %%sp, %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov %0, %%sp" : : "r" (p) : "memory")
|
|
#elif defined(__hppa__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r1" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r1, %0" : : "r" (p) : "memory")
|
|
#elif defined(__hppa64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r1" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r1, %0" : : "r" (p) : "memory")
|
|
#elif defined(__ia64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0=sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp=%0" : : "r" (p) : "memory")
|
|
#elif defined(__loongarch__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__m68k__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__mips64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__parisc__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r1" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r1, %0" : : "r" (p) : "memory")
|
|
#elif defined(__parisc64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r1" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r1, %0" : : "r" (p) : "memory")
|
|
#elif defined(__s390__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r15" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r15, %0" : : "r" (p) : "memory")
|
|
#elif defined(__s390x__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mr %0, r15" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mr r15, %0" : : "r" (p) : "memory")
|
|
#elif defined(__sh__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__sh64__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("mov %0, sp" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("mov sp, %0" : : "r" (p) : "memory")
|
|
#elif defined(__xtensa__)
|
|
#define COROUTINE_GET_SP(p) __asm__ volatile("rsr.a1 %0" : "=r" (p) : : "memory")
|
|
#define COROUTINE_SET_SP(p) __asm__ volatile("wsr.a1 %0" : : "r" (p) : "memory")
|
|
#else
|
|
#error "Unsupported platform, please implement `COROUTINE_GET_SP` and `COROUTINE_SET_SP`"
|
|
#endif
|
|
|
|
#endif // !__coroutine_cfg_H
|