1 | /* |
1 | /* |
2 | * Copyright (c) 2001-2012 Marc Alexander Lehmann <schmorp@schmorp.de> |
2 | * Copyright (c) 2001-2012,2015 Marc Alexander Lehmann <schmorp@schmorp.de> |
3 | * |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without modifica- |
4 | * Redistribution and use in source and binary forms, with or without modifica- |
5 | * tion, are permitted provided that the following conditions are met: |
5 | * tion, are permitted provided that the following conditions are met: |
6 | * |
6 | * |
7 | * 1. Redistributions of source code must retain the above copyright notice, |
7 | * 1. Redistributions of source code must retain the above copyright notice, |
… | |
… | |
76 | * 2011-07-03 rely on __GCC_HAVE_DWARF2_CFI_ASM for cfi detection. |
76 | * 2011-07-03 rely on __GCC_HAVE_DWARF2_CFI_ASM for cfi detection. |
77 | * 2011-08-08 cygwin trashes stacks, use pthreads with double stack on cygwin. |
77 | * 2011-08-08 cygwin trashes stacks, use pthreads with double stack on cygwin. |
78 | * 2012-12-04 reduce misprediction penalty for x86/amd64 assembly switcher. |
78 | * 2012-12-04 reduce misprediction penalty for x86/amd64 assembly switcher. |
79 | * 2012-12-05 experimental fiber backend (allocates stack twice). |
79 | * 2012-12-05 experimental fiber backend (allocates stack twice). |
80 | * 2012-12-07 API version 3 - add coro_stack_alloc/coro_stack_free. |
80 | * 2012-12-07 API version 3 - add coro_stack_alloc/coro_stack_free. |
|
|
81 | * 2012-12-21 valgrind stack registering was broken. |
|
|
82 | * 2015-12-05 experimental asm be for arm7, based on a patch by Nick Zavaritsky. |
|
|
83 | * use __name__ for predefined symbols, as in libecb. |
|
|
84 | * enable guard pages on arm, aarch64 and mips. |
81 | */ |
85 | */ |
82 | |
86 | |
83 | #ifndef CORO_H |
87 | #ifndef CORO_H |
84 | #define CORO_H |
88 | #define CORO_H |
85 | |
89 | |
… | |
… | |
137 | * For SGI's version of Microsoft's NT ;) |
141 | * For SGI's version of Microsoft's NT ;) |
138 | * |
142 | * |
139 | * -DCORO_ASM |
143 | * -DCORO_ASM |
140 | * |
144 | * |
141 | * Hand coded assembly, known to work only on a few architectures/ABI: |
145 | * Hand coded assembly, known to work only on a few architectures/ABI: |
142 | * GCC + x86/IA32 and amd64/x86_64 + GNU/Linux and a few BSDs. Fastest choice, |
146 | * GCC + arm7/x86/IA32/amd64/x86_64 + GNU/Linux and a few BSDs. Fastest |
143 | * if it works. |
147 | * choice, if it works. |
144 | * |
148 | * |
145 | * -DCORO_PTHREAD |
149 | * -DCORO_PTHREAD |
146 | * |
150 | * |
147 | * Use the pthread API. You have to provide <pthread.h> and -lpthread. |
151 | * Use the pthread API. You have to provide <pthread.h> and -lpthread. |
148 | * This is likely the slowest backend, and it also does not support fork(), |
152 | * This is likely the slowest backend, and it also does not support fork(), |
… | |
… | |
296 | |
300 | |
297 | #if !defined CORO_LOSER && !defined CORO_UCONTEXT \ |
301 | #if !defined CORO_LOSER && !defined CORO_UCONTEXT \ |
298 | && !defined CORO_SJLJ && !defined CORO_LINUX \ |
302 | && !defined CORO_SJLJ && !defined CORO_LINUX \ |
299 | && !defined CORO_IRIX && !defined CORO_ASM \ |
303 | && !defined CORO_IRIX && !defined CORO_ASM \ |
300 | && !defined CORO_PTHREAD && !defined CORO_FIBER |
304 | && !defined CORO_PTHREAD && !defined CORO_FIBER |
301 | # if defined WINDOWS && (defined __x86 || defined __amd64 || defined _M_IX86 || defined _M_AMD64) |
305 | # if defined WINDOWS && (defined __i386__ || (__x86_64__ || defined _M_IX86 || defined _M_AMD64) |
302 | # define CORO_ASM 1 |
306 | # define CORO_ASM 1 |
303 | # elif defined WINDOWS || defined _WIN32 |
307 | # elif defined WINDOWS || defined _WIN32 |
304 | # define CORO_LOSER 1 /* you don't win with windoze */ |
308 | # define CORO_LOSER 1 /* you don't win with windoze */ |
305 | # elif defined __linux && (defined __x86 || defined __amd64) |
309 | # elif __linux && (__i386__ || (__x86_64__ && !__ILP32__) || (__arm__ && __ARCH_ARCH == 7)) |
306 | # define CORO_ASM 1 |
310 | # define CORO_ASM 1 |
307 | # elif defined HAVE_UCONTEXT_H |
311 | # elif defined HAVE_UCONTEXT_H |
308 | # define CORO_UCONTEXT 1 |
312 | # define CORO_UCONTEXT 1 |
309 | # elif defined HAVE_SETJMP_H && defined HAVE_SIGALTSTACK |
313 | # elif defined HAVE_SETJMP_H && defined HAVE_SIGALTSTACK |
310 | # define CORO_SJLJ 1 |
314 | # define CORO_SJLJ 1 |
… | |
… | |
372 | struct coro_context |
376 | struct coro_context |
373 | { |
377 | { |
374 | void **sp; /* must be at offset 0 */ |
378 | void **sp; /* must be at offset 0 */ |
375 | }; |
379 | }; |
376 | |
380 | |
|
|
381 | #if __i386__ || __x86_64__ |
377 | void __attribute__ ((__noinline__, __regparm__(2))) |
382 | void __attribute__ ((__noinline__, __regparm__(2))) |
|
|
383 | #else |
|
|
384 | void __attribute__ ((__noinline__)) |
|
|
385 | #endif |
378 | coro_transfer (coro_context *prev, coro_context *next); |
386 | coro_transfer (coro_context *prev, coro_context *next); |
379 | |
387 | |
380 | # define coro_destroy(ctx) (void *)(ctx) |
388 | # define coro_destroy(ctx) (void *)(ctx) |
381 | |
389 | |
382 | #elif CORO_PTHREAD |
390 | #elif CORO_PTHREAD |