1 | /* |
1 | /* |
2 | * Copyright (c) 2001-2012 Marc Alexander Lehmann <schmorp@schmorp.de> |
2 | * Copyright (c) 2001-2012,2015 Marc Alexander Lehmann <schmorp@schmorp.de> |
3 | * |
3 | * |
4 | * Redistribution and use in source and binary forms, with or without modifica- |
4 | * Redistribution and use in source and binary forms, with or without modifica- |
5 | * tion, are permitted provided that the following conditions are met: |
5 | * tion, are permitted provided that the following conditions are met: |
6 | * |
6 | * |
7 | * 1. Redistributions of source code must retain the above copyright notice, |
7 | * 1. Redistributions of source code must retain the above copyright notice, |
… | |
… | |
76 | * 2011-07-03 rely on __GCC_HAVE_DWARF2_CFI_ASM for cfi detection. |
76 | * 2011-07-03 rely on __GCC_HAVE_DWARF2_CFI_ASM for cfi detection. |
77 | * 2011-08-08 cygwin trashes stacks, use pthreads with double stack on cygwin. |
77 | * 2011-08-08 cygwin trashes stacks, use pthreads with double stack on cygwin. |
78 | * 2012-12-04 reduce misprediction penalty for x86/amd64 assembly switcher. |
78 | * 2012-12-04 reduce misprediction penalty for x86/amd64 assembly switcher. |
79 | * 2012-12-05 experimental fiber backend (allocates stack twice). |
79 | * 2012-12-05 experimental fiber backend (allocates stack twice). |
80 | * 2012-12-07 API version 3 - add coro_stack_alloc/coro_stack_free. |
80 | * 2012-12-07 API version 3 - add coro_stack_alloc/coro_stack_free. |
|
|
81 | * 2012-12-21 valgrind stack registering was broken. |
|
|
82 | * 2015-12-05 experimental asm be for arm7, based on a patch by Nick Zavaritsky. |
|
|
83 | * use __name__ for predefined symbols, as in libecb. |
|
|
84 | * enable guard pages on arm, aarch64 and mips. |
|
|
85 | * 2016-08-27 try to disable _FORTIFY_SOURCE with CORO_SJLJ, as it |
|
|
86 | * breaks setjmp/longjmp. |
81 | */ |
87 | */ |
82 | |
88 | |
83 | #ifndef CORO_H |
89 | #ifndef CORO_H |
84 | #define CORO_H |
90 | #define CORO_H |
85 | |
91 | |
… | |
… | |
137 | * For SGI's version of Microsoft's NT ;) |
143 | * For SGI's version of Microsoft's NT ;) |
138 | * |
144 | * |
139 | * -DCORO_ASM |
145 | * -DCORO_ASM |
140 | * |
146 | * |
141 | * Hand coded assembly, known to work only on a few architectures/ABI: |
147 | * Hand coded assembly, known to work only on a few architectures/ABI: |
142 | * GCC + x86/IA32 and amd64/x86_64 + GNU/Linux and a few BSDs. Fastest choice, |
148 | * GCC + arm7/x86/IA32/amd64/x86_64 + GNU/Linux and a few BSDs. Fastest |
143 | * if it works. |
149 | * choice, if it works. |
144 | * |
150 | * |
145 | * -DCORO_PTHREAD |
151 | * -DCORO_PTHREAD |
146 | * |
152 | * |
147 | * Use the pthread API. You have to provide <pthread.h> and -lpthread. |
153 | * Use the pthread API. You have to provide <pthread.h> and -lpthread. |
148 | * This is likely the slowest backend, and it also does not support fork(), |
154 | * This is likely the slowest backend, and it also does not support fork(), |
… | |
… | |
296 | |
302 | |
297 | #if !defined CORO_LOSER && !defined CORO_UCONTEXT \ |
303 | #if !defined CORO_LOSER && !defined CORO_UCONTEXT \ |
298 | && !defined CORO_SJLJ && !defined CORO_LINUX \ |
304 | && !defined CORO_SJLJ && !defined CORO_LINUX \ |
299 | && !defined CORO_IRIX && !defined CORO_ASM \ |
305 | && !defined CORO_IRIX && !defined CORO_ASM \ |
300 | && !defined CORO_PTHREAD && !defined CORO_FIBER |
306 | && !defined CORO_PTHREAD && !defined CORO_FIBER |
301 | # if defined WINDOWS && (defined __x86 || defined __amd64 || defined _M_IX86 || defined _M_AMD64) |
307 | # if defined WINDOWS && (defined __i386__ || (__x86_64__ || defined _M_IX86 || defined _M_AMD64) |
302 | # define CORO_ASM 1 |
308 | # define CORO_ASM 1 |
303 | # elif defined WINDOWS || defined _WIN32 |
309 | # elif defined WINDOWS || defined _WIN32 |
304 | # define CORO_LOSER 1 /* you don't win with windoze */ |
310 | # define CORO_LOSER 1 /* you don't win with windoze */ |
305 | # elif defined __linux && (defined __x86 || defined __amd64) |
311 | # elif __linux && (__i386__ || (__x86_64__ && !__ILP32__) || (__arm__ && __ARM_ARCH == 7)) |
306 | # define CORO_ASM 1 |
312 | # define CORO_ASM 1 |
307 | # elif defined HAVE_UCONTEXT_H |
313 | # elif defined HAVE_UCONTEXT_H |
308 | # define CORO_UCONTEXT 1 |
314 | # define CORO_UCONTEXT 1 |
309 | # elif defined HAVE_SETJMP_H && defined HAVE_SIGALTSTACK |
315 | # elif defined HAVE_SETJMP_H && defined HAVE_SIGALTSTACK |
310 | # define CORO_SJLJ 1 |
316 | # define CORO_SJLJ 1 |
… | |
… | |
329 | |
335 | |
330 | #elif CORO_SJLJ || CORO_LOSER || CORO_LINUX || CORO_IRIX |
336 | #elif CORO_SJLJ || CORO_LOSER || CORO_LINUX || CORO_IRIX |
331 | |
337 | |
332 | # if defined(CORO_LINUX) && !defined(_GNU_SOURCE) |
338 | # if defined(CORO_LINUX) && !defined(_GNU_SOURCE) |
333 | # define _GNU_SOURCE /* for glibc */ |
339 | # define _GNU_SOURCE /* for glibc */ |
|
|
340 | # endif |
|
|
341 | |
|
|
342 | /* try to disable well-meant but buggy checks in some libcs */ |
|
|
343 | # ifdef _FORTIFY_SOURCE |
|
|
344 | # undef _FORTIFY_SOURCE |
|
|
345 | # undef __USE_FORTIFY_LEVEL /* helps some more when too much has been included already */ |
334 | # endif |
346 | # endif |
335 | |
347 | |
336 | # if !CORO_LOSER |
348 | # if !CORO_LOSER |
337 | # include <unistd.h> |
349 | # include <unistd.h> |
338 | # endif |
350 | # endif |
… | |
… | |
372 | struct coro_context |
384 | struct coro_context |
373 | { |
385 | { |
374 | void **sp; /* must be at offset 0 */ |
386 | void **sp; /* must be at offset 0 */ |
375 | }; |
387 | }; |
376 | |
388 | |
|
|
389 | #if __i386__ || __x86_64__ |
377 | void __attribute__ ((__noinline__, __regparm__(2))) |
390 | void __attribute__ ((__noinline__, __regparm__(2))) |
|
|
391 | #else |
|
|
392 | void __attribute__ ((__noinline__)) |
|
|
393 | #endif |
378 | coro_transfer (coro_context *prev, coro_context *next); |
394 | coro_transfer (coro_context *prev, coro_context *next); |
379 | |
395 | |
380 | # define coro_destroy(ctx) (void *)(ctx) |
396 | # define coro_destroy(ctx) (void *)(ctx) |
381 | |
397 | |
382 | #elif CORO_PTHREAD |
398 | #elif CORO_PTHREAD |