Qucs-S S-parameter Viewer & RF Synthesis Tools
Loading...
Searching...
No Matches
docs
help
help-venv
lib
python3.12
site-packages
greenlet
platform
switch_amd64_unix.h
1
/*
2
* this is the internal transfer function.
3
*
4
* HISTORY
5
* 3-May-13 Ralf Schmitt <ralf@systemexit.de>
6
* Add support for strange GCC caller-save decisions
7
* (ported from switch_aarch64_gcc.h)
8
* 18-Aug-11 Alexey Borzenkov <snaury@gmail.com>
9
* Correctly save rbp, csr and cw
10
* 01-Apr-04 Hye-Shik Chang <perky@FreeBSD.org>
11
* Ported from i386 to amd64.
12
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
13
* needed to add another magic constant to insure
14
* that f in slp_eval_frame(PyFrameObject *f)
15
* STACK_REFPLUS will probably be 1 in most cases.
16
* gets included into the saved stack area.
17
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
18
* after virtualizing stack save/restore, the
19
* stack size shrunk a bit. Needed to introduce
20
* an adjustment STACK_MAGIC per platform.
21
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
22
* slightly changed framework for spark
23
* 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
24
* Added ebx, esi and edi register-saves.
25
* 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
26
* Ported from i386.
27
*/
28
29
#define STACK_REFPLUS 1
30
31
#ifdef SLP_EVAL
32
33
/* #define STACK_MAGIC 3 */
34
/* the above works fine with gcc 2.96, but 2.95.3 wants this */
35
#define STACK_MAGIC 0
36
37
#define REGS_TO_SAVE "r12", "r13", "r14", "r15"
38
39
static
int
40
slp_switch(
void
)
41
{
42
int
err;
43
void
* rbp;
44
void
* rbx;
45
unsigned
int
csr;
46
unsigned
short
cw;
47
/* This used to be declared 'register', but that does nothing in
48
modern compilers and is explicitly forbidden in some new
49
standards. */
50
long
*stackref, stsizediff;
51
__asm__
volatile
(
""
: : : REGS_TO_SAVE);
52
__asm__
volatile
(
"fstcw %0"
:
"=m"
(cw));
53
__asm__
volatile
(
"stmxcsr %0"
:
"=m"
(csr));
54
__asm__
volatile
(
"movq %%rbp, %0"
:
"=m"
(rbp));
55
__asm__
volatile
(
"movq %%rbx, %0"
:
"=m"
(rbx));
56
__asm__ (
"movq %%rsp, %0"
:
"=g"
(stackref));
57
{
58
SLP_SAVE_STATE(stackref, stsizediff);
59
__asm__
volatile
(
60
"addq %0, %%rsp\n"
61
"addq %0, %%rbp\n"
62
:
63
:
"r"
(stsizediff)
64
);
65
SLP_RESTORE_STATE();
66
__asm__
volatile
(
"xorq %%rax, %%rax"
:
"=a"
(err));
67
}
68
__asm__
volatile
(
"movq %0, %%rbx"
: :
"m"
(rbx));
69
__asm__
volatile
(
"movq %0, %%rbp"
: :
"m"
(rbp));
70
__asm__
volatile
(
"ldmxcsr %0"
: :
"m"
(csr));
71
__asm__
volatile
(
"fldcw %0"
: :
"m"
(cw));
72
__asm__
volatile
(
""
: : : REGS_TO_SAVE);
73
return
err;
74
}
75
76
#endif
77
78
/*
79
* further self-processing support
80
*/
81
82
/*
83
* if you want to add self-inspection tools, place them
84
* here. See the x86_msvc for the necessary defines.
85
* These features are highly experimental und not
86
* essential yet.
87
*/
Generated by
1.9.8