Qucs-S S-parameter Viewer & RF Synthesis Tools
Loading...
Searching...
No Matches
docs
help
help-venv
lib
python3.12
site-packages
greenlet
platform
switch_aarch64_gcc.h
1
/*
2
* this is the internal transfer function.
3
*
4
* HISTORY
5
* 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall
6
* 13-Apr-13 Add support for strange GCC caller-save decisions
7
* 08-Apr-13 File creation. Michael Matz
8
*
9
* NOTES
10
*
11
* Simply save all callee saved registers
12
*
13
*/
14
15
#define STACK_REFPLUS 1
16
17
#ifdef SLP_EVAL
18
#define STACK_MAGIC 0
19
#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \
20
"x27", "x28", "x30"
/* aka lr */
, \
21
"v8", "v9", "v10", "v11", \
22
"v12", "v13", "v14", "v15"
23
24
/*
25
* Recall:
26
asm asm-qualifiers ( AssemblerTemplate
27
: OutputOperands
28
[ : InputOperands
29
[ : Clobbers ] ])
30
31
or (if asm-qualifiers contains 'goto')
32
33
asm asm-qualifiers ( AssemblerTemplate
34
: OutputOperands
35
: InputOperands
36
: Clobbers
37
: GotoLabels)
38
39
and OutputOperands are
40
41
[ [asmSymbolicName] ] constraint (cvariablename)
42
43
When a name is given, refer to it as ``%[the name]``.
44
When not given, ``%i`` where ``i`` is the zero-based index.
45
46
constraints starting with ``=`` means only writing; ``+`` means
47
reading and writing.
48
49
This is followed by ``r`` (must be register) or ``m`` (must be memory)
50
and these can be combined.
51
52
The ``cvariablename`` is actually an lvalue expression.
53
54
In AArch65, 31 general purpose registers. If named X0... they are
55
64-bit. If named W0... they are the bottom 32 bits of the
56
corresponding 64 bit register.
57
58
XZR and WZR are hardcoded to 0, and ignore writes.
59
60
Arguments are in X0..X7. C++ uses X0 for ``this``. X0 holds simple return
61
values (?)
62
63
Whenever a W register is written, the top half of the X register is zeroed.
64
*/
65
66
static
int
67
slp_switch(
void
)
68
{
69
int
err;
70
void
*fp;
71
/* Windowz uses a 32-bit long on a 64-bit platform, unlike the rest of
72
the world, and in theory we can be compiled with GCC/llvm on 64-bit
73
windows. So we need a fixed-width type.
74
*/
75
int64_t *stackref, stsizediff;
76
__asm__
volatile
(
""
: : : REGS_TO_SAVE);
77
__asm__
volatile
(
"str x29, %0"
:
"=m"
(fp) : : );
78
__asm__ (
"mov %0, sp"
:
"=r"
(stackref));
79
{
80
SLP_SAVE_STATE(stackref, stsizediff);
81
__asm__
volatile
(
82
"add sp,sp,%0\n"
83
"add x29,x29,%0\n"
84
:
85
:
"r"
(stsizediff)
86
);
87
SLP_RESTORE_STATE();
88
/* SLP_SAVE_STATE macro contains some return statements
89
(of -1 and 1). It falls through only when
90
the return value of slp_save_state() is zero, which
91
is placed in x0.
92
In that case we (slp_switch) also want to return zero
93
(also in x0 of course).
94
Now, some GCC versions (seen with 4.8) think it's a
95
good idea to save/restore x0 around the call to
96
slp_restore_state(), instead of simply zeroing it
97
at the return below. But slp_restore_state
98
writes random values to the stack slot used for this
99
save/restore (from when it once was saved above in
100
SLP_SAVE_STATE, when it was still uninitialized), so
101
"restoring" that precious zero actually makes us
102
return random values. There are some ways to make
103
GCC not use that zero value in the normal return path
104
(e.g. making err volatile, but that costs a little
105
stack space), and the simplest is to call a function
106
that returns an unknown value (which happens to be zero),
107
so the saved/restored value is unused.
108
109
Thus, this line stores a 0 into the ``err`` variable
110
(which must be held in a register for this instruction,
111
of course). The ``w`` qualifier causes the instruction
112
to use W0 instead of X0, otherwise we get a warning
113
about a value size mismatch (because err is an int,
114
and aarch64 platforms are LP64: 32-bit int, 64 bit long
115
and pointer).
116
*/
117
__asm__
volatile
(
"mov %w0, #0"
:
"=r"
(err));
118
}
119
__asm__
volatile
(
"ldr x29, %0"
: :
"m"
(fp) :);
120
__asm__
volatile
(
""
: : : REGS_TO_SAVE);
121
return
err;
122
}
123
124
#endif
Generated by
1.9.8