toolchain: Remove vestigial COFF assembler symbol mangling support

The toolchain headers included an abstraction for defining symbol
names in assembly context in the situation where we're using a
DOS-style assembler that automatically prepends an underscore to
symbol names.

We aren't.  Zephyr is an ELF platform.  None of our toolchains do
this.  Nothing sets the "TOOL_PREPENDS_UNDERSCORE" macro from within
the project, and it surely isn't an industry standard.  Yank it out.
Now we can write assembler labels in natural syntax, and a few other
things fall out to simplify too.

(NOTE: these headers contain assembly code and will fail checkpatch.
That is an expected false positive.)

Change-Id: Ic89e74422b52fe50b3b7306a0347d7a560259581
Signed-off-by: Andy Ross <andrew.j.ross@intel.com>
This commit is contained in:
Andy Ross 2016-08-18 09:25:00 -07:00 committed by Andrew Boie
parent a3d3659c75
commit 9f628943a8
8 changed files with 60 additions and 83 deletions

View file

@ -186,7 +186,7 @@ SECTION_FUNC(TEXT, __svc)
/* exception return is done in _IntExit(), including _GDB_STUB_EXC_EXIT */
b _IntExit
BRANCH_LABEL(_context_switch);
_context_switch:
#endif
/*

View file

@ -90,7 +90,7 @@ SECTION_FUNC(exception.entry, _exception)
andi r15, et, 1
beq r15, zero, not_interrupt
BRANCH_LABEL(is_interrupt)
is_interrupt:
/* If we get here, this is an interrupt */
/* Grab a reference to _nanokernel in r10 so we can determine the
@ -111,7 +111,7 @@ BRANCH_LABEL(is_interrupt)
addi sp, sp, -4
stw r12, 0(sp)
BRANCH_LABEL(on_irq_stack)
on_irq_stack:
/* Enter C interrupt handling code. Value of ipending will be the
* function parameter since we put it in r4
@ -166,7 +166,7 @@ BRANCH_LABEL(on_irq_stack)
call _Swap
jmpi _exception_exit
BRANCH_LABEL(not_interrupt)
not_interrupt:
/* Since this wasn't an interrupt we're not going to restart the
* faulting instruction.
@ -185,7 +185,7 @@ BRANCH_LABEL(not_interrupt)
bne r11, zero, is_interrupt
#endif
BRANCH_LABEL(_exception_enter_fault)
_exception_enter_fault:
/* If we get here, the exception wasn't in interrupt or an
* invocation of irq_oflload(). Let _Fault() handle it in
* C domain
@ -195,7 +195,7 @@ BRANCH_LABEL(_exception_enter_fault)
call _Fault
jmpi _exception_exit
BRANCH_LABEL(no_reschedule)
no_reschedule:
/* We put the thread stack pointer on top of the IRQ stack before
* we switched stacks. Restore it to go back to thread stack
@ -204,7 +204,7 @@ BRANCH_LABEL(no_reschedule)
/* Fall through */
BRANCH_LABEL(_exception_exit)
_exception_exit:
/* We are on the thread stack. Restore all saved registers
* and return to the interrupted context */

View file

@ -80,11 +80,11 @@ SECTION_FUNC(exception.other, _Swap)
stw r14, __tNANO_fiber_OFFSET(r10)
br next_chosen
BRANCH_LABEL(not_fiber)
not_fiber:
/* Fiber was NULL, we'll choose nanokernel.task */
ldw r11, __tNANO_task_OFFSET(r10)
BRANCH_LABEL(next_chosen)
next_chosen:
/* Set _nanokernel.current to value we chose for r11 */
stw r11, __tNANO_current_OFFSET(r10)
@ -123,7 +123,7 @@ BRANCH_LABEL(next_chosen)
ori r3, r3, NIOS2_STATUS_PIE_MSK
wrctl status, r3
BRANCH_LABEL(no_unlock)
no_unlock:
#else
wrctl status, r3
#endif

View file

@ -122,7 +122,7 @@ SECTION_FUNC(TEXT, _ExcEnt)
*/
xchgl %ecx, (%esp)
BRANCH_LABEL(_ExcEntSetupDone)
_ExcEntSetupDone:
/* By the time we get here, the stack should look like this:
* ESP -> ECX (excepting task)
@ -175,7 +175,7 @@ BRANCH_LABEL(_ExcEntSetupDone)
jne alreadyInException
movl %esp, __tTCS_esfPtr_OFFSET(%edx)
BRANCH_LABEL(alreadyInException)
alreadyInException:
#endif /* CONFIG_GDB_INFO */
@ -209,7 +209,7 @@ BRANCH_LABEL(alreadyInException)
je allDone
sti
BRANCH_LABEL(allDone)
allDone:
#if CONFIG_X86_IAMCU
movl %esp, %eax /* NANO_ESF * parameter */
#else
@ -271,7 +271,7 @@ SECTION_FUNC(TEXT, _ExcExit)
andl $~EXC_ACTIVE, __tTCS_flags_OFFSET (%ecx)
BRANCH_LABEL(nestedException)
nestedException:
#endif /* CONFIG_FP_SHARING || CONFIG_GDB_INFO */
/*
@ -299,7 +299,7 @@ BRANCH_LABEL(nestedException)
#if ALL_DYN_EXC_STUBS > 0
BRANCH_LABEL(_DynExcStubCommon)
_DynExcStubCommon:
call _common_dynamic_exc_handler
#ifndef CONFIG_X86_IAMCU
/* Cleanse the stack of stub_num */
@ -314,7 +314,7 @@ BRANCH_LABEL(_DynExcStubCommon)
*/
.altmacro
.macro __EXC_STUB_NUM id
BRANCH_LABEL(_DynExcStub\id)
_DynExcStub\id:
.endm
.macro EXC_STUB_NUM id
__EXC_STUB_NUM %id

View file

@ -223,7 +223,7 @@ SECTION_FUNC(TEXT, _IntEnt)
#ifdef CONFIG_DEBUG_INFO
jmp alreadyOnIntStack
BRANCH_LABEL(nested_save_isf)
nested_save_isf:
movl __tNANO_isf_OFFSET(%ecx), %edx /* Get old ISF */
movl %esp, __tNANO_isf_OFFSET(%ecx) /* Save new ISF */
pushl %edx /* Save old ISF */
@ -231,7 +231,7 @@ BRANCH_LABEL(nested_save_isf)
/* fall through to nested case */
BRANCH_LABEL(alreadyOnIntStack)
alreadyOnIntStack:
#ifdef CONFIG_INT_LATENCY_BENCHMARK
/* preserve eax which contain stub return address */
pushl %eax
@ -244,7 +244,7 @@ BRANCH_LABEL(alreadyOnIntStack)
jmp *%eax /* "return" back to stub */
#ifdef CONFIG_SYS_POWER_MANAGEMENT
BRANCH_LABEL(_HandleIdle)
_HandleIdle:
/* Preserve eax which contains stub return address */
#if defined(CONFIG_NANOKERNEL) && defined(CONFIG_TICKLESS_IDLE)
pushl %eax
@ -326,11 +326,11 @@ SECTION_FUNC(TEXT, _IntExitWithEoi)
*
* void _IntExit (void);
*/
BRANCH_LABEL(_IntExit)
_IntExit:
cli /* disable interrupts */
BRANCH_LABEL(_IntExitWithCli)
_IntExitWithCli:
#ifdef CONFIG_INT_LATENCY_BENCHMARK
call _int_latency_start
#endif
@ -434,7 +434,7 @@ BRANCH_LABEL(_IntExitWithCli)
iret
BRANCH_LABEL(noReschedule)
noReschedule:
/*
* A thread reschedule is not required; switch back to the
@ -453,7 +453,7 @@ BRANCH_LABEL(noReschedule)
* not be performed.
*/
BRANCH_LABEL(nestedInterrupt)
nestedInterrupt:
#ifdef CONFIG_INT_LATENCY_BENCHMARK
call _int_latency_stop
#endif
@ -538,7 +538,7 @@ SECTION_FUNC(TEXT, _SpuriousIntHandler)
mov %esp, %edx
#endif
BRANCH_LABEL(finishSpuriousInt)
finishSpuriousInt:
/* re-enable interrupts */
@ -551,7 +551,7 @@ BRANCH_LABEL(finishSpuriousInt)
movl $_NANO_ERR_SPURIOUS_INT, %eax
#endif
BRANCH_LABEL(callFatalHandler)
callFatalHandler:
/* call the fatal error handler */
call _NanoFatalErrorHandler
@ -559,7 +559,7 @@ BRANCH_LABEL(callFatalHandler)
/* handler doesn't return */
#if ALL_DYN_IRQ_STUBS > 0
BRANCH_LABEL(_DynIntStubCommon)
_DynIntStubCommon:
call _common_dynamic_irq_handler
/* Clean up and call IRET */
jmp _IntExitWithEoi
@ -577,7 +577,7 @@ BRANCH_LABEL(_DynIntStubCommon)
*/
.altmacro
.macro __INT_STUB_NUM id
BRANCH_LABEL(_DynIntStub\id)
_DynIntStub\id:
.endm
.macro INT_STUB_NUM id
__INT_STUB_NUM %id

View file

@ -158,7 +158,7 @@ SECTION_FUNC(TEXT, _Swap)
* (_nanokernel.task). The 'task' field will _never_ be NULL.
*/
BRANCH_LABEL(swapTask)
swapTask:
movl __tNANO_task_OFFSET (%eax), %ecx
/* fall through to 'restoreContext' */
@ -170,7 +170,7 @@ BRANCH_LABEL(swapTask)
* contains &_nanokernel.
*/
BRANCH_LABEL(restoreContext)
restoreContext:
#ifdef CONFIG_FP_SHARING
/*
@ -250,7 +250,7 @@ BRANCH_LABEL(restoreContext)
fninit
jmp floatSaveDone
BRANCH_LABEL(x87FloatSave)
x87FloatSave:
#endif /* CONFIG_SSE */
/* 'fnsave' performs an implicit 'fninit' after saving state! */
@ -259,8 +259,8 @@ BRANCH_LABEL(x87FloatSave)
/* fall through to 'floatSaveDone' */
BRANCH_LABEL(floatSaveDone)
BRANCH_LABEL(restoreContext_NoFloatSave)
floatSaveDone:
restoreContext_NoFloatSave:
/*********************************************************
* Restore floating point context of the incoming thread.
@ -282,7 +282,7 @@ BRANCH_LABEL(restoreContext_NoFloatSave)
fxrstor __tTCS_preempFloatReg_OFFSET (%ecx)
jmp floatRestoreDone
BRANCH_LABEL(x87FloatRestore)
x87FloatRestore:
#endif /* CONFIG_SSE */
@ -290,8 +290,8 @@ BRANCH_LABEL(x87FloatRestore)
/* fall through to 'floatRestoreDone' */
BRANCH_LABEL(floatRestoreDone)
BRANCH_LABEL(restoreContext_NoFloatRestore)
floatRestoreDone:
restoreContext_NoFloatRestore:
/* record that the incoming thread "owns" the non-integer registers */
@ -305,7 +305,7 @@ BRANCH_LABEL(restoreContext_NoFloatRestore)
* the last thread that utilized the non-integer registers.
*/
BRANCH_LABEL(restoreContext_NoFloatSwap)
restoreContext_NoFloatSwap:
/*
* Leave CR0[TS] clear if incoming thread utilizes "floating point"
@ -326,7 +326,7 @@ BRANCH_LABEL(restoreContext_NoFloatSwap)
orl $0x8, %edx
movl %edx, %cr0
BRANCH_LABEL(CROHandlingDone)
CROHandlingDone:
#endif /* CONFIG_FP_SHARING */
@ -379,7 +379,7 @@ BRANCH_LABEL(CROHandlingDone)
/* restore _Swap's %eax */
popl %eax
BRANCH_LABEL(skipIntLatencyStop)
skipIntLatencyStop:
#endif
popfl
#if CONFIG_X86_IAMCU

View file

@ -55,26 +55,6 @@
#define SECTION .section
#endif
/*
* The following definitions are used for symbol name compatibility.
*
* When #if 1, sources are assembled assuming the compiler
* you are using does not generate global symbols prefixed by "_".
* (e.g. elf/dwarf)
*
* When #if 0, sources are assembled assuming the compiler
* you are using generates global symbols prefixed by "_".
* (e.g. coff/stabs)
*/
#ifdef _ASMLANGUAGE
#ifndef TOOL_PREPENDS_UNDERSCORE
#define FUNC(sym) sym
#else
#define FUNC(sym) _##sym
#endif
#endif
/*
* If the project is being built for speed (i.e. not for minimum size) then
* align functions and branches in executable sections to improve performance.
@ -112,10 +92,7 @@
#endif
#define GC_SECTION(sym) SECTION .text.FUNC(sym), "ax"
#define BRANCH_LABEL(sym) FUNC(sym) :
#define VAR(sym) FUNC(sym)
#define GC_SECTION(sym) SECTION .text.##sym, "ax"
#endif /* _ASMLANGUAGE */

View file

@ -134,10 +134,10 @@ A##a:
#if defined(_ASMLANGUAGE) && !defined(_LINKER)
#if defined(CONFIG_ARM) || defined(CONFIG_NIOS2)
#define GTEXT(sym) .global FUNC(sym); .type FUNC(sym), %function
#define GDATA(sym) .global FUNC(sym); .type FUNC(sym), %object
#define WTEXT(sym) .weak FUNC(sym); .type FUNC(sym), %function
#define WDATA(sym) .weak FUNC(sym); .type FUNC(sym), %object
#define GTEXT(sym) .global sym; .type sym, %function
#define GDATA(sym) .global sym; .type sym, %object
#define WTEXT(sym) .weak sym; .type sym, %function
#define WDATA(sym) .weak sym; .type sym, %object
#elif defined(CONFIG_ARC)
/*
* Need to use assembly macros because ';' is interpreted as the start of
@ -145,18 +145,18 @@ A##a:
*/
.macro glbl_text symbol
.globl FUNC(\symbol)
.type FUNC(\symbol), %function
.globl \symbol
.type \symbol, %function
.endm
.macro glbl_data symbol
.globl FUNC(\symbol)
.type FUNC(\symbol), %object
.globl \symbol
.type \symbol, %object
.endm
.macro weak_data symbol
.weak FUNC(\symbol)
.type FUNC(\symbol), %object
.weak \symbol
.type \symbol, %object
.endm
#define GTEXT(sym) glbl_text sym
@ -164,8 +164,8 @@ A##a:
#define WDATA(sym) weak_data sym
#else /* !CONFIG_ARM && !CONFIG_ARC */
#define GTEXT(sym) .globl FUNC(sym); .type FUNC(sym), @function
#define GDATA(sym) .globl FUNC(sym); .type FUNC(sym), @object
#define GTEXT(sym) .globl sym; .type sym, @function
#define GDATA(sym) .globl sym; .type sym, @object
#endif
/*
@ -188,22 +188,22 @@ A##a:
*/
.macro section_var section, symbol
.section .\section\().FUNC(\symbol)
FUNC(\symbol) :
.section .\section\().\symbol
\symbol :
.endm
.macro section_func section, symbol
.section .\section\().FUNC(\symbol), "ax"
.section .\section\().\symbol, "ax"
FUNC_CODE()
PERFOPT_ALIGN
FUNC(\symbol) :
\symbol :
FUNC_INSTR(\symbol)
.endm
.macro section_subsec_func section, subsection, symbol
.section .\section\().\subsection, "ax"
PERFOPT_ALIGN
FUNC(\symbol) :
\symbol :
.endm
#define SECTION_VAR(sect, sym) section_var sect, sym
@ -212,14 +212,14 @@ A##a:
section_subsec_func sect, subsec, sym
#else /* !CONFIG_ARC */
#define SECTION_VAR(sect, sym) .section .sect.FUNC(sym); FUNC(sym) :
#define SECTION_VAR(sect, sym) .section .sect.##sym; sym :
#define SECTION_FUNC(sect, sym) \
.section .sect.FUNC(sym), "ax"; \
.section .sect.sym, "ax"; \
FUNC_CODE() \
PERFOPT_ALIGN; FUNC(sym) : \
PERFOPT_ALIGN; sym : \
FUNC_INSTR(sym)
#define SECTION_SUBSEC_FUNC(sect, subsec, sym) \
.section .sect.subsec, "ax"; PERFOPT_ALIGN; FUNC(sym) :
.section .sect.subsec, "ax"; PERFOPT_ALIGN; sym :
#endif /* CONFIG_ARC */