tc-arm.c 974 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887208882088920890208912089220893208942089520896208972089820899209002090120902209032090420905209062090720908209092091020911209122091320914209152091620917209182091920920209212092220923209242092520926209272092820929209302093120932209332093420935209362093720938209392094020941209422094320944209452094620947209482094920950209512095220953209542095520956209572095820959209602096120962209632096420965209662096720968209692097020971209722097320974209752097620977209782097920980209812098220983209842098520986209872098820989209902099120992209932099420995209962099720998209992100021001210022100321004210052100621007210082100921010210112101221013210142101521016210172101821019210202102121022210232102421025210262102721028210292103021031210322103321034210352103621037210382103921040210412104221043210442104521046210472104821049210502105121052210532105421055210562105721058210592106021061210622106321064210652106621067210682106921070210712107221073210742107521076210772107821079210802108121082210832108421085210862108721088210892109021091210922109321094210952109621097210982109921100211012110221103211042110521106211072110821109211102111121112211132111421115211162111721118211192112021121211222112321124211252112621127211282112921130211312113221133211342113521136211372113821139211402114121142211432114421145211462114721148211492115021151211522115321154211552115621157211582115921160211612116221163211642116521166211672116821169211702117121172211732117421175211762117721178211792118021181211822118321184211852118621187211882118921190211912119221193211942119521196211972119821199212002120121202212032120421205212062120721208212092121021211212122121321214212152121621217212182121921220212212122221223212242122521226212272122821229212302123121232212332123421235212362123721238212392124021241212422124321244212452124621247212482124921250212512125221253212542125521256212572125821259212602126121262212632126421265212662126721268212692127021271212722127321274212752127621277212782127921280212812128221283212842128521286212872128821289212902129121292212932129421295212962129721298212992130021301213022130321304213052130621307213082130921310213112131221313213142131521316213172131821319213202132121322213232132421325213262132721328213292133021331213322133321334213352133621337213382133921340213412134221343213442134521346213472134821349213502135121352213532135421355213562135721358213592136021361213622136321364213652136621367213682136921370213712137221373213742137521376213772137821379213802138121382213832138421385213862138721388213892139021391213922139321394213952139621397213982139921400214012140221403214042140521406214072140821409214102141121412214132141421415214162141721418214192142021421214222142321424214252142621427214282142921430214312143221433214342143521436214372143821439214402144121442214432144421445214462144721448214492145021451214522145321454214552145621457214582145921460214612146221463214642146521466214672146821469214702147121472214732147421475214762147721478214792148021481214822148321484214852148621487214882148921490214912149221493214942149521496214972149821499215002150121502215032150421505215062150721508215092151021511215122151321514215152151621517215182151921520215212152221523215242152521526215272152821529215302153121532215332153421535215362153721538215392154021541215422154321544215452154621547215482154921550215512155221553215542155521556215572155821559215602156121562215632156421565215662156721568215692157021571215722157321574215752157621577215782157921580215812158221583215842158521586215872158821589215902159121592215932159421595215962159721598215992160021601216022160321604216052160621607216082160921610216112161221613216142161521616216172161821619216202162121622216232162421625216262162721628216292163021631216322163321634216352163621637216382163921640216412164221643216442164521646216472164821649216502165121652216532165421655216562165721658216592166021661216622166321664216652166621667216682166921670216712167221673216742167521676216772167821679216802168121682216832168421685216862168721688216892169021691216922169321694216952169621697216982169921700217012170221703217042170521706217072170821709217102171121712217132171421715217162171721718217192172021721217222172321724217252172621727217282172921730217312173221733217342173521736217372173821739217402174121742217432174421745217462174721748217492175021751217522175321754217552175621757217582175921760217612176221763217642176521766217672176821769217702177121772217732177421775217762177721778217792178021781217822178321784217852178621787217882178921790217912179221793217942179521796217972179821799218002180121802218032180421805218062180721808218092181021811218122181321814218152181621817218182181921820218212182221823218242182521826218272182821829218302183121832218332183421835218362183721838218392184021841218422184321844218452184621847218482184921850218512185221853218542185521856218572185821859218602186121862218632186421865218662186721868218692187021871218722187321874218752187621877218782187921880218812188221883218842188521886218872188821889218902189121892218932189421895218962189721898218992190021901219022190321904219052190621907219082190921910219112191221913219142191521916219172191821919219202192121922219232192421925219262192721928219292193021931219322193321934219352193621937219382193921940219412194221943219442194521946219472194821949219502195121952219532195421955219562195721958219592196021961219622196321964219652196621967219682196921970219712197221973219742197521976219772197821979219802198121982219832198421985219862198721988219892199021991219922199321994219952199621997219982199922000220012200222003220042200522006220072200822009220102201122012220132201422015220162201722018220192202022021220222202322024220252202622027220282202922030220312203222033220342203522036220372203822039220402204122042220432204422045220462204722048220492205022051220522205322054220552205622057220582205922060220612206222063220642206522066220672206822069220702207122072220732207422075220762207722078220792208022081220822208322084220852208622087220882208922090220912209222093220942209522096220972209822099221002210122102221032210422105221062210722108221092211022111221122211322114221152211622117221182211922120221212212222123221242212522126221272212822129221302213122132221332213422135221362213722138221392214022141221422214322144221452214622147221482214922150221512215222153221542215522156221572215822159221602216122162221632216422165221662216722168221692217022171221722217322174221752217622177221782217922180221812218222183221842218522186221872218822189221902219122192221932219422195221962219722198221992220022201222022220322204222052220622207222082220922210222112221222213222142221522216222172221822219222202222122222222232222422225222262222722228222292223022231222322223322234222352223622237222382223922240222412224222243222442224522246222472224822249222502225122252222532225422255222562225722258222592226022261222622226322264222652226622267222682226922270222712227222273222742227522276222772227822279222802228122282222832228422285222862228722288222892229022291222922229322294222952229622297222982229922300223012230222303223042230522306223072230822309223102231122312223132231422315223162231722318223192232022321223222232322324223252232622327223282232922330223312233222333223342233522336223372233822339223402234122342223432234422345223462234722348223492235022351223522235322354223552235622357223582235922360223612236222363223642236522366223672236822369223702237122372223732237422375223762237722378223792238022381223822238322384223852238622387223882238922390223912239222393223942239522396223972239822399224002240122402224032240422405224062240722408224092241022411224122241322414224152241622417224182241922420224212242222423224242242522426224272242822429224302243122432224332243422435224362243722438224392244022441224422244322444224452244622447224482244922450224512245222453224542245522456224572245822459224602246122462224632246422465224662246722468224692247022471224722247322474224752247622477224782247922480224812248222483224842248522486224872248822489224902249122492224932249422495224962249722498224992250022501225022250322504225052250622507225082250922510225112251222513225142251522516225172251822519225202252122522225232252422525225262252722528225292253022531225322253322534225352253622537225382253922540225412254222543225442254522546225472254822549225502255122552225532255422555225562255722558225592256022561225622256322564225652256622567225682256922570225712257222573225742257522576225772257822579225802258122582225832258422585225862258722588225892259022591225922259322594225952259622597225982259922600226012260222603226042260522606226072260822609226102261122612226132261422615226162261722618226192262022621226222262322624226252262622627226282262922630226312263222633226342263522636226372263822639226402264122642226432264422645226462264722648226492265022651226522265322654226552265622657226582265922660226612266222663226642266522666226672266822669226702267122672226732267422675226762267722678226792268022681226822268322684226852268622687226882268922690226912269222693226942269522696226972269822699227002270122702227032270422705227062270722708227092271022711227122271322714227152271622717227182271922720227212272222723227242272522726227272272822729227302273122732227332273422735227362273722738227392274022741227422274322744227452274622747227482274922750227512275222753227542275522756227572275822759227602276122762227632276422765227662276722768227692277022771227722277322774227752277622777227782277922780227812278222783227842278522786227872278822789227902279122792227932279422795227962279722798227992280022801228022280322804228052280622807228082280922810228112281222813228142281522816228172281822819228202282122822228232282422825228262282722828228292283022831228322283322834228352283622837228382283922840228412284222843228442284522846228472284822849228502285122852228532285422855228562285722858228592286022861228622286322864228652286622867228682286922870228712287222873228742287522876228772287822879228802288122882228832288422885228862288722888228892289022891228922289322894228952289622897228982289922900229012290222903229042290522906229072290822909229102291122912229132291422915229162291722918229192292022921229222292322924229252292622927229282292922930229312293222933229342293522936229372293822939229402294122942229432294422945229462294722948229492295022951229522295322954229552295622957229582295922960229612296222963229642296522966229672296822969229702297122972229732297422975229762297722978229792298022981229822298322984229852298622987229882298922990229912299222993229942299522996229972299822999230002300123002230032300423005230062300723008230092301023011230122301323014230152301623017230182301923020230212302223023230242302523026230272302823029230302303123032230332303423035230362303723038230392304023041230422304323044230452304623047230482304923050230512305223053230542305523056230572305823059230602306123062230632306423065230662306723068230692307023071230722307323074230752307623077230782307923080230812308223083230842308523086230872308823089230902309123092230932309423095230962309723098230992310023101231022310323104231052310623107231082310923110231112311223113231142311523116231172311823119231202312123122231232312423125231262312723128231292313023131231322313323134231352313623137231382313923140231412314223143231442314523146231472314823149231502315123152231532315423155231562315723158231592316023161231622316323164231652316623167231682316923170231712317223173231742317523176231772317823179231802318123182231832318423185231862318723188231892319023191231922319323194231952319623197231982319923200232012320223203232042320523206232072320823209232102321123212232132321423215232162321723218232192322023221232222322323224232252322623227232282322923230232312323223233232342323523236232372323823239232402324123242232432324423245232462324723248232492325023251232522325323254232552325623257232582325923260232612326223263232642326523266232672326823269232702327123272232732327423275232762327723278232792328023281232822328323284232852328623287232882328923290232912329223293232942329523296232972329823299233002330123302233032330423305233062330723308233092331023311233122331323314233152331623317233182331923320233212332223323233242332523326233272332823329233302333123332233332333423335233362333723338233392334023341233422334323344233452334623347233482334923350233512335223353233542335523356233572335823359233602336123362233632336423365233662336723368233692337023371233722337323374233752337623377233782337923380233812338223383233842338523386233872338823389233902339123392233932339423395233962339723398233992340023401234022340323404234052340623407234082340923410234112341223413234142341523416234172341823419234202342123422234232342423425234262342723428234292343023431234322343323434234352343623437234382343923440234412344223443234442344523446234472344823449234502345123452234532345423455234562345723458234592346023461234622346323464234652346623467234682346923470234712347223473234742347523476234772347823479234802348123482234832348423485234862348723488234892349023491234922349323494234952349623497234982349923500235012350223503235042350523506235072350823509235102351123512235132351423515235162351723518235192352023521235222352323524235252352623527235282352923530235312353223533235342353523536235372353823539235402354123542235432354423545235462354723548235492355023551235522355323554235552355623557235582355923560235612356223563235642356523566235672356823569235702357123572235732357423575235762357723578235792358023581235822358323584235852358623587235882358923590235912359223593235942359523596235972359823599236002360123602236032360423605236062360723608236092361023611236122361323614236152361623617236182361923620236212362223623236242362523626236272362823629236302363123632236332363423635236362363723638236392364023641236422364323644236452364623647236482364923650236512365223653236542365523656236572365823659236602366123662236632366423665236662366723668236692367023671236722367323674236752367623677236782367923680236812368223683236842368523686236872368823689236902369123692236932369423695236962369723698236992370023701237022370323704237052370623707237082370923710237112371223713237142371523716237172371823719237202372123722237232372423725237262372723728237292373023731237322373323734237352373623737237382373923740237412374223743237442374523746237472374823749237502375123752237532375423755237562375723758237592376023761237622376323764237652376623767237682376923770237712377223773237742377523776237772377823779237802378123782237832378423785237862378723788237892379023791237922379323794237952379623797237982379923800238012380223803238042380523806238072380823809238102381123812238132381423815238162381723818238192382023821238222382323824238252382623827238282382923830238312383223833238342383523836238372383823839238402384123842238432384423845238462384723848238492385023851238522385323854238552385623857238582385923860238612386223863238642386523866238672386823869238702387123872238732387423875238762387723878238792388023881238822388323884238852388623887238882388923890238912389223893238942389523896238972389823899239002390123902239032390423905239062390723908239092391023911239122391323914239152391623917239182391923920239212392223923239242392523926239272392823929239302393123932239332393423935239362393723938239392394023941239422394323944239452394623947239482394923950239512395223953239542395523956239572395823959239602396123962239632396423965239662396723968239692397023971239722397323974239752397623977239782397923980239812398223983239842398523986239872398823989239902399123992239932399423995239962399723998239992400024001240022400324004240052400624007240082400924010240112401224013240142401524016240172401824019240202402124022240232402424025240262402724028240292403024031240322403324034240352403624037240382403924040240412404224043240442404524046240472404824049240502405124052240532405424055240562405724058240592406024061240622406324064240652406624067240682406924070240712407224073240742407524076240772407824079240802408124082240832408424085240862408724088240892409024091240922409324094240952409624097240982409924100241012410224103241042410524106241072410824109241102411124112241132411424115241162411724118241192412024121241222412324124241252412624127241282412924130241312413224133241342413524136241372413824139241402414124142241432414424145241462414724148241492415024151241522415324154241552415624157241582415924160241612416224163241642416524166241672416824169241702417124172241732417424175241762417724178241792418024181241822418324184241852418624187241882418924190241912419224193241942419524196241972419824199242002420124202242032420424205242062420724208242092421024211242122421324214242152421624217242182421924220242212422224223242242422524226242272422824229242302423124232242332423424235242362423724238242392424024241242422424324244242452424624247242482424924250242512425224253242542425524256242572425824259242602426124262242632426424265242662426724268242692427024271242722427324274242752427624277242782427924280242812428224283242842428524286242872428824289242902429124292242932429424295242962429724298242992430024301243022430324304243052430624307243082430924310243112431224313243142431524316243172431824319243202432124322243232432424325243262432724328243292433024331243322433324334243352433624337243382433924340243412434224343243442434524346243472434824349243502435124352243532435424355243562435724358243592436024361243622436324364243652436624367243682436924370243712437224373243742437524376243772437824379243802438124382243832438424385243862438724388243892439024391243922439324394243952439624397243982439924400244012440224403244042440524406244072440824409244102441124412244132441424415244162441724418244192442024421244222442324424244252442624427244282442924430244312443224433244342443524436244372443824439244402444124442244432444424445244462444724448244492445024451244522445324454244552445624457244582445924460244612446224463244642446524466244672446824469244702447124472244732447424475244762447724478244792448024481244822448324484244852448624487244882448924490244912449224493244942449524496244972449824499245002450124502245032450424505245062450724508245092451024511245122451324514245152451624517245182451924520245212452224523245242452524526245272452824529245302453124532245332453424535245362453724538245392454024541245422454324544245452454624547245482454924550245512455224553245542455524556245572455824559245602456124562245632456424565245662456724568245692457024571245722457324574245752457624577245782457924580245812458224583245842458524586245872458824589245902459124592245932459424595245962459724598245992460024601246022460324604246052460624607246082460924610246112461224613246142461524616246172461824619246202462124622246232462424625246262462724628246292463024631246322463324634246352463624637246382463924640246412464224643246442464524646246472464824649246502465124652246532465424655246562465724658246592466024661246622466324664246652466624667246682466924670246712467224673246742467524676246772467824679246802468124682246832468424685246862468724688246892469024691246922469324694246952469624697246982469924700247012470224703247042470524706247072470824709247102471124712247132471424715247162471724718247192472024721247222472324724247252472624727247282472924730247312473224733247342473524736247372473824739247402474124742247432474424745247462474724748247492475024751247522475324754247552475624757247582475924760247612476224763247642476524766247672476824769247702477124772247732477424775247762477724778247792478024781247822478324784247852478624787247882478924790247912479224793247942479524796247972479824799248002480124802248032480424805248062480724808248092481024811248122481324814248152481624817248182481924820248212482224823248242482524826248272482824829248302483124832248332483424835248362483724838248392484024841248422484324844248452484624847248482484924850248512485224853248542485524856248572485824859248602486124862248632486424865248662486724868248692487024871248722487324874248752487624877248782487924880248812488224883248842488524886248872488824889248902489124892248932489424895248962489724898248992490024901249022490324904249052490624907249082490924910249112491224913249142491524916249172491824919249202492124922249232492424925249262492724928249292493024931249322493324934249352493624937249382493924940249412494224943249442494524946249472494824949249502495124952249532495424955249562495724958249592496024961249622496324964249652496624967249682496924970249712497224973249742497524976249772497824979249802498124982249832498424985249862498724988249892499024991249922499324994249952499624997249982499925000250012500225003250042500525006250072500825009250102501125012250132501425015250162501725018250192502025021250222502325024250252502625027250282502925030250312503225033250342503525036250372503825039250402504125042250432504425045250462504725048250492505025051250522505325054250552505625057250582505925060250612506225063250642506525066250672506825069250702507125072250732507425075250762507725078250792508025081250822508325084250852508625087250882508925090250912509225093250942509525096250972509825099251002510125102251032510425105251062510725108251092511025111251122511325114251152511625117251182511925120251212512225123251242512525126251272512825129251302513125132251332513425135251362513725138251392514025141251422514325144251452514625147251482514925150251512515225153251542515525156251572515825159251602516125162251632516425165251662516725168251692517025171251722517325174251752517625177251782517925180251812518225183251842518525186251872518825189251902519125192251932519425195251962519725198251992520025201252022520325204252052520625207252082520925210252112521225213252142521525216252172521825219252202522125222252232522425225252262522725228252292523025231252322523325234252352523625237252382523925240252412524225243252442524525246252472524825249252502525125252252532525425255252562525725258252592526025261252622526325264252652526625267252682526925270252712527225273252742527525276252772527825279252802528125282252832528425285252862528725288252892529025291252922529325294252952529625297252982529925300253012530225303253042530525306253072530825309253102531125312253132531425315253162531725318253192532025321253222532325324253252532625327253282532925330253312533225333253342533525336253372533825339253402534125342253432534425345253462534725348253492535025351253522535325354253552535625357253582535925360253612536225363253642536525366253672536825369253702537125372253732537425375253762537725378253792538025381253822538325384253852538625387253882538925390253912539225393253942539525396253972539825399254002540125402254032540425405254062540725408254092541025411254122541325414254152541625417254182541925420254212542225423254242542525426254272542825429254302543125432254332543425435254362543725438254392544025441254422544325444254452544625447254482544925450254512545225453254542545525456254572545825459254602546125462254632546425465254662546725468254692547025471254722547325474254752547625477254782547925480254812548225483254842548525486254872548825489254902549125492254932549425495254962549725498254992550025501255022550325504255052550625507255082550925510255112551225513255142551525516255172551825519255202552125522255232552425525255262552725528255292553025531255322553325534255352553625537255382553925540255412554225543255442554525546255472554825549255502555125552255532555425555255562555725558255592556025561255622556325564255652556625567255682556925570255712557225573255742557525576255772557825579255802558125582255832558425585255862558725588255892559025591255922559325594255952559625597255982559925600256012560225603256042560525606256072560825609256102561125612256132561425615256162561725618256192562025621256222562325624256252562625627256282562925630256312563225633256342563525636256372563825639256402564125642256432564425645256462564725648256492565025651256522565325654256552565625657256582565925660256612566225663256642566525666256672566825669256702567125672256732567425675256762567725678256792568025681256822568325684256852568625687256882568925690256912569225693256942569525696256972569825699257002570125702257032570425705257062570725708257092571025711257122571325714257152571625717257182571925720257212572225723257242572525726257272572825729257302573125732257332573425735257362573725738257392574025741257422574325744257452574625747257482574925750257512575225753257542575525756257572575825759257602576125762257632576425765257662576725768257692577025771257722577325774257752577625777257782577925780257812578225783257842578525786257872578825789257902579125792257932579425795257962579725798257992580025801258022580325804258052580625807258082580925810258112581225813258142581525816258172581825819258202582125822258232582425825258262582725828258292583025831258322583325834258352583625837258382583925840258412584225843258442584525846258472584825849258502585125852258532585425855258562585725858258592586025861258622586325864258652586625867258682586925870258712587225873258742587525876258772587825879258802588125882258832588425885258862588725888258892589025891258922589325894258952589625897258982589925900259012590225903259042590525906259072590825909259102591125912259132591425915259162591725918259192592025921259222592325924259252592625927259282592925930259312593225933259342593525936259372593825939259402594125942259432594425945259462594725948259492595025951259522595325954259552595625957259582595925960259612596225963259642596525966259672596825969259702597125972259732597425975259762597725978259792598025981259822598325984259852598625987259882598925990259912599225993259942599525996259972599825999260002600126002260032600426005260062600726008260092601026011260122601326014260152601626017260182601926020260212602226023260242602526026260272602826029260302603126032260332603426035260362603726038260392604026041260422604326044260452604626047260482604926050260512605226053260542605526056260572605826059260602606126062260632606426065260662606726068260692607026071260722607326074260752607626077260782607926080260812608226083260842608526086260872608826089260902609126092260932609426095260962609726098260992610026101261022610326104261052610626107261082610926110261112611226113261142611526116261172611826119261202612126122261232612426125261262612726128261292613026131261322613326134261352613626137261382613926140261412614226143261442614526146261472614826149261502615126152261532615426155261562615726158261592616026161261622616326164261652616626167261682616926170261712617226173261742617526176261772617826179261802618126182261832618426185261862618726188261892619026191261922619326194261952619626197261982619926200262012620226203262042620526206262072620826209262102621126212262132621426215262162621726218262192622026221262222622326224262252622626227262282622926230262312623226233262342623526236262372623826239262402624126242262432624426245262462624726248262492625026251262522625326254262552625626257262582625926260262612626226263262642626526266262672626826269262702627126272262732627426275262762627726278262792628026281262822628326284262852628626287262882628926290262912629226293262942629526296262972629826299263002630126302263032630426305263062630726308263092631026311263122631326314263152631626317263182631926320263212632226323263242632526326263272632826329263302633126332263332633426335263362633726338263392634026341263422634326344263452634626347263482634926350263512635226353263542635526356263572635826359263602636126362263632636426365263662636726368263692637026371263722637326374263752637626377263782637926380263812638226383263842638526386263872638826389263902639126392263932639426395263962639726398263992640026401264022640326404264052640626407264082640926410264112641226413264142641526416264172641826419264202642126422264232642426425264262642726428264292643026431264322643326434264352643626437264382643926440264412644226443264442644526446264472644826449264502645126452264532645426455264562645726458264592646026461264622646326464264652646626467264682646926470264712647226473264742647526476264772647826479264802648126482264832648426485264862648726488264892649026491264922649326494264952649626497264982649926500265012650226503265042650526506265072650826509265102651126512265132651426515265162651726518265192652026521265222652326524265252652626527265282652926530265312653226533265342653526536265372653826539265402654126542265432654426545265462654726548265492655026551265522655326554265552655626557265582655926560265612656226563265642656526566265672656826569265702657126572265732657426575265762657726578265792658026581265822658326584265852658626587265882658926590265912659226593265942659526596265972659826599266002660126602266032660426605266062660726608266092661026611266122661326614266152661626617266182661926620266212662226623266242662526626266272662826629266302663126632266332663426635266362663726638266392664026641266422664326644266452664626647266482664926650266512665226653266542665526656266572665826659266602666126662266632666426665266662666726668266692667026671266722667326674266752667626677266782667926680266812668226683266842668526686266872668826689266902669126692266932669426695266962669726698266992670026701267022670326704267052670626707267082670926710267112671226713267142671526716267172671826719267202672126722267232672426725267262672726728267292673026731267322673326734267352673626737267382673926740267412674226743267442674526746267472674826749267502675126752267532675426755267562675726758267592676026761267622676326764267652676626767267682676926770267712677226773267742677526776267772677826779267802678126782267832678426785267862678726788267892679026791267922679326794267952679626797267982679926800268012680226803268042680526806268072680826809268102681126812268132681426815268162681726818268192682026821268222682326824268252682626827268282682926830268312683226833268342683526836268372683826839268402684126842268432684426845268462684726848268492685026851268522685326854268552685626857268582685926860268612686226863268642686526866268672686826869268702687126872268732687426875268762687726878268792688026881268822688326884268852688626887268882688926890268912689226893268942689526896268972689826899269002690126902269032690426905269062690726908269092691026911269122691326914269152691626917269182691926920269212692226923269242692526926269272692826929269302693126932269332693426935269362693726938269392694026941269422694326944269452694626947269482694926950269512695226953269542695526956269572695826959269602696126962269632696426965269662696726968269692697026971269722697326974269752697626977269782697926980269812698226983269842698526986269872698826989269902699126992269932699426995269962699726998269992700027001270022700327004270052700627007270082700927010270112701227013270142701527016270172701827019270202702127022270232702427025270262702727028270292703027031270322703327034270352703627037270382703927040270412704227043270442704527046270472704827049270502705127052270532705427055270562705727058270592706027061270622706327064270652706627067270682706927070270712707227073270742707527076270772707827079270802708127082270832708427085270862708727088270892709027091270922709327094270952709627097270982709927100271012710227103271042710527106271072710827109271102711127112271132711427115271162711727118271192712027121271222712327124271252712627127271282712927130271312713227133271342713527136271372713827139271402714127142271432714427145271462714727148271492715027151271522715327154271552715627157271582715927160271612716227163271642716527166271672716827169271702717127172271732717427175271762717727178271792718027181271822718327184271852718627187271882718927190271912719227193271942719527196271972719827199272002720127202272032720427205272062720727208272092721027211272122721327214272152721627217272182721927220272212722227223272242722527226272272722827229272302723127232272332723427235272362723727238272392724027241272422724327244272452724627247272482724927250272512725227253272542725527256272572725827259272602726127262272632726427265272662726727268272692727027271272722727327274272752727627277272782727927280272812728227283272842728527286272872728827289272902729127292272932729427295272962729727298272992730027301273022730327304273052730627307273082730927310273112731227313273142731527316273172731827319273202732127322273232732427325273262732727328273292733027331273322733327334273352733627337273382733927340273412734227343273442734527346273472734827349273502735127352273532735427355273562735727358273592736027361273622736327364273652736627367273682736927370273712737227373273742737527376273772737827379273802738127382273832738427385273862738727388273892739027391273922739327394273952739627397273982739927400274012740227403274042740527406274072740827409274102741127412274132741427415274162741727418274192742027421274222742327424274252742627427274282742927430274312743227433274342743527436274372743827439274402744127442274432744427445274462744727448274492745027451274522745327454274552745627457274582745927460274612746227463274642746527466274672746827469274702747127472274732747427475274762747727478274792748027481274822748327484274852748627487274882748927490274912749227493274942749527496274972749827499275002750127502275032750427505275062750727508275092751027511275122751327514275152751627517275182751927520275212752227523275242752527526275272752827529275302753127532275332753427535275362753727538275392754027541275422754327544275452754627547275482754927550275512755227553275542755527556275572755827559275602756127562275632756427565275662756727568275692757027571275722757327574275752757627577275782757927580275812758227583275842758527586275872758827589275902759127592275932759427595275962759727598275992760027601276022760327604276052760627607276082760927610276112761227613276142761527616276172761827619276202762127622276232762427625276262762727628276292763027631276322763327634276352763627637276382763927640276412764227643276442764527646276472764827649276502765127652276532765427655276562765727658276592766027661276622766327664276652766627667276682766927670276712767227673276742767527676276772767827679276802768127682276832768427685276862768727688276892769027691276922769327694276952769627697276982769927700277012770227703277042770527706277072770827709277102771127712277132771427715277162771727718277192772027721277222772327724277252772627727277282772927730277312773227733277342773527736277372773827739277402774127742277432774427745277462774727748277492775027751277522775327754277552775627757277582775927760277612776227763277642776527766277672776827769277702777127772277732777427775277762777727778277792778027781277822778327784277852778627787277882778927790277912779227793277942779527796277972779827799278002780127802278032780427805278062780727808278092781027811278122781327814278152781627817278182781927820278212782227823278242782527826278272782827829278302783127832278332783427835278362783727838278392784027841278422784327844278452784627847278482784927850278512785227853278542785527856278572785827859278602786127862278632786427865278662786727868278692787027871278722787327874278752787627877278782787927880278812788227883278842788527886278872788827889278902789127892278932789427895278962789727898278992790027901279022790327904279052790627907279082790927910279112791227913279142791527916279172791827919279202792127922279232792427925279262792727928279292793027931279322793327934279352793627937279382793927940279412794227943279442794527946279472794827949279502795127952279532795427955279562795727958279592796027961279622796327964279652796627967279682796927970279712797227973279742797527976279772797827979279802798127982279832798427985279862798727988279892799027991279922799327994279952799627997279982799928000280012800228003280042800528006280072800828009280102801128012280132801428015280162801728018280192802028021280222802328024280252802628027280282802928030280312803228033280342803528036280372803828039280402804128042280432804428045280462804728048280492805028051280522805328054280552805628057280582805928060280612806228063280642806528066280672806828069280702807128072280732807428075280762807728078280792808028081280822808328084280852808628087280882808928090280912809228093280942809528096280972809828099281002810128102281032810428105281062810728108281092811028111281122811328114281152811628117281182811928120281212812228123281242812528126281272812828129281302813128132281332813428135281362813728138281392814028141281422814328144281452814628147281482814928150281512815228153281542815528156281572815828159281602816128162281632816428165281662816728168281692817028171281722817328174281752817628177281782817928180281812818228183281842818528186281872818828189281902819128192281932819428195281962819728198281992820028201282022820328204282052820628207282082820928210282112821228213282142821528216282172821828219282202822128222282232822428225282262822728228282292823028231282322823328234282352823628237282382823928240282412824228243282442824528246282472824828249282502825128252282532825428255282562825728258282592826028261282622826328264282652826628267282682826928270282712827228273282742827528276282772827828279282802828128282282832828428285282862828728288282892829028291282922829328294282952829628297282982829928300283012830228303283042830528306283072830828309283102831128312283132831428315283162831728318283192832028321283222832328324283252832628327283282832928330283312833228333283342833528336283372833828339283402834128342283432834428345283462834728348283492835028351283522835328354283552835628357283582835928360283612836228363283642836528366283672836828369283702837128372283732837428375283762837728378283792838028381283822838328384283852838628387283882838928390283912839228393283942839528396283972839828399284002840128402284032840428405284062840728408284092841028411284122841328414284152841628417284182841928420284212842228423284242842528426284272842828429284302843128432284332843428435284362843728438284392844028441284422844328444284452844628447284482844928450284512845228453284542845528456284572845828459284602846128462284632846428465284662846728468284692847028471284722847328474284752847628477284782847928480284812848228483284842848528486284872848828489284902849128492284932849428495284962849728498284992850028501285022850328504285052850628507285082850928510285112851228513285142851528516285172851828519285202852128522285232852428525285262852728528285292853028531285322853328534285352853628537285382853928540285412854228543285442854528546285472854828549285502855128552285532855428555285562855728558285592856028561285622856328564285652856628567285682856928570285712857228573285742857528576285772857828579285802858128582285832858428585285862858728588285892859028591285922859328594285952859628597285982859928600286012860228603286042860528606286072860828609286102861128612286132861428615286162861728618286192862028621286222862328624286252862628627286282862928630286312863228633286342863528636286372863828639286402864128642286432864428645286462864728648286492865028651286522865328654286552865628657286582865928660286612866228663286642866528666286672866828669286702867128672286732867428675286762867728678286792868028681286822868328684286852868628687286882868928690286912869228693286942869528696286972869828699287002870128702287032870428705287062870728708287092871028711287122871328714287152871628717287182871928720287212872228723287242872528726287272872828729287302873128732287332873428735287362873728738287392874028741287422874328744287452874628747287482874928750287512875228753287542875528756287572875828759287602876128762287632876428765287662876728768287692877028771287722877328774287752877628777287782877928780287812878228783287842878528786287872878828789287902879128792287932879428795287962879728798287992880028801288022880328804288052880628807288082880928810288112881228813288142881528816288172881828819288202882128822288232882428825288262882728828288292883028831288322883328834288352883628837288382883928840288412884228843288442884528846288472884828849288502885128852288532885428855288562885728858288592886028861288622886328864288652886628867288682886928870288712887228873288742887528876288772887828879288802888128882288832888428885288862888728888288892889028891288922889328894288952889628897288982889928900289012890228903289042890528906289072890828909289102891128912289132891428915289162891728918289192892028921289222892328924289252892628927289282892928930289312893228933289342893528936289372893828939289402894128942289432894428945289462894728948289492895028951289522895328954289552895628957289582895928960289612896228963289642896528966289672896828969289702897128972289732897428975289762897728978289792898028981289822898328984289852898628987289882898928990289912899228993289942899528996289972899828999290002900129002290032900429005290062900729008290092901029011290122901329014290152901629017290182901929020290212902229023290242902529026290272902829029290302903129032290332903429035290362903729038290392904029041290422904329044290452904629047290482904929050290512905229053290542905529056290572905829059290602906129062290632906429065290662906729068290692907029071290722907329074290752907629077290782907929080290812908229083290842908529086290872908829089290902909129092290932909429095290962909729098290992910029101291022910329104291052910629107291082910929110291112911229113291142911529116291172911829119291202912129122291232912429125291262912729128291292913029131291322913329134291352913629137291382913929140291412914229143291442914529146291472914829149291502915129152291532915429155291562915729158291592916029161291622916329164291652916629167291682916929170291712917229173291742917529176291772917829179291802918129182291832918429185291862918729188291892919029191291922919329194291952919629197291982919929200292012920229203292042920529206292072920829209292102921129212292132921429215292162921729218292192922029221292222922329224292252922629227292282922929230292312923229233292342923529236292372923829239292402924129242292432924429245292462924729248292492925029251292522925329254292552925629257292582925929260292612926229263292642926529266292672926829269292702927129272292732927429275292762927729278292792928029281292822928329284292852928629287292882928929290292912929229293292942929529296292972929829299293002930129302293032930429305293062930729308293092931029311293122931329314293152931629317293182931929320293212932229323293242932529326293272932829329293302933129332293332933429335293362933729338293392934029341293422934329344293452934629347293482934929350293512935229353293542935529356293572935829359293602936129362293632936429365293662936729368293692937029371293722937329374293752937629377293782937929380293812938229383293842938529386293872938829389293902939129392293932939429395293962939729398293992940029401294022940329404294052940629407294082940929410294112941229413294142941529416294172941829419294202942129422294232942429425294262942729428294292943029431294322943329434294352943629437294382943929440294412944229443294442944529446294472944829449294502945129452294532945429455294562945729458294592946029461294622946329464294652946629467294682946929470294712947229473294742947529476294772947829479294802948129482294832948429485294862948729488294892949029491294922949329494294952949629497294982949929500295012950229503295042950529506295072950829509295102951129512295132951429515295162951729518295192952029521295222952329524295252952629527295282952929530295312953229533295342953529536295372953829539295402954129542295432954429545295462954729548295492955029551295522955329554295552955629557295582955929560295612956229563295642956529566295672956829569295702957129572295732957429575295762957729578295792958029581295822958329584295852958629587295882958929590295912959229593295942959529596295972959829599296002960129602296032960429605296062960729608296092961029611296122961329614296152961629617296182961929620296212962229623296242962529626296272962829629296302963129632296332963429635296362963729638296392964029641296422964329644296452964629647296482964929650296512965229653296542965529656296572965829659296602966129662296632966429665296662966729668296692967029671296722967329674296752967629677296782967929680296812968229683296842968529686296872968829689296902969129692296932969429695296962969729698296992970029701297022970329704297052970629707297082970929710297112971229713297142971529716297172971829719297202972129722297232972429725297262972729728297292973029731297322973329734297352973629737297382973929740297412974229743297442974529746297472974829749297502975129752297532975429755297562975729758297592976029761297622976329764297652976629767297682976929770297712977229773297742977529776297772977829779297802978129782297832978429785297862978729788297892979029791297922979329794297952979629797297982979929800298012980229803298042980529806298072980829809298102981129812298132981429815298162981729818298192982029821298222982329824298252982629827298282982929830298312983229833298342983529836298372983829839298402984129842298432984429845298462984729848298492985029851298522985329854298552985629857298582985929860298612986229863298642986529866298672986829869298702987129872298732987429875298762987729878298792988029881298822988329884298852988629887298882988929890298912989229893298942989529896298972989829899299002990129902299032990429905299062990729908299092991029911299122991329914299152991629917299182991929920299212992229923299242992529926299272992829929299302993129932299332993429935299362993729938299392994029941299422994329944299452994629947299482994929950299512995229953299542995529956299572995829959299602996129962299632996429965299662996729968299692997029971299722997329974299752997629977299782997929980299812998229983299842998529986299872998829989299902999129992299932999429995299962999729998299993000030001300023000330004300053000630007300083000930010300113001230013300143001530016300173001830019300203002130022300233002430025300263002730028300293003030031300323003330034300353003630037300383003930040300413004230043300443004530046300473004830049300503005130052300533005430055300563005730058300593006030061300623006330064300653006630067300683006930070300713007230073300743007530076300773007830079300803008130082300833008430085300863008730088300893009030091300923009330094300953009630097300983009930100301013010230103301043010530106301073010830109301103011130112301133011430115301163011730118301193012030121301223012330124301253012630127301283012930130301313013230133301343013530136301373013830139301403014130142301433014430145301463014730148301493015030151301523015330154301553015630157301583015930160301613016230163301643016530166301673016830169301703017130172301733017430175301763017730178301793018030181301823018330184301853018630187301883018930190301913019230193301943019530196301973019830199302003020130202302033020430205302063020730208302093021030211302123021330214302153021630217302183021930220302213022230223302243022530226302273022830229302303023130232302333023430235302363023730238302393024030241302423024330244302453024630247302483024930250302513025230253302543025530256302573025830259302603026130262302633026430265302663026730268302693027030271302723027330274302753027630277302783027930280302813028230283302843028530286302873028830289302903029130292302933029430295302963029730298302993030030301303023030330304303053030630307303083030930310303113031230313303143031530316303173031830319303203032130322303233032430325303263032730328303293033030331303323033330334303353033630337303383033930340303413034230343303443034530346303473034830349303503035130352303533035430355303563035730358303593036030361303623036330364303653036630367303683036930370303713037230373303743037530376303773037830379303803038130382303833038430385303863038730388303893039030391303923039330394303953039630397303983039930400304013040230403304043040530406304073040830409304103041130412304133041430415304163041730418304193042030421304223042330424304253042630427304283042930430304313043230433304343043530436304373043830439304403044130442304433044430445304463044730448304493045030451304523045330454304553045630457304583045930460304613046230463304643046530466304673046830469304703047130472304733047430475304763047730478304793048030481304823048330484304853048630487304883048930490304913049230493304943049530496304973049830499305003050130502305033050430505305063050730508305093051030511305123051330514305153051630517305183051930520305213052230523305243052530526305273052830529305303053130532305333053430535305363053730538305393054030541305423054330544305453054630547305483054930550305513055230553305543055530556305573055830559305603056130562305633056430565305663056730568305693057030571305723057330574305753057630577305783057930580305813058230583305843058530586305873058830589305903059130592305933059430595305963059730598305993060030601306023060330604306053060630607306083060930610306113061230613306143061530616306173061830619306203062130622306233062430625306263062730628306293063030631306323063330634306353063630637306383063930640306413064230643306443064530646306473064830649306503065130652306533065430655306563065730658306593066030661306623066330664306653066630667306683066930670306713067230673306743067530676306773067830679306803068130682306833068430685306863068730688306893069030691306923069330694306953069630697306983069930700307013070230703307043070530706307073070830709307103071130712307133071430715307163071730718307193072030721307223072330724307253072630727307283072930730307313073230733307343073530736307373073830739307403074130742307433074430745307463074730748307493075030751307523075330754307553075630757307583075930760307613076230763307643076530766307673076830769307703077130772307733077430775307763077730778307793078030781307823078330784307853078630787307883078930790307913079230793307943079530796307973079830799308003080130802308033080430805308063080730808308093081030811308123081330814308153081630817308183081930820308213082230823308243082530826308273082830829308303083130832308333083430835308363083730838308393084030841308423084330844308453084630847308483084930850308513085230853308543085530856308573085830859308603086130862308633086430865308663086730868308693087030871308723087330874308753087630877308783087930880308813088230883308843088530886308873088830889308903089130892308933089430895308963089730898308993090030901309023090330904309053090630907309083090930910309113091230913309143091530916309173091830919309203092130922309233092430925309263092730928309293093030931309323093330934309353093630937309383093930940309413094230943309443094530946309473094830949309503095130952309533095430955309563095730958309593096030961309623096330964309653096630967309683096930970309713097230973309743097530976309773097830979309803098130982309833098430985309863098730988309893099030991309923099330994309953099630997309983099931000310013100231003310043100531006310073100831009310103101131012310133101431015310163101731018310193102031021310223102331024310253102631027310283102931030310313103231033310343103531036310373103831039310403104131042310433104431045310463104731048310493105031051310523105331054310553105631057310583105931060310613106231063310643106531066310673106831069310703107131072310733107431075310763107731078310793108031081310823108331084310853108631087310883108931090310913109231093310943109531096310973109831099311003110131102311033110431105311063110731108311093111031111311123111331114311153111631117311183111931120311213112231123311243112531126311273112831129311303113131132311333113431135311363113731138311393114031141311423114331144311453114631147311483114931150311513115231153311543115531156311573115831159311603116131162311633116431165311663116731168311693117031171311723117331174311753117631177311783117931180311813118231183311843118531186311873118831189311903119131192311933119431195311963119731198311993120031201312023120331204312053120631207312083120931210312113121231213312143121531216312173121831219312203122131222312233122431225312263122731228312293123031231312323123331234312353123631237312383123931240312413124231243312443124531246312473124831249312503125131252312533125431255312563125731258312593126031261312623126331264312653126631267312683126931270312713127231273312743127531276312773127831279312803128131282312833128431285312863128731288312893129031291312923129331294312953129631297312983129931300313013130231303313043130531306313073130831309313103131131312313133131431315313163131731318313193132031321313223132331324313253132631327313283132931330313313133231333313343133531336313373133831339313403134131342313433134431345313463134731348313493135031351313523135331354313553135631357313583135931360313613136231363313643136531366313673136831369313703137131372313733137431375313763137731378313793138031381313823138331384313853138631387313883138931390313913139231393313943139531396313973139831399314003140131402314033140431405314063140731408314093141031411314123141331414314153141631417314183141931420314213142231423314243142531426314273142831429314303143131432314333143431435314363143731438314393144031441314423144331444314453144631447314483144931450314513145231453314543145531456314573145831459314603146131462314633146431465314663146731468314693147031471314723147331474314753147631477314783147931480314813148231483314843148531486314873148831489314903149131492314933149431495314963149731498314993150031501315023150331504315053150631507315083150931510315113151231513315143151531516315173151831519315203152131522315233152431525315263152731528315293153031531315323153331534315353153631537315383153931540315413154231543315443154531546315473154831549315503155131552315533155431555315563155731558315593156031561315623156331564315653156631567315683156931570315713157231573315743157531576315773157831579315803158131582315833158431585315863158731588315893159031591315923159331594315953159631597315983159931600316013160231603316043160531606316073160831609316103161131612316133161431615316163161731618316193162031621316223162331624316253162631627316283162931630316313163231633316343163531636316373163831639316403164131642316433164431645316463164731648316493165031651316523165331654316553165631657316583165931660316613166231663316643166531666316673166831669316703167131672316733167431675316763167731678316793168031681316823168331684316853168631687316883168931690316913169231693316943169531696316973169831699317003170131702317033170431705317063170731708317093171031711317123171331714317153171631717317183171931720317213172231723317243172531726317273172831729317303173131732317333173431735317363173731738317393174031741317423174331744317453174631747317483174931750317513175231753317543175531756317573175831759317603176131762317633176431765317663176731768317693177031771317723177331774317753177631777317783177931780317813178231783317843178531786317873178831789317903179131792317933179431795317963179731798317993180031801318023180331804318053180631807318083180931810318113181231813318143181531816318173181831819318203182131822318233182431825318263182731828318293183031831318323183331834318353183631837318383183931840318413184231843318443184531846318473184831849318503185131852318533185431855318563185731858318593186031861318623186331864318653186631867318683186931870318713187231873318743187531876318773187831879318803188131882318833188431885318863188731888318893189031891318923189331894318953189631897318983189931900319013190231903319043190531906319073190831909319103191131912319133191431915319163191731918319193192031921319223192331924319253192631927319283192931930319313193231933319343193531936319373193831939319403194131942319433194431945319463194731948319493195031951319523195331954319553195631957319583195931960319613196231963319643196531966319673196831969319703197131972319733197431975319763197731978319793198031981319823198331984319853198631987319883198931990319913199231993319943199531996319973199831999320003200132002320033200432005320063200732008320093201032011320123201332014320153201632017320183201932020320213202232023320243202532026320273202832029320303203132032320333203432035320363203732038320393204032041320423204332044320453204632047320483204932050320513205232053320543205532056320573205832059320603206132062320633206432065320663206732068320693207032071320723207332074320753207632077320783207932080320813208232083320843208532086320873208832089320903209132092320933209432095320963209732098320993210032101321023210332104321053210632107321083210932110321113211232113321143211532116321173211832119321203212132122321233212432125321263212732128321293213032131321323213332134321353213632137321383213932140321413214232143321443214532146321473214832149321503215132152321533215432155321563215732158321593216032161321623216332164321653216632167321683216932170321713217232173321743217532176321773217832179321803218132182321833218432185321863218732188321893219032191321923219332194321953219632197321983219932200322013220232203322043220532206322073220832209322103221132212322133221432215322163221732218322193222032221322223222332224322253222632227322283222932230322313223232233322343223532236322373223832239322403224132242322433224432245322463224732248322493225032251322523225332254322553225632257322583225932260322613226232263322643226532266322673226832269322703227132272322733227432275322763227732278322793228032281322823228332284322853228632287322883228932290322913229232293322943229532296322973229832299323003230132302323033230432305323063230732308323093231032311323123231332314323153231632317323183231932320323213232232323323243232532326323273232832329323303233132332323333233432335323363233732338323393234032341323423234332344323453234632347323483234932350323513235232353323543235532356323573235832359323603236132362323633236432365323663236732368323693237032371323723237332374323753237632377323783237932380323813238232383323843238532386323873238832389323903239132392323933239432395323963239732398323993240032401324023240332404324053240632407324083240932410324113241232413324143241532416324173241832419324203242132422324233242432425324263242732428324293243032431324323243332434324353243632437324383243932440324413244232443324443244532446324473244832449324503245132452324533245432455324563245732458324593246032461324623246332464324653246632467324683246932470324713247232473324743247532476324773247832479324803248132482324833248432485324863248732488324893249032491324923249332494324953249632497324983249932500325013250232503325043250532506325073250832509325103251132512325133251432515325163251732518325193252032521325223252332524325253252632527325283252932530325313253232533325343253532536325373253832539325403254132542325433254432545325463254732548325493255032551325523255332554325553255632557325583255932560325613256232563325643256532566325673256832569325703257132572325733257432575325763257732578325793258032581325823258332584325853258632587325883258932590325913259232593325943259532596325973259832599326003260132602326033260432605326063260732608326093261032611326123261332614326153261632617326183261932620326213262232623326243262532626326273262832629326303263132632326333263432635326363263732638326393264032641326423264332644326453264632647326483264932650326513265232653326543265532656326573265832659326603266132662326633266432665326663266732668326693267032671326723267332674326753267632677326783267932680326813268232683326843268532686326873268832689326903269132692326933269432695326963269732698326993270032701327023270332704327053270632707327083270932710327113271232713327143271532716327173271832719327203272132722327233272432725327263272732728327293273032731327323273332734327353273632737327383273932740327413274232743327443274532746327473274832749327503275132752327533275432755327563275732758327593276032761327623276332764327653276632767327683276932770327713277232773327743277532776327773277832779327803278132782327833278432785327863278732788327893279032791327923279332794327953279632797327983279932800328013280232803328043280532806328073280832809328103281132812328133281432815328163281732818328193282032821328223282332824328253282632827328283282932830328313283232833328343283532836328373283832839328403284132842328433284432845328463284732848328493285032851328523285332854328553285632857328583285932860328613286232863328643286532866328673286832869328703287132872328733287432875328763287732878328793288032881328823288332884328853288632887328883288932890328913289232893328943289532896328973289832899329003290132902329033290432905329063290732908329093291032911329123291332914329153291632917329183291932920329213292232923329243292532926329273292832929329303293132932329333293432935329363293732938329393294032941329423294332944329453294632947329483294932950329513295232953329543295532956329573295832959329603296132962329633296432965329663296732968329693297032971329723297332974329753297632977329783297932980329813298232983329843298532986329873298832989329903299132992329933299432995329963299732998329993300033001330023300333004330053300633007330083300933010330113301233013330143301533016330173301833019330203302133022330233302433025330263302733028330293303033031330323303333034330353303633037330383303933040330413304233043330443304533046330473304833049330503305133052330533305433055330563305733058330593306033061330623306333064330653306633067330683306933070330713307233073330743307533076330773307833079330803308133082330833308433085330863308733088330893309033091330923309333094330953309633097330983309933100331013310233103331043310533106331073310833109331103311133112331133311433115331163311733118331193312033121331223312333124331253312633127331283312933130331313313233133331343313533136331373313833139331403314133142331433314433145331463314733148331493315033151331523315333154331553315633157331583315933160331613316233163331643316533166331673316833169331703317133172331733317433175331763317733178331793318033181331823318333184331853318633187331883318933190331913319233193331943319533196331973319833199332003320133202332033320433205332063320733208332093321033211332123321333214332153321633217332183321933220332213322233223332243322533226332273322833229332303323133232332333323433235332363323733238332393324033241332423324333244332453324633247332483324933250332513325233253332543325533256332573325833259332603326133262332633326433265332663326733268332693327033271332723327333274332753327633277332783327933280332813328233283332843328533286332873328833289332903329133292332933329433295332963329733298332993330033301333023330333304333053330633307333083330933310333113331233313333143331533316333173331833319333203332133322333233332433325333263332733328333293333033331333323333333334333353333633337333383333933340333413334233343333443334533346333473334833349333503335133352333533335433355333563335733358333593336033361333623336333364333653336633367333683336933370333713337233373333743337533376333773337833379333803338133382333833338433385333863338733388333893339033391333923339333394333953339633397333983339933400334013340233403334043340533406334073340833409334103341133412334133341433415334163341733418334193342033421334223342333424334253342633427334283342933430334313343233433334343343533436334373343833439334403344133442334433344433445334463344733448334493345033451334523345333454334553345633457334583345933460334613346233463334643346533466334673346833469334703347133472334733347433475334763347733478334793348033481334823348333484334853348633487334883348933490334913349233493334943349533496334973349833499335003350133502335033350433505335063350733508335093351033511335123351333514335153351633517335183351933520335213352233523335243352533526335273352833529335303353133532335333353433535335363353733538335393354033541335423354333544335453354633547335483354933550335513355233553335543355533556335573355833559335603356133562335633356433565335663356733568335693357033571335723357333574335753357633577335783357933580335813358233583335843358533586335873358833589335903359133592335933359433595335963359733598335993360033601336023360333604336053360633607336083360933610336113361233613336143361533616336173361833619336203362133622336233362433625336263362733628336293363033631336323363333634336353363633637336383363933640336413364233643336443364533646336473364833649336503365133652336533365433655336563365733658336593366033661336623366333664336653366633667336683366933670336713367233673336743367533676
  1. /* tc-arm.c -- Assemble for the ARM
  2. Copyright (C) 1994-2022 Free Software Foundation, Inc.
  3. Contributed by Richard Earnshaw (rwe@pegasus.esprit.ec.org)
  4. Modified by David Taylor (dtaylor@armltd.co.uk)
  5. Cirrus coprocessor mods by Aldy Hernandez (aldyh@redhat.com)
  6. Cirrus coprocessor fixes by Petko Manolov (petkan@nucleusys.com)
  7. Cirrus coprocessor fixes by Vladimir Ivanov (vladitx@nucleusys.com)
  8. This file is part of GAS, the GNU Assembler.
  9. GAS is free software; you can redistribute it and/or modify
  10. it under the terms of the GNU General Public License as published by
  11. the Free Software Foundation; either version 3, or (at your option)
  12. any later version.
  13. GAS is distributed in the hope that it will be useful,
  14. but WITHOUT ANY WARRANTY; without even the implied warranty of
  15. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  16. GNU General Public License for more details.
  17. You should have received a copy of the GNU General Public License
  18. along with GAS; see the file COPYING. If not, write to the Free
  19. Software Foundation, 51 Franklin Street - Fifth Floor, Boston, MA
  20. 02110-1301, USA. */
  21. #include "as.h"
  22. #include <limits.h>
  23. #include <stdarg.h>
  24. #define NO_RELOC 0
  25. #include "safe-ctype.h"
  26. #include "subsegs.h"
  27. #include "obstack.h"
  28. #include "libiberty.h"
  29. #include "opcode/arm.h"
  30. #include "cpu-arm.h"
  31. #ifdef OBJ_ELF
  32. #include "elf/arm.h"
  33. #include "dw2gencfi.h"
  34. #endif
  35. #include "dwarf2dbg.h"
  36. #ifdef OBJ_ELF
  37. /* Must be at least the size of the largest unwind opcode (currently two). */
  38. #define ARM_OPCODE_CHUNK_SIZE 8
  39. /* This structure holds the unwinding state. */
  40. static struct
  41. {
  42. symbolS * proc_start;
  43. symbolS * table_entry;
  44. symbolS * personality_routine;
  45. int personality_index;
  46. /* The segment containing the function. */
  47. segT saved_seg;
  48. subsegT saved_subseg;
  49. /* Opcodes generated from this function. */
  50. unsigned char * opcodes;
  51. int opcode_count;
  52. int opcode_alloc;
  53. /* The number of bytes pushed to the stack. */
  54. offsetT frame_size;
  55. /* We don't add stack adjustment opcodes immediately so that we can merge
  56. multiple adjustments. We can also omit the final adjustment
  57. when using a frame pointer. */
  58. offsetT pending_offset;
  59. /* These two fields are set by both unwind_movsp and unwind_setfp. They
  60. hold the reg+offset to use when restoring sp from a frame pointer. */
  61. offsetT fp_offset;
  62. int fp_reg;
  63. /* Nonzero if an unwind_setfp directive has been seen. */
  64. unsigned fp_used:1;
  65. /* Nonzero if the last opcode restores sp from fp_reg. */
  66. unsigned sp_restored:1;
  67. } unwind;
  68. /* Whether --fdpic was given. */
  69. static int arm_fdpic;
  70. #endif /* OBJ_ELF */
  71. /* Results from operand parsing worker functions. */
  72. typedef enum
  73. {
  74. PARSE_OPERAND_SUCCESS,
  75. PARSE_OPERAND_FAIL,
  76. PARSE_OPERAND_FAIL_NO_BACKTRACK
  77. } parse_operand_result;
  78. enum arm_float_abi
  79. {
  80. ARM_FLOAT_ABI_HARD,
  81. ARM_FLOAT_ABI_SOFTFP,
  82. ARM_FLOAT_ABI_SOFT
  83. };
  84. /* Types of processor to assemble for. */
  85. #ifndef CPU_DEFAULT
  86. /* The code that was here used to select a default CPU depending on compiler
  87. pre-defines which were only present when doing native builds, thus
  88. changing gas' default behaviour depending upon the build host.
  89. If you have a target that requires a default CPU option then the you
  90. should define CPU_DEFAULT here. */
  91. #endif
  92. /* Perform range checks on positive and negative overflows by checking if the
  93. VALUE given fits within the range of an BITS sized immediate. */
  94. static bool out_of_range_p (offsetT value, offsetT bits)
  95. {
  96. gas_assert (bits < (offsetT)(sizeof (value) * 8));
  97. return (value & ~((1 << bits)-1))
  98. && ((value & ~((1 << bits)-1)) != ~((1 << bits)-1));
  99. }
  100. #ifndef FPU_DEFAULT
  101. # ifdef TE_LINUX
  102. # define FPU_DEFAULT FPU_ARCH_FPA
  103. # elif defined (TE_NetBSD)
  104. # ifdef OBJ_ELF
  105. # define FPU_DEFAULT FPU_ARCH_VFP /* Soft-float, but VFP order. */
  106. # else
  107. /* Legacy a.out format. */
  108. # define FPU_DEFAULT FPU_ARCH_FPA /* Soft-float, but FPA order. */
  109. # endif
  110. # elif defined (TE_VXWORKS)
  111. # define FPU_DEFAULT FPU_ARCH_VFP /* Soft-float, VFP order. */
  112. # else
  113. /* For backwards compatibility, default to FPA. */
  114. # define FPU_DEFAULT FPU_ARCH_FPA
  115. # endif
  116. #endif /* ifndef FPU_DEFAULT */
  117. #define streq(a, b) (strcmp (a, b) == 0)
  118. /* Current set of feature bits available (CPU+FPU). Different from
  119. selected_cpu + selected_fpu in case of autodetection since the CPU
  120. feature bits are then all set. */
  121. static arm_feature_set cpu_variant;
  122. /* Feature bits used in each execution state. Used to set build attribute
  123. (in particular Tag_*_ISA_use) in CPU autodetection mode. */
  124. static arm_feature_set arm_arch_used;
  125. static arm_feature_set thumb_arch_used;
  126. /* Flags stored in private area of BFD structure. */
  127. static int uses_apcs_26 = false;
  128. static int atpcs = false;
  129. static int support_interwork = false;
  130. static int uses_apcs_float = false;
  131. static int pic_code = false;
  132. static int fix_v4bx = false;
  133. /* Warn on using deprecated features. */
  134. static int warn_on_deprecated = true;
  135. static int warn_on_restrict_it = false;
  136. /* Understand CodeComposer Studio assembly syntax. */
  137. bool codecomposer_syntax = false;
  138. /* Variables that we set while parsing command-line options. Once all
  139. options have been read we re-process these values to set the real
  140. assembly flags. */
  141. /* CPU and FPU feature bits set for legacy CPU and FPU options (eg. -marm1
  142. instead of -mcpu=arm1). */
  143. static const arm_feature_set *legacy_cpu = NULL;
  144. static const arm_feature_set *legacy_fpu = NULL;
  145. /* CPU, extension and FPU feature bits selected by -mcpu. */
  146. static const arm_feature_set *mcpu_cpu_opt = NULL;
  147. static arm_feature_set *mcpu_ext_opt = NULL;
  148. static const arm_feature_set *mcpu_fpu_opt = NULL;
  149. /* CPU, extension and FPU feature bits selected by -march. */
  150. static const arm_feature_set *march_cpu_opt = NULL;
  151. static arm_feature_set *march_ext_opt = NULL;
  152. static const arm_feature_set *march_fpu_opt = NULL;
  153. /* Feature bits selected by -mfpu. */
  154. static const arm_feature_set *mfpu_opt = NULL;
  155. /* Constants for known architecture features. */
  156. static const arm_feature_set fpu_default = FPU_DEFAULT;
  157. static const arm_feature_set fpu_arch_vfp_v1 ATTRIBUTE_UNUSED = FPU_ARCH_VFP_V1;
  158. static const arm_feature_set fpu_arch_vfp_v2 = FPU_ARCH_VFP_V2;
  159. static const arm_feature_set fpu_arch_vfp_v3 ATTRIBUTE_UNUSED = FPU_ARCH_VFP_V3;
  160. static const arm_feature_set fpu_arch_neon_v1 ATTRIBUTE_UNUSED = FPU_ARCH_NEON_V1;
  161. static const arm_feature_set fpu_arch_fpa = FPU_ARCH_FPA;
  162. static const arm_feature_set fpu_any_hard = FPU_ANY_HARD;
  163. #ifdef OBJ_ELF
  164. static const arm_feature_set fpu_arch_maverick = FPU_ARCH_MAVERICK;
  165. #endif
  166. static const arm_feature_set fpu_endian_pure = FPU_ARCH_ENDIAN_PURE;
  167. #ifdef CPU_DEFAULT
  168. static const arm_feature_set cpu_default = CPU_DEFAULT;
  169. #endif
  170. static const arm_feature_set arm_ext_v1 = ARM_FEATURE_CORE_LOW (ARM_EXT_V1);
  171. static const arm_feature_set arm_ext_v2 = ARM_FEATURE_CORE_LOW (ARM_EXT_V2);
  172. static const arm_feature_set arm_ext_v2s = ARM_FEATURE_CORE_LOW (ARM_EXT_V2S);
  173. static const arm_feature_set arm_ext_v3 = ARM_FEATURE_CORE_LOW (ARM_EXT_V3);
  174. static const arm_feature_set arm_ext_v3m = ARM_FEATURE_CORE_LOW (ARM_EXT_V3M);
  175. static const arm_feature_set arm_ext_v4 = ARM_FEATURE_CORE_LOW (ARM_EXT_V4);
  176. static const arm_feature_set arm_ext_v4t = ARM_FEATURE_CORE_LOW (ARM_EXT_V4T);
  177. static const arm_feature_set arm_ext_v5 = ARM_FEATURE_CORE_LOW (ARM_EXT_V5);
  178. static const arm_feature_set arm_ext_v4t_5 =
  179. ARM_FEATURE_CORE_LOW (ARM_EXT_V4T | ARM_EXT_V5);
  180. static const arm_feature_set arm_ext_v5t = ARM_FEATURE_CORE_LOW (ARM_EXT_V5T);
  181. static const arm_feature_set arm_ext_v5e = ARM_FEATURE_CORE_LOW (ARM_EXT_V5E);
  182. static const arm_feature_set arm_ext_v5exp = ARM_FEATURE_CORE_LOW (ARM_EXT_V5ExP);
  183. static const arm_feature_set arm_ext_v5j = ARM_FEATURE_CORE_LOW (ARM_EXT_V5J);
  184. static const arm_feature_set arm_ext_v6 = ARM_FEATURE_CORE_LOW (ARM_EXT_V6);
  185. static const arm_feature_set arm_ext_v6k = ARM_FEATURE_CORE_LOW (ARM_EXT_V6K);
  186. static const arm_feature_set arm_ext_v6t2 = ARM_FEATURE_CORE_LOW (ARM_EXT_V6T2);
  187. /* Only for compatability of hint instructions. */
  188. static const arm_feature_set arm_ext_v6k_v6t2 =
  189. ARM_FEATURE_CORE_LOW (ARM_EXT_V6K | ARM_EXT_V6T2);
  190. static const arm_feature_set arm_ext_v6_notm =
  191. ARM_FEATURE_CORE_LOW (ARM_EXT_V6_NOTM);
  192. static const arm_feature_set arm_ext_v6_dsp =
  193. ARM_FEATURE_CORE_LOW (ARM_EXT_V6_DSP);
  194. static const arm_feature_set arm_ext_barrier =
  195. ARM_FEATURE_CORE_LOW (ARM_EXT_BARRIER);
  196. static const arm_feature_set arm_ext_msr =
  197. ARM_FEATURE_CORE_LOW (ARM_EXT_THUMB_MSR);
  198. static const arm_feature_set arm_ext_div = ARM_FEATURE_CORE_LOW (ARM_EXT_DIV);
  199. static const arm_feature_set arm_ext_v7 = ARM_FEATURE_CORE_LOW (ARM_EXT_V7);
  200. static const arm_feature_set arm_ext_v7a = ARM_FEATURE_CORE_LOW (ARM_EXT_V7A);
  201. static const arm_feature_set arm_ext_v7r = ARM_FEATURE_CORE_LOW (ARM_EXT_V7R);
  202. static const arm_feature_set arm_ext_v8r = ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8R);
  203. #ifdef OBJ_ELF
  204. static const arm_feature_set ATTRIBUTE_UNUSED arm_ext_v7m = ARM_FEATURE_CORE_LOW (ARM_EXT_V7M);
  205. #endif
  206. static const arm_feature_set arm_ext_v8 = ARM_FEATURE_CORE_LOW (ARM_EXT_V8);
  207. static const arm_feature_set arm_ext_m =
  208. ARM_FEATURE_CORE (ARM_EXT_V6M | ARM_EXT_V7M,
  209. ARM_EXT2_V8M | ARM_EXT2_V8M_MAIN);
  210. static const arm_feature_set arm_ext_mp = ARM_FEATURE_CORE_LOW (ARM_EXT_MP);
  211. static const arm_feature_set arm_ext_sec = ARM_FEATURE_CORE_LOW (ARM_EXT_SEC);
  212. static const arm_feature_set arm_ext_os = ARM_FEATURE_CORE_LOW (ARM_EXT_OS);
  213. static const arm_feature_set arm_ext_adiv = ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV);
  214. static const arm_feature_set arm_ext_virt = ARM_FEATURE_CORE_LOW (ARM_EXT_VIRT);
  215. static const arm_feature_set arm_ext_pan = ARM_FEATURE_CORE_HIGH (ARM_EXT2_PAN);
  216. static const arm_feature_set arm_ext_v8m = ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8M);
  217. static const arm_feature_set arm_ext_v8m_main =
  218. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8M_MAIN);
  219. static const arm_feature_set arm_ext_v8_1m_main =
  220. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8_1M_MAIN);
  221. /* Instructions in ARMv8-M only found in M profile architectures. */
  222. static const arm_feature_set arm_ext_v8m_m_only =
  223. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8M | ARM_EXT2_V8M_MAIN);
  224. static const arm_feature_set arm_ext_v6t2_v8m =
  225. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V6T2_V8M);
  226. /* Instructions shared between ARMv8-A and ARMv8-M. */
  227. static const arm_feature_set arm_ext_atomics =
  228. ARM_FEATURE_CORE_HIGH (ARM_EXT2_ATOMICS);
  229. #ifdef OBJ_ELF
  230. /* DSP instructions Tag_DSP_extension refers to. */
  231. static const arm_feature_set arm_ext_dsp =
  232. ARM_FEATURE_CORE_LOW (ARM_EXT_V5E | ARM_EXT_V5ExP | ARM_EXT_V6_DSP);
  233. #endif
  234. static const arm_feature_set arm_ext_ras =
  235. ARM_FEATURE_CORE_HIGH (ARM_EXT2_RAS);
  236. /* FP16 instructions. */
  237. static const arm_feature_set arm_ext_fp16 =
  238. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST);
  239. static const arm_feature_set arm_ext_fp16_fml =
  240. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_FML);
  241. static const arm_feature_set arm_ext_v8_2 =
  242. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8_2A);
  243. static const arm_feature_set arm_ext_v8_3 =
  244. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8_3A);
  245. static const arm_feature_set arm_ext_sb =
  246. ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB);
  247. static const arm_feature_set arm_ext_predres =
  248. ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES);
  249. static const arm_feature_set arm_ext_bf16 =
  250. ARM_FEATURE_CORE_HIGH (ARM_EXT2_BF16);
  251. static const arm_feature_set arm_ext_i8mm =
  252. ARM_FEATURE_CORE_HIGH (ARM_EXT2_I8MM);
  253. static const arm_feature_set arm_ext_crc =
  254. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC);
  255. static const arm_feature_set arm_ext_cde =
  256. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE);
  257. static const arm_feature_set arm_ext_cde0 =
  258. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE0);
  259. static const arm_feature_set arm_ext_cde1 =
  260. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE1);
  261. static const arm_feature_set arm_ext_cde2 =
  262. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE2);
  263. static const arm_feature_set arm_ext_cde3 =
  264. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE3);
  265. static const arm_feature_set arm_ext_cde4 =
  266. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE4);
  267. static const arm_feature_set arm_ext_cde5 =
  268. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE5);
  269. static const arm_feature_set arm_ext_cde6 =
  270. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE6);
  271. static const arm_feature_set arm_ext_cde7 =
  272. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE7);
  273. static const arm_feature_set arm_arch_any = ARM_ANY;
  274. static const arm_feature_set fpu_any = FPU_ANY;
  275. static const arm_feature_set arm_arch_full ATTRIBUTE_UNUSED = ARM_FEATURE (-1, -1, -1);
  276. static const arm_feature_set arm_arch_t2 = ARM_ARCH_THUMB2;
  277. static const arm_feature_set arm_arch_none = ARM_ARCH_NONE;
  278. static const arm_feature_set arm_cext_iwmmxt2 =
  279. ARM_FEATURE_COPROC (ARM_CEXT_IWMMXT2);
  280. static const arm_feature_set arm_cext_iwmmxt =
  281. ARM_FEATURE_COPROC (ARM_CEXT_IWMMXT);
  282. static const arm_feature_set arm_cext_xscale =
  283. ARM_FEATURE_COPROC (ARM_CEXT_XSCALE);
  284. static const arm_feature_set arm_cext_maverick =
  285. ARM_FEATURE_COPROC (ARM_CEXT_MAVERICK);
  286. static const arm_feature_set fpu_fpa_ext_v1 =
  287. ARM_FEATURE_COPROC (FPU_FPA_EXT_V1);
  288. static const arm_feature_set fpu_fpa_ext_v2 =
  289. ARM_FEATURE_COPROC (FPU_FPA_EXT_V2);
  290. static const arm_feature_set fpu_vfp_ext_v1xd =
  291. ARM_FEATURE_COPROC (FPU_VFP_EXT_V1xD);
  292. static const arm_feature_set fpu_vfp_ext_v1 =
  293. ARM_FEATURE_COPROC (FPU_VFP_EXT_V1);
  294. static const arm_feature_set fpu_vfp_ext_v2 =
  295. ARM_FEATURE_COPROC (FPU_VFP_EXT_V2);
  296. static const arm_feature_set fpu_vfp_ext_v3xd =
  297. ARM_FEATURE_COPROC (FPU_VFP_EXT_V3xD);
  298. static const arm_feature_set fpu_vfp_ext_v3 =
  299. ARM_FEATURE_COPROC (FPU_VFP_EXT_V3);
  300. static const arm_feature_set fpu_vfp_ext_d32 =
  301. ARM_FEATURE_COPROC (FPU_VFP_EXT_D32);
  302. static const arm_feature_set fpu_neon_ext_v1 =
  303. ARM_FEATURE_COPROC (FPU_NEON_EXT_V1);
  304. static const arm_feature_set fpu_vfp_v3_or_neon_ext =
  305. ARM_FEATURE_COPROC (FPU_NEON_EXT_V1 | FPU_VFP_EXT_V3);
  306. static const arm_feature_set mve_ext =
  307. ARM_FEATURE_CORE_HIGH (ARM_EXT2_MVE);
  308. static const arm_feature_set mve_fp_ext =
  309. ARM_FEATURE_CORE_HIGH (ARM_EXT2_MVE_FP);
  310. /* Note: This has more than one bit set, which means using it with
  311. mark_feature_used (which returns if *any* of the bits are set in the current
  312. cpu variant) can give surprising results. */
  313. static const arm_feature_set armv8m_fp =
  314. ARM_FEATURE_COPROC (FPU_VFP_V5_SP_D16);
  315. #ifdef OBJ_ELF
  316. static const arm_feature_set fpu_vfp_fp16 =
  317. ARM_FEATURE_COPROC (FPU_VFP_EXT_FP16);
  318. static const arm_feature_set fpu_neon_ext_fma =
  319. ARM_FEATURE_COPROC (FPU_NEON_EXT_FMA);
  320. #endif
  321. static const arm_feature_set fpu_vfp_ext_fma =
  322. ARM_FEATURE_COPROC (FPU_VFP_EXT_FMA);
  323. static const arm_feature_set fpu_vfp_ext_armv8 =
  324. ARM_FEATURE_COPROC (FPU_VFP_EXT_ARMV8);
  325. static const arm_feature_set fpu_vfp_ext_armv8xd =
  326. ARM_FEATURE_COPROC (FPU_VFP_EXT_ARMV8xD);
  327. static const arm_feature_set fpu_neon_ext_armv8 =
  328. ARM_FEATURE_COPROC (FPU_NEON_EXT_ARMV8);
  329. static const arm_feature_set fpu_crypto_ext_armv8 =
  330. ARM_FEATURE_COPROC (FPU_CRYPTO_EXT_ARMV8);
  331. static const arm_feature_set fpu_neon_ext_v8_1 =
  332. ARM_FEATURE_COPROC (FPU_NEON_EXT_RDMA);
  333. static const arm_feature_set fpu_neon_ext_dotprod =
  334. ARM_FEATURE_COPROC (FPU_NEON_EXT_DOTPROD);
  335. static const arm_feature_set pacbti_ext =
  336. ARM_FEATURE_CORE_HIGH_HIGH (ARM_EXT3_PACBTI);
  337. static int mfloat_abi_opt = -1;
  338. /* Architecture feature bits selected by the last -mcpu/-march or .cpu/.arch
  339. directive. */
  340. static arm_feature_set selected_arch = ARM_ARCH_NONE;
  341. /* Extension feature bits selected by the last -mcpu/-march or .arch_extension
  342. directive. */
  343. static arm_feature_set selected_ext = ARM_ARCH_NONE;
  344. /* Feature bits selected by the last -mcpu/-march or by the combination of the
  345. last .cpu/.arch directive .arch_extension directives since that
  346. directive. */
  347. static arm_feature_set selected_cpu = ARM_ARCH_NONE;
  348. /* FPU feature bits selected by the last -mfpu or .fpu directive. */
  349. static arm_feature_set selected_fpu = FPU_NONE;
  350. /* Feature bits selected by the last .object_arch directive. */
  351. static arm_feature_set selected_object_arch = ARM_ARCH_NONE;
  352. /* Must be long enough to hold any of the names in arm_cpus. */
  353. static const struct arm_ext_table * selected_ctx_ext_table = NULL;
  354. static char selected_cpu_name[20];
  355. extern FLONUM_TYPE generic_floating_point_number;
  356. /* Return if no cpu was selected on command-line. */
  357. static bool
  358. no_cpu_selected (void)
  359. {
  360. return ARM_FEATURE_EQUAL (selected_cpu, arm_arch_none);
  361. }
  362. #ifdef OBJ_ELF
  363. # ifdef EABI_DEFAULT
  364. static int meabi_flags = EABI_DEFAULT;
  365. # else
  366. static int meabi_flags = EF_ARM_EABI_UNKNOWN;
  367. # endif
  368. static int attributes_set_explicitly[NUM_KNOWN_OBJ_ATTRIBUTES];
  369. bool
  370. arm_is_eabi (void)
  371. {
  372. return (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4);
  373. }
  374. #endif
  375. #ifdef OBJ_ELF
  376. /* Pre-defined "_GLOBAL_OFFSET_TABLE_" */
  377. symbolS * GOT_symbol;
  378. #endif
  379. /* 0: assemble for ARM,
  380. 1: assemble for Thumb,
  381. 2: assemble for Thumb even though target CPU does not support thumb
  382. instructions. */
  383. static int thumb_mode = 0;
  384. /* A value distinct from the possible values for thumb_mode that we
  385. can use to record whether thumb_mode has been copied into the
  386. tc_frag_data field of a frag. */
  387. #define MODE_RECORDED (1 << 4)
  388. /* Specifies the intrinsic IT insn behavior mode. */
  389. enum implicit_it_mode
  390. {
  391. IMPLICIT_IT_MODE_NEVER = 0x00,
  392. IMPLICIT_IT_MODE_ARM = 0x01,
  393. IMPLICIT_IT_MODE_THUMB = 0x02,
  394. IMPLICIT_IT_MODE_ALWAYS = (IMPLICIT_IT_MODE_ARM | IMPLICIT_IT_MODE_THUMB)
  395. };
  396. static int implicit_it_mode = IMPLICIT_IT_MODE_ARM;
  397. /* If unified_syntax is true, we are processing the new unified
  398. ARM/Thumb syntax. Important differences from the old ARM mode:
  399. - Immediate operands do not require a # prefix.
  400. - Conditional affixes always appear at the end of the
  401. instruction. (For backward compatibility, those instructions
  402. that formerly had them in the middle, continue to accept them
  403. there.)
  404. - The IT instruction may appear, and if it does is validated
  405. against subsequent conditional affixes. It does not generate
  406. machine code.
  407. Important differences from the old Thumb mode:
  408. - Immediate operands do not require a # prefix.
  409. - Most of the V6T2 instructions are only available in unified mode.
  410. - The .N and .W suffixes are recognized and honored (it is an error
  411. if they cannot be honored).
  412. - All instructions set the flags if and only if they have an 's' affix.
  413. - Conditional affixes may be used. They are validated against
  414. preceding IT instructions. Unlike ARM mode, you cannot use a
  415. conditional affix except in the scope of an IT instruction. */
  416. static bool unified_syntax = false;
  417. /* An immediate operand can start with #, and ld*, st*, pld operands
  418. can contain [ and ]. We need to tell APP not to elide whitespace
  419. before a [, which can appear as the first operand for pld.
  420. Likewise, a { can appear as the first operand for push, pop, vld*, etc. */
  421. const char arm_symbol_chars[] = "#[]{}";
  422. enum neon_el_type
  423. {
  424. NT_invtype,
  425. NT_untyped,
  426. NT_integer,
  427. NT_float,
  428. NT_poly,
  429. NT_signed,
  430. NT_bfloat,
  431. NT_unsigned
  432. };
  433. struct neon_type_el
  434. {
  435. enum neon_el_type type;
  436. unsigned size;
  437. };
  438. #define NEON_MAX_TYPE_ELS 5
  439. struct neon_type
  440. {
  441. struct neon_type_el el[NEON_MAX_TYPE_ELS];
  442. unsigned elems;
  443. };
  444. enum pred_instruction_type
  445. {
  446. OUTSIDE_PRED_INSN,
  447. INSIDE_VPT_INSN,
  448. INSIDE_IT_INSN,
  449. INSIDE_IT_LAST_INSN,
  450. IF_INSIDE_IT_LAST_INSN, /* Either outside or inside;
  451. if inside, should be the last one. */
  452. NEUTRAL_IT_INSN, /* This could be either inside or outside,
  453. i.e. BKPT and NOP. */
  454. IT_INSN, /* The IT insn has been parsed. */
  455. VPT_INSN, /* The VPT/VPST insn has been parsed. */
  456. MVE_OUTSIDE_PRED_INSN , /* Instruction to indicate a MVE instruction without
  457. a predication code. */
  458. MVE_UNPREDICABLE_INSN, /* MVE instruction that is non-predicable. */
  459. };
  460. /* The maximum number of operands we need. */
  461. #define ARM_IT_MAX_OPERANDS 6
  462. #define ARM_IT_MAX_RELOCS 3
  463. struct arm_it
  464. {
  465. const char * error;
  466. unsigned long instruction;
  467. unsigned int size;
  468. unsigned int size_req;
  469. unsigned int cond;
  470. /* "uncond_value" is set to the value in place of the conditional field in
  471. unconditional versions of the instruction, or -1u if nothing is
  472. appropriate. */
  473. unsigned int uncond_value;
  474. struct neon_type vectype;
  475. /* This does not indicate an actual NEON instruction, only that
  476. the mnemonic accepts neon-style type suffixes. */
  477. int is_neon;
  478. /* Set to the opcode if the instruction needs relaxation.
  479. Zero if the instruction is not relaxed. */
  480. unsigned long relax;
  481. struct
  482. {
  483. bfd_reloc_code_real_type type;
  484. expressionS exp;
  485. int pc_rel;
  486. } relocs[ARM_IT_MAX_RELOCS];
  487. enum pred_instruction_type pred_insn_type;
  488. struct
  489. {
  490. unsigned reg;
  491. signed int imm;
  492. struct neon_type_el vectype;
  493. unsigned present : 1; /* Operand present. */
  494. unsigned isreg : 1; /* Operand was a register. */
  495. unsigned immisreg : 2; /* .imm field is a second register.
  496. 0: imm, 1: gpr, 2: MVE Q-register. */
  497. unsigned isscalar : 2; /* Operand is a (SIMD) scalar:
  498. 0) not scalar,
  499. 1) Neon scalar,
  500. 2) MVE scalar. */
  501. unsigned immisalign : 1; /* Immediate is an alignment specifier. */
  502. unsigned immisfloat : 1; /* Immediate was parsed as a float. */
  503. /* Note: we abuse "regisimm" to mean "is Neon register" in VMOV
  504. instructions. This allows us to disambiguate ARM <-> vector insns. */
  505. unsigned regisimm : 1; /* 64-bit immediate, reg forms high 32 bits. */
  506. unsigned isvec : 1; /* Is a single, double or quad VFP/Neon reg. */
  507. unsigned isquad : 1; /* Operand is SIMD quad register. */
  508. unsigned issingle : 1; /* Operand is VFP single-precision register. */
  509. unsigned iszr : 1; /* Operand is ZR register. */
  510. unsigned hasreloc : 1; /* Operand has relocation suffix. */
  511. unsigned writeback : 1; /* Operand has trailing ! */
  512. unsigned preind : 1; /* Preindexed address. */
  513. unsigned postind : 1; /* Postindexed address. */
  514. unsigned negative : 1; /* Index register was negated. */
  515. unsigned shifted : 1; /* Shift applied to operation. */
  516. unsigned shift_kind : 3; /* Shift operation (enum shift_kind). */
  517. } operands[ARM_IT_MAX_OPERANDS];
  518. };
  519. static struct arm_it inst;
  520. #define NUM_FLOAT_VALS 8
  521. const char * fp_const[] =
  522. {
  523. "0.0", "1.0", "2.0", "3.0", "4.0", "5.0", "0.5", "10.0", 0
  524. };
  525. LITTLENUM_TYPE fp_values[NUM_FLOAT_VALS][MAX_LITTLENUMS];
  526. #define FAIL (-1)
  527. #define SUCCESS (0)
  528. #define SUFF_S 1
  529. #define SUFF_D 2
  530. #define SUFF_E 3
  531. #define SUFF_P 4
  532. #define CP_T_X 0x00008000
  533. #define CP_T_Y 0x00400000
  534. #define CONDS_BIT 0x00100000
  535. #define LOAD_BIT 0x00100000
  536. #define DOUBLE_LOAD_FLAG 0x00000001
  537. struct asm_cond
  538. {
  539. const char * template_name;
  540. unsigned long value;
  541. };
  542. #define COND_ALWAYS 0xE
  543. struct asm_psr
  544. {
  545. const char * template_name;
  546. unsigned long field;
  547. };
  548. struct asm_barrier_opt
  549. {
  550. const char * template_name;
  551. unsigned long value;
  552. const arm_feature_set arch;
  553. };
  554. /* The bit that distinguishes CPSR and SPSR. */
  555. #define SPSR_BIT (1 << 22)
  556. /* The individual PSR flag bits. */
  557. #define PSR_c (1 << 16)
  558. #define PSR_x (1 << 17)
  559. #define PSR_s (1 << 18)
  560. #define PSR_f (1 << 19)
  561. struct reloc_entry
  562. {
  563. const char * name;
  564. bfd_reloc_code_real_type reloc;
  565. };
  566. enum vfp_reg_pos
  567. {
  568. VFP_REG_Sd, VFP_REG_Sm, VFP_REG_Sn,
  569. VFP_REG_Dd, VFP_REG_Dm, VFP_REG_Dn
  570. };
  571. enum vfp_ldstm_type
  572. {
  573. VFP_LDSTMIA, VFP_LDSTMDB, VFP_LDSTMIAX, VFP_LDSTMDBX
  574. };
  575. /* Bits for DEFINED field in neon_typed_alias. */
  576. #define NTA_HASTYPE 1
  577. #define NTA_HASINDEX 2
  578. struct neon_typed_alias
  579. {
  580. unsigned char defined;
  581. unsigned char index;
  582. struct neon_type_el eltype;
  583. };
  584. /* ARM register categories. This includes coprocessor numbers and various
  585. architecture extensions' registers. Each entry should have an error message
  586. in reg_expected_msgs below. */
  587. enum arm_reg_type
  588. {
  589. REG_TYPE_RN,
  590. REG_TYPE_CP,
  591. REG_TYPE_CN,
  592. REG_TYPE_FN,
  593. REG_TYPE_VFS,
  594. REG_TYPE_VFD,
  595. REG_TYPE_NQ,
  596. REG_TYPE_VFSD,
  597. REG_TYPE_NDQ,
  598. REG_TYPE_NSD,
  599. REG_TYPE_NSDQ,
  600. REG_TYPE_VFC,
  601. REG_TYPE_MVF,
  602. REG_TYPE_MVD,
  603. REG_TYPE_MVFX,
  604. REG_TYPE_MVDX,
  605. REG_TYPE_MVAX,
  606. REG_TYPE_MQ,
  607. REG_TYPE_DSPSC,
  608. REG_TYPE_MMXWR,
  609. REG_TYPE_MMXWC,
  610. REG_TYPE_MMXWCG,
  611. REG_TYPE_XSCALE,
  612. REG_TYPE_RNB,
  613. REG_TYPE_ZR,
  614. REG_TYPE_PSEUDO
  615. };
  616. /* Structure for a hash table entry for a register.
  617. If TYPE is REG_TYPE_VFD or REG_TYPE_NQ, the NEON field can point to extra
  618. information which states whether a vector type or index is specified (for a
  619. register alias created with .dn or .qn). Otherwise NEON should be NULL. */
  620. struct reg_entry
  621. {
  622. const char * name;
  623. unsigned int number;
  624. unsigned char type;
  625. unsigned char builtin;
  626. struct neon_typed_alias * neon;
  627. };
  628. /* Diagnostics used when we don't get a register of the expected type. */
  629. const char * const reg_expected_msgs[] =
  630. {
  631. [REG_TYPE_RN] = N_("ARM register expected"),
  632. [REG_TYPE_CP] = N_("bad or missing co-processor number"),
  633. [REG_TYPE_CN] = N_("co-processor register expected"),
  634. [REG_TYPE_FN] = N_("FPA register expected"),
  635. [REG_TYPE_VFS] = N_("VFP single precision register expected"),
  636. [REG_TYPE_VFD] = N_("VFP/Neon double precision register expected"),
  637. [REG_TYPE_NQ] = N_("Neon quad precision register expected"),
  638. [REG_TYPE_VFSD] = N_("VFP single or double precision register expected"),
  639. [REG_TYPE_NDQ] = N_("Neon double or quad precision register expected"),
  640. [REG_TYPE_NSD] = N_("Neon single or double precision register expected"),
  641. [REG_TYPE_NSDQ] = N_("VFP single, double or Neon quad precision register"
  642. " expected"),
  643. [REG_TYPE_VFC] = N_("VFP system register expected"),
  644. [REG_TYPE_MVF] = N_("Maverick MVF register expected"),
  645. [REG_TYPE_MVD] = N_("Maverick MVD register expected"),
  646. [REG_TYPE_MVFX] = N_("Maverick MVFX register expected"),
  647. [REG_TYPE_MVDX] = N_("Maverick MVDX register expected"),
  648. [REG_TYPE_MVAX] = N_("Maverick MVAX register expected"),
  649. [REG_TYPE_DSPSC] = N_("Maverick DSPSC register expected"),
  650. [REG_TYPE_MMXWR] = N_("iWMMXt data register expected"),
  651. [REG_TYPE_MMXWC] = N_("iWMMXt control register expected"),
  652. [REG_TYPE_MMXWCG] = N_("iWMMXt scalar register expected"),
  653. [REG_TYPE_XSCALE] = N_("XScale accumulator register expected"),
  654. [REG_TYPE_MQ] = N_("MVE vector register expected"),
  655. [REG_TYPE_RNB] = "",
  656. [REG_TYPE_ZR] = N_("ZR register expected"),
  657. [REG_TYPE_PSEUDO] = N_("Pseudo register expected"),
  658. };
  659. /* Some well known registers that we refer to directly elsewhere. */
  660. #define REG_R12 12
  661. #define REG_SP 13
  662. #define REG_LR 14
  663. #define REG_PC 15
  664. /* ARM instructions take 4bytes in the object file, Thumb instructions
  665. take 2: */
  666. #define INSN_SIZE 4
  667. struct asm_opcode
  668. {
  669. /* Basic string to match. */
  670. const char * template_name;
  671. /* Parameters to instruction. */
  672. unsigned int operands[8];
  673. /* Conditional tag - see opcode_lookup. */
  674. unsigned int tag : 4;
  675. /* Basic instruction code. */
  676. unsigned int avalue;
  677. /* Thumb-format instruction code. */
  678. unsigned int tvalue;
  679. /* Which architecture variant provides this instruction. */
  680. const arm_feature_set * avariant;
  681. const arm_feature_set * tvariant;
  682. /* Function to call to encode instruction in ARM format. */
  683. void (* aencode) (void);
  684. /* Function to call to encode instruction in Thumb format. */
  685. void (* tencode) (void);
  686. /* Indicates whether this instruction may be vector predicated. */
  687. unsigned int mayBeVecPred : 1;
  688. };
  689. /* Defines for various bits that we will want to toggle. */
  690. #define INST_IMMEDIATE 0x02000000
  691. #define OFFSET_REG 0x02000000
  692. #define HWOFFSET_IMM 0x00400000
  693. #define SHIFT_BY_REG 0x00000010
  694. #define PRE_INDEX 0x01000000
  695. #define INDEX_UP 0x00800000
  696. #define WRITE_BACK 0x00200000
  697. #define LDM_TYPE_2_OR_3 0x00400000
  698. #define CPSI_MMOD 0x00020000
  699. #define LITERAL_MASK 0xf000f000
  700. #define OPCODE_MASK 0xfe1fffff
  701. #define V4_STR_BIT 0x00000020
  702. #define VLDR_VMOV_SAME 0x0040f000
  703. #define T2_SUBS_PC_LR 0xf3de8f00
  704. #define DATA_OP_SHIFT 21
  705. #define SBIT_SHIFT 20
  706. #define T2_OPCODE_MASK 0xfe1fffff
  707. #define T2_DATA_OP_SHIFT 21
  708. #define T2_SBIT_SHIFT 20
  709. #define A_COND_MASK 0xf0000000
  710. #define A_PUSH_POP_OP_MASK 0x0fff0000
  711. /* Opcodes for pushing/poping registers to/from the stack. */
  712. #define A1_OPCODE_PUSH 0x092d0000
  713. #define A2_OPCODE_PUSH 0x052d0004
  714. #define A2_OPCODE_POP 0x049d0004
  715. /* Codes to distinguish the arithmetic instructions. */
  716. #define OPCODE_AND 0
  717. #define OPCODE_EOR 1
  718. #define OPCODE_SUB 2
  719. #define OPCODE_RSB 3
  720. #define OPCODE_ADD 4
  721. #define OPCODE_ADC 5
  722. #define OPCODE_SBC 6
  723. #define OPCODE_RSC 7
  724. #define OPCODE_TST 8
  725. #define OPCODE_TEQ 9
  726. #define OPCODE_CMP 10
  727. #define OPCODE_CMN 11
  728. #define OPCODE_ORR 12
  729. #define OPCODE_MOV 13
  730. #define OPCODE_BIC 14
  731. #define OPCODE_MVN 15
  732. #define T2_OPCODE_AND 0
  733. #define T2_OPCODE_BIC 1
  734. #define T2_OPCODE_ORR 2
  735. #define T2_OPCODE_ORN 3
  736. #define T2_OPCODE_EOR 4
  737. #define T2_OPCODE_ADD 8
  738. #define T2_OPCODE_ADC 10
  739. #define T2_OPCODE_SBC 11
  740. #define T2_OPCODE_SUB 13
  741. #define T2_OPCODE_RSB 14
  742. #define T_OPCODE_MUL 0x4340
  743. #define T_OPCODE_TST 0x4200
  744. #define T_OPCODE_CMN 0x42c0
  745. #define T_OPCODE_NEG 0x4240
  746. #define T_OPCODE_MVN 0x43c0
  747. #define T_OPCODE_ADD_R3 0x1800
  748. #define T_OPCODE_SUB_R3 0x1a00
  749. #define T_OPCODE_ADD_HI 0x4400
  750. #define T_OPCODE_ADD_ST 0xb000
  751. #define T_OPCODE_SUB_ST 0xb080
  752. #define T_OPCODE_ADD_SP 0xa800
  753. #define T_OPCODE_ADD_PC 0xa000
  754. #define T_OPCODE_ADD_I8 0x3000
  755. #define T_OPCODE_SUB_I8 0x3800
  756. #define T_OPCODE_ADD_I3 0x1c00
  757. #define T_OPCODE_SUB_I3 0x1e00
  758. #define T_OPCODE_ASR_R 0x4100
  759. #define T_OPCODE_LSL_R 0x4080
  760. #define T_OPCODE_LSR_R 0x40c0
  761. #define T_OPCODE_ROR_R 0x41c0
  762. #define T_OPCODE_ASR_I 0x1000
  763. #define T_OPCODE_LSL_I 0x0000
  764. #define T_OPCODE_LSR_I 0x0800
  765. #define T_OPCODE_MOV_I8 0x2000
  766. #define T_OPCODE_CMP_I8 0x2800
  767. #define T_OPCODE_CMP_LR 0x4280
  768. #define T_OPCODE_MOV_HR 0x4600
  769. #define T_OPCODE_CMP_HR 0x4500
  770. #define T_OPCODE_LDR_PC 0x4800
  771. #define T_OPCODE_LDR_SP 0x9800
  772. #define T_OPCODE_STR_SP 0x9000
  773. #define T_OPCODE_LDR_IW 0x6800
  774. #define T_OPCODE_STR_IW 0x6000
  775. #define T_OPCODE_LDR_IH 0x8800
  776. #define T_OPCODE_STR_IH 0x8000
  777. #define T_OPCODE_LDR_IB 0x7800
  778. #define T_OPCODE_STR_IB 0x7000
  779. #define T_OPCODE_LDR_RW 0x5800
  780. #define T_OPCODE_STR_RW 0x5000
  781. #define T_OPCODE_LDR_RH 0x5a00
  782. #define T_OPCODE_STR_RH 0x5200
  783. #define T_OPCODE_LDR_RB 0x5c00
  784. #define T_OPCODE_STR_RB 0x5400
  785. #define T_OPCODE_PUSH 0xb400
  786. #define T_OPCODE_POP 0xbc00
  787. #define T_OPCODE_BRANCH 0xe000
  788. #define THUMB_SIZE 2 /* Size of thumb instruction. */
  789. #define THUMB_PP_PC_LR 0x0100
  790. #define THUMB_LOAD_BIT 0x0800
  791. #define THUMB2_LOAD_BIT 0x00100000
  792. #define BAD_SYNTAX _("syntax error")
  793. #define BAD_ARGS _("bad arguments to instruction")
  794. #define BAD_SP _("r13 not allowed here")
  795. #define BAD_PC _("r15 not allowed here")
  796. #define BAD_ODD _("Odd register not allowed here")
  797. #define BAD_EVEN _("Even register not allowed here")
  798. #define BAD_COND _("instruction cannot be conditional")
  799. #define BAD_OVERLAP _("registers may not be the same")
  800. #define BAD_HIREG _("lo register required")
  801. #define BAD_THUMB32 _("instruction not supported in Thumb16 mode")
  802. #define BAD_ADDR_MODE _("instruction does not accept this addressing mode")
  803. #define BAD_BRANCH _("branch must be last instruction in IT block")
  804. #define BAD_BRANCH_OFF _("branch out of range or not a multiple of 2")
  805. #define BAD_NO_VPT _("instruction not allowed in VPT block")
  806. #define BAD_NOT_IT _("instruction not allowed in IT block")
  807. #define BAD_NOT_VPT _("instruction missing MVE vector predication code")
  808. #define BAD_FPU _("selected FPU does not support instruction")
  809. #define BAD_OUT_IT _("thumb conditional instruction should be in IT block")
  810. #define BAD_OUT_VPT \
  811. _("vector predicated instruction should be in VPT/VPST block")
  812. #define BAD_IT_COND _("incorrect condition in IT block")
  813. #define BAD_VPT_COND _("incorrect condition in VPT/VPST block")
  814. #define BAD_IT_IT _("IT falling in the range of a previous IT block")
  815. #define MISSING_FNSTART _("missing .fnstart before unwinding directive")
  816. #define BAD_PC_ADDRESSING \
  817. _("cannot use register index with PC-relative addressing")
  818. #define BAD_PC_WRITEBACK \
  819. _("cannot use writeback with PC-relative addressing")
  820. #define BAD_RANGE _("branch out of range")
  821. #define BAD_FP16 _("selected processor does not support fp16 instruction")
  822. #define BAD_BF16 _("selected processor does not support bf16 instruction")
  823. #define BAD_CDE _("selected processor does not support cde instruction")
  824. #define BAD_CDE_COPROC _("coprocessor for insn is not enabled for cde")
  825. #define UNPRED_REG(R) _("using " R " results in unpredictable behaviour")
  826. #define THUMB1_RELOC_ONLY _("relocation valid in thumb1 code only")
  827. #define MVE_NOT_IT _("Warning: instruction is UNPREDICTABLE in an IT " \
  828. "block")
  829. #define MVE_NOT_VPT _("Warning: instruction is UNPREDICTABLE in a VPT " \
  830. "block")
  831. #define MVE_BAD_PC _("Warning: instruction is UNPREDICTABLE with PC" \
  832. " operand")
  833. #define MVE_BAD_SP _("Warning: instruction is UNPREDICTABLE with SP" \
  834. " operand")
  835. #define BAD_SIMD_TYPE _("bad type in SIMD instruction")
  836. #define BAD_MVE_AUTO \
  837. _("GAS auto-detection mode and -march=all is deprecated for MVE, please" \
  838. " use a valid -march or -mcpu option.")
  839. #define BAD_MVE_SRCDEST _("Warning: 32-bit element size and same destination "\
  840. "and source operands makes instruction UNPREDICTABLE")
  841. #define BAD_EL_TYPE _("bad element type for instruction")
  842. #define MVE_BAD_QREG _("MVE vector register Q[0..7] expected")
  843. #define BAD_PACBTI _("selected processor does not support PACBTI extention")
  844. static htab_t arm_ops_hsh;
  845. static htab_t arm_cond_hsh;
  846. static htab_t arm_vcond_hsh;
  847. static htab_t arm_shift_hsh;
  848. static htab_t arm_psr_hsh;
  849. static htab_t arm_v7m_psr_hsh;
  850. static htab_t arm_reg_hsh;
  851. static htab_t arm_reloc_hsh;
  852. static htab_t arm_barrier_opt_hsh;
  853. /* Stuff needed to resolve the label ambiguity
  854. As:
  855. ...
  856. label: <insn>
  857. may differ from:
  858. ...
  859. label:
  860. <insn> */
  861. symbolS * last_label_seen;
  862. static int label_is_thumb_function_name = false;
  863. /* Literal pool structure. Held on a per-section
  864. and per-sub-section basis. */
  865. #define MAX_LITERAL_POOL_SIZE 1024
  866. typedef struct literal_pool
  867. {
  868. expressionS literals [MAX_LITERAL_POOL_SIZE];
  869. unsigned int next_free_entry;
  870. unsigned int id;
  871. symbolS * symbol;
  872. segT section;
  873. subsegT sub_section;
  874. #ifdef OBJ_ELF
  875. struct dwarf2_line_info locs [MAX_LITERAL_POOL_SIZE];
  876. #endif
  877. struct literal_pool * next;
  878. unsigned int alignment;
  879. } literal_pool;
  880. /* Pointer to a linked list of literal pools. */
  881. literal_pool * list_of_pools = NULL;
  882. typedef enum asmfunc_states
  883. {
  884. OUTSIDE_ASMFUNC,
  885. WAITING_ASMFUNC_NAME,
  886. WAITING_ENDASMFUNC
  887. } asmfunc_states;
  888. static asmfunc_states asmfunc_state = OUTSIDE_ASMFUNC;
  889. #ifdef OBJ_ELF
  890. # define now_pred seg_info (now_seg)->tc_segment_info_data.current_pred
  891. #else
  892. static struct current_pred now_pred;
  893. #endif
  894. static inline int
  895. now_pred_compatible (int cond)
  896. {
  897. return (cond & ~1) == (now_pred.cc & ~1);
  898. }
  899. static inline int
  900. conditional_insn (void)
  901. {
  902. return inst.cond != COND_ALWAYS;
  903. }
  904. static int in_pred_block (void);
  905. static int handle_pred_state (void);
  906. static void force_automatic_it_block_close (void);
  907. static void it_fsm_post_encode (void);
  908. #define set_pred_insn_type(type) \
  909. do \
  910. { \
  911. inst.pred_insn_type = type; \
  912. if (handle_pred_state () == FAIL) \
  913. return; \
  914. } \
  915. while (0)
  916. #define set_pred_insn_type_nonvoid(type, failret) \
  917. do \
  918. { \
  919. inst.pred_insn_type = type; \
  920. if (handle_pred_state () == FAIL) \
  921. return failret; \
  922. } \
  923. while(0)
  924. #define set_pred_insn_type_last() \
  925. do \
  926. { \
  927. if (inst.cond == COND_ALWAYS) \
  928. set_pred_insn_type (IF_INSIDE_IT_LAST_INSN); \
  929. else \
  930. set_pred_insn_type (INSIDE_IT_LAST_INSN); \
  931. } \
  932. while (0)
  933. /* Toggle value[pos]. */
  934. #define TOGGLE_BIT(value, pos) (value ^ (1 << pos))
  935. /* Pure syntax. */
  936. /* This array holds the chars that always start a comment. If the
  937. pre-processor is disabled, these aren't very useful. */
  938. char arm_comment_chars[] = "@";
  939. /* This array holds the chars that only start a comment at the beginning of
  940. a line. If the line seems to have the form '# 123 filename'
  941. .line and .file directives will appear in the pre-processed output. */
  942. /* Note that input_file.c hand checks for '#' at the beginning of the
  943. first line of the input file. This is because the compiler outputs
  944. #NO_APP at the beginning of its output. */
  945. /* Also note that comments like this one will always work. */
  946. const char line_comment_chars[] = "#";
  947. char arm_line_separator_chars[] = ";";
  948. /* Chars that can be used to separate mant
  949. from exp in floating point numbers. */
  950. const char EXP_CHARS[] = "eE";
  951. /* Chars that mean this number is a floating point constant. */
  952. /* As in 0f12.456 */
  953. /* or 0d1.2345e12 */
  954. const char FLT_CHARS[] = "rRsSfFdDxXeEpPHh";
  955. /* Prefix characters that indicate the start of an immediate
  956. value. */
  957. #define is_immediate_prefix(C) ((C) == '#' || (C) == '$')
  958. /* Separator character handling. */
  959. #define skip_whitespace(str) do { if (*(str) == ' ') ++(str); } while (0)
  960. enum fp_16bit_format
  961. {
  962. ARM_FP16_FORMAT_IEEE = 0x1,
  963. ARM_FP16_FORMAT_ALTERNATIVE = 0x2,
  964. ARM_FP16_FORMAT_DEFAULT = 0x3
  965. };
  966. static enum fp_16bit_format fp16_format = ARM_FP16_FORMAT_DEFAULT;
  967. static inline int
  968. skip_past_char (char ** str, char c)
  969. {
  970. /* PR gas/14987: Allow for whitespace before the expected character. */
  971. skip_whitespace (*str);
  972. if (**str == c)
  973. {
  974. (*str)++;
  975. return SUCCESS;
  976. }
  977. else
  978. return FAIL;
  979. }
  980. #define skip_past_comma(str) skip_past_char (str, ',')
  981. /* Arithmetic expressions (possibly involving symbols). */
  982. /* Return TRUE if anything in the expression is a bignum. */
  983. static bool
  984. walk_no_bignums (symbolS * sp)
  985. {
  986. if (symbol_get_value_expression (sp)->X_op == O_big)
  987. return true;
  988. if (symbol_get_value_expression (sp)->X_add_symbol)
  989. {
  990. return (walk_no_bignums (symbol_get_value_expression (sp)->X_add_symbol)
  991. || (symbol_get_value_expression (sp)->X_op_symbol
  992. && walk_no_bignums (symbol_get_value_expression (sp)->X_op_symbol)));
  993. }
  994. return false;
  995. }
  996. static bool in_my_get_expression = false;
  997. /* Third argument to my_get_expression. */
  998. #define GE_NO_PREFIX 0
  999. #define GE_IMM_PREFIX 1
  1000. #define GE_OPT_PREFIX 2
  1001. /* This is a bit of a hack. Use an optional prefix, and also allow big (64-bit)
  1002. immediates, as can be used in Neon VMVN and VMOV immediate instructions. */
  1003. #define GE_OPT_PREFIX_BIG 3
  1004. static int
  1005. my_get_expression (expressionS * ep, char ** str, int prefix_mode)
  1006. {
  1007. char * save_in;
  1008. /* In unified syntax, all prefixes are optional. */
  1009. if (unified_syntax)
  1010. prefix_mode = (prefix_mode == GE_OPT_PREFIX_BIG) ? prefix_mode
  1011. : GE_OPT_PREFIX;
  1012. switch (prefix_mode)
  1013. {
  1014. case GE_NO_PREFIX: break;
  1015. case GE_IMM_PREFIX:
  1016. if (!is_immediate_prefix (**str))
  1017. {
  1018. inst.error = _("immediate expression requires a # prefix");
  1019. return FAIL;
  1020. }
  1021. (*str)++;
  1022. break;
  1023. case GE_OPT_PREFIX:
  1024. case GE_OPT_PREFIX_BIG:
  1025. if (is_immediate_prefix (**str))
  1026. (*str)++;
  1027. break;
  1028. default:
  1029. abort ();
  1030. }
  1031. memset (ep, 0, sizeof (expressionS));
  1032. save_in = input_line_pointer;
  1033. input_line_pointer = *str;
  1034. in_my_get_expression = true;
  1035. expression (ep);
  1036. in_my_get_expression = false;
  1037. if (ep->X_op == O_illegal || ep->X_op == O_absent)
  1038. {
  1039. /* We found a bad or missing expression in md_operand(). */
  1040. *str = input_line_pointer;
  1041. input_line_pointer = save_in;
  1042. if (inst.error == NULL)
  1043. inst.error = (ep->X_op == O_absent
  1044. ? _("missing expression") :_("bad expression"));
  1045. return 1;
  1046. }
  1047. /* Get rid of any bignums now, so that we don't generate an error for which
  1048. we can't establish a line number later on. Big numbers are never valid
  1049. in instructions, which is where this routine is always called. */
  1050. if (prefix_mode != GE_OPT_PREFIX_BIG
  1051. && (ep->X_op == O_big
  1052. || (ep->X_add_symbol
  1053. && (walk_no_bignums (ep->X_add_symbol)
  1054. || (ep->X_op_symbol
  1055. && walk_no_bignums (ep->X_op_symbol))))))
  1056. {
  1057. inst.error = _("invalid constant");
  1058. *str = input_line_pointer;
  1059. input_line_pointer = save_in;
  1060. return 1;
  1061. }
  1062. *str = input_line_pointer;
  1063. input_line_pointer = save_in;
  1064. return SUCCESS;
  1065. }
  1066. /* Turn a string in input_line_pointer into a floating point constant
  1067. of type TYPE, and store the appropriate bytes in *LITP. The number
  1068. of LITTLENUMS emitted is stored in *SIZEP. An error message is
  1069. returned, or NULL on OK.
  1070. Note that fp constants aren't represent in the normal way on the ARM.
  1071. In big endian mode, things are as expected. However, in little endian
  1072. mode fp constants are big-endian word-wise, and little-endian byte-wise
  1073. within the words. For example, (double) 1.1 in big endian mode is
  1074. the byte sequence 3f f1 99 99 99 99 99 9a, and in little endian mode is
  1075. the byte sequence 99 99 f1 3f 9a 99 99 99.
  1076. ??? The format of 12 byte floats is uncertain according to gcc's arm.h. */
  1077. const char *
  1078. md_atof (int type, char * litP, int * sizeP)
  1079. {
  1080. int prec;
  1081. LITTLENUM_TYPE words[MAX_LITTLENUMS];
  1082. char *t;
  1083. int i;
  1084. switch (type)
  1085. {
  1086. case 'H':
  1087. case 'h':
  1088. /* bfloat16, despite not being part of the IEEE specification, can also
  1089. be handled by atof_ieee(). */
  1090. case 'b':
  1091. prec = 1;
  1092. break;
  1093. case 'f':
  1094. case 'F':
  1095. case 's':
  1096. case 'S':
  1097. prec = 2;
  1098. break;
  1099. case 'd':
  1100. case 'D':
  1101. case 'r':
  1102. case 'R':
  1103. prec = 4;
  1104. break;
  1105. case 'x':
  1106. case 'X':
  1107. prec = 5;
  1108. break;
  1109. case 'p':
  1110. case 'P':
  1111. prec = 5;
  1112. break;
  1113. default:
  1114. *sizeP = 0;
  1115. return _("Unrecognized or unsupported floating point constant");
  1116. }
  1117. t = atof_ieee (input_line_pointer, type, words);
  1118. if (t)
  1119. input_line_pointer = t;
  1120. *sizeP = prec * sizeof (LITTLENUM_TYPE);
  1121. if (target_big_endian || prec == 1)
  1122. for (i = 0; i < prec; i++)
  1123. {
  1124. md_number_to_chars (litP, (valueT) words[i], sizeof (LITTLENUM_TYPE));
  1125. litP += sizeof (LITTLENUM_TYPE);
  1126. }
  1127. else if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_endian_pure))
  1128. for (i = prec - 1; i >= 0; i--)
  1129. {
  1130. md_number_to_chars (litP, (valueT) words[i], sizeof (LITTLENUM_TYPE));
  1131. litP += sizeof (LITTLENUM_TYPE);
  1132. }
  1133. else
  1134. /* For a 4 byte float the order of elements in `words' is 1 0.
  1135. For an 8 byte float the order is 1 0 3 2. */
  1136. for (i = 0; i < prec; i += 2)
  1137. {
  1138. md_number_to_chars (litP, (valueT) words[i + 1],
  1139. sizeof (LITTLENUM_TYPE));
  1140. md_number_to_chars (litP + sizeof (LITTLENUM_TYPE),
  1141. (valueT) words[i], sizeof (LITTLENUM_TYPE));
  1142. litP += 2 * sizeof (LITTLENUM_TYPE);
  1143. }
  1144. return NULL;
  1145. }
  1146. /* We handle all bad expressions here, so that we can report the faulty
  1147. instruction in the error message. */
  1148. void
  1149. md_operand (expressionS * exp)
  1150. {
  1151. if (in_my_get_expression)
  1152. exp->X_op = O_illegal;
  1153. }
  1154. /* Immediate values. */
  1155. #ifdef OBJ_ELF
  1156. /* Generic immediate-value read function for use in directives.
  1157. Accepts anything that 'expression' can fold to a constant.
  1158. *val receives the number. */
  1159. static int
  1160. immediate_for_directive (int *val)
  1161. {
  1162. expressionS exp;
  1163. exp.X_op = O_illegal;
  1164. if (is_immediate_prefix (*input_line_pointer))
  1165. {
  1166. input_line_pointer++;
  1167. expression (&exp);
  1168. }
  1169. if (exp.X_op != O_constant)
  1170. {
  1171. as_bad (_("expected #constant"));
  1172. ignore_rest_of_line ();
  1173. return FAIL;
  1174. }
  1175. *val = exp.X_add_number;
  1176. return SUCCESS;
  1177. }
  1178. #endif
  1179. /* Register parsing. */
  1180. /* Generic register parser. CCP points to what should be the
  1181. beginning of a register name. If it is indeed a valid register
  1182. name, advance CCP over it and return the reg_entry structure;
  1183. otherwise return NULL. Does not issue diagnostics. */
  1184. static struct reg_entry *
  1185. arm_reg_parse_multi (char **ccp)
  1186. {
  1187. char *start = *ccp;
  1188. char *p;
  1189. struct reg_entry *reg;
  1190. skip_whitespace (start);
  1191. #ifdef REGISTER_PREFIX
  1192. if (*start != REGISTER_PREFIX)
  1193. return NULL;
  1194. start++;
  1195. #endif
  1196. #ifdef OPTIONAL_REGISTER_PREFIX
  1197. if (*start == OPTIONAL_REGISTER_PREFIX)
  1198. start++;
  1199. #endif
  1200. p = start;
  1201. if (!ISALPHA (*p) || !is_name_beginner (*p))
  1202. return NULL;
  1203. do
  1204. p++;
  1205. while (ISALPHA (*p) || ISDIGIT (*p) || *p == '_');
  1206. reg = (struct reg_entry *) str_hash_find_n (arm_reg_hsh, start, p - start);
  1207. if (!reg)
  1208. return NULL;
  1209. *ccp = p;
  1210. return reg;
  1211. }
  1212. static int
  1213. arm_reg_alt_syntax (char **ccp, char *start, struct reg_entry *reg,
  1214. enum arm_reg_type type)
  1215. {
  1216. /* Alternative syntaxes are accepted for a few register classes. */
  1217. switch (type)
  1218. {
  1219. case REG_TYPE_MVF:
  1220. case REG_TYPE_MVD:
  1221. case REG_TYPE_MVFX:
  1222. case REG_TYPE_MVDX:
  1223. /* Generic coprocessor register names are allowed for these. */
  1224. if (reg && reg->type == REG_TYPE_CN)
  1225. return reg->number;
  1226. break;
  1227. case REG_TYPE_CP:
  1228. /* For backward compatibility, a bare number is valid here. */
  1229. {
  1230. unsigned long processor = strtoul (start, ccp, 10);
  1231. if (*ccp != start && processor <= 15)
  1232. return processor;
  1233. }
  1234. /* Fall through. */
  1235. case REG_TYPE_MMXWC:
  1236. /* WC includes WCG. ??? I'm not sure this is true for all
  1237. instructions that take WC registers. */
  1238. if (reg && reg->type == REG_TYPE_MMXWCG)
  1239. return reg->number;
  1240. break;
  1241. default:
  1242. break;
  1243. }
  1244. return FAIL;
  1245. }
  1246. /* As arm_reg_parse_multi, but the register must be of type TYPE, and the
  1247. return value is the register number or FAIL. */
  1248. static int
  1249. arm_reg_parse (char **ccp, enum arm_reg_type type)
  1250. {
  1251. char *start = *ccp;
  1252. struct reg_entry *reg = arm_reg_parse_multi (ccp);
  1253. int ret;
  1254. /* Do not allow a scalar (reg+index) to parse as a register. */
  1255. if (reg && reg->neon && (reg->neon->defined & NTA_HASINDEX))
  1256. return FAIL;
  1257. if (reg && reg->type == type)
  1258. return reg->number;
  1259. if ((ret = arm_reg_alt_syntax (ccp, start, reg, type)) != FAIL)
  1260. return ret;
  1261. *ccp = start;
  1262. return FAIL;
  1263. }
  1264. /* Parse a Neon type specifier. *STR should point at the leading '.'
  1265. character. Does no verification at this stage that the type fits the opcode
  1266. properly. E.g.,
  1267. .i32.i32.s16
  1268. .s32.f32
  1269. .u16
  1270. Can all be legally parsed by this function.
  1271. Fills in neon_type struct pointer with parsed information, and updates STR
  1272. to point after the parsed type specifier. Returns SUCCESS if this was a legal
  1273. type, FAIL if not. */
  1274. static int
  1275. parse_neon_type (struct neon_type *type, char **str)
  1276. {
  1277. char *ptr = *str;
  1278. if (type)
  1279. type->elems = 0;
  1280. while (type->elems < NEON_MAX_TYPE_ELS)
  1281. {
  1282. enum neon_el_type thistype = NT_untyped;
  1283. unsigned thissize = -1u;
  1284. if (*ptr != '.')
  1285. break;
  1286. ptr++;
  1287. /* Just a size without an explicit type. */
  1288. if (ISDIGIT (*ptr))
  1289. goto parsesize;
  1290. switch (TOLOWER (*ptr))
  1291. {
  1292. case 'i': thistype = NT_integer; break;
  1293. case 'f': thistype = NT_float; break;
  1294. case 'p': thistype = NT_poly; break;
  1295. case 's': thistype = NT_signed; break;
  1296. case 'u': thistype = NT_unsigned; break;
  1297. case 'd':
  1298. thistype = NT_float;
  1299. thissize = 64;
  1300. ptr++;
  1301. goto done;
  1302. case 'b':
  1303. thistype = NT_bfloat;
  1304. switch (TOLOWER (*(++ptr)))
  1305. {
  1306. case 'f':
  1307. ptr += 1;
  1308. thissize = strtoul (ptr, &ptr, 10);
  1309. if (thissize != 16)
  1310. {
  1311. as_bad (_("bad size %d in type specifier"), thissize);
  1312. return FAIL;
  1313. }
  1314. goto done;
  1315. case '0': case '1': case '2': case '3': case '4':
  1316. case '5': case '6': case '7': case '8': case '9':
  1317. case ' ': case '.':
  1318. as_bad (_("unexpected type character `b' -- did you mean `bf'?"));
  1319. return FAIL;
  1320. default:
  1321. break;
  1322. }
  1323. break;
  1324. default:
  1325. as_bad (_("unexpected character `%c' in type specifier"), *ptr);
  1326. return FAIL;
  1327. }
  1328. ptr++;
  1329. /* .f is an abbreviation for .f32. */
  1330. if (thistype == NT_float && !ISDIGIT (*ptr))
  1331. thissize = 32;
  1332. else
  1333. {
  1334. parsesize:
  1335. thissize = strtoul (ptr, &ptr, 10);
  1336. if (thissize != 8 && thissize != 16 && thissize != 32
  1337. && thissize != 64)
  1338. {
  1339. as_bad (_("bad size %d in type specifier"), thissize);
  1340. return FAIL;
  1341. }
  1342. }
  1343. done:
  1344. if (type)
  1345. {
  1346. type->el[type->elems].type = thistype;
  1347. type->el[type->elems].size = thissize;
  1348. type->elems++;
  1349. }
  1350. }
  1351. /* Empty/missing type is not a successful parse. */
  1352. if (type->elems == 0)
  1353. return FAIL;
  1354. *str = ptr;
  1355. return SUCCESS;
  1356. }
  1357. /* Errors may be set multiple times during parsing or bit encoding
  1358. (particularly in the Neon bits), but usually the earliest error which is set
  1359. will be the most meaningful. Avoid overwriting it with later (cascading)
  1360. errors by calling this function. */
  1361. static void
  1362. first_error (const char *err)
  1363. {
  1364. if (!inst.error)
  1365. inst.error = err;
  1366. }
  1367. /* Parse a single type, e.g. ".s32", leading period included. */
  1368. static int
  1369. parse_neon_operand_type (struct neon_type_el *vectype, char **ccp)
  1370. {
  1371. char *str = *ccp;
  1372. struct neon_type optype;
  1373. if (*str == '.')
  1374. {
  1375. if (parse_neon_type (&optype, &str) == SUCCESS)
  1376. {
  1377. if (optype.elems == 1)
  1378. *vectype = optype.el[0];
  1379. else
  1380. {
  1381. first_error (_("only one type should be specified for operand"));
  1382. return FAIL;
  1383. }
  1384. }
  1385. else
  1386. {
  1387. first_error (_("vector type expected"));
  1388. return FAIL;
  1389. }
  1390. }
  1391. else
  1392. return FAIL;
  1393. *ccp = str;
  1394. return SUCCESS;
  1395. }
  1396. /* Special meanings for indices (which have a range of 0-7), which will fit into
  1397. a 4-bit integer. */
  1398. #define NEON_ALL_LANES 15
  1399. #define NEON_INTERLEAVE_LANES 14
  1400. /* Record a use of the given feature. */
  1401. static void
  1402. record_feature_use (const arm_feature_set *feature)
  1403. {
  1404. if (thumb_mode)
  1405. ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used, *feature);
  1406. else
  1407. ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used, *feature);
  1408. }
  1409. /* If the given feature available in the selected CPU, mark it as used.
  1410. Returns TRUE iff feature is available. */
  1411. static bool
  1412. mark_feature_used (const arm_feature_set *feature)
  1413. {
  1414. /* Do not support the use of MVE only instructions when in auto-detection or
  1415. -march=all. */
  1416. if (((feature == &mve_ext) || (feature == &mve_fp_ext))
  1417. && ARM_CPU_IS_ANY (cpu_variant))
  1418. {
  1419. first_error (BAD_MVE_AUTO);
  1420. return false;
  1421. }
  1422. /* Ensure the option is valid on the current architecture. */
  1423. if (!ARM_CPU_HAS_FEATURE (cpu_variant, *feature))
  1424. return false;
  1425. /* Add the appropriate architecture feature for the barrier option used.
  1426. */
  1427. record_feature_use (feature);
  1428. return true;
  1429. }
  1430. /* Parse either a register or a scalar, with an optional type. Return the
  1431. register number, and optionally fill in the actual type of the register
  1432. when multiple alternatives were given (NEON_TYPE_NDQ) in *RTYPE, and
  1433. type/index information in *TYPEINFO. */
  1434. static int
  1435. parse_typed_reg_or_scalar (char **ccp, enum arm_reg_type type,
  1436. enum arm_reg_type *rtype,
  1437. struct neon_typed_alias *typeinfo)
  1438. {
  1439. char *str = *ccp;
  1440. struct reg_entry *reg = arm_reg_parse_multi (&str);
  1441. struct neon_typed_alias atype;
  1442. struct neon_type_el parsetype;
  1443. atype.defined = 0;
  1444. atype.index = -1;
  1445. atype.eltype.type = NT_invtype;
  1446. atype.eltype.size = -1;
  1447. /* Try alternate syntax for some types of register. Note these are mutually
  1448. exclusive with the Neon syntax extensions. */
  1449. if (reg == NULL)
  1450. {
  1451. int altreg = arm_reg_alt_syntax (&str, *ccp, reg, type);
  1452. if (altreg != FAIL)
  1453. *ccp = str;
  1454. if (typeinfo)
  1455. *typeinfo = atype;
  1456. return altreg;
  1457. }
  1458. /* Undo polymorphism when a set of register types may be accepted. */
  1459. if ((type == REG_TYPE_NDQ
  1460. && (reg->type == REG_TYPE_NQ || reg->type == REG_TYPE_VFD))
  1461. || (type == REG_TYPE_VFSD
  1462. && (reg->type == REG_TYPE_VFS || reg->type == REG_TYPE_VFD))
  1463. || (type == REG_TYPE_NSDQ
  1464. && (reg->type == REG_TYPE_VFS || reg->type == REG_TYPE_VFD
  1465. || reg->type == REG_TYPE_NQ))
  1466. || (type == REG_TYPE_NSD
  1467. && (reg->type == REG_TYPE_VFS || reg->type == REG_TYPE_VFD))
  1468. || (type == REG_TYPE_MMXWC
  1469. && (reg->type == REG_TYPE_MMXWCG)))
  1470. type = (enum arm_reg_type) reg->type;
  1471. if (type == REG_TYPE_MQ)
  1472. {
  1473. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  1474. return FAIL;
  1475. if (!reg || reg->type != REG_TYPE_NQ)
  1476. return FAIL;
  1477. if (reg->number > 14 && !mark_feature_used (&fpu_vfp_ext_d32))
  1478. {
  1479. first_error (_("expected MVE register [q0..q7]"));
  1480. return FAIL;
  1481. }
  1482. type = REG_TYPE_NQ;
  1483. }
  1484. else if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  1485. && (type == REG_TYPE_NQ))
  1486. return FAIL;
  1487. if (type != reg->type)
  1488. return FAIL;
  1489. if (reg->neon)
  1490. atype = *reg->neon;
  1491. if (parse_neon_operand_type (&parsetype, &str) == SUCCESS)
  1492. {
  1493. if ((atype.defined & NTA_HASTYPE) != 0)
  1494. {
  1495. first_error (_("can't redefine type for operand"));
  1496. return FAIL;
  1497. }
  1498. atype.defined |= NTA_HASTYPE;
  1499. atype.eltype = parsetype;
  1500. }
  1501. if (skip_past_char (&str, '[') == SUCCESS)
  1502. {
  1503. if (type != REG_TYPE_VFD
  1504. && !(type == REG_TYPE_VFS
  1505. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8_2))
  1506. && !(type == REG_TYPE_NQ
  1507. && ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)))
  1508. {
  1509. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  1510. first_error (_("only D and Q registers may be indexed"));
  1511. else
  1512. first_error (_("only D registers may be indexed"));
  1513. return FAIL;
  1514. }
  1515. if ((atype.defined & NTA_HASINDEX) != 0)
  1516. {
  1517. first_error (_("can't change index for operand"));
  1518. return FAIL;
  1519. }
  1520. atype.defined |= NTA_HASINDEX;
  1521. if (skip_past_char (&str, ']') == SUCCESS)
  1522. atype.index = NEON_ALL_LANES;
  1523. else
  1524. {
  1525. expressionS exp;
  1526. my_get_expression (&exp, &str, GE_NO_PREFIX);
  1527. if (exp.X_op != O_constant)
  1528. {
  1529. first_error (_("constant expression required"));
  1530. return FAIL;
  1531. }
  1532. if (skip_past_char (&str, ']') == FAIL)
  1533. return FAIL;
  1534. atype.index = exp.X_add_number;
  1535. }
  1536. }
  1537. if (typeinfo)
  1538. *typeinfo = atype;
  1539. if (rtype)
  1540. *rtype = type;
  1541. *ccp = str;
  1542. return reg->number;
  1543. }
  1544. /* Like arm_reg_parse, but also allow the following extra features:
  1545. - If RTYPE is non-zero, return the (possibly restricted) type of the
  1546. register (e.g. Neon double or quad reg when either has been requested).
  1547. - If this is a Neon vector type with additional type information, fill
  1548. in the struct pointed to by VECTYPE (if non-NULL).
  1549. This function will fault on encountering a scalar. */
  1550. static int
  1551. arm_typed_reg_parse (char **ccp, enum arm_reg_type type,
  1552. enum arm_reg_type *rtype, struct neon_type_el *vectype)
  1553. {
  1554. struct neon_typed_alias atype;
  1555. char *str = *ccp;
  1556. int reg = parse_typed_reg_or_scalar (&str, type, rtype, &atype);
  1557. if (reg == FAIL)
  1558. return FAIL;
  1559. /* Do not allow regname(... to parse as a register. */
  1560. if (*str == '(')
  1561. return FAIL;
  1562. /* Do not allow a scalar (reg+index) to parse as a register. */
  1563. if ((atype.defined & NTA_HASINDEX) != 0)
  1564. {
  1565. first_error (_("register operand expected, but got scalar"));
  1566. return FAIL;
  1567. }
  1568. if (vectype)
  1569. *vectype = atype.eltype;
  1570. *ccp = str;
  1571. return reg;
  1572. }
  1573. #define NEON_SCALAR_REG(X) ((X) >> 4)
  1574. #define NEON_SCALAR_INDEX(X) ((X) & 15)
  1575. /* Parse a Neon scalar. Most of the time when we're parsing a scalar, we don't
  1576. have enough information to be able to do a good job bounds-checking. So, we
  1577. just do easy checks here, and do further checks later. */
  1578. static int
  1579. parse_scalar (char **ccp, int elsize, struct neon_type_el *type, enum
  1580. arm_reg_type reg_type)
  1581. {
  1582. int reg;
  1583. char *str = *ccp;
  1584. struct neon_typed_alias atype;
  1585. unsigned reg_size;
  1586. reg = parse_typed_reg_or_scalar (&str, reg_type, NULL, &atype);
  1587. switch (reg_type)
  1588. {
  1589. case REG_TYPE_VFS:
  1590. reg_size = 32;
  1591. break;
  1592. case REG_TYPE_VFD:
  1593. reg_size = 64;
  1594. break;
  1595. case REG_TYPE_MQ:
  1596. reg_size = 128;
  1597. break;
  1598. default:
  1599. gas_assert (0);
  1600. return FAIL;
  1601. }
  1602. if (reg == FAIL || (atype.defined & NTA_HASINDEX) == 0)
  1603. return FAIL;
  1604. if (reg_type != REG_TYPE_MQ && atype.index == NEON_ALL_LANES)
  1605. {
  1606. first_error (_("scalar must have an index"));
  1607. return FAIL;
  1608. }
  1609. else if (atype.index >= reg_size / elsize)
  1610. {
  1611. first_error (_("scalar index out of range"));
  1612. return FAIL;
  1613. }
  1614. if (type)
  1615. *type = atype.eltype;
  1616. *ccp = str;
  1617. return reg * 16 + atype.index;
  1618. }
  1619. /* Types of registers in a list. */
  1620. enum reg_list_els
  1621. {
  1622. REGLIST_RN,
  1623. REGLIST_PSEUDO,
  1624. REGLIST_CLRM,
  1625. REGLIST_VFP_S,
  1626. REGLIST_VFP_S_VPR,
  1627. REGLIST_VFP_D,
  1628. REGLIST_VFP_D_VPR,
  1629. REGLIST_NEON_D
  1630. };
  1631. /* Parse an ARM register list. Returns the bitmask, or FAIL. */
  1632. static long
  1633. parse_reg_list (char ** strp, enum reg_list_els etype)
  1634. {
  1635. char *str = *strp;
  1636. long range = 0;
  1637. int another_range;
  1638. gas_assert (etype == REGLIST_RN || etype == REGLIST_CLRM
  1639. || etype == REGLIST_PSEUDO);
  1640. /* We come back here if we get ranges concatenated by '+' or '|'. */
  1641. do
  1642. {
  1643. skip_whitespace (str);
  1644. another_range = 0;
  1645. if (*str == '{')
  1646. {
  1647. int in_range = 0;
  1648. int cur_reg = -1;
  1649. str++;
  1650. do
  1651. {
  1652. int reg;
  1653. const char apsr_str[] = "apsr";
  1654. int apsr_str_len = strlen (apsr_str);
  1655. enum arm_reg_type rt;
  1656. if (etype == REGLIST_RN || etype == REGLIST_CLRM)
  1657. rt = REG_TYPE_RN;
  1658. else
  1659. rt = REG_TYPE_PSEUDO;
  1660. reg = arm_reg_parse (&str, rt);
  1661. if (etype == REGLIST_CLRM)
  1662. {
  1663. if (reg == REG_SP || reg == REG_PC)
  1664. reg = FAIL;
  1665. else if (reg == FAIL
  1666. && !strncasecmp (str, apsr_str, apsr_str_len)
  1667. && !ISALPHA (*(str + apsr_str_len)))
  1668. {
  1669. reg = 15;
  1670. str += apsr_str_len;
  1671. }
  1672. if (reg == FAIL)
  1673. {
  1674. first_error (_("r0-r12, lr or APSR expected"));
  1675. return FAIL;
  1676. }
  1677. }
  1678. else if (etype == REGLIST_PSEUDO)
  1679. {
  1680. if (reg == FAIL)
  1681. {
  1682. first_error (_(reg_expected_msgs[REG_TYPE_PSEUDO]));
  1683. return FAIL;
  1684. }
  1685. }
  1686. else /* etype == REGLIST_RN. */
  1687. {
  1688. if (reg == FAIL)
  1689. {
  1690. first_error (_(reg_expected_msgs[REGLIST_RN]));
  1691. return FAIL;
  1692. }
  1693. }
  1694. if (in_range)
  1695. {
  1696. int i;
  1697. if (reg <= cur_reg)
  1698. {
  1699. first_error (_("bad range in register list"));
  1700. return FAIL;
  1701. }
  1702. for (i = cur_reg + 1; i < reg; i++)
  1703. {
  1704. if (range & (1 << i))
  1705. as_tsktsk
  1706. (_("Warning: duplicated register (r%d) in register list"),
  1707. i);
  1708. else
  1709. range |= 1 << i;
  1710. }
  1711. in_range = 0;
  1712. }
  1713. if (range & (1 << reg))
  1714. as_tsktsk (_("Warning: duplicated register (r%d) in register list"),
  1715. reg);
  1716. else if (reg <= cur_reg)
  1717. as_tsktsk (_("Warning: register range not in ascending order"));
  1718. range |= 1 << reg;
  1719. cur_reg = reg;
  1720. }
  1721. while (skip_past_comma (&str) != FAIL
  1722. || (in_range = 1, *str++ == '-'));
  1723. str--;
  1724. if (skip_past_char (&str, '}') == FAIL)
  1725. {
  1726. first_error (_("missing `}'"));
  1727. return FAIL;
  1728. }
  1729. }
  1730. else if (etype == REGLIST_RN)
  1731. {
  1732. expressionS exp;
  1733. if (my_get_expression (&exp, &str, GE_NO_PREFIX))
  1734. return FAIL;
  1735. if (exp.X_op == O_constant)
  1736. {
  1737. if (exp.X_add_number
  1738. != (exp.X_add_number & 0x0000ffff))
  1739. {
  1740. inst.error = _("invalid register mask");
  1741. return FAIL;
  1742. }
  1743. if ((range & exp.X_add_number) != 0)
  1744. {
  1745. int regno = range & exp.X_add_number;
  1746. regno &= -regno;
  1747. regno = (1 << regno) - 1;
  1748. as_tsktsk
  1749. (_("Warning: duplicated register (r%d) in register list"),
  1750. regno);
  1751. }
  1752. range |= exp.X_add_number;
  1753. }
  1754. else
  1755. {
  1756. if (inst.relocs[0].type != 0)
  1757. {
  1758. inst.error = _("expression too complex");
  1759. return FAIL;
  1760. }
  1761. memcpy (&inst.relocs[0].exp, &exp, sizeof (expressionS));
  1762. inst.relocs[0].type = BFD_RELOC_ARM_MULTI;
  1763. inst.relocs[0].pc_rel = 0;
  1764. }
  1765. }
  1766. if (*str == '|' || *str == '+')
  1767. {
  1768. str++;
  1769. another_range = 1;
  1770. }
  1771. }
  1772. while (another_range);
  1773. *strp = str;
  1774. return range;
  1775. }
  1776. /* Parse a VFP register list. If the string is invalid return FAIL.
  1777. Otherwise return the number of registers, and set PBASE to the first
  1778. register. Parses registers of type ETYPE.
  1779. If REGLIST_NEON_D is used, several syntax enhancements are enabled:
  1780. - Q registers can be used to specify pairs of D registers
  1781. - { } can be omitted from around a singleton register list
  1782. FIXME: This is not implemented, as it would require backtracking in
  1783. some cases, e.g.:
  1784. vtbl.8 d3,d4,d5
  1785. This could be done (the meaning isn't really ambiguous), but doesn't
  1786. fit in well with the current parsing framework.
  1787. - 32 D registers may be used (also true for VFPv3).
  1788. FIXME: Types are ignored in these register lists, which is probably a
  1789. bug. */
  1790. static int
  1791. parse_vfp_reg_list (char **ccp, unsigned int *pbase, enum reg_list_els etype,
  1792. bool *partial_match)
  1793. {
  1794. char *str = *ccp;
  1795. int base_reg;
  1796. int new_base;
  1797. enum arm_reg_type regtype = (enum arm_reg_type) 0;
  1798. int max_regs = 0;
  1799. int count = 0;
  1800. int warned = 0;
  1801. unsigned long mask = 0;
  1802. int i;
  1803. bool vpr_seen = false;
  1804. bool expect_vpr =
  1805. (etype == REGLIST_VFP_S_VPR) || (etype == REGLIST_VFP_D_VPR);
  1806. if (skip_past_char (&str, '{') == FAIL)
  1807. {
  1808. inst.error = _("expecting {");
  1809. return FAIL;
  1810. }
  1811. switch (etype)
  1812. {
  1813. case REGLIST_VFP_S:
  1814. case REGLIST_VFP_S_VPR:
  1815. regtype = REG_TYPE_VFS;
  1816. max_regs = 32;
  1817. break;
  1818. case REGLIST_VFP_D:
  1819. case REGLIST_VFP_D_VPR:
  1820. regtype = REG_TYPE_VFD;
  1821. break;
  1822. case REGLIST_NEON_D:
  1823. regtype = REG_TYPE_NDQ;
  1824. break;
  1825. default:
  1826. gas_assert (0);
  1827. }
  1828. if (etype != REGLIST_VFP_S && etype != REGLIST_VFP_S_VPR)
  1829. {
  1830. /* VFPv3 allows 32 D registers, except for the VFPv3-D16 variant. */
  1831. if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_d32))
  1832. {
  1833. max_regs = 32;
  1834. if (thumb_mode)
  1835. ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
  1836. fpu_vfp_ext_d32);
  1837. else
  1838. ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used,
  1839. fpu_vfp_ext_d32);
  1840. }
  1841. else
  1842. max_regs = 16;
  1843. }
  1844. base_reg = max_regs;
  1845. *partial_match = false;
  1846. do
  1847. {
  1848. unsigned int setmask = 1, addregs = 1;
  1849. const char vpr_str[] = "vpr";
  1850. size_t vpr_str_len = strlen (vpr_str);
  1851. new_base = arm_typed_reg_parse (&str, regtype, &regtype, NULL);
  1852. if (expect_vpr)
  1853. {
  1854. if (new_base == FAIL
  1855. && !strncasecmp (str, vpr_str, vpr_str_len)
  1856. && !ISALPHA (*(str + vpr_str_len))
  1857. && !vpr_seen)
  1858. {
  1859. vpr_seen = true;
  1860. str += vpr_str_len;
  1861. if (count == 0)
  1862. base_reg = 0; /* Canonicalize VPR only on d0 with 0 regs. */
  1863. }
  1864. else if (vpr_seen)
  1865. {
  1866. first_error (_("VPR expected last"));
  1867. return FAIL;
  1868. }
  1869. else if (new_base == FAIL)
  1870. {
  1871. if (regtype == REG_TYPE_VFS)
  1872. first_error (_("VFP single precision register or VPR "
  1873. "expected"));
  1874. else /* regtype == REG_TYPE_VFD. */
  1875. first_error (_("VFP/Neon double precision register or VPR "
  1876. "expected"));
  1877. return FAIL;
  1878. }
  1879. }
  1880. else if (new_base == FAIL)
  1881. {
  1882. first_error (_(reg_expected_msgs[regtype]));
  1883. return FAIL;
  1884. }
  1885. *partial_match = true;
  1886. if (vpr_seen)
  1887. continue;
  1888. if (new_base >= max_regs)
  1889. {
  1890. first_error (_("register out of range in list"));
  1891. return FAIL;
  1892. }
  1893. /* Note: a value of 2 * n is returned for the register Q<n>. */
  1894. if (regtype == REG_TYPE_NQ)
  1895. {
  1896. setmask = 3;
  1897. addregs = 2;
  1898. }
  1899. if (new_base < base_reg)
  1900. base_reg = new_base;
  1901. if (mask & (setmask << new_base))
  1902. {
  1903. first_error (_("invalid register list"));
  1904. return FAIL;
  1905. }
  1906. if ((mask >> new_base) != 0 && ! warned && !vpr_seen)
  1907. {
  1908. as_tsktsk (_("register list not in ascending order"));
  1909. warned = 1;
  1910. }
  1911. mask |= setmask << new_base;
  1912. count += addregs;
  1913. if (*str == '-') /* We have the start of a range expression */
  1914. {
  1915. int high_range;
  1916. str++;
  1917. if ((high_range = arm_typed_reg_parse (&str, regtype, NULL, NULL))
  1918. == FAIL)
  1919. {
  1920. inst.error = gettext (reg_expected_msgs[regtype]);
  1921. return FAIL;
  1922. }
  1923. if (high_range >= max_regs)
  1924. {
  1925. first_error (_("register out of range in list"));
  1926. return FAIL;
  1927. }
  1928. if (regtype == REG_TYPE_NQ)
  1929. high_range = high_range + 1;
  1930. if (high_range <= new_base)
  1931. {
  1932. inst.error = _("register range not in ascending order");
  1933. return FAIL;
  1934. }
  1935. for (new_base += addregs; new_base <= high_range; new_base += addregs)
  1936. {
  1937. if (mask & (setmask << new_base))
  1938. {
  1939. inst.error = _("invalid register list");
  1940. return FAIL;
  1941. }
  1942. mask |= setmask << new_base;
  1943. count += addregs;
  1944. }
  1945. }
  1946. }
  1947. while (skip_past_comma (&str) != FAIL);
  1948. str++;
  1949. /* Sanity check -- should have raised a parse error above. */
  1950. if ((!vpr_seen && count == 0) || count > max_regs)
  1951. abort ();
  1952. *pbase = base_reg;
  1953. if (expect_vpr && !vpr_seen)
  1954. {
  1955. first_error (_("VPR expected last"));
  1956. return FAIL;
  1957. }
  1958. /* Final test -- the registers must be consecutive. */
  1959. mask >>= base_reg;
  1960. for (i = 0; i < count; i++)
  1961. {
  1962. if ((mask & (1u << i)) == 0)
  1963. {
  1964. inst.error = _("non-contiguous register range");
  1965. return FAIL;
  1966. }
  1967. }
  1968. *ccp = str;
  1969. return count;
  1970. }
  1971. /* True if two alias types are the same. */
  1972. static bool
  1973. neon_alias_types_same (struct neon_typed_alias *a, struct neon_typed_alias *b)
  1974. {
  1975. if (!a && !b)
  1976. return true;
  1977. if (!a || !b)
  1978. return false;
  1979. if (a->defined != b->defined)
  1980. return false;
  1981. if ((a->defined & NTA_HASTYPE) != 0
  1982. && (a->eltype.type != b->eltype.type
  1983. || a->eltype.size != b->eltype.size))
  1984. return false;
  1985. if ((a->defined & NTA_HASINDEX) != 0
  1986. && (a->index != b->index))
  1987. return false;
  1988. return true;
  1989. }
  1990. /* Parse element/structure lists for Neon VLD<n> and VST<n> instructions.
  1991. The base register is put in *PBASE.
  1992. The lane (or one of the NEON_*_LANES constants) is placed in bits [3:0] of
  1993. the return value.
  1994. The register stride (minus one) is put in bit 4 of the return value.
  1995. Bits [6:5] encode the list length (minus one).
  1996. The type of the list elements is put in *ELTYPE, if non-NULL. */
  1997. #define NEON_LANE(X) ((X) & 0xf)
  1998. #define NEON_REG_STRIDE(X) ((((X) >> 4) & 1) + 1)
  1999. #define NEON_REGLIST_LENGTH(X) ((((X) >> 5) & 3) + 1)
  2000. static int
  2001. parse_neon_el_struct_list (char **str, unsigned *pbase,
  2002. int mve,
  2003. struct neon_type_el *eltype)
  2004. {
  2005. char *ptr = *str;
  2006. int base_reg = -1;
  2007. int reg_incr = -1;
  2008. int count = 0;
  2009. int lane = -1;
  2010. int leading_brace = 0;
  2011. enum arm_reg_type rtype = REG_TYPE_NDQ;
  2012. const char *const incr_error = mve ? _("register stride must be 1") :
  2013. _("register stride must be 1 or 2");
  2014. const char *const type_error = _("mismatched element/structure types in list");
  2015. struct neon_typed_alias firsttype;
  2016. firsttype.defined = 0;
  2017. firsttype.eltype.type = NT_invtype;
  2018. firsttype.eltype.size = -1;
  2019. firsttype.index = -1;
  2020. if (skip_past_char (&ptr, '{') == SUCCESS)
  2021. leading_brace = 1;
  2022. do
  2023. {
  2024. struct neon_typed_alias atype;
  2025. if (mve)
  2026. rtype = REG_TYPE_MQ;
  2027. int getreg = parse_typed_reg_or_scalar (&ptr, rtype, &rtype, &atype);
  2028. if (getreg == FAIL)
  2029. {
  2030. first_error (_(reg_expected_msgs[rtype]));
  2031. return FAIL;
  2032. }
  2033. if (base_reg == -1)
  2034. {
  2035. base_reg = getreg;
  2036. if (rtype == REG_TYPE_NQ)
  2037. {
  2038. reg_incr = 1;
  2039. }
  2040. firsttype = atype;
  2041. }
  2042. else if (reg_incr == -1)
  2043. {
  2044. reg_incr = getreg - base_reg;
  2045. if (reg_incr < 1 || reg_incr > 2)
  2046. {
  2047. first_error (_(incr_error));
  2048. return FAIL;
  2049. }
  2050. }
  2051. else if (getreg != base_reg + reg_incr * count)
  2052. {
  2053. first_error (_(incr_error));
  2054. return FAIL;
  2055. }
  2056. if (! neon_alias_types_same (&atype, &firsttype))
  2057. {
  2058. first_error (_(type_error));
  2059. return FAIL;
  2060. }
  2061. /* Handle Dn-Dm or Qn-Qm syntax. Can only be used with non-indexed list
  2062. modes. */
  2063. if (ptr[0] == '-')
  2064. {
  2065. struct neon_typed_alias htype;
  2066. int hireg, dregs = (rtype == REG_TYPE_NQ) ? 2 : 1;
  2067. if (lane == -1)
  2068. lane = NEON_INTERLEAVE_LANES;
  2069. else if (lane != NEON_INTERLEAVE_LANES)
  2070. {
  2071. first_error (_(type_error));
  2072. return FAIL;
  2073. }
  2074. if (reg_incr == -1)
  2075. reg_incr = 1;
  2076. else if (reg_incr != 1)
  2077. {
  2078. first_error (_("don't use Rn-Rm syntax with non-unit stride"));
  2079. return FAIL;
  2080. }
  2081. ptr++;
  2082. hireg = parse_typed_reg_or_scalar (&ptr, rtype, NULL, &htype);
  2083. if (hireg == FAIL)
  2084. {
  2085. first_error (_(reg_expected_msgs[rtype]));
  2086. return FAIL;
  2087. }
  2088. if (! neon_alias_types_same (&htype, &firsttype))
  2089. {
  2090. first_error (_(type_error));
  2091. return FAIL;
  2092. }
  2093. count += hireg + dregs - getreg;
  2094. continue;
  2095. }
  2096. /* If we're using Q registers, we can't use [] or [n] syntax. */
  2097. if (rtype == REG_TYPE_NQ)
  2098. {
  2099. count += 2;
  2100. continue;
  2101. }
  2102. if ((atype.defined & NTA_HASINDEX) != 0)
  2103. {
  2104. if (lane == -1)
  2105. lane = atype.index;
  2106. else if (lane != atype.index)
  2107. {
  2108. first_error (_(type_error));
  2109. return FAIL;
  2110. }
  2111. }
  2112. else if (lane == -1)
  2113. lane = NEON_INTERLEAVE_LANES;
  2114. else if (lane != NEON_INTERLEAVE_LANES)
  2115. {
  2116. first_error (_(type_error));
  2117. return FAIL;
  2118. }
  2119. count++;
  2120. }
  2121. while ((count != 1 || leading_brace) && skip_past_comma (&ptr) != FAIL);
  2122. /* No lane set by [x]. We must be interleaving structures. */
  2123. if (lane == -1)
  2124. lane = NEON_INTERLEAVE_LANES;
  2125. /* Sanity check. */
  2126. if (lane == -1 || base_reg == -1 || count < 1 || (!mve && count > 4)
  2127. || (count > 1 && reg_incr == -1))
  2128. {
  2129. first_error (_("error parsing element/structure list"));
  2130. return FAIL;
  2131. }
  2132. if ((count > 1 || leading_brace) && skip_past_char (&ptr, '}') == FAIL)
  2133. {
  2134. first_error (_("expected }"));
  2135. return FAIL;
  2136. }
  2137. if (reg_incr == -1)
  2138. reg_incr = 1;
  2139. if (eltype)
  2140. *eltype = firsttype.eltype;
  2141. *pbase = base_reg;
  2142. *str = ptr;
  2143. return lane | ((reg_incr - 1) << 4) | ((count - 1) << 5);
  2144. }
  2145. /* Parse an explicit relocation suffix on an expression. This is
  2146. either nothing, or a word in parentheses. Note that if !OBJ_ELF,
  2147. arm_reloc_hsh contains no entries, so this function can only
  2148. succeed if there is no () after the word. Returns -1 on error,
  2149. BFD_RELOC_UNUSED if there wasn't any suffix. */
  2150. static int
  2151. parse_reloc (char **str)
  2152. {
  2153. struct reloc_entry *r;
  2154. char *p, *q;
  2155. if (**str != '(')
  2156. return BFD_RELOC_UNUSED;
  2157. p = *str + 1;
  2158. q = p;
  2159. while (*q && *q != ')' && *q != ',')
  2160. q++;
  2161. if (*q != ')')
  2162. return -1;
  2163. if ((r = (struct reloc_entry *)
  2164. str_hash_find_n (arm_reloc_hsh, p, q - p)) == NULL)
  2165. return -1;
  2166. *str = q + 1;
  2167. return r->reloc;
  2168. }
  2169. /* Directives: register aliases. */
  2170. static struct reg_entry *
  2171. insert_reg_alias (char *str, unsigned number, int type)
  2172. {
  2173. struct reg_entry *new_reg;
  2174. const char *name;
  2175. if ((new_reg = (struct reg_entry *) str_hash_find (arm_reg_hsh, str)) != 0)
  2176. {
  2177. if (new_reg->builtin)
  2178. as_warn (_("ignoring attempt to redefine built-in register '%s'"), str);
  2179. /* Only warn about a redefinition if it's not defined as the
  2180. same register. */
  2181. else if (new_reg->number != number || new_reg->type != type)
  2182. as_warn (_("ignoring redefinition of register alias '%s'"), str);
  2183. return NULL;
  2184. }
  2185. name = xstrdup (str);
  2186. new_reg = XNEW (struct reg_entry);
  2187. new_reg->name = name;
  2188. new_reg->number = number;
  2189. new_reg->type = type;
  2190. new_reg->builtin = false;
  2191. new_reg->neon = NULL;
  2192. str_hash_insert (arm_reg_hsh, name, new_reg, 0);
  2193. return new_reg;
  2194. }
  2195. static void
  2196. insert_neon_reg_alias (char *str, int number, int type,
  2197. struct neon_typed_alias *atype)
  2198. {
  2199. struct reg_entry *reg = insert_reg_alias (str, number, type);
  2200. if (!reg)
  2201. {
  2202. first_error (_("attempt to redefine typed alias"));
  2203. return;
  2204. }
  2205. if (atype)
  2206. {
  2207. reg->neon = XNEW (struct neon_typed_alias);
  2208. *reg->neon = *atype;
  2209. }
  2210. }
  2211. /* Look for the .req directive. This is of the form:
  2212. new_register_name .req existing_register_name
  2213. If we find one, or if it looks sufficiently like one that we want to
  2214. handle any error here, return TRUE. Otherwise return FALSE. */
  2215. static bool
  2216. create_register_alias (char * newname, char *p)
  2217. {
  2218. struct reg_entry *old;
  2219. char *oldname, *nbuf;
  2220. size_t nlen;
  2221. /* The input scrubber ensures that whitespace after the mnemonic is
  2222. collapsed to single spaces. */
  2223. oldname = p;
  2224. if (!startswith (oldname, " .req "))
  2225. return false;
  2226. oldname += 6;
  2227. if (*oldname == '\0')
  2228. return false;
  2229. old = (struct reg_entry *) str_hash_find (arm_reg_hsh, oldname);
  2230. if (!old)
  2231. {
  2232. as_warn (_("unknown register '%s' -- .req ignored"), oldname);
  2233. return true;
  2234. }
  2235. /* If TC_CASE_SENSITIVE is defined, then newname already points to
  2236. the desired alias name, and p points to its end. If not, then
  2237. the desired alias name is in the global original_case_string. */
  2238. #ifdef TC_CASE_SENSITIVE
  2239. nlen = p - newname;
  2240. #else
  2241. newname = original_case_string;
  2242. nlen = strlen (newname);
  2243. #endif
  2244. nbuf = xmemdup0 (newname, nlen);
  2245. /* Create aliases under the new name as stated; an all-lowercase
  2246. version of the new name; and an all-uppercase version of the new
  2247. name. */
  2248. if (insert_reg_alias (nbuf, old->number, old->type) != NULL)
  2249. {
  2250. for (p = nbuf; *p; p++)
  2251. *p = TOUPPER (*p);
  2252. if (strncmp (nbuf, newname, nlen))
  2253. {
  2254. /* If this attempt to create an additional alias fails, do not bother
  2255. trying to create the all-lower case alias. We will fail and issue
  2256. a second, duplicate error message. This situation arises when the
  2257. programmer does something like:
  2258. foo .req r0
  2259. Foo .req r1
  2260. The second .req creates the "Foo" alias but then fails to create
  2261. the artificial FOO alias because it has already been created by the
  2262. first .req. */
  2263. if (insert_reg_alias (nbuf, old->number, old->type) == NULL)
  2264. {
  2265. free (nbuf);
  2266. return true;
  2267. }
  2268. }
  2269. for (p = nbuf; *p; p++)
  2270. *p = TOLOWER (*p);
  2271. if (strncmp (nbuf, newname, nlen))
  2272. insert_reg_alias (nbuf, old->number, old->type);
  2273. }
  2274. free (nbuf);
  2275. return true;
  2276. }
  2277. /* Create a Neon typed/indexed register alias using directives, e.g.:
  2278. X .dn d5.s32[1]
  2279. Y .qn 6.s16
  2280. Z .dn d7
  2281. T .dn Z[0]
  2282. These typed registers can be used instead of the types specified after the
  2283. Neon mnemonic, so long as all operands given have types. Types can also be
  2284. specified directly, e.g.:
  2285. vadd d0.s32, d1.s32, d2.s32 */
  2286. static bool
  2287. create_neon_reg_alias (char *newname, char *p)
  2288. {
  2289. enum arm_reg_type basetype;
  2290. struct reg_entry *basereg;
  2291. struct reg_entry mybasereg;
  2292. struct neon_type ntype;
  2293. struct neon_typed_alias typeinfo;
  2294. char *namebuf, *nameend ATTRIBUTE_UNUSED;
  2295. int namelen;
  2296. typeinfo.defined = 0;
  2297. typeinfo.eltype.type = NT_invtype;
  2298. typeinfo.eltype.size = -1;
  2299. typeinfo.index = -1;
  2300. nameend = p;
  2301. if (startswith (p, " .dn "))
  2302. basetype = REG_TYPE_VFD;
  2303. else if (startswith (p, " .qn "))
  2304. basetype = REG_TYPE_NQ;
  2305. else
  2306. return false;
  2307. p += 5;
  2308. if (*p == '\0')
  2309. return false;
  2310. basereg = arm_reg_parse_multi (&p);
  2311. if (basereg && basereg->type != basetype)
  2312. {
  2313. as_bad (_("bad type for register"));
  2314. return false;
  2315. }
  2316. if (basereg == NULL)
  2317. {
  2318. expressionS exp;
  2319. /* Try parsing as an integer. */
  2320. my_get_expression (&exp, &p, GE_NO_PREFIX);
  2321. if (exp.X_op != O_constant)
  2322. {
  2323. as_bad (_("expression must be constant"));
  2324. return false;
  2325. }
  2326. basereg = &mybasereg;
  2327. basereg->number = (basetype == REG_TYPE_NQ) ? exp.X_add_number * 2
  2328. : exp.X_add_number;
  2329. basereg->neon = 0;
  2330. }
  2331. if (basereg->neon)
  2332. typeinfo = *basereg->neon;
  2333. if (parse_neon_type (&ntype, &p) == SUCCESS)
  2334. {
  2335. /* We got a type. */
  2336. if (typeinfo.defined & NTA_HASTYPE)
  2337. {
  2338. as_bad (_("can't redefine the type of a register alias"));
  2339. return false;
  2340. }
  2341. typeinfo.defined |= NTA_HASTYPE;
  2342. if (ntype.elems != 1)
  2343. {
  2344. as_bad (_("you must specify a single type only"));
  2345. return false;
  2346. }
  2347. typeinfo.eltype = ntype.el[0];
  2348. }
  2349. if (skip_past_char (&p, '[') == SUCCESS)
  2350. {
  2351. expressionS exp;
  2352. /* We got a scalar index. */
  2353. if (typeinfo.defined & NTA_HASINDEX)
  2354. {
  2355. as_bad (_("can't redefine the index of a scalar alias"));
  2356. return false;
  2357. }
  2358. my_get_expression (&exp, &p, GE_NO_PREFIX);
  2359. if (exp.X_op != O_constant)
  2360. {
  2361. as_bad (_("scalar index must be constant"));
  2362. return false;
  2363. }
  2364. typeinfo.defined |= NTA_HASINDEX;
  2365. typeinfo.index = exp.X_add_number;
  2366. if (skip_past_char (&p, ']') == FAIL)
  2367. {
  2368. as_bad (_("expecting ]"));
  2369. return false;
  2370. }
  2371. }
  2372. /* If TC_CASE_SENSITIVE is defined, then newname already points to
  2373. the desired alias name, and p points to its end. If not, then
  2374. the desired alias name is in the global original_case_string. */
  2375. #ifdef TC_CASE_SENSITIVE
  2376. namelen = nameend - newname;
  2377. #else
  2378. newname = original_case_string;
  2379. namelen = strlen (newname);
  2380. #endif
  2381. namebuf = xmemdup0 (newname, namelen);
  2382. insert_neon_reg_alias (namebuf, basereg->number, basetype,
  2383. typeinfo.defined != 0 ? &typeinfo : NULL);
  2384. /* Insert name in all uppercase. */
  2385. for (p = namebuf; *p; p++)
  2386. *p = TOUPPER (*p);
  2387. if (strncmp (namebuf, newname, namelen))
  2388. insert_neon_reg_alias (namebuf, basereg->number, basetype,
  2389. typeinfo.defined != 0 ? &typeinfo : NULL);
  2390. /* Insert name in all lowercase. */
  2391. for (p = namebuf; *p; p++)
  2392. *p = TOLOWER (*p);
  2393. if (strncmp (namebuf, newname, namelen))
  2394. insert_neon_reg_alias (namebuf, basereg->number, basetype,
  2395. typeinfo.defined != 0 ? &typeinfo : NULL);
  2396. free (namebuf);
  2397. return true;
  2398. }
  2399. /* Should never be called, as .req goes between the alias and the
  2400. register name, not at the beginning of the line. */
  2401. static void
  2402. s_req (int a ATTRIBUTE_UNUSED)
  2403. {
  2404. as_bad (_("invalid syntax for .req directive"));
  2405. }
  2406. static void
  2407. s_dn (int a ATTRIBUTE_UNUSED)
  2408. {
  2409. as_bad (_("invalid syntax for .dn directive"));
  2410. }
  2411. static void
  2412. s_qn (int a ATTRIBUTE_UNUSED)
  2413. {
  2414. as_bad (_("invalid syntax for .qn directive"));
  2415. }
  2416. /* The .unreq directive deletes an alias which was previously defined
  2417. by .req. For example:
  2418. my_alias .req r11
  2419. .unreq my_alias */
  2420. static void
  2421. s_unreq (int a ATTRIBUTE_UNUSED)
  2422. {
  2423. char * name;
  2424. char saved_char;
  2425. name = input_line_pointer;
  2426. while (*input_line_pointer != 0
  2427. && *input_line_pointer != ' '
  2428. && *input_line_pointer != '\n')
  2429. ++input_line_pointer;
  2430. saved_char = *input_line_pointer;
  2431. *input_line_pointer = 0;
  2432. if (!*name)
  2433. as_bad (_("invalid syntax for .unreq directive"));
  2434. else
  2435. {
  2436. struct reg_entry *reg
  2437. = (struct reg_entry *) str_hash_find (arm_reg_hsh, name);
  2438. if (!reg)
  2439. as_bad (_("unknown register alias '%s'"), name);
  2440. else if (reg->builtin)
  2441. as_warn (_("ignoring attempt to use .unreq on fixed register name: '%s'"),
  2442. name);
  2443. else
  2444. {
  2445. char * p;
  2446. char * nbuf;
  2447. str_hash_delete (arm_reg_hsh, name);
  2448. free ((char *) reg->name);
  2449. free (reg->neon);
  2450. free (reg);
  2451. /* Also locate the all upper case and all lower case versions.
  2452. Do not complain if we cannot find one or the other as it
  2453. was probably deleted above. */
  2454. nbuf = strdup (name);
  2455. for (p = nbuf; *p; p++)
  2456. *p = TOUPPER (*p);
  2457. reg = (struct reg_entry *) str_hash_find (arm_reg_hsh, nbuf);
  2458. if (reg)
  2459. {
  2460. str_hash_delete (arm_reg_hsh, nbuf);
  2461. free ((char *) reg->name);
  2462. free (reg->neon);
  2463. free (reg);
  2464. }
  2465. for (p = nbuf; *p; p++)
  2466. *p = TOLOWER (*p);
  2467. reg = (struct reg_entry *) str_hash_find (arm_reg_hsh, nbuf);
  2468. if (reg)
  2469. {
  2470. str_hash_delete (arm_reg_hsh, nbuf);
  2471. free ((char *) reg->name);
  2472. free (reg->neon);
  2473. free (reg);
  2474. }
  2475. free (nbuf);
  2476. }
  2477. }
  2478. *input_line_pointer = saved_char;
  2479. demand_empty_rest_of_line ();
  2480. }
  2481. /* Directives: Instruction set selection. */
  2482. #ifdef OBJ_ELF
  2483. /* This code is to handle mapping symbols as defined in the ARM ELF spec.
  2484. (See "Mapping symbols", section 4.5.5, ARM AAELF version 1.0).
  2485. Note that previously, $a and $t has type STT_FUNC (BSF_OBJECT flag),
  2486. and $d has type STT_OBJECT (BSF_OBJECT flag). Now all three are untyped. */
  2487. /* Create a new mapping symbol for the transition to STATE. */
  2488. static void
  2489. make_mapping_symbol (enum mstate state, valueT value, fragS *frag)
  2490. {
  2491. symbolS * symbolP;
  2492. const char * symname;
  2493. int type;
  2494. switch (state)
  2495. {
  2496. case MAP_DATA:
  2497. symname = "$d";
  2498. type = BSF_NO_FLAGS;
  2499. break;
  2500. case MAP_ARM:
  2501. symname = "$a";
  2502. type = BSF_NO_FLAGS;
  2503. break;
  2504. case MAP_THUMB:
  2505. symname = "$t";
  2506. type = BSF_NO_FLAGS;
  2507. break;
  2508. default:
  2509. abort ();
  2510. }
  2511. symbolP = symbol_new (symname, now_seg, frag, value);
  2512. symbol_get_bfdsym (symbolP)->flags |= type | BSF_LOCAL;
  2513. switch (state)
  2514. {
  2515. case MAP_ARM:
  2516. THUMB_SET_FUNC (symbolP, 0);
  2517. ARM_SET_THUMB (symbolP, 0);
  2518. ARM_SET_INTERWORK (symbolP, support_interwork);
  2519. break;
  2520. case MAP_THUMB:
  2521. THUMB_SET_FUNC (symbolP, 1);
  2522. ARM_SET_THUMB (symbolP, 1);
  2523. ARM_SET_INTERWORK (symbolP, support_interwork);
  2524. break;
  2525. case MAP_DATA:
  2526. default:
  2527. break;
  2528. }
  2529. /* Save the mapping symbols for future reference. Also check that
  2530. we do not place two mapping symbols at the same offset within a
  2531. frag. We'll handle overlap between frags in
  2532. check_mapping_symbols.
  2533. If .fill or other data filling directive generates zero sized data,
  2534. the mapping symbol for the following code will have the same value
  2535. as the one generated for the data filling directive. In this case,
  2536. we replace the old symbol with the new one at the same address. */
  2537. if (value == 0)
  2538. {
  2539. if (frag->tc_frag_data.first_map != NULL)
  2540. {
  2541. know (S_GET_VALUE (frag->tc_frag_data.first_map) == 0);
  2542. symbol_remove (frag->tc_frag_data.first_map, &symbol_rootP, &symbol_lastP);
  2543. }
  2544. frag->tc_frag_data.first_map = symbolP;
  2545. }
  2546. if (frag->tc_frag_data.last_map != NULL)
  2547. {
  2548. know (S_GET_VALUE (frag->tc_frag_data.last_map) <= S_GET_VALUE (symbolP));
  2549. if (S_GET_VALUE (frag->tc_frag_data.last_map) == S_GET_VALUE (symbolP))
  2550. symbol_remove (frag->tc_frag_data.last_map, &symbol_rootP, &symbol_lastP);
  2551. }
  2552. frag->tc_frag_data.last_map = symbolP;
  2553. }
  2554. /* We must sometimes convert a region marked as code to data during
  2555. code alignment, if an odd number of bytes have to be padded. The
  2556. code mapping symbol is pushed to an aligned address. */
  2557. static void
  2558. insert_data_mapping_symbol (enum mstate state,
  2559. valueT value, fragS *frag, offsetT bytes)
  2560. {
  2561. /* If there was already a mapping symbol, remove it. */
  2562. if (frag->tc_frag_data.last_map != NULL
  2563. && S_GET_VALUE (frag->tc_frag_data.last_map) == frag->fr_address + value)
  2564. {
  2565. symbolS *symp = frag->tc_frag_data.last_map;
  2566. if (value == 0)
  2567. {
  2568. know (frag->tc_frag_data.first_map == symp);
  2569. frag->tc_frag_data.first_map = NULL;
  2570. }
  2571. frag->tc_frag_data.last_map = NULL;
  2572. symbol_remove (symp, &symbol_rootP, &symbol_lastP);
  2573. }
  2574. make_mapping_symbol (MAP_DATA, value, frag);
  2575. make_mapping_symbol (state, value + bytes, frag);
  2576. }
  2577. static void mapping_state_2 (enum mstate state, int max_chars);
  2578. /* Set the mapping state to STATE. Only call this when about to
  2579. emit some STATE bytes to the file. */
  2580. #define TRANSITION(from, to) (mapstate == (from) && state == (to))
  2581. void
  2582. mapping_state (enum mstate state)
  2583. {
  2584. enum mstate mapstate = seg_info (now_seg)->tc_segment_info_data.mapstate;
  2585. if (mapstate == state)
  2586. /* The mapping symbol has already been emitted.
  2587. There is nothing else to do. */
  2588. return;
  2589. if (state == MAP_ARM || state == MAP_THUMB)
  2590. /* PR gas/12931
  2591. All ARM instructions require 4-byte alignment.
  2592. (Almost) all Thumb instructions require 2-byte alignment.
  2593. When emitting instructions into any section, mark the section
  2594. appropriately.
  2595. Some Thumb instructions are alignment-sensitive modulo 4 bytes,
  2596. but themselves require 2-byte alignment; this applies to some
  2597. PC- relative forms. However, these cases will involve implicit
  2598. literal pool generation or an explicit .align >=2, both of
  2599. which will cause the section to me marked with sufficient
  2600. alignment. Thus, we don't handle those cases here. */
  2601. record_alignment (now_seg, state == MAP_ARM ? 2 : 1);
  2602. if (TRANSITION (MAP_UNDEFINED, MAP_DATA))
  2603. /* This case will be evaluated later. */
  2604. return;
  2605. mapping_state_2 (state, 0);
  2606. }
  2607. /* Same as mapping_state, but MAX_CHARS bytes have already been
  2608. allocated. Put the mapping symbol that far back. */
  2609. static void
  2610. mapping_state_2 (enum mstate state, int max_chars)
  2611. {
  2612. enum mstate mapstate = seg_info (now_seg)->tc_segment_info_data.mapstate;
  2613. if (!SEG_NORMAL (now_seg))
  2614. return;
  2615. if (mapstate == state)
  2616. /* The mapping symbol has already been emitted.
  2617. There is nothing else to do. */
  2618. return;
  2619. if (TRANSITION (MAP_UNDEFINED, MAP_ARM)
  2620. || TRANSITION (MAP_UNDEFINED, MAP_THUMB))
  2621. {
  2622. struct frag * const frag_first = seg_info (now_seg)->frchainP->frch_root;
  2623. const int add_symbol = (frag_now != frag_first) || (frag_now_fix () > 0);
  2624. if (add_symbol)
  2625. make_mapping_symbol (MAP_DATA, (valueT) 0, frag_first);
  2626. }
  2627. seg_info (now_seg)->tc_segment_info_data.mapstate = state;
  2628. make_mapping_symbol (state, (valueT) frag_now_fix () - max_chars, frag_now);
  2629. }
  2630. #undef TRANSITION
  2631. #else
  2632. #define mapping_state(x) ((void)0)
  2633. #define mapping_state_2(x, y) ((void)0)
  2634. #endif
  2635. /* Find the real, Thumb encoded start of a Thumb function. */
  2636. #ifdef OBJ_COFF
  2637. static symbolS *
  2638. find_real_start (symbolS * symbolP)
  2639. {
  2640. char * real_start;
  2641. const char * name = S_GET_NAME (symbolP);
  2642. symbolS * new_target;
  2643. /* This definition must agree with the one in gcc/config/arm/thumb.c. */
  2644. #define STUB_NAME ".real_start_of"
  2645. if (name == NULL)
  2646. abort ();
  2647. /* The compiler may generate BL instructions to local labels because
  2648. it needs to perform a branch to a far away location. These labels
  2649. do not have a corresponding ".real_start_of" label. We check
  2650. both for S_IS_LOCAL and for a leading dot, to give a way to bypass
  2651. the ".real_start_of" convention for nonlocal branches. */
  2652. if (S_IS_LOCAL (symbolP) || name[0] == '.')
  2653. return symbolP;
  2654. real_start = concat (STUB_NAME, name, NULL);
  2655. new_target = symbol_find (real_start);
  2656. free (real_start);
  2657. if (new_target == NULL)
  2658. {
  2659. as_warn (_("Failed to find real start of function: %s\n"), name);
  2660. new_target = symbolP;
  2661. }
  2662. return new_target;
  2663. }
  2664. #endif
  2665. static void
  2666. opcode_select (int width)
  2667. {
  2668. switch (width)
  2669. {
  2670. case 16:
  2671. if (! thumb_mode)
  2672. {
  2673. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4t))
  2674. as_bad (_("selected processor does not support THUMB opcodes"));
  2675. thumb_mode = 1;
  2676. /* No need to force the alignment, since we will have been
  2677. coming from ARM mode, which is word-aligned. */
  2678. record_alignment (now_seg, 1);
  2679. }
  2680. break;
  2681. case 32:
  2682. if (thumb_mode)
  2683. {
  2684. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1))
  2685. as_bad (_("selected processor does not support ARM opcodes"));
  2686. thumb_mode = 0;
  2687. if (!need_pass_2)
  2688. frag_align (2, 0, 0);
  2689. record_alignment (now_seg, 1);
  2690. }
  2691. break;
  2692. default:
  2693. as_bad (_("invalid instruction size selected (%d)"), width);
  2694. }
  2695. }
  2696. static void
  2697. s_arm (int ignore ATTRIBUTE_UNUSED)
  2698. {
  2699. opcode_select (32);
  2700. demand_empty_rest_of_line ();
  2701. }
  2702. static void
  2703. s_thumb (int ignore ATTRIBUTE_UNUSED)
  2704. {
  2705. opcode_select (16);
  2706. demand_empty_rest_of_line ();
  2707. }
  2708. static void
  2709. s_code (int unused ATTRIBUTE_UNUSED)
  2710. {
  2711. int temp;
  2712. temp = get_absolute_expression ();
  2713. switch (temp)
  2714. {
  2715. case 16:
  2716. case 32:
  2717. opcode_select (temp);
  2718. break;
  2719. default:
  2720. as_bad (_("invalid operand to .code directive (%d) (expecting 16 or 32)"), temp);
  2721. }
  2722. }
  2723. static void
  2724. s_force_thumb (int ignore ATTRIBUTE_UNUSED)
  2725. {
  2726. /* If we are not already in thumb mode go into it, EVEN if
  2727. the target processor does not support thumb instructions.
  2728. This is used by gcc/config/arm/lib1funcs.asm for example
  2729. to compile interworking support functions even if the
  2730. target processor should not support interworking. */
  2731. if (! thumb_mode)
  2732. {
  2733. thumb_mode = 2;
  2734. record_alignment (now_seg, 1);
  2735. }
  2736. demand_empty_rest_of_line ();
  2737. }
  2738. static void
  2739. s_thumb_func (int ignore ATTRIBUTE_UNUSED)
  2740. {
  2741. s_thumb (0);
  2742. /* The following label is the name/address of the start of a Thumb function.
  2743. We need to know this for the interworking support. */
  2744. label_is_thumb_function_name = true;
  2745. }
  2746. /* Perform a .set directive, but also mark the alias as
  2747. being a thumb function. */
  2748. static void
  2749. s_thumb_set (int equiv)
  2750. {
  2751. /* XXX the following is a duplicate of the code for s_set() in read.c
  2752. We cannot just call that code as we need to get at the symbol that
  2753. is created. */
  2754. char * name;
  2755. char delim;
  2756. char * end_name;
  2757. symbolS * symbolP;
  2758. /* Especial apologies for the random logic:
  2759. This just grew, and could be parsed much more simply!
  2760. Dean - in haste. */
  2761. delim = get_symbol_name (& name);
  2762. end_name = input_line_pointer;
  2763. (void) restore_line_pointer (delim);
  2764. if (*input_line_pointer != ',')
  2765. {
  2766. *end_name = 0;
  2767. as_bad (_("expected comma after name \"%s\""), name);
  2768. *end_name = delim;
  2769. ignore_rest_of_line ();
  2770. return;
  2771. }
  2772. input_line_pointer++;
  2773. *end_name = 0;
  2774. if (name[0] == '.' && name[1] == '\0')
  2775. {
  2776. /* XXX - this should not happen to .thumb_set. */
  2777. abort ();
  2778. }
  2779. if ((symbolP = symbol_find (name)) == NULL
  2780. && (symbolP = md_undefined_symbol (name)) == NULL)
  2781. {
  2782. #ifndef NO_LISTING
  2783. /* When doing symbol listings, play games with dummy fragments living
  2784. outside the normal fragment chain to record the file and line info
  2785. for this symbol. */
  2786. if (listing & LISTING_SYMBOLS)
  2787. {
  2788. extern struct list_info_struct * listing_tail;
  2789. fragS * dummy_frag = (fragS * ) xmalloc (sizeof (fragS));
  2790. memset (dummy_frag, 0, sizeof (fragS));
  2791. dummy_frag->fr_type = rs_fill;
  2792. dummy_frag->line = listing_tail;
  2793. symbolP = symbol_new (name, undefined_section, dummy_frag, 0);
  2794. dummy_frag->fr_symbol = symbolP;
  2795. }
  2796. else
  2797. #endif
  2798. symbolP = symbol_new (name, undefined_section, &zero_address_frag, 0);
  2799. #ifdef OBJ_COFF
  2800. /* "set" symbols are local unless otherwise specified. */
  2801. SF_SET_LOCAL (symbolP);
  2802. #endif /* OBJ_COFF */
  2803. } /* Make a new symbol. */
  2804. symbol_table_insert (symbolP);
  2805. * end_name = delim;
  2806. if (equiv
  2807. && S_IS_DEFINED (symbolP)
  2808. && S_GET_SEGMENT (symbolP) != reg_section)
  2809. as_bad (_("symbol `%s' already defined"), S_GET_NAME (symbolP));
  2810. pseudo_set (symbolP);
  2811. demand_empty_rest_of_line ();
  2812. /* XXX Now we come to the Thumb specific bit of code. */
  2813. THUMB_SET_FUNC (symbolP, 1);
  2814. ARM_SET_THUMB (symbolP, 1);
  2815. #if defined OBJ_ELF || defined OBJ_COFF
  2816. ARM_SET_INTERWORK (symbolP, support_interwork);
  2817. #endif
  2818. }
  2819. /* Directives: Mode selection. */
  2820. /* .syntax [unified|divided] - choose the new unified syntax
  2821. (same for Arm and Thumb encoding, modulo slight differences in what
  2822. can be represented) or the old divergent syntax for each mode. */
  2823. static void
  2824. s_syntax (int unused ATTRIBUTE_UNUSED)
  2825. {
  2826. char *name, delim;
  2827. delim = get_symbol_name (& name);
  2828. if (!strcasecmp (name, "unified"))
  2829. unified_syntax = true;
  2830. else if (!strcasecmp (name, "divided"))
  2831. unified_syntax = false;
  2832. else
  2833. {
  2834. as_bad (_("unrecognized syntax mode \"%s\""), name);
  2835. return;
  2836. }
  2837. (void) restore_line_pointer (delim);
  2838. demand_empty_rest_of_line ();
  2839. }
  2840. /* Directives: sectioning and alignment. */
  2841. static void
  2842. s_bss (int ignore ATTRIBUTE_UNUSED)
  2843. {
  2844. /* We don't support putting frags in the BSS segment, we fake it by
  2845. marking in_bss, then looking at s_skip for clues. */
  2846. subseg_set (bss_section, 0);
  2847. demand_empty_rest_of_line ();
  2848. #ifdef md_elf_section_change_hook
  2849. md_elf_section_change_hook ();
  2850. #endif
  2851. }
  2852. static void
  2853. s_even (int ignore ATTRIBUTE_UNUSED)
  2854. {
  2855. /* Never make frag if expect extra pass. */
  2856. if (!need_pass_2)
  2857. frag_align (1, 0, 0);
  2858. record_alignment (now_seg, 1);
  2859. demand_empty_rest_of_line ();
  2860. }
  2861. /* Directives: CodeComposer Studio. */
  2862. /* .ref (for CodeComposer Studio syntax only). */
  2863. static void
  2864. s_ccs_ref (int unused ATTRIBUTE_UNUSED)
  2865. {
  2866. if (codecomposer_syntax)
  2867. ignore_rest_of_line ();
  2868. else
  2869. as_bad (_(".ref pseudo-op only available with -mccs flag."));
  2870. }
  2871. /* If name is not NULL, then it is used for marking the beginning of a
  2872. function, whereas if it is NULL then it means the function end. */
  2873. static void
  2874. asmfunc_debug (const char * name)
  2875. {
  2876. static const char * last_name = NULL;
  2877. if (name != NULL)
  2878. {
  2879. gas_assert (last_name == NULL);
  2880. last_name = name;
  2881. if (debug_type == DEBUG_STABS)
  2882. stabs_generate_asm_func (name, name);
  2883. }
  2884. else
  2885. {
  2886. gas_assert (last_name != NULL);
  2887. if (debug_type == DEBUG_STABS)
  2888. stabs_generate_asm_endfunc (last_name, last_name);
  2889. last_name = NULL;
  2890. }
  2891. }
  2892. static void
  2893. s_ccs_asmfunc (int unused ATTRIBUTE_UNUSED)
  2894. {
  2895. if (codecomposer_syntax)
  2896. {
  2897. switch (asmfunc_state)
  2898. {
  2899. case OUTSIDE_ASMFUNC:
  2900. asmfunc_state = WAITING_ASMFUNC_NAME;
  2901. break;
  2902. case WAITING_ASMFUNC_NAME:
  2903. as_bad (_(".asmfunc repeated."));
  2904. break;
  2905. case WAITING_ENDASMFUNC:
  2906. as_bad (_(".asmfunc without function."));
  2907. break;
  2908. }
  2909. demand_empty_rest_of_line ();
  2910. }
  2911. else
  2912. as_bad (_(".asmfunc pseudo-op only available with -mccs flag."));
  2913. }
  2914. static void
  2915. s_ccs_endasmfunc (int unused ATTRIBUTE_UNUSED)
  2916. {
  2917. if (codecomposer_syntax)
  2918. {
  2919. switch (asmfunc_state)
  2920. {
  2921. case OUTSIDE_ASMFUNC:
  2922. as_bad (_(".endasmfunc without a .asmfunc."));
  2923. break;
  2924. case WAITING_ASMFUNC_NAME:
  2925. as_bad (_(".endasmfunc without function."));
  2926. break;
  2927. case WAITING_ENDASMFUNC:
  2928. asmfunc_state = OUTSIDE_ASMFUNC;
  2929. asmfunc_debug (NULL);
  2930. break;
  2931. }
  2932. demand_empty_rest_of_line ();
  2933. }
  2934. else
  2935. as_bad (_(".endasmfunc pseudo-op only available with -mccs flag."));
  2936. }
  2937. static void
  2938. s_ccs_def (int name)
  2939. {
  2940. if (codecomposer_syntax)
  2941. s_globl (name);
  2942. else
  2943. as_bad (_(".def pseudo-op only available with -mccs flag."));
  2944. }
  2945. /* Directives: Literal pools. */
  2946. static literal_pool *
  2947. find_literal_pool (void)
  2948. {
  2949. literal_pool * pool;
  2950. for (pool = list_of_pools; pool != NULL; pool = pool->next)
  2951. {
  2952. if (pool->section == now_seg
  2953. && pool->sub_section == now_subseg)
  2954. break;
  2955. }
  2956. return pool;
  2957. }
  2958. static literal_pool *
  2959. find_or_make_literal_pool (void)
  2960. {
  2961. /* Next literal pool ID number. */
  2962. static unsigned int latest_pool_num = 1;
  2963. literal_pool * pool;
  2964. pool = find_literal_pool ();
  2965. if (pool == NULL)
  2966. {
  2967. /* Create a new pool. */
  2968. pool = XNEW (literal_pool);
  2969. if (! pool)
  2970. return NULL;
  2971. pool->next_free_entry = 0;
  2972. pool->section = now_seg;
  2973. pool->sub_section = now_subseg;
  2974. pool->next = list_of_pools;
  2975. pool->symbol = NULL;
  2976. pool->alignment = 2;
  2977. /* Add it to the list. */
  2978. list_of_pools = pool;
  2979. }
  2980. /* New pools, and emptied pools, will have a NULL symbol. */
  2981. if (pool->symbol == NULL)
  2982. {
  2983. pool->symbol = symbol_create (FAKE_LABEL_NAME, undefined_section,
  2984. &zero_address_frag, 0);
  2985. pool->id = latest_pool_num ++;
  2986. }
  2987. /* Done. */
  2988. return pool;
  2989. }
  2990. /* Add the literal in the global 'inst'
  2991. structure to the relevant literal pool. */
  2992. static int
  2993. add_to_lit_pool (unsigned int nbytes)
  2994. {
  2995. #define PADDING_SLOT 0x1
  2996. #define LIT_ENTRY_SIZE_MASK 0xFF
  2997. literal_pool * pool;
  2998. unsigned int entry, pool_size = 0;
  2999. bool padding_slot_p = false;
  3000. unsigned imm1 = 0;
  3001. unsigned imm2 = 0;
  3002. if (nbytes == 8)
  3003. {
  3004. imm1 = inst.operands[1].imm;
  3005. imm2 = (inst.operands[1].regisimm ? inst.operands[1].reg
  3006. : inst.relocs[0].exp.X_unsigned ? 0
  3007. : ((bfd_int64_t) inst.operands[1].imm) >> 32);
  3008. if (target_big_endian)
  3009. {
  3010. imm1 = imm2;
  3011. imm2 = inst.operands[1].imm;
  3012. }
  3013. }
  3014. pool = find_or_make_literal_pool ();
  3015. /* Check if this literal value is already in the pool. */
  3016. for (entry = 0; entry < pool->next_free_entry; entry ++)
  3017. {
  3018. if (nbytes == 4)
  3019. {
  3020. if ((pool->literals[entry].X_op == inst.relocs[0].exp.X_op)
  3021. && (inst.relocs[0].exp.X_op == O_constant)
  3022. && (pool->literals[entry].X_add_number
  3023. == inst.relocs[0].exp.X_add_number)
  3024. && (pool->literals[entry].X_md == nbytes)
  3025. && (pool->literals[entry].X_unsigned
  3026. == inst.relocs[0].exp.X_unsigned))
  3027. break;
  3028. if ((pool->literals[entry].X_op == inst.relocs[0].exp.X_op)
  3029. && (inst.relocs[0].exp.X_op == O_symbol)
  3030. && (pool->literals[entry].X_add_number
  3031. == inst.relocs[0].exp.X_add_number)
  3032. && (pool->literals[entry].X_add_symbol
  3033. == inst.relocs[0].exp.X_add_symbol)
  3034. && (pool->literals[entry].X_op_symbol
  3035. == inst.relocs[0].exp.X_op_symbol)
  3036. && (pool->literals[entry].X_md == nbytes))
  3037. break;
  3038. }
  3039. else if ((nbytes == 8)
  3040. && !(pool_size & 0x7)
  3041. && ((entry + 1) != pool->next_free_entry)
  3042. && (pool->literals[entry].X_op == O_constant)
  3043. && (pool->literals[entry].X_add_number == (offsetT) imm1)
  3044. && (pool->literals[entry].X_unsigned
  3045. == inst.relocs[0].exp.X_unsigned)
  3046. && (pool->literals[entry + 1].X_op == O_constant)
  3047. && (pool->literals[entry + 1].X_add_number == (offsetT) imm2)
  3048. && (pool->literals[entry + 1].X_unsigned
  3049. == inst.relocs[0].exp.X_unsigned))
  3050. break;
  3051. padding_slot_p = ((pool->literals[entry].X_md >> 8) == PADDING_SLOT);
  3052. if (padding_slot_p && (nbytes == 4))
  3053. break;
  3054. pool_size += 4;
  3055. }
  3056. /* Do we need to create a new entry? */
  3057. if (entry == pool->next_free_entry)
  3058. {
  3059. if (entry >= MAX_LITERAL_POOL_SIZE)
  3060. {
  3061. inst.error = _("literal pool overflow");
  3062. return FAIL;
  3063. }
  3064. if (nbytes == 8)
  3065. {
  3066. /* For 8-byte entries, we align to an 8-byte boundary,
  3067. and split it into two 4-byte entries, because on 32-bit
  3068. host, 8-byte constants are treated as big num, thus
  3069. saved in "generic_bignum" which will be overwritten
  3070. by later assignments.
  3071. We also need to make sure there is enough space for
  3072. the split.
  3073. We also check to make sure the literal operand is a
  3074. constant number. */
  3075. if (!(inst.relocs[0].exp.X_op == O_constant
  3076. || inst.relocs[0].exp.X_op == O_big))
  3077. {
  3078. inst.error = _("invalid type for literal pool");
  3079. return FAIL;
  3080. }
  3081. else if (pool_size & 0x7)
  3082. {
  3083. if ((entry + 2) >= MAX_LITERAL_POOL_SIZE)
  3084. {
  3085. inst.error = _("literal pool overflow");
  3086. return FAIL;
  3087. }
  3088. pool->literals[entry] = inst.relocs[0].exp;
  3089. pool->literals[entry].X_op = O_constant;
  3090. pool->literals[entry].X_add_number = 0;
  3091. pool->literals[entry++].X_md = (PADDING_SLOT << 8) | 4;
  3092. pool->next_free_entry += 1;
  3093. pool_size += 4;
  3094. }
  3095. else if ((entry + 1) >= MAX_LITERAL_POOL_SIZE)
  3096. {
  3097. inst.error = _("literal pool overflow");
  3098. return FAIL;
  3099. }
  3100. pool->literals[entry] = inst.relocs[0].exp;
  3101. pool->literals[entry].X_op = O_constant;
  3102. pool->literals[entry].X_add_number = imm1;
  3103. pool->literals[entry].X_unsigned = inst.relocs[0].exp.X_unsigned;
  3104. pool->literals[entry++].X_md = 4;
  3105. pool->literals[entry] = inst.relocs[0].exp;
  3106. pool->literals[entry].X_op = O_constant;
  3107. pool->literals[entry].X_add_number = imm2;
  3108. pool->literals[entry].X_unsigned = inst.relocs[0].exp.X_unsigned;
  3109. pool->literals[entry].X_md = 4;
  3110. pool->alignment = 3;
  3111. pool->next_free_entry += 1;
  3112. }
  3113. else
  3114. {
  3115. pool->literals[entry] = inst.relocs[0].exp;
  3116. pool->literals[entry].X_md = 4;
  3117. }
  3118. #ifdef OBJ_ELF
  3119. /* PR ld/12974: Record the location of the first source line to reference
  3120. this entry in the literal pool. If it turns out during linking that the
  3121. symbol does not exist we will be able to give an accurate line number for
  3122. the (first use of the) missing reference. */
  3123. if (debug_type == DEBUG_DWARF2)
  3124. dwarf2_where (pool->locs + entry);
  3125. #endif
  3126. pool->next_free_entry += 1;
  3127. }
  3128. else if (padding_slot_p)
  3129. {
  3130. pool->literals[entry] = inst.relocs[0].exp;
  3131. pool->literals[entry].X_md = nbytes;
  3132. }
  3133. inst.relocs[0].exp.X_op = O_symbol;
  3134. inst.relocs[0].exp.X_add_number = pool_size;
  3135. inst.relocs[0].exp.X_add_symbol = pool->symbol;
  3136. return SUCCESS;
  3137. }
  3138. bool
  3139. tc_start_label_without_colon (void)
  3140. {
  3141. bool ret = true;
  3142. if (codecomposer_syntax && asmfunc_state == WAITING_ASMFUNC_NAME)
  3143. {
  3144. const char *label = input_line_pointer;
  3145. while (!is_end_of_line[(int) label[-1]])
  3146. --label;
  3147. if (*label == '.')
  3148. {
  3149. as_bad (_("Invalid label '%s'"), label);
  3150. ret = false;
  3151. }
  3152. asmfunc_debug (label);
  3153. asmfunc_state = WAITING_ENDASMFUNC;
  3154. }
  3155. return ret;
  3156. }
  3157. /* Can't use symbol_new here, so have to create a symbol and then at
  3158. a later date assign it a value. That's what these functions do. */
  3159. static void
  3160. symbol_locate (symbolS * symbolP,
  3161. const char * name, /* It is copied, the caller can modify. */
  3162. segT segment, /* Segment identifier (SEG_<something>). */
  3163. valueT valu, /* Symbol value. */
  3164. fragS * frag) /* Associated fragment. */
  3165. {
  3166. size_t name_length;
  3167. char * preserved_copy_of_name;
  3168. name_length = strlen (name) + 1; /* +1 for \0. */
  3169. obstack_grow (&notes, name, name_length);
  3170. preserved_copy_of_name = (char *) obstack_finish (&notes);
  3171. #ifdef tc_canonicalize_symbol_name
  3172. preserved_copy_of_name =
  3173. tc_canonicalize_symbol_name (preserved_copy_of_name);
  3174. #endif
  3175. S_SET_NAME (symbolP, preserved_copy_of_name);
  3176. S_SET_SEGMENT (symbolP, segment);
  3177. S_SET_VALUE (symbolP, valu);
  3178. symbol_clear_list_pointers (symbolP);
  3179. symbol_set_frag (symbolP, frag);
  3180. /* Link to end of symbol chain. */
  3181. {
  3182. extern int symbol_table_frozen;
  3183. if (symbol_table_frozen)
  3184. abort ();
  3185. }
  3186. symbol_append (symbolP, symbol_lastP, & symbol_rootP, & symbol_lastP);
  3187. obj_symbol_new_hook (symbolP);
  3188. #ifdef tc_symbol_new_hook
  3189. tc_symbol_new_hook (symbolP);
  3190. #endif
  3191. #ifdef DEBUG_SYMS
  3192. verify_symbol_chain (symbol_rootP, symbol_lastP);
  3193. #endif /* DEBUG_SYMS */
  3194. }
  3195. static void
  3196. s_ltorg (int ignored ATTRIBUTE_UNUSED)
  3197. {
  3198. unsigned int entry;
  3199. literal_pool * pool;
  3200. char sym_name[20];
  3201. pool = find_literal_pool ();
  3202. if (pool == NULL
  3203. || pool->symbol == NULL
  3204. || pool->next_free_entry == 0)
  3205. return;
  3206. /* Align pool as you have word accesses.
  3207. Only make a frag if we have to. */
  3208. if (!need_pass_2)
  3209. frag_align (pool->alignment, 0, 0);
  3210. record_alignment (now_seg, 2);
  3211. #ifdef OBJ_ELF
  3212. seg_info (now_seg)->tc_segment_info_data.mapstate = MAP_DATA;
  3213. make_mapping_symbol (MAP_DATA, (valueT) frag_now_fix (), frag_now);
  3214. #endif
  3215. sprintf (sym_name, "$$lit_\002%x", pool->id);
  3216. symbol_locate (pool->symbol, sym_name, now_seg,
  3217. (valueT) frag_now_fix (), frag_now);
  3218. symbol_table_insert (pool->symbol);
  3219. ARM_SET_THUMB (pool->symbol, thumb_mode);
  3220. #if defined OBJ_COFF || defined OBJ_ELF
  3221. ARM_SET_INTERWORK (pool->symbol, support_interwork);
  3222. #endif
  3223. for (entry = 0; entry < pool->next_free_entry; entry ++)
  3224. {
  3225. #ifdef OBJ_ELF
  3226. if (debug_type == DEBUG_DWARF2)
  3227. dwarf2_gen_line_info (frag_now_fix (), pool->locs + entry);
  3228. #endif
  3229. /* First output the expression in the instruction to the pool. */
  3230. emit_expr (&(pool->literals[entry]),
  3231. pool->literals[entry].X_md & LIT_ENTRY_SIZE_MASK);
  3232. }
  3233. /* Mark the pool as empty. */
  3234. pool->next_free_entry = 0;
  3235. pool->symbol = NULL;
  3236. }
  3237. #ifdef OBJ_ELF
  3238. /* Forward declarations for functions below, in the MD interface
  3239. section. */
  3240. static void fix_new_arm (fragS *, int, short, expressionS *, int, int);
  3241. static valueT create_unwind_entry (int);
  3242. static void start_unwind_section (const segT, int);
  3243. static void add_unwind_opcode (valueT, int);
  3244. static void flush_pending_unwind (void);
  3245. /* Directives: Data. */
  3246. static void
  3247. s_arm_elf_cons (int nbytes)
  3248. {
  3249. expressionS exp;
  3250. #ifdef md_flush_pending_output
  3251. md_flush_pending_output ();
  3252. #endif
  3253. if (is_it_end_of_statement ())
  3254. {
  3255. demand_empty_rest_of_line ();
  3256. return;
  3257. }
  3258. #ifdef md_cons_align
  3259. md_cons_align (nbytes);
  3260. #endif
  3261. mapping_state (MAP_DATA);
  3262. do
  3263. {
  3264. int reloc;
  3265. char *base = input_line_pointer;
  3266. expression (& exp);
  3267. if (exp.X_op != O_symbol)
  3268. emit_expr (&exp, (unsigned int) nbytes);
  3269. else
  3270. {
  3271. char *before_reloc = input_line_pointer;
  3272. reloc = parse_reloc (&input_line_pointer);
  3273. if (reloc == -1)
  3274. {
  3275. as_bad (_("unrecognized relocation suffix"));
  3276. ignore_rest_of_line ();
  3277. return;
  3278. }
  3279. else if (reloc == BFD_RELOC_UNUSED)
  3280. emit_expr (&exp, (unsigned int) nbytes);
  3281. else
  3282. {
  3283. reloc_howto_type *howto = (reloc_howto_type *)
  3284. bfd_reloc_type_lookup (stdoutput,
  3285. (bfd_reloc_code_real_type) reloc);
  3286. int size = bfd_get_reloc_size (howto);
  3287. if (reloc == BFD_RELOC_ARM_PLT32)
  3288. {
  3289. as_bad (_("(plt) is only valid on branch targets"));
  3290. reloc = BFD_RELOC_UNUSED;
  3291. size = 0;
  3292. }
  3293. if (size > nbytes)
  3294. as_bad (ngettext ("%s relocations do not fit in %d byte",
  3295. "%s relocations do not fit in %d bytes",
  3296. nbytes),
  3297. howto->name, nbytes);
  3298. else
  3299. {
  3300. /* We've parsed an expression stopping at O_symbol.
  3301. But there may be more expression left now that we
  3302. have parsed the relocation marker. Parse it again.
  3303. XXX Surely there is a cleaner way to do this. */
  3304. char *p = input_line_pointer;
  3305. int offset;
  3306. char *save_buf = XNEWVEC (char, input_line_pointer - base);
  3307. memcpy (save_buf, base, input_line_pointer - base);
  3308. memmove (base + (input_line_pointer - before_reloc),
  3309. base, before_reloc - base);
  3310. input_line_pointer = base + (input_line_pointer-before_reloc);
  3311. expression (&exp);
  3312. memcpy (base, save_buf, p - base);
  3313. offset = nbytes - size;
  3314. p = frag_more (nbytes);
  3315. memset (p, 0, nbytes);
  3316. fix_new_exp (frag_now, p - frag_now->fr_literal + offset,
  3317. size, &exp, 0, (enum bfd_reloc_code_real) reloc);
  3318. free (save_buf);
  3319. }
  3320. }
  3321. }
  3322. }
  3323. while (*input_line_pointer++ == ',');
  3324. /* Put terminator back into stream. */
  3325. input_line_pointer --;
  3326. demand_empty_rest_of_line ();
  3327. }
  3328. /* Emit an expression containing a 32-bit thumb instruction.
  3329. Implementation based on put_thumb32_insn. */
  3330. static void
  3331. emit_thumb32_expr (expressionS * exp)
  3332. {
  3333. expressionS exp_high = *exp;
  3334. exp_high.X_add_number = (unsigned long)exp_high.X_add_number >> 16;
  3335. emit_expr (& exp_high, (unsigned int) THUMB_SIZE);
  3336. exp->X_add_number &= 0xffff;
  3337. emit_expr (exp, (unsigned int) THUMB_SIZE);
  3338. }
  3339. /* Guess the instruction size based on the opcode. */
  3340. static int
  3341. thumb_insn_size (int opcode)
  3342. {
  3343. if ((unsigned int) opcode < 0xe800u)
  3344. return 2;
  3345. else if ((unsigned int) opcode >= 0xe8000000u)
  3346. return 4;
  3347. else
  3348. return 0;
  3349. }
  3350. static bool
  3351. emit_insn (expressionS *exp, int nbytes)
  3352. {
  3353. int size = 0;
  3354. if (exp->X_op == O_constant)
  3355. {
  3356. size = nbytes;
  3357. if (size == 0)
  3358. size = thumb_insn_size (exp->X_add_number);
  3359. if (size != 0)
  3360. {
  3361. if (size == 2 && (unsigned int)exp->X_add_number > 0xffffu)
  3362. {
  3363. as_bad (_(".inst.n operand too big. "\
  3364. "Use .inst.w instead"));
  3365. size = 0;
  3366. }
  3367. else
  3368. {
  3369. if (now_pred.state == AUTOMATIC_PRED_BLOCK)
  3370. set_pred_insn_type_nonvoid (OUTSIDE_PRED_INSN, 0);
  3371. else
  3372. set_pred_insn_type_nonvoid (NEUTRAL_IT_INSN, 0);
  3373. if (thumb_mode && (size > THUMB_SIZE) && !target_big_endian)
  3374. emit_thumb32_expr (exp);
  3375. else
  3376. emit_expr (exp, (unsigned int) size);
  3377. it_fsm_post_encode ();
  3378. }
  3379. }
  3380. else
  3381. as_bad (_("cannot determine Thumb instruction size. " \
  3382. "Use .inst.n/.inst.w instead"));
  3383. }
  3384. else
  3385. as_bad (_("constant expression required"));
  3386. return (size != 0);
  3387. }
  3388. /* Like s_arm_elf_cons but do not use md_cons_align and
  3389. set the mapping state to MAP_ARM/MAP_THUMB. */
  3390. static void
  3391. s_arm_elf_inst (int nbytes)
  3392. {
  3393. if (is_it_end_of_statement ())
  3394. {
  3395. demand_empty_rest_of_line ();
  3396. return;
  3397. }
  3398. /* Calling mapping_state () here will not change ARM/THUMB,
  3399. but will ensure not to be in DATA state. */
  3400. if (thumb_mode)
  3401. mapping_state (MAP_THUMB);
  3402. else
  3403. {
  3404. if (nbytes != 0)
  3405. {
  3406. as_bad (_("width suffixes are invalid in ARM mode"));
  3407. ignore_rest_of_line ();
  3408. return;
  3409. }
  3410. nbytes = 4;
  3411. mapping_state (MAP_ARM);
  3412. }
  3413. dwarf2_emit_insn (0);
  3414. do
  3415. {
  3416. expressionS exp;
  3417. expression (& exp);
  3418. if (! emit_insn (& exp, nbytes))
  3419. {
  3420. ignore_rest_of_line ();
  3421. return;
  3422. }
  3423. }
  3424. while (*input_line_pointer++ == ',');
  3425. /* Put terminator back into stream. */
  3426. input_line_pointer --;
  3427. demand_empty_rest_of_line ();
  3428. }
  3429. /* Parse a .rel31 directive. */
  3430. static void
  3431. s_arm_rel31 (int ignored ATTRIBUTE_UNUSED)
  3432. {
  3433. expressionS exp;
  3434. char *p;
  3435. valueT highbit;
  3436. highbit = 0;
  3437. if (*input_line_pointer == '1')
  3438. highbit = 0x80000000;
  3439. else if (*input_line_pointer != '0')
  3440. as_bad (_("expected 0 or 1"));
  3441. input_line_pointer++;
  3442. if (*input_line_pointer != ',')
  3443. as_bad (_("missing comma"));
  3444. input_line_pointer++;
  3445. #ifdef md_flush_pending_output
  3446. md_flush_pending_output ();
  3447. #endif
  3448. #ifdef md_cons_align
  3449. md_cons_align (4);
  3450. #endif
  3451. mapping_state (MAP_DATA);
  3452. expression (&exp);
  3453. p = frag_more (4);
  3454. md_number_to_chars (p, highbit, 4);
  3455. fix_new_arm (frag_now, p - frag_now->fr_literal, 4, &exp, 1,
  3456. BFD_RELOC_ARM_PREL31);
  3457. demand_empty_rest_of_line ();
  3458. }
  3459. /* Directives: AEABI stack-unwind tables. */
  3460. /* Parse an unwind_fnstart directive. Simply records the current location. */
  3461. static void
  3462. s_arm_unwind_fnstart (int ignored ATTRIBUTE_UNUSED)
  3463. {
  3464. demand_empty_rest_of_line ();
  3465. if (unwind.proc_start)
  3466. {
  3467. as_bad (_("duplicate .fnstart directive"));
  3468. return;
  3469. }
  3470. /* Mark the start of the function. */
  3471. unwind.proc_start = expr_build_dot ();
  3472. /* Reset the rest of the unwind info. */
  3473. unwind.opcode_count = 0;
  3474. unwind.table_entry = NULL;
  3475. unwind.personality_routine = NULL;
  3476. unwind.personality_index = -1;
  3477. unwind.frame_size = 0;
  3478. unwind.fp_offset = 0;
  3479. unwind.fp_reg = REG_SP;
  3480. unwind.fp_used = 0;
  3481. unwind.sp_restored = 0;
  3482. }
  3483. /* Parse a handlerdata directive. Creates the exception handling table entry
  3484. for the function. */
  3485. static void
  3486. s_arm_unwind_handlerdata (int ignored ATTRIBUTE_UNUSED)
  3487. {
  3488. demand_empty_rest_of_line ();
  3489. if (!unwind.proc_start)
  3490. as_bad (MISSING_FNSTART);
  3491. if (unwind.table_entry)
  3492. as_bad (_("duplicate .handlerdata directive"));
  3493. create_unwind_entry (1);
  3494. }
  3495. /* Parse an unwind_fnend directive. Generates the index table entry. */
  3496. static void
  3497. s_arm_unwind_fnend (int ignored ATTRIBUTE_UNUSED)
  3498. {
  3499. long where;
  3500. char *ptr;
  3501. valueT val;
  3502. unsigned int marked_pr_dependency;
  3503. demand_empty_rest_of_line ();
  3504. if (!unwind.proc_start)
  3505. {
  3506. as_bad (_(".fnend directive without .fnstart"));
  3507. return;
  3508. }
  3509. /* Add eh table entry. */
  3510. if (unwind.table_entry == NULL)
  3511. val = create_unwind_entry (0);
  3512. else
  3513. val = 0;
  3514. /* Add index table entry. This is two words. */
  3515. start_unwind_section (unwind.saved_seg, 1);
  3516. frag_align (2, 0, 0);
  3517. record_alignment (now_seg, 2);
  3518. ptr = frag_more (8);
  3519. memset (ptr, 0, 8);
  3520. where = frag_now_fix () - 8;
  3521. /* Self relative offset of the function start. */
  3522. fix_new (frag_now, where, 4, unwind.proc_start, 0, 1,
  3523. BFD_RELOC_ARM_PREL31);
  3524. /* Indicate dependency on EHABI-defined personality routines to the
  3525. linker, if it hasn't been done already. */
  3526. marked_pr_dependency
  3527. = seg_info (now_seg)->tc_segment_info_data.marked_pr_dependency;
  3528. if (unwind.personality_index >= 0 && unwind.personality_index < 3
  3529. && !(marked_pr_dependency & (1 << unwind.personality_index)))
  3530. {
  3531. static const char *const name[] =
  3532. {
  3533. "__aeabi_unwind_cpp_pr0",
  3534. "__aeabi_unwind_cpp_pr1",
  3535. "__aeabi_unwind_cpp_pr2"
  3536. };
  3537. symbolS *pr = symbol_find_or_make (name[unwind.personality_index]);
  3538. fix_new (frag_now, where, 0, pr, 0, 1, BFD_RELOC_NONE);
  3539. seg_info (now_seg)->tc_segment_info_data.marked_pr_dependency
  3540. |= 1 << unwind.personality_index;
  3541. }
  3542. if (val)
  3543. /* Inline exception table entry. */
  3544. md_number_to_chars (ptr + 4, val, 4);
  3545. else
  3546. /* Self relative offset of the table entry. */
  3547. fix_new (frag_now, where + 4, 4, unwind.table_entry, 0, 1,
  3548. BFD_RELOC_ARM_PREL31);
  3549. /* Restore the original section. */
  3550. subseg_set (unwind.saved_seg, unwind.saved_subseg);
  3551. unwind.proc_start = NULL;
  3552. }
  3553. /* Parse an unwind_cantunwind directive. */
  3554. static void
  3555. s_arm_unwind_cantunwind (int ignored ATTRIBUTE_UNUSED)
  3556. {
  3557. demand_empty_rest_of_line ();
  3558. if (!unwind.proc_start)
  3559. as_bad (MISSING_FNSTART);
  3560. if (unwind.personality_routine || unwind.personality_index != -1)
  3561. as_bad (_("personality routine specified for cantunwind frame"));
  3562. unwind.personality_index = -2;
  3563. }
  3564. /* Parse a personalityindex directive. */
  3565. static void
  3566. s_arm_unwind_personalityindex (int ignored ATTRIBUTE_UNUSED)
  3567. {
  3568. expressionS exp;
  3569. if (!unwind.proc_start)
  3570. as_bad (MISSING_FNSTART);
  3571. if (unwind.personality_routine || unwind.personality_index != -1)
  3572. as_bad (_("duplicate .personalityindex directive"));
  3573. expression (&exp);
  3574. if (exp.X_op != O_constant
  3575. || exp.X_add_number < 0 || exp.X_add_number > 15)
  3576. {
  3577. as_bad (_("bad personality routine number"));
  3578. ignore_rest_of_line ();
  3579. return;
  3580. }
  3581. unwind.personality_index = exp.X_add_number;
  3582. demand_empty_rest_of_line ();
  3583. }
  3584. /* Parse a personality directive. */
  3585. static void
  3586. s_arm_unwind_personality (int ignored ATTRIBUTE_UNUSED)
  3587. {
  3588. char *name, *p, c;
  3589. if (!unwind.proc_start)
  3590. as_bad (MISSING_FNSTART);
  3591. if (unwind.personality_routine || unwind.personality_index != -1)
  3592. as_bad (_("duplicate .personality directive"));
  3593. c = get_symbol_name (& name);
  3594. p = input_line_pointer;
  3595. if (c == '"')
  3596. ++ input_line_pointer;
  3597. unwind.personality_routine = symbol_find_or_make (name);
  3598. *p = c;
  3599. demand_empty_rest_of_line ();
  3600. }
  3601. /* Parse a directive saving pseudo registers. */
  3602. static void
  3603. s_arm_unwind_save_pseudo (void)
  3604. {
  3605. valueT op;
  3606. long range;
  3607. range = parse_reg_list (&input_line_pointer, REGLIST_PSEUDO);
  3608. if (range == FAIL)
  3609. {
  3610. as_bad (_("expected pseudo register list"));
  3611. ignore_rest_of_line ();
  3612. return;
  3613. }
  3614. demand_empty_rest_of_line ();
  3615. if (range & (1 << 9))
  3616. {
  3617. /* Opcode for restoring RA_AUTH_CODE. */
  3618. op = 0xb4;
  3619. add_unwind_opcode (op, 1);
  3620. }
  3621. }
  3622. /* Parse a directive saving core registers. */
  3623. static void
  3624. s_arm_unwind_save_core (void)
  3625. {
  3626. valueT op;
  3627. long range;
  3628. int n;
  3629. range = parse_reg_list (&input_line_pointer, REGLIST_RN);
  3630. if (range == FAIL)
  3631. {
  3632. as_bad (_("expected register list"));
  3633. ignore_rest_of_line ();
  3634. return;
  3635. }
  3636. demand_empty_rest_of_line ();
  3637. /* Turn .unwind_movsp ip followed by .unwind_save {..., ip, ...}
  3638. into .unwind_save {..., sp...}. We aren't bothered about the value of
  3639. ip because it is clobbered by calls. */
  3640. if (unwind.sp_restored && unwind.fp_reg == 12
  3641. && (range & 0x3000) == 0x1000)
  3642. {
  3643. unwind.opcode_count--;
  3644. unwind.sp_restored = 0;
  3645. range = (range | 0x2000) & ~0x1000;
  3646. unwind.pending_offset = 0;
  3647. }
  3648. /* Pop r4-r15. */
  3649. if (range & 0xfff0)
  3650. {
  3651. /* See if we can use the short opcodes. These pop a block of up to 8
  3652. registers starting with r4, plus maybe r14. */
  3653. for (n = 0; n < 8; n++)
  3654. {
  3655. /* Break at the first non-saved register. */
  3656. if ((range & (1 << (n + 4))) == 0)
  3657. break;
  3658. }
  3659. /* See if there are any other bits set. */
  3660. if (n == 0 || (range & (0xfff0 << n) & 0xbff0) != 0)
  3661. {
  3662. /* Use the long form. */
  3663. op = 0x8000 | ((range >> 4) & 0xfff);
  3664. add_unwind_opcode (op, 2);
  3665. }
  3666. else
  3667. {
  3668. /* Use the short form. */
  3669. if (range & 0x4000)
  3670. op = 0xa8; /* Pop r14. */
  3671. else
  3672. op = 0xa0; /* Do not pop r14. */
  3673. op |= (n - 1);
  3674. add_unwind_opcode (op, 1);
  3675. }
  3676. }
  3677. /* Pop r0-r3. */
  3678. if (range & 0xf)
  3679. {
  3680. op = 0xb100 | (range & 0xf);
  3681. add_unwind_opcode (op, 2);
  3682. }
  3683. /* Record the number of bytes pushed. */
  3684. for (n = 0; n < 16; n++)
  3685. {
  3686. if (range & (1 << n))
  3687. unwind.frame_size += 4;
  3688. }
  3689. }
  3690. /* Parse a directive saving FPA registers. */
  3691. static void
  3692. s_arm_unwind_save_fpa (int reg)
  3693. {
  3694. expressionS exp;
  3695. int num_regs;
  3696. valueT op;
  3697. /* Get Number of registers to transfer. */
  3698. if (skip_past_comma (&input_line_pointer) != FAIL)
  3699. expression (&exp);
  3700. else
  3701. exp.X_op = O_illegal;
  3702. if (exp.X_op != O_constant)
  3703. {
  3704. as_bad (_("expected , <constant>"));
  3705. ignore_rest_of_line ();
  3706. return;
  3707. }
  3708. num_regs = exp.X_add_number;
  3709. if (num_regs < 1 || num_regs > 4)
  3710. {
  3711. as_bad (_("number of registers must be in the range [1:4]"));
  3712. ignore_rest_of_line ();
  3713. return;
  3714. }
  3715. demand_empty_rest_of_line ();
  3716. if (reg == 4)
  3717. {
  3718. /* Short form. */
  3719. op = 0xb4 | (num_regs - 1);
  3720. add_unwind_opcode (op, 1);
  3721. }
  3722. else
  3723. {
  3724. /* Long form. */
  3725. op = 0xc800 | (reg << 4) | (num_regs - 1);
  3726. add_unwind_opcode (op, 2);
  3727. }
  3728. unwind.frame_size += num_regs * 12;
  3729. }
  3730. /* Parse a directive saving VFP registers for ARMv6 and above. */
  3731. static void
  3732. s_arm_unwind_save_vfp_armv6 (void)
  3733. {
  3734. int count;
  3735. unsigned int start;
  3736. valueT op;
  3737. int num_vfpv3_regs = 0;
  3738. int num_regs_below_16;
  3739. bool partial_match;
  3740. count = parse_vfp_reg_list (&input_line_pointer, &start, REGLIST_VFP_D,
  3741. &partial_match);
  3742. if (count == FAIL)
  3743. {
  3744. as_bad (_("expected register list"));
  3745. ignore_rest_of_line ();
  3746. return;
  3747. }
  3748. demand_empty_rest_of_line ();
  3749. /* We always generate FSTMD/FLDMD-style unwinding opcodes (rather
  3750. than FSTMX/FLDMX-style ones). */
  3751. /* Generate opcode for (VFPv3) registers numbered in the range 16 .. 31. */
  3752. if (start >= 16)
  3753. num_vfpv3_regs = count;
  3754. else if (start + count > 16)
  3755. num_vfpv3_regs = start + count - 16;
  3756. if (num_vfpv3_regs > 0)
  3757. {
  3758. int start_offset = start > 16 ? start - 16 : 0;
  3759. op = 0xc800 | (start_offset << 4) | (num_vfpv3_regs - 1);
  3760. add_unwind_opcode (op, 2);
  3761. }
  3762. /* Generate opcode for registers numbered in the range 0 .. 15. */
  3763. num_regs_below_16 = num_vfpv3_regs > 0 ? 16 - (int) start : count;
  3764. gas_assert (num_regs_below_16 + num_vfpv3_regs == count);
  3765. if (num_regs_below_16 > 0)
  3766. {
  3767. op = 0xc900 | (start << 4) | (num_regs_below_16 - 1);
  3768. add_unwind_opcode (op, 2);
  3769. }
  3770. unwind.frame_size += count * 8;
  3771. }
  3772. /* Parse a directive saving VFP registers for pre-ARMv6. */
  3773. static void
  3774. s_arm_unwind_save_vfp (void)
  3775. {
  3776. int count;
  3777. unsigned int reg;
  3778. valueT op;
  3779. bool partial_match;
  3780. count = parse_vfp_reg_list (&input_line_pointer, &reg, REGLIST_VFP_D,
  3781. &partial_match);
  3782. if (count == FAIL)
  3783. {
  3784. as_bad (_("expected register list"));
  3785. ignore_rest_of_line ();
  3786. return;
  3787. }
  3788. demand_empty_rest_of_line ();
  3789. if (reg == 8)
  3790. {
  3791. /* Short form. */
  3792. op = 0xb8 | (count - 1);
  3793. add_unwind_opcode (op, 1);
  3794. }
  3795. else
  3796. {
  3797. /* Long form. */
  3798. op = 0xb300 | (reg << 4) | (count - 1);
  3799. add_unwind_opcode (op, 2);
  3800. }
  3801. unwind.frame_size += count * 8 + 4;
  3802. }
  3803. /* Parse a directive saving iWMMXt data registers. */
  3804. static void
  3805. s_arm_unwind_save_mmxwr (void)
  3806. {
  3807. int reg;
  3808. int hi_reg;
  3809. int i;
  3810. unsigned mask = 0;
  3811. valueT op;
  3812. if (*input_line_pointer == '{')
  3813. input_line_pointer++;
  3814. do
  3815. {
  3816. reg = arm_reg_parse (&input_line_pointer, REG_TYPE_MMXWR);
  3817. if (reg == FAIL)
  3818. {
  3819. as_bad ("%s", _(reg_expected_msgs[REG_TYPE_MMXWR]));
  3820. goto error;
  3821. }
  3822. if (mask >> reg)
  3823. as_tsktsk (_("register list not in ascending order"));
  3824. mask |= 1 << reg;
  3825. if (*input_line_pointer == '-')
  3826. {
  3827. input_line_pointer++;
  3828. hi_reg = arm_reg_parse (&input_line_pointer, REG_TYPE_MMXWR);
  3829. if (hi_reg == FAIL)
  3830. {
  3831. as_bad ("%s", _(reg_expected_msgs[REG_TYPE_MMXWR]));
  3832. goto error;
  3833. }
  3834. else if (reg >= hi_reg)
  3835. {
  3836. as_bad (_("bad register range"));
  3837. goto error;
  3838. }
  3839. for (; reg < hi_reg; reg++)
  3840. mask |= 1 << reg;
  3841. }
  3842. }
  3843. while (skip_past_comma (&input_line_pointer) != FAIL);
  3844. skip_past_char (&input_line_pointer, '}');
  3845. demand_empty_rest_of_line ();
  3846. /* Generate any deferred opcodes because we're going to be looking at
  3847. the list. */
  3848. flush_pending_unwind ();
  3849. for (i = 0; i < 16; i++)
  3850. {
  3851. if (mask & (1 << i))
  3852. unwind.frame_size += 8;
  3853. }
  3854. /* Attempt to combine with a previous opcode. We do this because gcc
  3855. likes to output separate unwind directives for a single block of
  3856. registers. */
  3857. if (unwind.opcode_count > 0)
  3858. {
  3859. i = unwind.opcodes[unwind.opcode_count - 1];
  3860. if ((i & 0xf8) == 0xc0)
  3861. {
  3862. i &= 7;
  3863. /* Only merge if the blocks are contiguous. */
  3864. if (i < 6)
  3865. {
  3866. if ((mask & 0xfe00) == (1 << 9))
  3867. {
  3868. mask |= ((1 << (i + 11)) - 1) & 0xfc00;
  3869. unwind.opcode_count--;
  3870. }
  3871. }
  3872. else if (i == 6 && unwind.opcode_count >= 2)
  3873. {
  3874. i = unwind.opcodes[unwind.opcode_count - 2];
  3875. reg = i >> 4;
  3876. i &= 0xf;
  3877. op = 0xffff << (reg - 1);
  3878. if (reg > 0
  3879. && ((mask & op) == (1u << (reg - 1))))
  3880. {
  3881. op = (1 << (reg + i + 1)) - 1;
  3882. op &= ~((1 << reg) - 1);
  3883. mask |= op;
  3884. unwind.opcode_count -= 2;
  3885. }
  3886. }
  3887. }
  3888. }
  3889. hi_reg = 15;
  3890. /* We want to generate opcodes in the order the registers have been
  3891. saved, ie. descending order. */
  3892. for (reg = 15; reg >= -1; reg--)
  3893. {
  3894. /* Save registers in blocks. */
  3895. if (reg < 0
  3896. || !(mask & (1 << reg)))
  3897. {
  3898. /* We found an unsaved reg. Generate opcodes to save the
  3899. preceding block. */
  3900. if (reg != hi_reg)
  3901. {
  3902. if (reg == 9)
  3903. {
  3904. /* Short form. */
  3905. op = 0xc0 | (hi_reg - 10);
  3906. add_unwind_opcode (op, 1);
  3907. }
  3908. else
  3909. {
  3910. /* Long form. */
  3911. op = 0xc600 | ((reg + 1) << 4) | ((hi_reg - reg) - 1);
  3912. add_unwind_opcode (op, 2);
  3913. }
  3914. }
  3915. hi_reg = reg - 1;
  3916. }
  3917. }
  3918. return;
  3919. error:
  3920. ignore_rest_of_line ();
  3921. }
  3922. static void
  3923. s_arm_unwind_save_mmxwcg (void)
  3924. {
  3925. int reg;
  3926. int hi_reg;
  3927. unsigned mask = 0;
  3928. valueT op;
  3929. if (*input_line_pointer == '{')
  3930. input_line_pointer++;
  3931. skip_whitespace (input_line_pointer);
  3932. do
  3933. {
  3934. reg = arm_reg_parse (&input_line_pointer, REG_TYPE_MMXWCG);
  3935. if (reg == FAIL)
  3936. {
  3937. as_bad ("%s", _(reg_expected_msgs[REG_TYPE_MMXWCG]));
  3938. goto error;
  3939. }
  3940. reg -= 8;
  3941. if (mask >> reg)
  3942. as_tsktsk (_("register list not in ascending order"));
  3943. mask |= 1 << reg;
  3944. if (*input_line_pointer == '-')
  3945. {
  3946. input_line_pointer++;
  3947. hi_reg = arm_reg_parse (&input_line_pointer, REG_TYPE_MMXWCG);
  3948. if (hi_reg == FAIL)
  3949. {
  3950. as_bad ("%s", _(reg_expected_msgs[REG_TYPE_MMXWCG]));
  3951. goto error;
  3952. }
  3953. else if (reg >= hi_reg)
  3954. {
  3955. as_bad (_("bad register range"));
  3956. goto error;
  3957. }
  3958. for (; reg < hi_reg; reg++)
  3959. mask |= 1 << reg;
  3960. }
  3961. }
  3962. while (skip_past_comma (&input_line_pointer) != FAIL);
  3963. skip_past_char (&input_line_pointer, '}');
  3964. demand_empty_rest_of_line ();
  3965. /* Generate any deferred opcodes because we're going to be looking at
  3966. the list. */
  3967. flush_pending_unwind ();
  3968. for (reg = 0; reg < 16; reg++)
  3969. {
  3970. if (mask & (1 << reg))
  3971. unwind.frame_size += 4;
  3972. }
  3973. op = 0xc700 | mask;
  3974. add_unwind_opcode (op, 2);
  3975. return;
  3976. error:
  3977. ignore_rest_of_line ();
  3978. }
  3979. /* Parse an unwind_save directive.
  3980. If the argument is non-zero, this is a .vsave directive. */
  3981. static void
  3982. s_arm_unwind_save (int arch_v6)
  3983. {
  3984. char *peek;
  3985. struct reg_entry *reg;
  3986. bool had_brace = false;
  3987. if (!unwind.proc_start)
  3988. as_bad (MISSING_FNSTART);
  3989. /* Figure out what sort of save we have. */
  3990. peek = input_line_pointer;
  3991. if (*peek == '{')
  3992. {
  3993. had_brace = true;
  3994. peek++;
  3995. }
  3996. reg = arm_reg_parse_multi (&peek);
  3997. if (!reg)
  3998. {
  3999. as_bad (_("register expected"));
  4000. ignore_rest_of_line ();
  4001. return;
  4002. }
  4003. switch (reg->type)
  4004. {
  4005. case REG_TYPE_FN:
  4006. if (had_brace)
  4007. {
  4008. as_bad (_("FPA .unwind_save does not take a register list"));
  4009. ignore_rest_of_line ();
  4010. return;
  4011. }
  4012. input_line_pointer = peek;
  4013. s_arm_unwind_save_fpa (reg->number);
  4014. return;
  4015. case REG_TYPE_RN:
  4016. s_arm_unwind_save_core ();
  4017. return;
  4018. case REG_TYPE_PSEUDO:
  4019. s_arm_unwind_save_pseudo ();
  4020. return;
  4021. case REG_TYPE_VFD:
  4022. if (arch_v6)
  4023. s_arm_unwind_save_vfp_armv6 ();
  4024. else
  4025. s_arm_unwind_save_vfp ();
  4026. return;
  4027. case REG_TYPE_MMXWR:
  4028. s_arm_unwind_save_mmxwr ();
  4029. return;
  4030. case REG_TYPE_MMXWCG:
  4031. s_arm_unwind_save_mmxwcg ();
  4032. return;
  4033. default:
  4034. as_bad (_(".unwind_save does not support this kind of register"));
  4035. ignore_rest_of_line ();
  4036. }
  4037. }
  4038. /* Parse an unwind_movsp directive. */
  4039. static void
  4040. s_arm_unwind_movsp (int ignored ATTRIBUTE_UNUSED)
  4041. {
  4042. int reg;
  4043. valueT op;
  4044. int offset;
  4045. if (!unwind.proc_start)
  4046. as_bad (MISSING_FNSTART);
  4047. reg = arm_reg_parse (&input_line_pointer, REG_TYPE_RN);
  4048. if (reg == FAIL)
  4049. {
  4050. as_bad ("%s", _(reg_expected_msgs[REG_TYPE_RN]));
  4051. ignore_rest_of_line ();
  4052. return;
  4053. }
  4054. /* Optional constant. */
  4055. if (skip_past_comma (&input_line_pointer) != FAIL)
  4056. {
  4057. if (immediate_for_directive (&offset) == FAIL)
  4058. return;
  4059. }
  4060. else
  4061. offset = 0;
  4062. demand_empty_rest_of_line ();
  4063. if (reg == REG_SP || reg == REG_PC)
  4064. {
  4065. as_bad (_("SP and PC not permitted in .unwind_movsp directive"));
  4066. return;
  4067. }
  4068. if (unwind.fp_reg != REG_SP)
  4069. as_bad (_("unexpected .unwind_movsp directive"));
  4070. /* Generate opcode to restore the value. */
  4071. op = 0x90 | reg;
  4072. add_unwind_opcode (op, 1);
  4073. /* Record the information for later. */
  4074. unwind.fp_reg = reg;
  4075. unwind.fp_offset = unwind.frame_size - offset;
  4076. unwind.sp_restored = 1;
  4077. }
  4078. /* Parse an unwind_pad directive. */
  4079. static void
  4080. s_arm_unwind_pad (int ignored ATTRIBUTE_UNUSED)
  4081. {
  4082. int offset;
  4083. if (!unwind.proc_start)
  4084. as_bad (MISSING_FNSTART);
  4085. if (immediate_for_directive (&offset) == FAIL)
  4086. return;
  4087. if (offset & 3)
  4088. {
  4089. as_bad (_("stack increment must be multiple of 4"));
  4090. ignore_rest_of_line ();
  4091. return;
  4092. }
  4093. /* Don't generate any opcodes, just record the details for later. */
  4094. unwind.frame_size += offset;
  4095. unwind.pending_offset += offset;
  4096. demand_empty_rest_of_line ();
  4097. }
  4098. /* Parse an unwind_setfp directive. */
  4099. static void
  4100. s_arm_unwind_setfp (int ignored ATTRIBUTE_UNUSED)
  4101. {
  4102. int sp_reg;
  4103. int fp_reg;
  4104. int offset;
  4105. if (!unwind.proc_start)
  4106. as_bad (MISSING_FNSTART);
  4107. fp_reg = arm_reg_parse (&input_line_pointer, REG_TYPE_RN);
  4108. if (skip_past_comma (&input_line_pointer) == FAIL)
  4109. sp_reg = FAIL;
  4110. else
  4111. sp_reg = arm_reg_parse (&input_line_pointer, REG_TYPE_RN);
  4112. if (fp_reg == FAIL || sp_reg == FAIL)
  4113. {
  4114. as_bad (_("expected <reg>, <reg>"));
  4115. ignore_rest_of_line ();
  4116. return;
  4117. }
  4118. /* Optional constant. */
  4119. if (skip_past_comma (&input_line_pointer) != FAIL)
  4120. {
  4121. if (immediate_for_directive (&offset) == FAIL)
  4122. return;
  4123. }
  4124. else
  4125. offset = 0;
  4126. demand_empty_rest_of_line ();
  4127. if (sp_reg != REG_SP && sp_reg != unwind.fp_reg)
  4128. {
  4129. as_bad (_("register must be either sp or set by a previous"
  4130. "unwind_movsp directive"));
  4131. return;
  4132. }
  4133. /* Don't generate any opcodes, just record the information for later. */
  4134. unwind.fp_reg = fp_reg;
  4135. unwind.fp_used = 1;
  4136. if (sp_reg == REG_SP)
  4137. unwind.fp_offset = unwind.frame_size - offset;
  4138. else
  4139. unwind.fp_offset -= offset;
  4140. }
  4141. /* Parse an unwind_raw directive. */
  4142. static void
  4143. s_arm_unwind_raw (int ignored ATTRIBUTE_UNUSED)
  4144. {
  4145. expressionS exp;
  4146. /* This is an arbitrary limit. */
  4147. unsigned char op[16];
  4148. int count;
  4149. if (!unwind.proc_start)
  4150. as_bad (MISSING_FNSTART);
  4151. expression (&exp);
  4152. if (exp.X_op == O_constant
  4153. && skip_past_comma (&input_line_pointer) != FAIL)
  4154. {
  4155. unwind.frame_size += exp.X_add_number;
  4156. expression (&exp);
  4157. }
  4158. else
  4159. exp.X_op = O_illegal;
  4160. if (exp.X_op != O_constant)
  4161. {
  4162. as_bad (_("expected <offset>, <opcode>"));
  4163. ignore_rest_of_line ();
  4164. return;
  4165. }
  4166. count = 0;
  4167. /* Parse the opcode. */
  4168. for (;;)
  4169. {
  4170. if (count >= 16)
  4171. {
  4172. as_bad (_("unwind opcode too long"));
  4173. ignore_rest_of_line ();
  4174. }
  4175. if (exp.X_op != O_constant || exp.X_add_number & ~0xff)
  4176. {
  4177. as_bad (_("invalid unwind opcode"));
  4178. ignore_rest_of_line ();
  4179. return;
  4180. }
  4181. op[count++] = exp.X_add_number;
  4182. /* Parse the next byte. */
  4183. if (skip_past_comma (&input_line_pointer) == FAIL)
  4184. break;
  4185. expression (&exp);
  4186. }
  4187. /* Add the opcode bytes in reverse order. */
  4188. while (count--)
  4189. add_unwind_opcode (op[count], 1);
  4190. demand_empty_rest_of_line ();
  4191. }
  4192. /* Parse a .eabi_attribute directive. */
  4193. static void
  4194. s_arm_eabi_attribute (int ignored ATTRIBUTE_UNUSED)
  4195. {
  4196. int tag = obj_elf_vendor_attribute (OBJ_ATTR_PROC);
  4197. if (tag >= 0 && tag < NUM_KNOWN_OBJ_ATTRIBUTES)
  4198. attributes_set_explicitly[tag] = 1;
  4199. }
  4200. /* Emit a tls fix for the symbol. */
  4201. static void
  4202. s_arm_tls_descseq (int ignored ATTRIBUTE_UNUSED)
  4203. {
  4204. char *p;
  4205. expressionS exp;
  4206. #ifdef md_flush_pending_output
  4207. md_flush_pending_output ();
  4208. #endif
  4209. #ifdef md_cons_align
  4210. md_cons_align (4);
  4211. #endif
  4212. /* Since we're just labelling the code, there's no need to define a
  4213. mapping symbol. */
  4214. expression (&exp);
  4215. p = obstack_next_free (&frchain_now->frch_obstack);
  4216. fix_new_arm (frag_now, p - frag_now->fr_literal, 4, &exp, 0,
  4217. thumb_mode ? BFD_RELOC_ARM_THM_TLS_DESCSEQ
  4218. : BFD_RELOC_ARM_TLS_DESCSEQ);
  4219. }
  4220. #endif /* OBJ_ELF */
  4221. static void s_arm_arch (int);
  4222. static void s_arm_object_arch (int);
  4223. static void s_arm_cpu (int);
  4224. static void s_arm_fpu (int);
  4225. static void s_arm_arch_extension (int);
  4226. #ifdef TE_PE
  4227. static void
  4228. pe_directive_secrel (int dummy ATTRIBUTE_UNUSED)
  4229. {
  4230. expressionS exp;
  4231. do
  4232. {
  4233. expression (&exp);
  4234. if (exp.X_op == O_symbol)
  4235. exp.X_op = O_secrel;
  4236. emit_expr (&exp, 4);
  4237. }
  4238. while (*input_line_pointer++ == ',');
  4239. input_line_pointer--;
  4240. demand_empty_rest_of_line ();
  4241. }
  4242. #endif /* TE_PE */
  4243. int
  4244. arm_is_largest_exponent_ok (int precision)
  4245. {
  4246. /* precision == 1 ensures that this will only return
  4247. true for 16 bit floats. */
  4248. return (precision == 1) && (fp16_format == ARM_FP16_FORMAT_ALTERNATIVE);
  4249. }
  4250. static void
  4251. set_fp16_format (int dummy ATTRIBUTE_UNUSED)
  4252. {
  4253. char saved_char;
  4254. char* name;
  4255. enum fp_16bit_format new_format;
  4256. new_format = ARM_FP16_FORMAT_DEFAULT;
  4257. name = input_line_pointer;
  4258. while (*input_line_pointer && !ISSPACE (*input_line_pointer))
  4259. input_line_pointer++;
  4260. saved_char = *input_line_pointer;
  4261. *input_line_pointer = 0;
  4262. if (strcasecmp (name, "ieee") == 0)
  4263. new_format = ARM_FP16_FORMAT_IEEE;
  4264. else if (strcasecmp (name, "alternative") == 0)
  4265. new_format = ARM_FP16_FORMAT_ALTERNATIVE;
  4266. else
  4267. {
  4268. as_bad (_("unrecognised float16 format \"%s\""), name);
  4269. goto cleanup;
  4270. }
  4271. /* Only set fp16_format if it is still the default (aka not already
  4272. been set yet). */
  4273. if (fp16_format == ARM_FP16_FORMAT_DEFAULT)
  4274. fp16_format = new_format;
  4275. else
  4276. {
  4277. if (new_format != fp16_format)
  4278. as_warn (_("float16 format cannot be set more than once, ignoring."));
  4279. }
  4280. cleanup:
  4281. *input_line_pointer = saved_char;
  4282. ignore_rest_of_line ();
  4283. }
  4284. /* This table describes all the machine specific pseudo-ops the assembler
  4285. has to support. The fields are:
  4286. pseudo-op name without dot
  4287. function to call to execute this pseudo-op
  4288. Integer arg to pass to the function. */
  4289. const pseudo_typeS md_pseudo_table[] =
  4290. {
  4291. /* Never called because '.req' does not start a line. */
  4292. { "req", s_req, 0 },
  4293. /* Following two are likewise never called. */
  4294. { "dn", s_dn, 0 },
  4295. { "qn", s_qn, 0 },
  4296. { "unreq", s_unreq, 0 },
  4297. { "bss", s_bss, 0 },
  4298. { "align", s_align_ptwo, 2 },
  4299. { "arm", s_arm, 0 },
  4300. { "thumb", s_thumb, 0 },
  4301. { "code", s_code, 0 },
  4302. { "force_thumb", s_force_thumb, 0 },
  4303. { "thumb_func", s_thumb_func, 0 },
  4304. { "thumb_set", s_thumb_set, 0 },
  4305. { "even", s_even, 0 },
  4306. { "ltorg", s_ltorg, 0 },
  4307. { "pool", s_ltorg, 0 },
  4308. { "syntax", s_syntax, 0 },
  4309. { "cpu", s_arm_cpu, 0 },
  4310. { "arch", s_arm_arch, 0 },
  4311. { "object_arch", s_arm_object_arch, 0 },
  4312. { "fpu", s_arm_fpu, 0 },
  4313. { "arch_extension", s_arm_arch_extension, 0 },
  4314. #ifdef OBJ_ELF
  4315. { "word", s_arm_elf_cons, 4 },
  4316. { "long", s_arm_elf_cons, 4 },
  4317. { "inst.n", s_arm_elf_inst, 2 },
  4318. { "inst.w", s_arm_elf_inst, 4 },
  4319. { "inst", s_arm_elf_inst, 0 },
  4320. { "rel31", s_arm_rel31, 0 },
  4321. { "fnstart", s_arm_unwind_fnstart, 0 },
  4322. { "fnend", s_arm_unwind_fnend, 0 },
  4323. { "cantunwind", s_arm_unwind_cantunwind, 0 },
  4324. { "personality", s_arm_unwind_personality, 0 },
  4325. { "personalityindex", s_arm_unwind_personalityindex, 0 },
  4326. { "handlerdata", s_arm_unwind_handlerdata, 0 },
  4327. { "save", s_arm_unwind_save, 0 },
  4328. { "vsave", s_arm_unwind_save, 1 },
  4329. { "movsp", s_arm_unwind_movsp, 0 },
  4330. { "pad", s_arm_unwind_pad, 0 },
  4331. { "setfp", s_arm_unwind_setfp, 0 },
  4332. { "unwind_raw", s_arm_unwind_raw, 0 },
  4333. { "eabi_attribute", s_arm_eabi_attribute, 0 },
  4334. { "tlsdescseq", s_arm_tls_descseq, 0 },
  4335. #else
  4336. { "word", cons, 4},
  4337. /* These are used for dwarf. */
  4338. {"2byte", cons, 2},
  4339. {"4byte", cons, 4},
  4340. {"8byte", cons, 8},
  4341. /* These are used for dwarf2. */
  4342. { "file", dwarf2_directive_file, 0 },
  4343. { "loc", dwarf2_directive_loc, 0 },
  4344. { "loc_mark_labels", dwarf2_directive_loc_mark_labels, 0 },
  4345. #endif
  4346. { "extend", float_cons, 'x' },
  4347. { "ldouble", float_cons, 'x' },
  4348. { "packed", float_cons, 'p' },
  4349. { "bfloat16", float_cons, 'b' },
  4350. #ifdef TE_PE
  4351. {"secrel32", pe_directive_secrel, 0},
  4352. #endif
  4353. /* These are for compatibility with CodeComposer Studio. */
  4354. {"ref", s_ccs_ref, 0},
  4355. {"def", s_ccs_def, 0},
  4356. {"asmfunc", s_ccs_asmfunc, 0},
  4357. {"endasmfunc", s_ccs_endasmfunc, 0},
  4358. {"float16", float_cons, 'h' },
  4359. {"float16_format", set_fp16_format, 0 },
  4360. { 0, 0, 0 }
  4361. };
  4362. /* Parser functions used exclusively in instruction operands. */
  4363. /* Generic immediate-value read function for use in insn parsing.
  4364. STR points to the beginning of the immediate (the leading #);
  4365. VAL receives the value; if the value is outside [MIN, MAX]
  4366. issue an error. PREFIX_OPT is true if the immediate prefix is
  4367. optional. */
  4368. static int
  4369. parse_immediate (char **str, int *val, int min, int max,
  4370. bool prefix_opt)
  4371. {
  4372. expressionS exp;
  4373. my_get_expression (&exp, str, prefix_opt ? GE_OPT_PREFIX : GE_IMM_PREFIX);
  4374. if (exp.X_op != O_constant)
  4375. {
  4376. inst.error = _("constant expression required");
  4377. return FAIL;
  4378. }
  4379. if (exp.X_add_number < min || exp.X_add_number > max)
  4380. {
  4381. inst.error = _("immediate value out of range");
  4382. return FAIL;
  4383. }
  4384. *val = exp.X_add_number;
  4385. return SUCCESS;
  4386. }
  4387. /* Less-generic immediate-value read function with the possibility of loading a
  4388. big (64-bit) immediate, as required by Neon VMOV, VMVN and logic immediate
  4389. instructions. Puts the result directly in inst.operands[i]. */
  4390. static int
  4391. parse_big_immediate (char **str, int i, expressionS *in_exp,
  4392. bool allow_symbol_p)
  4393. {
  4394. expressionS exp;
  4395. expressionS *exp_p = in_exp ? in_exp : &exp;
  4396. char *ptr = *str;
  4397. my_get_expression (exp_p, &ptr, GE_OPT_PREFIX_BIG);
  4398. if (exp_p->X_op == O_constant)
  4399. {
  4400. inst.operands[i].imm = exp_p->X_add_number & 0xffffffff;
  4401. /* If we're on a 64-bit host, then a 64-bit number can be returned using
  4402. O_constant. We have to be careful not to break compilation for
  4403. 32-bit X_add_number, though. */
  4404. if ((exp_p->X_add_number & ~(offsetT)(0xffffffffU)) != 0)
  4405. {
  4406. /* X >> 32 is illegal if sizeof (exp_p->X_add_number) == 4. */
  4407. inst.operands[i].reg = (((exp_p->X_add_number >> 16) >> 16)
  4408. & 0xffffffff);
  4409. inst.operands[i].regisimm = 1;
  4410. }
  4411. }
  4412. else if (exp_p->X_op == O_big
  4413. && LITTLENUM_NUMBER_OF_BITS * exp_p->X_add_number > 32)
  4414. {
  4415. unsigned parts = 32 / LITTLENUM_NUMBER_OF_BITS, j, idx = 0;
  4416. /* Bignums have their least significant bits in
  4417. generic_bignum[0]. Make sure we put 32 bits in imm and
  4418. 32 bits in reg, in a (hopefully) portable way. */
  4419. gas_assert (parts != 0);
  4420. /* Make sure that the number is not too big.
  4421. PR 11972: Bignums can now be sign-extended to the
  4422. size of a .octa so check that the out of range bits
  4423. are all zero or all one. */
  4424. if (LITTLENUM_NUMBER_OF_BITS * exp_p->X_add_number > 64)
  4425. {
  4426. LITTLENUM_TYPE m = -1;
  4427. if (generic_bignum[parts * 2] != 0
  4428. && generic_bignum[parts * 2] != m)
  4429. return FAIL;
  4430. for (j = parts * 2 + 1; j < (unsigned) exp_p->X_add_number; j++)
  4431. if (generic_bignum[j] != generic_bignum[j-1])
  4432. return FAIL;
  4433. }
  4434. inst.operands[i].imm = 0;
  4435. for (j = 0; j < parts; j++, idx++)
  4436. inst.operands[i].imm |= ((unsigned) generic_bignum[idx]
  4437. << (LITTLENUM_NUMBER_OF_BITS * j));
  4438. inst.operands[i].reg = 0;
  4439. for (j = 0; j < parts; j++, idx++)
  4440. inst.operands[i].reg |= ((unsigned) generic_bignum[idx]
  4441. << (LITTLENUM_NUMBER_OF_BITS * j));
  4442. inst.operands[i].regisimm = 1;
  4443. }
  4444. else if (!(exp_p->X_op == O_symbol && allow_symbol_p))
  4445. return FAIL;
  4446. *str = ptr;
  4447. return SUCCESS;
  4448. }
  4449. /* Returns the pseudo-register number of an FPA immediate constant,
  4450. or FAIL if there isn't a valid constant here. */
  4451. static int
  4452. parse_fpa_immediate (char ** str)
  4453. {
  4454. LITTLENUM_TYPE words[MAX_LITTLENUMS];
  4455. char * save_in;
  4456. expressionS exp;
  4457. int i;
  4458. int j;
  4459. /* First try and match exact strings, this is to guarantee
  4460. that some formats will work even for cross assembly. */
  4461. for (i = 0; fp_const[i]; i++)
  4462. {
  4463. if (strncmp (*str, fp_const[i], strlen (fp_const[i])) == 0)
  4464. {
  4465. char *start = *str;
  4466. *str += strlen (fp_const[i]);
  4467. if (is_end_of_line[(unsigned char) **str])
  4468. return i + 8;
  4469. *str = start;
  4470. }
  4471. }
  4472. /* Just because we didn't get a match doesn't mean that the constant
  4473. isn't valid, just that it is in a format that we don't
  4474. automatically recognize. Try parsing it with the standard
  4475. expression routines. */
  4476. memset (words, 0, MAX_LITTLENUMS * sizeof (LITTLENUM_TYPE));
  4477. /* Look for a raw floating point number. */
  4478. if ((save_in = atof_ieee (*str, 'x', words)) != NULL
  4479. && is_end_of_line[(unsigned char) *save_in])
  4480. {
  4481. for (i = 0; i < NUM_FLOAT_VALS; i++)
  4482. {
  4483. for (j = 0; j < MAX_LITTLENUMS; j++)
  4484. {
  4485. if (words[j] != fp_values[i][j])
  4486. break;
  4487. }
  4488. if (j == MAX_LITTLENUMS)
  4489. {
  4490. *str = save_in;
  4491. return i + 8;
  4492. }
  4493. }
  4494. }
  4495. /* Try and parse a more complex expression, this will probably fail
  4496. unless the code uses a floating point prefix (eg "0f"). */
  4497. save_in = input_line_pointer;
  4498. input_line_pointer = *str;
  4499. if (expression (&exp) == absolute_section
  4500. && exp.X_op == O_big
  4501. && exp.X_add_number < 0)
  4502. {
  4503. /* FIXME: 5 = X_PRECISION, should be #define'd where we can use it.
  4504. Ditto for 15. */
  4505. #define X_PRECISION 5
  4506. #define E_PRECISION 15L
  4507. if (gen_to_words (words, X_PRECISION, E_PRECISION) == 0)
  4508. {
  4509. for (i = 0; i < NUM_FLOAT_VALS; i++)
  4510. {
  4511. for (j = 0; j < MAX_LITTLENUMS; j++)
  4512. {
  4513. if (words[j] != fp_values[i][j])
  4514. break;
  4515. }
  4516. if (j == MAX_LITTLENUMS)
  4517. {
  4518. *str = input_line_pointer;
  4519. input_line_pointer = save_in;
  4520. return i + 8;
  4521. }
  4522. }
  4523. }
  4524. }
  4525. *str = input_line_pointer;
  4526. input_line_pointer = save_in;
  4527. inst.error = _("invalid FPA immediate expression");
  4528. return FAIL;
  4529. }
  4530. /* Returns 1 if a number has "quarter-precision" float format
  4531. 0baBbbbbbc defgh000 00000000 00000000. */
  4532. static int
  4533. is_quarter_float (unsigned imm)
  4534. {
  4535. int bs = (imm & 0x20000000) ? 0x3e000000 : 0x40000000;
  4536. return (imm & 0x7ffff) == 0 && ((imm & 0x7e000000) ^ bs) == 0;
  4537. }
  4538. /* Detect the presence of a floating point or integer zero constant,
  4539. i.e. #0.0 or #0. */
  4540. static bool
  4541. parse_ifimm_zero (char **in)
  4542. {
  4543. int error_code;
  4544. if (!is_immediate_prefix (**in))
  4545. {
  4546. /* In unified syntax, all prefixes are optional. */
  4547. if (!unified_syntax)
  4548. return false;
  4549. }
  4550. else
  4551. ++*in;
  4552. /* Accept #0x0 as a synonym for #0. */
  4553. if (startswith (*in, "0x"))
  4554. {
  4555. int val;
  4556. if (parse_immediate (in, &val, 0, 0, true) == FAIL)
  4557. return false;
  4558. return true;
  4559. }
  4560. error_code = atof_generic (in, ".", EXP_CHARS,
  4561. &generic_floating_point_number);
  4562. if (!error_code
  4563. && generic_floating_point_number.sign == '+'
  4564. && (generic_floating_point_number.low
  4565. > generic_floating_point_number.leader))
  4566. return true;
  4567. return false;
  4568. }
  4569. /* Parse an 8-bit "quarter-precision" floating point number of the form:
  4570. 0baBbbbbbc defgh000 00000000 00000000.
  4571. The zero and minus-zero cases need special handling, since they can't be
  4572. encoded in the "quarter-precision" float format, but can nonetheless be
  4573. loaded as integer constants. */
  4574. static unsigned
  4575. parse_qfloat_immediate (char **ccp, int *immed)
  4576. {
  4577. char *str = *ccp;
  4578. char *fpnum;
  4579. LITTLENUM_TYPE words[MAX_LITTLENUMS];
  4580. int found_fpchar = 0;
  4581. skip_past_char (&str, '#');
  4582. /* We must not accidentally parse an integer as a floating-point number. Make
  4583. sure that the value we parse is not an integer by checking for special
  4584. characters '.' or 'e'.
  4585. FIXME: This is a horrible hack, but doing better is tricky because type
  4586. information isn't in a very usable state at parse time. */
  4587. fpnum = str;
  4588. skip_whitespace (fpnum);
  4589. if (startswith (fpnum, "0x"))
  4590. return FAIL;
  4591. else
  4592. {
  4593. for (; *fpnum != '\0' && *fpnum != ' ' && *fpnum != '\n'; fpnum++)
  4594. if (*fpnum == '.' || *fpnum == 'e' || *fpnum == 'E')
  4595. {
  4596. found_fpchar = 1;
  4597. break;
  4598. }
  4599. if (!found_fpchar)
  4600. return FAIL;
  4601. }
  4602. if ((str = atof_ieee (str, 's', words)) != NULL)
  4603. {
  4604. unsigned fpword = 0;
  4605. int i;
  4606. /* Our FP word must be 32 bits (single-precision FP). */
  4607. for (i = 0; i < 32 / LITTLENUM_NUMBER_OF_BITS; i++)
  4608. {
  4609. fpword <<= LITTLENUM_NUMBER_OF_BITS;
  4610. fpword |= words[i];
  4611. }
  4612. if (is_quarter_float (fpword) || (fpword & 0x7fffffff) == 0)
  4613. *immed = fpword;
  4614. else
  4615. return FAIL;
  4616. *ccp = str;
  4617. return SUCCESS;
  4618. }
  4619. return FAIL;
  4620. }
  4621. /* Shift operands. */
  4622. enum shift_kind
  4623. {
  4624. SHIFT_LSL, SHIFT_LSR, SHIFT_ASR, SHIFT_ROR, SHIFT_RRX, SHIFT_UXTW
  4625. };
  4626. struct asm_shift_name
  4627. {
  4628. const char *name;
  4629. enum shift_kind kind;
  4630. };
  4631. /* Third argument to parse_shift. */
  4632. enum parse_shift_mode
  4633. {
  4634. NO_SHIFT_RESTRICT, /* Any kind of shift is accepted. */
  4635. SHIFT_IMMEDIATE, /* Shift operand must be an immediate. */
  4636. SHIFT_LSL_OR_ASR_IMMEDIATE, /* Shift must be LSL or ASR immediate. */
  4637. SHIFT_ASR_IMMEDIATE, /* Shift must be ASR immediate. */
  4638. SHIFT_LSL_IMMEDIATE, /* Shift must be LSL immediate. */
  4639. SHIFT_UXTW_IMMEDIATE /* Shift must be UXTW immediate. */
  4640. };
  4641. /* Parse a <shift> specifier on an ARM data processing instruction.
  4642. This has three forms:
  4643. (LSL|LSR|ASL|ASR|ROR) Rs
  4644. (LSL|LSR|ASL|ASR|ROR) #imm
  4645. RRX
  4646. Note that ASL is assimilated to LSL in the instruction encoding, and
  4647. RRX to ROR #0 (which cannot be written as such). */
  4648. static int
  4649. parse_shift (char **str, int i, enum parse_shift_mode mode)
  4650. {
  4651. const struct asm_shift_name *shift_name;
  4652. enum shift_kind shift;
  4653. char *s = *str;
  4654. char *p = s;
  4655. int reg;
  4656. for (p = *str; ISALPHA (*p); p++)
  4657. ;
  4658. if (p == *str)
  4659. {
  4660. inst.error = _("shift expression expected");
  4661. return FAIL;
  4662. }
  4663. shift_name
  4664. = (const struct asm_shift_name *) str_hash_find_n (arm_shift_hsh, *str,
  4665. p - *str);
  4666. if (shift_name == NULL)
  4667. {
  4668. inst.error = _("shift expression expected");
  4669. return FAIL;
  4670. }
  4671. shift = shift_name->kind;
  4672. switch (mode)
  4673. {
  4674. case NO_SHIFT_RESTRICT:
  4675. case SHIFT_IMMEDIATE:
  4676. if (shift == SHIFT_UXTW)
  4677. {
  4678. inst.error = _("'UXTW' not allowed here");
  4679. return FAIL;
  4680. }
  4681. break;
  4682. case SHIFT_LSL_OR_ASR_IMMEDIATE:
  4683. if (shift != SHIFT_LSL && shift != SHIFT_ASR)
  4684. {
  4685. inst.error = _("'LSL' or 'ASR' required");
  4686. return FAIL;
  4687. }
  4688. break;
  4689. case SHIFT_LSL_IMMEDIATE:
  4690. if (shift != SHIFT_LSL)
  4691. {
  4692. inst.error = _("'LSL' required");
  4693. return FAIL;
  4694. }
  4695. break;
  4696. case SHIFT_ASR_IMMEDIATE:
  4697. if (shift != SHIFT_ASR)
  4698. {
  4699. inst.error = _("'ASR' required");
  4700. return FAIL;
  4701. }
  4702. break;
  4703. case SHIFT_UXTW_IMMEDIATE:
  4704. if (shift != SHIFT_UXTW)
  4705. {
  4706. inst.error = _("'UXTW' required");
  4707. return FAIL;
  4708. }
  4709. break;
  4710. default: abort ();
  4711. }
  4712. if (shift != SHIFT_RRX)
  4713. {
  4714. /* Whitespace can appear here if the next thing is a bare digit. */
  4715. skip_whitespace (p);
  4716. if (mode == NO_SHIFT_RESTRICT
  4717. && (reg = arm_reg_parse (&p, REG_TYPE_RN)) != FAIL)
  4718. {
  4719. inst.operands[i].imm = reg;
  4720. inst.operands[i].immisreg = 1;
  4721. }
  4722. else if (my_get_expression (&inst.relocs[0].exp, &p, GE_IMM_PREFIX))
  4723. return FAIL;
  4724. }
  4725. inst.operands[i].shift_kind = shift;
  4726. inst.operands[i].shifted = 1;
  4727. *str = p;
  4728. return SUCCESS;
  4729. }
  4730. /* Parse a <shifter_operand> for an ARM data processing instruction:
  4731. #<immediate>
  4732. #<immediate>, <rotate>
  4733. <Rm>
  4734. <Rm>, <shift>
  4735. where <shift> is defined by parse_shift above, and <rotate> is a
  4736. multiple of 2 between 0 and 30. Validation of immediate operands
  4737. is deferred to md_apply_fix. */
  4738. static int
  4739. parse_shifter_operand (char **str, int i)
  4740. {
  4741. int value;
  4742. expressionS exp;
  4743. if ((value = arm_reg_parse (str, REG_TYPE_RN)) != FAIL)
  4744. {
  4745. inst.operands[i].reg = value;
  4746. inst.operands[i].isreg = 1;
  4747. /* parse_shift will override this if appropriate */
  4748. inst.relocs[0].exp.X_op = O_constant;
  4749. inst.relocs[0].exp.X_add_number = 0;
  4750. if (skip_past_comma (str) == FAIL)
  4751. return SUCCESS;
  4752. /* Shift operation on register. */
  4753. return parse_shift (str, i, NO_SHIFT_RESTRICT);
  4754. }
  4755. if (my_get_expression (&inst.relocs[0].exp, str, GE_IMM_PREFIX))
  4756. return FAIL;
  4757. if (skip_past_comma (str) == SUCCESS)
  4758. {
  4759. /* #x, y -- ie explicit rotation by Y. */
  4760. if (my_get_expression (&exp, str, GE_NO_PREFIX))
  4761. return FAIL;
  4762. if (exp.X_op != O_constant || inst.relocs[0].exp.X_op != O_constant)
  4763. {
  4764. inst.error = _("constant expression expected");
  4765. return FAIL;
  4766. }
  4767. value = exp.X_add_number;
  4768. if (value < 0 || value > 30 || value % 2 != 0)
  4769. {
  4770. inst.error = _("invalid rotation");
  4771. return FAIL;
  4772. }
  4773. if (inst.relocs[0].exp.X_add_number < 0
  4774. || inst.relocs[0].exp.X_add_number > 255)
  4775. {
  4776. inst.error = _("invalid constant");
  4777. return FAIL;
  4778. }
  4779. /* Encode as specified. */
  4780. inst.operands[i].imm = inst.relocs[0].exp.X_add_number | value << 7;
  4781. return SUCCESS;
  4782. }
  4783. inst.relocs[0].type = BFD_RELOC_ARM_IMMEDIATE;
  4784. inst.relocs[0].pc_rel = 0;
  4785. return SUCCESS;
  4786. }
  4787. /* Group relocation information. Each entry in the table contains the
  4788. textual name of the relocation as may appear in assembler source
  4789. and must end with a colon.
  4790. Along with this textual name are the relocation codes to be used if
  4791. the corresponding instruction is an ALU instruction (ADD or SUB only),
  4792. an LDR, an LDRS, or an LDC. */
  4793. struct group_reloc_table_entry
  4794. {
  4795. const char *name;
  4796. int alu_code;
  4797. int ldr_code;
  4798. int ldrs_code;
  4799. int ldc_code;
  4800. };
  4801. typedef enum
  4802. {
  4803. /* Varieties of non-ALU group relocation. */
  4804. GROUP_LDR,
  4805. GROUP_LDRS,
  4806. GROUP_LDC,
  4807. GROUP_MVE
  4808. } group_reloc_type;
  4809. static struct group_reloc_table_entry group_reloc_table[] =
  4810. { /* Program counter relative: */
  4811. { "pc_g0_nc",
  4812. BFD_RELOC_ARM_ALU_PC_G0_NC, /* ALU */
  4813. 0, /* LDR */
  4814. 0, /* LDRS */
  4815. 0 }, /* LDC */
  4816. { "pc_g0",
  4817. BFD_RELOC_ARM_ALU_PC_G0, /* ALU */
  4818. BFD_RELOC_ARM_LDR_PC_G0, /* LDR */
  4819. BFD_RELOC_ARM_LDRS_PC_G0, /* LDRS */
  4820. BFD_RELOC_ARM_LDC_PC_G0 }, /* LDC */
  4821. { "pc_g1_nc",
  4822. BFD_RELOC_ARM_ALU_PC_G1_NC, /* ALU */
  4823. 0, /* LDR */
  4824. 0, /* LDRS */
  4825. 0 }, /* LDC */
  4826. { "pc_g1",
  4827. BFD_RELOC_ARM_ALU_PC_G1, /* ALU */
  4828. BFD_RELOC_ARM_LDR_PC_G1, /* LDR */
  4829. BFD_RELOC_ARM_LDRS_PC_G1, /* LDRS */
  4830. BFD_RELOC_ARM_LDC_PC_G1 }, /* LDC */
  4831. { "pc_g2",
  4832. BFD_RELOC_ARM_ALU_PC_G2, /* ALU */
  4833. BFD_RELOC_ARM_LDR_PC_G2, /* LDR */
  4834. BFD_RELOC_ARM_LDRS_PC_G2, /* LDRS */
  4835. BFD_RELOC_ARM_LDC_PC_G2 }, /* LDC */
  4836. /* Section base relative */
  4837. { "sb_g0_nc",
  4838. BFD_RELOC_ARM_ALU_SB_G0_NC, /* ALU */
  4839. 0, /* LDR */
  4840. 0, /* LDRS */
  4841. 0 }, /* LDC */
  4842. { "sb_g0",
  4843. BFD_RELOC_ARM_ALU_SB_G0, /* ALU */
  4844. BFD_RELOC_ARM_LDR_SB_G0, /* LDR */
  4845. BFD_RELOC_ARM_LDRS_SB_G0, /* LDRS */
  4846. BFD_RELOC_ARM_LDC_SB_G0 }, /* LDC */
  4847. { "sb_g1_nc",
  4848. BFD_RELOC_ARM_ALU_SB_G1_NC, /* ALU */
  4849. 0, /* LDR */
  4850. 0, /* LDRS */
  4851. 0 }, /* LDC */
  4852. { "sb_g1",
  4853. BFD_RELOC_ARM_ALU_SB_G1, /* ALU */
  4854. BFD_RELOC_ARM_LDR_SB_G1, /* LDR */
  4855. BFD_RELOC_ARM_LDRS_SB_G1, /* LDRS */
  4856. BFD_RELOC_ARM_LDC_SB_G1 }, /* LDC */
  4857. { "sb_g2",
  4858. BFD_RELOC_ARM_ALU_SB_G2, /* ALU */
  4859. BFD_RELOC_ARM_LDR_SB_G2, /* LDR */
  4860. BFD_RELOC_ARM_LDRS_SB_G2, /* LDRS */
  4861. BFD_RELOC_ARM_LDC_SB_G2 }, /* LDC */
  4862. /* Absolute thumb alu relocations. */
  4863. { "lower0_7",
  4864. BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC,/* ALU. */
  4865. 0, /* LDR. */
  4866. 0, /* LDRS. */
  4867. 0 }, /* LDC. */
  4868. { "lower8_15",
  4869. BFD_RELOC_ARM_THUMB_ALU_ABS_G1_NC,/* ALU. */
  4870. 0, /* LDR. */
  4871. 0, /* LDRS. */
  4872. 0 }, /* LDC. */
  4873. { "upper0_7",
  4874. BFD_RELOC_ARM_THUMB_ALU_ABS_G2_NC,/* ALU. */
  4875. 0, /* LDR. */
  4876. 0, /* LDRS. */
  4877. 0 }, /* LDC. */
  4878. { "upper8_15",
  4879. BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC,/* ALU. */
  4880. 0, /* LDR. */
  4881. 0, /* LDRS. */
  4882. 0 } }; /* LDC. */
  4883. /* Given the address of a pointer pointing to the textual name of a group
  4884. relocation as may appear in assembler source, attempt to find its details
  4885. in group_reloc_table. The pointer will be updated to the character after
  4886. the trailing colon. On failure, FAIL will be returned; SUCCESS
  4887. otherwise. On success, *entry will be updated to point at the relevant
  4888. group_reloc_table entry. */
  4889. static int
  4890. find_group_reloc_table_entry (char **str, struct group_reloc_table_entry **out)
  4891. {
  4892. unsigned int i;
  4893. for (i = 0; i < ARRAY_SIZE (group_reloc_table); i++)
  4894. {
  4895. int length = strlen (group_reloc_table[i].name);
  4896. if (strncasecmp (group_reloc_table[i].name, *str, length) == 0
  4897. && (*str)[length] == ':')
  4898. {
  4899. *out = &group_reloc_table[i];
  4900. *str += (length + 1);
  4901. return SUCCESS;
  4902. }
  4903. }
  4904. return FAIL;
  4905. }
  4906. /* Parse a <shifter_operand> for an ARM data processing instruction
  4907. (as for parse_shifter_operand) where group relocations are allowed:
  4908. #<immediate>
  4909. #<immediate>, <rotate>
  4910. #:<group_reloc>:<expression>
  4911. <Rm>
  4912. <Rm>, <shift>
  4913. where <group_reloc> is one of the strings defined in group_reloc_table.
  4914. The hashes are optional.
  4915. Everything else is as for parse_shifter_operand. */
  4916. static parse_operand_result
  4917. parse_shifter_operand_group_reloc (char **str, int i)
  4918. {
  4919. /* Determine if we have the sequence of characters #: or just :
  4920. coming next. If we do, then we check for a group relocation.
  4921. If we don't, punt the whole lot to parse_shifter_operand. */
  4922. if (((*str)[0] == '#' && (*str)[1] == ':')
  4923. || (*str)[0] == ':')
  4924. {
  4925. struct group_reloc_table_entry *entry;
  4926. if ((*str)[0] == '#')
  4927. (*str) += 2;
  4928. else
  4929. (*str)++;
  4930. /* Try to parse a group relocation. Anything else is an error. */
  4931. if (find_group_reloc_table_entry (str, &entry) == FAIL)
  4932. {
  4933. inst.error = _("unknown group relocation");
  4934. return PARSE_OPERAND_FAIL_NO_BACKTRACK;
  4935. }
  4936. /* We now have the group relocation table entry corresponding to
  4937. the name in the assembler source. Next, we parse the expression. */
  4938. if (my_get_expression (&inst.relocs[0].exp, str, GE_NO_PREFIX))
  4939. return PARSE_OPERAND_FAIL_NO_BACKTRACK;
  4940. /* Record the relocation type (always the ALU variant here). */
  4941. inst.relocs[0].type = (bfd_reloc_code_real_type) entry->alu_code;
  4942. gas_assert (inst.relocs[0].type != 0);
  4943. return PARSE_OPERAND_SUCCESS;
  4944. }
  4945. else
  4946. return parse_shifter_operand (str, i) == SUCCESS
  4947. ? PARSE_OPERAND_SUCCESS : PARSE_OPERAND_FAIL;
  4948. /* Never reached. */
  4949. }
  4950. /* Parse a Neon alignment expression. Information is written to
  4951. inst.operands[i]. We assume the initial ':' has been skipped.
  4952. align .imm = align << 8, .immisalign=1, .preind=0 */
  4953. static parse_operand_result
  4954. parse_neon_alignment (char **str, int i)
  4955. {
  4956. char *p = *str;
  4957. expressionS exp;
  4958. my_get_expression (&exp, &p, GE_NO_PREFIX);
  4959. if (exp.X_op != O_constant)
  4960. {
  4961. inst.error = _("alignment must be constant");
  4962. return PARSE_OPERAND_FAIL;
  4963. }
  4964. inst.operands[i].imm = exp.X_add_number << 8;
  4965. inst.operands[i].immisalign = 1;
  4966. /* Alignments are not pre-indexes. */
  4967. inst.operands[i].preind = 0;
  4968. *str = p;
  4969. return PARSE_OPERAND_SUCCESS;
  4970. }
  4971. /* Parse all forms of an ARM address expression. Information is written
  4972. to inst.operands[i] and/or inst.relocs[0].
  4973. Preindexed addressing (.preind=1):
  4974. [Rn, #offset] .reg=Rn .relocs[0].exp=offset
  4975. [Rn, +/-Rm] .reg=Rn .imm=Rm .immisreg=1 .negative=0/1
  4976. [Rn, +/-Rm, shift] .reg=Rn .imm=Rm .immisreg=1 .negative=0/1
  4977. .shift_kind=shift .relocs[0].exp=shift_imm
  4978. These three may have a trailing ! which causes .writeback to be set also.
  4979. Postindexed addressing (.postind=1, .writeback=1):
  4980. [Rn], #offset .reg=Rn .relocs[0].exp=offset
  4981. [Rn], +/-Rm .reg=Rn .imm=Rm .immisreg=1 .negative=0/1
  4982. [Rn], +/-Rm, shift .reg=Rn .imm=Rm .immisreg=1 .negative=0/1
  4983. .shift_kind=shift .relocs[0].exp=shift_imm
  4984. Unindexed addressing (.preind=0, .postind=0):
  4985. [Rn], {option} .reg=Rn .imm=option .immisreg=0
  4986. Other:
  4987. [Rn]{!} shorthand for [Rn,#0]{!}
  4988. =immediate .isreg=0 .relocs[0].exp=immediate
  4989. label .reg=PC .relocs[0].pc_rel=1 .relocs[0].exp=label
  4990. It is the caller's responsibility to check for addressing modes not
  4991. supported by the instruction, and to set inst.relocs[0].type. */
  4992. static parse_operand_result
  4993. parse_address_main (char **str, int i, int group_relocations,
  4994. group_reloc_type group_type)
  4995. {
  4996. char *p = *str;
  4997. int reg;
  4998. if (skip_past_char (&p, '[') == FAIL)
  4999. {
  5000. if (group_type == GROUP_MVE
  5001. && (reg = arm_reg_parse (&p, REG_TYPE_RN)) != FAIL)
  5002. {
  5003. /* [r0-r15] expected as argument but receiving r0-r15 without
  5004. [] brackets. */
  5005. inst.error = BAD_SYNTAX;
  5006. return PARSE_OPERAND_FAIL;
  5007. }
  5008. else if (skip_past_char (&p, '=') == FAIL)
  5009. {
  5010. /* Bare address - translate to PC-relative offset. */
  5011. inst.relocs[0].pc_rel = 1;
  5012. inst.operands[i].reg = REG_PC;
  5013. inst.operands[i].isreg = 1;
  5014. inst.operands[i].preind = 1;
  5015. if (my_get_expression (&inst.relocs[0].exp, &p, GE_OPT_PREFIX_BIG))
  5016. return PARSE_OPERAND_FAIL;
  5017. }
  5018. else if (parse_big_immediate (&p, i, &inst.relocs[0].exp,
  5019. /*allow_symbol_p=*/true))
  5020. return PARSE_OPERAND_FAIL;
  5021. *str = p;
  5022. return PARSE_OPERAND_SUCCESS;
  5023. }
  5024. /* PR gas/14887: Allow for whitespace after the opening bracket. */
  5025. skip_whitespace (p);
  5026. if (group_type == GROUP_MVE)
  5027. {
  5028. enum arm_reg_type rtype = REG_TYPE_MQ;
  5029. struct neon_type_el et;
  5030. if ((reg = arm_typed_reg_parse (&p, rtype, &rtype, &et)) != FAIL)
  5031. {
  5032. inst.operands[i].isquad = 1;
  5033. }
  5034. else if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) == FAIL)
  5035. {
  5036. inst.error = BAD_ADDR_MODE;
  5037. return PARSE_OPERAND_FAIL;
  5038. }
  5039. }
  5040. else if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) == FAIL)
  5041. {
  5042. if (group_type == GROUP_MVE)
  5043. inst.error = BAD_ADDR_MODE;
  5044. else
  5045. inst.error = _(reg_expected_msgs[REG_TYPE_RN]);
  5046. return PARSE_OPERAND_FAIL;
  5047. }
  5048. inst.operands[i].reg = reg;
  5049. inst.operands[i].isreg = 1;
  5050. if (skip_past_comma (&p) == SUCCESS)
  5051. {
  5052. inst.operands[i].preind = 1;
  5053. if (*p == '+') p++;
  5054. else if (*p == '-') p++, inst.operands[i].negative = 1;
  5055. enum arm_reg_type rtype = REG_TYPE_MQ;
  5056. struct neon_type_el et;
  5057. if (group_type == GROUP_MVE
  5058. && (reg = arm_typed_reg_parse (&p, rtype, &rtype, &et)) != FAIL)
  5059. {
  5060. inst.operands[i].immisreg = 2;
  5061. inst.operands[i].imm = reg;
  5062. if (skip_past_comma (&p) == SUCCESS)
  5063. {
  5064. if (parse_shift (&p, i, SHIFT_UXTW_IMMEDIATE) == SUCCESS)
  5065. {
  5066. inst.operands[i].imm |= inst.relocs[0].exp.X_add_number << 5;
  5067. inst.relocs[0].exp.X_add_number = 0;
  5068. }
  5069. else
  5070. return PARSE_OPERAND_FAIL;
  5071. }
  5072. }
  5073. else if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) != FAIL)
  5074. {
  5075. inst.operands[i].imm = reg;
  5076. inst.operands[i].immisreg = 1;
  5077. if (skip_past_comma (&p) == SUCCESS)
  5078. if (parse_shift (&p, i, SHIFT_IMMEDIATE) == FAIL)
  5079. return PARSE_OPERAND_FAIL;
  5080. }
  5081. else if (skip_past_char (&p, ':') == SUCCESS)
  5082. {
  5083. /* FIXME: '@' should be used here, but it's filtered out by generic
  5084. code before we get to see it here. This may be subject to
  5085. change. */
  5086. parse_operand_result result = parse_neon_alignment (&p, i);
  5087. if (result != PARSE_OPERAND_SUCCESS)
  5088. return result;
  5089. }
  5090. else
  5091. {
  5092. if (inst.operands[i].negative)
  5093. {
  5094. inst.operands[i].negative = 0;
  5095. p--;
  5096. }
  5097. if (group_relocations
  5098. && ((*p == '#' && *(p + 1) == ':') || *p == ':'))
  5099. {
  5100. struct group_reloc_table_entry *entry;
  5101. /* Skip over the #: or : sequence. */
  5102. if (*p == '#')
  5103. p += 2;
  5104. else
  5105. p++;
  5106. /* Try to parse a group relocation. Anything else is an
  5107. error. */
  5108. if (find_group_reloc_table_entry (&p, &entry) == FAIL)
  5109. {
  5110. inst.error = _("unknown group relocation");
  5111. return PARSE_OPERAND_FAIL_NO_BACKTRACK;
  5112. }
  5113. /* We now have the group relocation table entry corresponding to
  5114. the name in the assembler source. Next, we parse the
  5115. expression. */
  5116. if (my_get_expression (&inst.relocs[0].exp, &p, GE_NO_PREFIX))
  5117. return PARSE_OPERAND_FAIL_NO_BACKTRACK;
  5118. /* Record the relocation type. */
  5119. switch (group_type)
  5120. {
  5121. case GROUP_LDR:
  5122. inst.relocs[0].type
  5123. = (bfd_reloc_code_real_type) entry->ldr_code;
  5124. break;
  5125. case GROUP_LDRS:
  5126. inst.relocs[0].type
  5127. = (bfd_reloc_code_real_type) entry->ldrs_code;
  5128. break;
  5129. case GROUP_LDC:
  5130. inst.relocs[0].type
  5131. = (bfd_reloc_code_real_type) entry->ldc_code;
  5132. break;
  5133. default:
  5134. gas_assert (0);
  5135. }
  5136. if (inst.relocs[0].type == 0)
  5137. {
  5138. inst.error = _("this group relocation is not allowed on this instruction");
  5139. return PARSE_OPERAND_FAIL_NO_BACKTRACK;
  5140. }
  5141. }
  5142. else
  5143. {
  5144. char *q = p;
  5145. if (my_get_expression (&inst.relocs[0].exp, &p, GE_IMM_PREFIX))
  5146. return PARSE_OPERAND_FAIL;
  5147. /* If the offset is 0, find out if it's a +0 or -0. */
  5148. if (inst.relocs[0].exp.X_op == O_constant
  5149. && inst.relocs[0].exp.X_add_number == 0)
  5150. {
  5151. skip_whitespace (q);
  5152. if (*q == '#')
  5153. {
  5154. q++;
  5155. skip_whitespace (q);
  5156. }
  5157. if (*q == '-')
  5158. inst.operands[i].negative = 1;
  5159. }
  5160. }
  5161. }
  5162. }
  5163. else if (skip_past_char (&p, ':') == SUCCESS)
  5164. {
  5165. /* FIXME: '@' should be used here, but it's filtered out by generic code
  5166. before we get to see it here. This may be subject to change. */
  5167. parse_operand_result result = parse_neon_alignment (&p, i);
  5168. if (result != PARSE_OPERAND_SUCCESS)
  5169. return result;
  5170. }
  5171. if (skip_past_char (&p, ']') == FAIL)
  5172. {
  5173. inst.error = _("']' expected");
  5174. return PARSE_OPERAND_FAIL;
  5175. }
  5176. if (skip_past_char (&p, '!') == SUCCESS)
  5177. inst.operands[i].writeback = 1;
  5178. else if (skip_past_comma (&p) == SUCCESS)
  5179. {
  5180. if (skip_past_char (&p, '{') == SUCCESS)
  5181. {
  5182. /* [Rn], {expr} - unindexed, with option */
  5183. if (parse_immediate (&p, &inst.operands[i].imm,
  5184. 0, 255, true) == FAIL)
  5185. return PARSE_OPERAND_FAIL;
  5186. if (skip_past_char (&p, '}') == FAIL)
  5187. {
  5188. inst.error = _("'}' expected at end of 'option' field");
  5189. return PARSE_OPERAND_FAIL;
  5190. }
  5191. if (inst.operands[i].preind)
  5192. {
  5193. inst.error = _("cannot combine index with option");
  5194. return PARSE_OPERAND_FAIL;
  5195. }
  5196. *str = p;
  5197. return PARSE_OPERAND_SUCCESS;
  5198. }
  5199. else
  5200. {
  5201. inst.operands[i].postind = 1;
  5202. inst.operands[i].writeback = 1;
  5203. if (inst.operands[i].preind)
  5204. {
  5205. inst.error = _("cannot combine pre- and post-indexing");
  5206. return PARSE_OPERAND_FAIL;
  5207. }
  5208. if (*p == '+') p++;
  5209. else if (*p == '-') p++, inst.operands[i].negative = 1;
  5210. enum arm_reg_type rtype = REG_TYPE_MQ;
  5211. struct neon_type_el et;
  5212. if (group_type == GROUP_MVE
  5213. && (reg = arm_typed_reg_parse (&p, rtype, &rtype, &et)) != FAIL)
  5214. {
  5215. inst.operands[i].immisreg = 2;
  5216. inst.operands[i].imm = reg;
  5217. }
  5218. else if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) != FAIL)
  5219. {
  5220. /* We might be using the immediate for alignment already. If we
  5221. are, OR the register number into the low-order bits. */
  5222. if (inst.operands[i].immisalign)
  5223. inst.operands[i].imm |= reg;
  5224. else
  5225. inst.operands[i].imm = reg;
  5226. inst.operands[i].immisreg = 1;
  5227. if (skip_past_comma (&p) == SUCCESS)
  5228. if (parse_shift (&p, i, SHIFT_IMMEDIATE) == FAIL)
  5229. return PARSE_OPERAND_FAIL;
  5230. }
  5231. else
  5232. {
  5233. char *q = p;
  5234. if (inst.operands[i].negative)
  5235. {
  5236. inst.operands[i].negative = 0;
  5237. p--;
  5238. }
  5239. if (my_get_expression (&inst.relocs[0].exp, &p, GE_IMM_PREFIX))
  5240. return PARSE_OPERAND_FAIL;
  5241. /* If the offset is 0, find out if it's a +0 or -0. */
  5242. if (inst.relocs[0].exp.X_op == O_constant
  5243. && inst.relocs[0].exp.X_add_number == 0)
  5244. {
  5245. skip_whitespace (q);
  5246. if (*q == '#')
  5247. {
  5248. q++;
  5249. skip_whitespace (q);
  5250. }
  5251. if (*q == '-')
  5252. inst.operands[i].negative = 1;
  5253. }
  5254. }
  5255. }
  5256. }
  5257. /* If at this point neither .preind nor .postind is set, we have a
  5258. bare [Rn]{!}, which is shorthand for [Rn,#0]{!}. */
  5259. if (inst.operands[i].preind == 0 && inst.operands[i].postind == 0)
  5260. {
  5261. inst.operands[i].preind = 1;
  5262. inst.relocs[0].exp.X_op = O_constant;
  5263. inst.relocs[0].exp.X_add_number = 0;
  5264. }
  5265. *str = p;
  5266. return PARSE_OPERAND_SUCCESS;
  5267. }
  5268. static int
  5269. parse_address (char **str, int i)
  5270. {
  5271. return parse_address_main (str, i, 0, GROUP_LDR) == PARSE_OPERAND_SUCCESS
  5272. ? SUCCESS : FAIL;
  5273. }
  5274. static parse_operand_result
  5275. parse_address_group_reloc (char **str, int i, group_reloc_type type)
  5276. {
  5277. return parse_address_main (str, i, 1, type);
  5278. }
  5279. /* Parse an operand for a MOVW or MOVT instruction. */
  5280. static int
  5281. parse_half (char **str)
  5282. {
  5283. char * p;
  5284. p = *str;
  5285. skip_past_char (&p, '#');
  5286. if (strncasecmp (p, ":lower16:", 9) == 0)
  5287. inst.relocs[0].type = BFD_RELOC_ARM_MOVW;
  5288. else if (strncasecmp (p, ":upper16:", 9) == 0)
  5289. inst.relocs[0].type = BFD_RELOC_ARM_MOVT;
  5290. if (inst.relocs[0].type != BFD_RELOC_UNUSED)
  5291. {
  5292. p += 9;
  5293. skip_whitespace (p);
  5294. }
  5295. if (my_get_expression (&inst.relocs[0].exp, &p, GE_NO_PREFIX))
  5296. return FAIL;
  5297. if (inst.relocs[0].type == BFD_RELOC_UNUSED)
  5298. {
  5299. if (inst.relocs[0].exp.X_op != O_constant)
  5300. {
  5301. inst.error = _("constant expression expected");
  5302. return FAIL;
  5303. }
  5304. if (inst.relocs[0].exp.X_add_number < 0
  5305. || inst.relocs[0].exp.X_add_number > 0xffff)
  5306. {
  5307. inst.error = _("immediate value out of range");
  5308. return FAIL;
  5309. }
  5310. }
  5311. *str = p;
  5312. return SUCCESS;
  5313. }
  5314. /* Miscellaneous. */
  5315. /* Parse a PSR flag operand. The value returned is FAIL on syntax error,
  5316. or a bitmask suitable to be or-ed into the ARM msr instruction. */
  5317. static int
  5318. parse_psr (char **str, bool lhs)
  5319. {
  5320. char *p;
  5321. unsigned long psr_field;
  5322. const struct asm_psr *psr;
  5323. char *start;
  5324. bool is_apsr = false;
  5325. bool m_profile = ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_m);
  5326. /* PR gas/12698: If the user has specified -march=all then m_profile will
  5327. be TRUE, but we want to ignore it in this case as we are building for any
  5328. CPU type, including non-m variants. */
  5329. if (ARM_FEATURE_CORE_EQUAL (selected_cpu, arm_arch_any))
  5330. m_profile = false;
  5331. /* CPSR's and SPSR's can now be lowercase. This is just a convenience
  5332. feature for ease of use and backwards compatibility. */
  5333. p = *str;
  5334. if (strncasecmp (p, "SPSR", 4) == 0)
  5335. {
  5336. if (m_profile)
  5337. goto unsupported_psr;
  5338. psr_field = SPSR_BIT;
  5339. }
  5340. else if (strncasecmp (p, "CPSR", 4) == 0)
  5341. {
  5342. if (m_profile)
  5343. goto unsupported_psr;
  5344. psr_field = 0;
  5345. }
  5346. else if (strncasecmp (p, "APSR", 4) == 0)
  5347. {
  5348. /* APSR[_<bits>] can be used as a synonym for CPSR[_<flags>] on ARMv7-A
  5349. and ARMv7-R architecture CPUs. */
  5350. is_apsr = true;
  5351. psr_field = 0;
  5352. }
  5353. else if (m_profile)
  5354. {
  5355. start = p;
  5356. do
  5357. p++;
  5358. while (ISALNUM (*p) || *p == '_');
  5359. if (strncasecmp (start, "iapsr", 5) == 0
  5360. || strncasecmp (start, "eapsr", 5) == 0
  5361. || strncasecmp (start, "xpsr", 4) == 0
  5362. || strncasecmp (start, "psr", 3) == 0)
  5363. p = start + strcspn (start, "rR") + 1;
  5364. psr = (const struct asm_psr *) str_hash_find_n (arm_v7m_psr_hsh, start,
  5365. p - start);
  5366. if (!psr)
  5367. return FAIL;
  5368. /* If APSR is being written, a bitfield may be specified. Note that
  5369. APSR itself is handled above. */
  5370. if (psr->field <= 3)
  5371. {
  5372. psr_field = psr->field;
  5373. is_apsr = true;
  5374. goto check_suffix;
  5375. }
  5376. *str = p;
  5377. /* M-profile MSR instructions have the mask field set to "10", except
  5378. *PSR variants which modify APSR, which may use a different mask (and
  5379. have been handled already). Do that by setting the PSR_f field
  5380. here. */
  5381. return psr->field | (lhs ? PSR_f : 0);
  5382. }
  5383. else
  5384. goto unsupported_psr;
  5385. p += 4;
  5386. check_suffix:
  5387. if (*p == '_')
  5388. {
  5389. /* A suffix follows. */
  5390. p++;
  5391. start = p;
  5392. do
  5393. p++;
  5394. while (ISALNUM (*p) || *p == '_');
  5395. if (is_apsr)
  5396. {
  5397. /* APSR uses a notation for bits, rather than fields. */
  5398. unsigned int nzcvq_bits = 0;
  5399. unsigned int g_bit = 0;
  5400. char *bit;
  5401. for (bit = start; bit != p; bit++)
  5402. {
  5403. switch (TOLOWER (*bit))
  5404. {
  5405. case 'n':
  5406. nzcvq_bits |= (nzcvq_bits & 0x01) ? 0x20 : 0x01;
  5407. break;
  5408. case 'z':
  5409. nzcvq_bits |= (nzcvq_bits & 0x02) ? 0x20 : 0x02;
  5410. break;
  5411. case 'c':
  5412. nzcvq_bits |= (nzcvq_bits & 0x04) ? 0x20 : 0x04;
  5413. break;
  5414. case 'v':
  5415. nzcvq_bits |= (nzcvq_bits & 0x08) ? 0x20 : 0x08;
  5416. break;
  5417. case 'q':
  5418. nzcvq_bits |= (nzcvq_bits & 0x10) ? 0x20 : 0x10;
  5419. break;
  5420. case 'g':
  5421. g_bit |= (g_bit & 0x1) ? 0x2 : 0x1;
  5422. break;
  5423. default:
  5424. inst.error = _("unexpected bit specified after APSR");
  5425. return FAIL;
  5426. }
  5427. }
  5428. if (nzcvq_bits == 0x1f)
  5429. psr_field |= PSR_f;
  5430. if (g_bit == 0x1)
  5431. {
  5432. if (!ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6_dsp))
  5433. {
  5434. inst.error = _("selected processor does not "
  5435. "support DSP extension");
  5436. return FAIL;
  5437. }
  5438. psr_field |= PSR_s;
  5439. }
  5440. if ((nzcvq_bits & 0x20) != 0
  5441. || (nzcvq_bits != 0x1f && nzcvq_bits != 0)
  5442. || (g_bit & 0x2) != 0)
  5443. {
  5444. inst.error = _("bad bitmask specified after APSR");
  5445. return FAIL;
  5446. }
  5447. }
  5448. else
  5449. {
  5450. psr = (const struct asm_psr *) str_hash_find_n (arm_psr_hsh, start,
  5451. p - start);
  5452. if (!psr)
  5453. goto error;
  5454. psr_field |= psr->field;
  5455. }
  5456. }
  5457. else
  5458. {
  5459. if (ISALNUM (*p))
  5460. goto error; /* Garbage after "[CS]PSR". */
  5461. /* Unadorned APSR is equivalent to APSR_nzcvq/CPSR_f (for writes). This
  5462. is deprecated, but allow it anyway. */
  5463. if (is_apsr && lhs)
  5464. {
  5465. psr_field |= PSR_f;
  5466. as_tsktsk (_("writing to APSR without specifying a bitmask is "
  5467. "deprecated"));
  5468. }
  5469. else if (!m_profile)
  5470. /* These bits are never right for M-profile devices: don't set them
  5471. (only code paths which read/write APSR reach here). */
  5472. psr_field |= (PSR_c | PSR_f);
  5473. }
  5474. *str = p;
  5475. return psr_field;
  5476. unsupported_psr:
  5477. inst.error = _("selected processor does not support requested special "
  5478. "purpose register");
  5479. return FAIL;
  5480. error:
  5481. inst.error = _("flag for {c}psr instruction expected");
  5482. return FAIL;
  5483. }
  5484. static int
  5485. parse_sys_vldr_vstr (char **str)
  5486. {
  5487. unsigned i;
  5488. int val = FAIL;
  5489. struct {
  5490. const char *name;
  5491. int regl;
  5492. int regh;
  5493. } sysregs[] = {
  5494. {"FPSCR", 0x1, 0x0},
  5495. {"FPSCR_nzcvqc", 0x2, 0x0},
  5496. {"VPR", 0x4, 0x1},
  5497. {"P0", 0x5, 0x1},
  5498. {"FPCXTNS", 0x6, 0x1},
  5499. {"FPCXTS", 0x7, 0x1}
  5500. };
  5501. char *op_end = strchr (*str, ',');
  5502. size_t op_strlen = op_end - *str;
  5503. for (i = 0; i < sizeof (sysregs) / sizeof (sysregs[0]); i++)
  5504. {
  5505. if (!strncmp (*str, sysregs[i].name, op_strlen))
  5506. {
  5507. val = sysregs[i].regl | (sysregs[i].regh << 3);
  5508. *str = op_end;
  5509. break;
  5510. }
  5511. }
  5512. return val;
  5513. }
  5514. /* Parse the flags argument to CPSI[ED]. Returns FAIL on error, or a
  5515. value suitable for splatting into the AIF field of the instruction. */
  5516. static int
  5517. parse_cps_flags (char **str)
  5518. {
  5519. int val = 0;
  5520. int saw_a_flag = 0;
  5521. char *s = *str;
  5522. for (;;)
  5523. switch (*s++)
  5524. {
  5525. case '\0': case ',':
  5526. goto done;
  5527. case 'a': case 'A': saw_a_flag = 1; val |= 0x4; break;
  5528. case 'i': case 'I': saw_a_flag = 1; val |= 0x2; break;
  5529. case 'f': case 'F': saw_a_flag = 1; val |= 0x1; break;
  5530. default:
  5531. inst.error = _("unrecognized CPS flag");
  5532. return FAIL;
  5533. }
  5534. done:
  5535. if (saw_a_flag == 0)
  5536. {
  5537. inst.error = _("missing CPS flags");
  5538. return FAIL;
  5539. }
  5540. *str = s - 1;
  5541. return val;
  5542. }
  5543. /* Parse an endian specifier ("BE" or "LE", case insensitive);
  5544. returns 0 for big-endian, 1 for little-endian, FAIL for an error. */
  5545. static int
  5546. parse_endian_specifier (char **str)
  5547. {
  5548. int little_endian;
  5549. char *s = *str;
  5550. if (strncasecmp (s, "BE", 2))
  5551. little_endian = 0;
  5552. else if (strncasecmp (s, "LE", 2))
  5553. little_endian = 1;
  5554. else
  5555. {
  5556. inst.error = _("valid endian specifiers are be or le");
  5557. return FAIL;
  5558. }
  5559. if (ISALNUM (s[2]) || s[2] == '_')
  5560. {
  5561. inst.error = _("valid endian specifiers are be or le");
  5562. return FAIL;
  5563. }
  5564. *str = s + 2;
  5565. return little_endian;
  5566. }
  5567. /* Parse a rotation specifier: ROR #0, #8, #16, #24. *val receives a
  5568. value suitable for poking into the rotate field of an sxt or sxta
  5569. instruction, or FAIL on error. */
  5570. static int
  5571. parse_ror (char **str)
  5572. {
  5573. int rot;
  5574. char *s = *str;
  5575. if (strncasecmp (s, "ROR", 3) == 0)
  5576. s += 3;
  5577. else
  5578. {
  5579. inst.error = _("missing rotation field after comma");
  5580. return FAIL;
  5581. }
  5582. if (parse_immediate (&s, &rot, 0, 24, false) == FAIL)
  5583. return FAIL;
  5584. switch (rot)
  5585. {
  5586. case 0: *str = s; return 0x0;
  5587. case 8: *str = s; return 0x1;
  5588. case 16: *str = s; return 0x2;
  5589. case 24: *str = s; return 0x3;
  5590. default:
  5591. inst.error = _("rotation can only be 0, 8, 16, or 24");
  5592. return FAIL;
  5593. }
  5594. }
  5595. /* Parse a conditional code (from conds[] below). The value returned is in the
  5596. range 0 .. 14, or FAIL. */
  5597. static int
  5598. parse_cond (char **str)
  5599. {
  5600. char *q;
  5601. const struct asm_cond *c;
  5602. int n;
  5603. /* Condition codes are always 2 characters, so matching up to
  5604. 3 characters is sufficient. */
  5605. char cond[3];
  5606. q = *str;
  5607. n = 0;
  5608. while (ISALPHA (*q) && n < 3)
  5609. {
  5610. cond[n] = TOLOWER (*q);
  5611. q++;
  5612. n++;
  5613. }
  5614. c = (const struct asm_cond *) str_hash_find_n (arm_cond_hsh, cond, n);
  5615. if (!c)
  5616. {
  5617. inst.error = _("condition required");
  5618. return FAIL;
  5619. }
  5620. *str = q;
  5621. return c->value;
  5622. }
  5623. /* Parse an option for a barrier instruction. Returns the encoding for the
  5624. option, or FAIL. */
  5625. static int
  5626. parse_barrier (char **str)
  5627. {
  5628. char *p, *q;
  5629. const struct asm_barrier_opt *o;
  5630. p = q = *str;
  5631. while (ISALPHA (*q))
  5632. q++;
  5633. o = (const struct asm_barrier_opt *) str_hash_find_n (arm_barrier_opt_hsh, p,
  5634. q - p);
  5635. if (!o)
  5636. return FAIL;
  5637. if (!mark_feature_used (&o->arch))
  5638. return FAIL;
  5639. *str = q;
  5640. return o->value;
  5641. }
  5642. /* Parse the operands of a table branch instruction. Similar to a memory
  5643. operand. */
  5644. static int
  5645. parse_tb (char **str)
  5646. {
  5647. char * p = *str;
  5648. int reg;
  5649. if (skip_past_char (&p, '[') == FAIL)
  5650. {
  5651. inst.error = _("'[' expected");
  5652. return FAIL;
  5653. }
  5654. if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) == FAIL)
  5655. {
  5656. inst.error = _(reg_expected_msgs[REG_TYPE_RN]);
  5657. return FAIL;
  5658. }
  5659. inst.operands[0].reg = reg;
  5660. if (skip_past_comma (&p) == FAIL)
  5661. {
  5662. inst.error = _("',' expected");
  5663. return FAIL;
  5664. }
  5665. if ((reg = arm_reg_parse (&p, REG_TYPE_RN)) == FAIL)
  5666. {
  5667. inst.error = _(reg_expected_msgs[REG_TYPE_RN]);
  5668. return FAIL;
  5669. }
  5670. inst.operands[0].imm = reg;
  5671. if (skip_past_comma (&p) == SUCCESS)
  5672. {
  5673. if (parse_shift (&p, 0, SHIFT_LSL_IMMEDIATE) == FAIL)
  5674. return FAIL;
  5675. if (inst.relocs[0].exp.X_add_number != 1)
  5676. {
  5677. inst.error = _("invalid shift");
  5678. return FAIL;
  5679. }
  5680. inst.operands[0].shifted = 1;
  5681. }
  5682. if (skip_past_char (&p, ']') == FAIL)
  5683. {
  5684. inst.error = _("']' expected");
  5685. return FAIL;
  5686. }
  5687. *str = p;
  5688. return SUCCESS;
  5689. }
  5690. /* Parse the operands of a Neon VMOV instruction. See do_neon_mov for more
  5691. information on the types the operands can take and how they are encoded.
  5692. Up to four operands may be read; this function handles setting the
  5693. ".present" field for each read operand itself.
  5694. Updates STR and WHICH_OPERAND if parsing is successful and returns SUCCESS,
  5695. else returns FAIL. */
  5696. static int
  5697. parse_neon_mov (char **str, int *which_operand)
  5698. {
  5699. int i = *which_operand, val;
  5700. enum arm_reg_type rtype;
  5701. char *ptr = *str;
  5702. struct neon_type_el optype;
  5703. if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_MQ)) != FAIL)
  5704. {
  5705. /* Cases 17 or 19. */
  5706. inst.operands[i].reg = val;
  5707. inst.operands[i].isvec = 1;
  5708. inst.operands[i].isscalar = 2;
  5709. inst.operands[i].vectype = optype;
  5710. inst.operands[i++].present = 1;
  5711. if (skip_past_comma (&ptr) == FAIL)
  5712. goto wanted_comma;
  5713. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) != FAIL)
  5714. {
  5715. /* Case 17: VMOV<c>.<dt> <Qd[idx]>, <Rt> */
  5716. inst.operands[i].reg = val;
  5717. inst.operands[i].isreg = 1;
  5718. inst.operands[i].present = 1;
  5719. }
  5720. else if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_MQ)) != FAIL)
  5721. {
  5722. /* Case 19: VMOV<c> <Qd[idx]>, <Qd[idx2]>, <Rt>, <Rt2> */
  5723. inst.operands[i].reg = val;
  5724. inst.operands[i].isvec = 1;
  5725. inst.operands[i].isscalar = 2;
  5726. inst.operands[i].vectype = optype;
  5727. inst.operands[i++].present = 1;
  5728. if (skip_past_comma (&ptr) == FAIL)
  5729. goto wanted_comma;
  5730. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) == FAIL)
  5731. goto wanted_arm;
  5732. inst.operands[i].reg = val;
  5733. inst.operands[i].isreg = 1;
  5734. inst.operands[i++].present = 1;
  5735. if (skip_past_comma (&ptr) == FAIL)
  5736. goto wanted_comma;
  5737. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) == FAIL)
  5738. goto wanted_arm;
  5739. inst.operands[i].reg = val;
  5740. inst.operands[i].isreg = 1;
  5741. inst.operands[i].present = 1;
  5742. }
  5743. else
  5744. {
  5745. first_error (_("expected ARM or MVE vector register"));
  5746. return FAIL;
  5747. }
  5748. }
  5749. else if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_VFD)) != FAIL)
  5750. {
  5751. /* Case 4: VMOV<c><q>.<size> <Dn[x]>, <Rd>. */
  5752. inst.operands[i].reg = val;
  5753. inst.operands[i].isscalar = 1;
  5754. inst.operands[i].vectype = optype;
  5755. inst.operands[i++].present = 1;
  5756. if (skip_past_comma (&ptr) == FAIL)
  5757. goto wanted_comma;
  5758. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) == FAIL)
  5759. goto wanted_arm;
  5760. inst.operands[i].reg = val;
  5761. inst.operands[i].isreg = 1;
  5762. inst.operands[i].present = 1;
  5763. }
  5764. else if (((val = arm_typed_reg_parse (&ptr, REG_TYPE_NSDQ, &rtype, &optype))
  5765. != FAIL)
  5766. || ((val = arm_typed_reg_parse (&ptr, REG_TYPE_MQ, &rtype, &optype))
  5767. != FAIL))
  5768. {
  5769. /* Cases 0, 1, 2, 3, 5 (D only). */
  5770. if (skip_past_comma (&ptr) == FAIL)
  5771. goto wanted_comma;
  5772. inst.operands[i].reg = val;
  5773. inst.operands[i].isreg = 1;
  5774. inst.operands[i].isquad = (rtype == REG_TYPE_NQ);
  5775. inst.operands[i].issingle = (rtype == REG_TYPE_VFS);
  5776. inst.operands[i].isvec = 1;
  5777. inst.operands[i].vectype = optype;
  5778. inst.operands[i++].present = 1;
  5779. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) != FAIL)
  5780. {
  5781. /* Case 5: VMOV<c><q> <Dm>, <Rd>, <Rn>.
  5782. Case 13: VMOV <Sd>, <Rm> */
  5783. inst.operands[i].reg = val;
  5784. inst.operands[i].isreg = 1;
  5785. inst.operands[i].present = 1;
  5786. if (rtype == REG_TYPE_NQ)
  5787. {
  5788. first_error (_("can't use Neon quad register here"));
  5789. return FAIL;
  5790. }
  5791. else if (rtype != REG_TYPE_VFS)
  5792. {
  5793. i++;
  5794. if (skip_past_comma (&ptr) == FAIL)
  5795. goto wanted_comma;
  5796. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) == FAIL)
  5797. goto wanted_arm;
  5798. inst.operands[i].reg = val;
  5799. inst.operands[i].isreg = 1;
  5800. inst.operands[i].present = 1;
  5801. }
  5802. }
  5803. else if (((val = arm_typed_reg_parse (&ptr, REG_TYPE_NSDQ, &rtype,
  5804. &optype)) != FAIL)
  5805. || ((val = arm_typed_reg_parse (&ptr, REG_TYPE_MQ, &rtype,
  5806. &optype)) != FAIL))
  5807. {
  5808. /* Case 0: VMOV<c><q> <Qd>, <Qm>
  5809. Case 1: VMOV<c><q> <Dd>, <Dm>
  5810. Case 8: VMOV.F32 <Sd>, <Sm>
  5811. Case 15: VMOV <Sd>, <Se>, <Rn>, <Rm> */
  5812. inst.operands[i].reg = val;
  5813. inst.operands[i].isreg = 1;
  5814. inst.operands[i].isquad = (rtype == REG_TYPE_NQ);
  5815. inst.operands[i].issingle = (rtype == REG_TYPE_VFS);
  5816. inst.operands[i].isvec = 1;
  5817. inst.operands[i].vectype = optype;
  5818. inst.operands[i].present = 1;
  5819. if (skip_past_comma (&ptr) == SUCCESS)
  5820. {
  5821. /* Case 15. */
  5822. i++;
  5823. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) == FAIL)
  5824. goto wanted_arm;
  5825. inst.operands[i].reg = val;
  5826. inst.operands[i].isreg = 1;
  5827. inst.operands[i++].present = 1;
  5828. if (skip_past_comma (&ptr) == FAIL)
  5829. goto wanted_comma;
  5830. if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) == FAIL)
  5831. goto wanted_arm;
  5832. inst.operands[i].reg = val;
  5833. inst.operands[i].isreg = 1;
  5834. inst.operands[i].present = 1;
  5835. }
  5836. }
  5837. else if (parse_qfloat_immediate (&ptr, &inst.operands[i].imm) == SUCCESS)
  5838. /* Case 2: VMOV<c><q>.<dt> <Qd>, #<float-imm>
  5839. Case 3: VMOV<c><q>.<dt> <Dd>, #<float-imm>
  5840. Case 10: VMOV.F32 <Sd>, #<imm>
  5841. Case 11: VMOV.F64 <Dd>, #<imm> */
  5842. inst.operands[i].immisfloat = 1;
  5843. else if (parse_big_immediate (&ptr, i, NULL, /*allow_symbol_p=*/false)
  5844. == SUCCESS)
  5845. /* Case 2: VMOV<c><q>.<dt> <Qd>, #<imm>
  5846. Case 3: VMOV<c><q>.<dt> <Dd>, #<imm> */
  5847. ;
  5848. else
  5849. {
  5850. first_error (_("expected <Rm> or <Dm> or <Qm> operand"));
  5851. return FAIL;
  5852. }
  5853. }
  5854. else if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) != FAIL)
  5855. {
  5856. /* Cases 6, 7, 16, 18. */
  5857. inst.operands[i].reg = val;
  5858. inst.operands[i].isreg = 1;
  5859. inst.operands[i++].present = 1;
  5860. if (skip_past_comma (&ptr) == FAIL)
  5861. goto wanted_comma;
  5862. if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_MQ)) != FAIL)
  5863. {
  5864. /* Case 18: VMOV<c>.<dt> <Rt>, <Qn[idx]> */
  5865. inst.operands[i].reg = val;
  5866. inst.operands[i].isscalar = 2;
  5867. inst.operands[i].present = 1;
  5868. inst.operands[i].vectype = optype;
  5869. }
  5870. else if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_VFD)) != FAIL)
  5871. {
  5872. /* Case 6: VMOV<c><q>.<dt> <Rd>, <Dn[x]> */
  5873. inst.operands[i].reg = val;
  5874. inst.operands[i].isscalar = 1;
  5875. inst.operands[i].present = 1;
  5876. inst.operands[i].vectype = optype;
  5877. }
  5878. else if ((val = arm_reg_parse (&ptr, REG_TYPE_RN)) != FAIL)
  5879. {
  5880. inst.operands[i].reg = val;
  5881. inst.operands[i].isreg = 1;
  5882. inst.operands[i++].present = 1;
  5883. if (skip_past_comma (&ptr) == FAIL)
  5884. goto wanted_comma;
  5885. if ((val = arm_typed_reg_parse (&ptr, REG_TYPE_VFSD, &rtype, &optype))
  5886. != FAIL)
  5887. {
  5888. /* Case 7: VMOV<c><q> <Rd>, <Rn>, <Dm> */
  5889. inst.operands[i].reg = val;
  5890. inst.operands[i].isreg = 1;
  5891. inst.operands[i].isvec = 1;
  5892. inst.operands[i].issingle = (rtype == REG_TYPE_VFS);
  5893. inst.operands[i].vectype = optype;
  5894. inst.operands[i].present = 1;
  5895. if (rtype == REG_TYPE_VFS)
  5896. {
  5897. /* Case 14. */
  5898. i++;
  5899. if (skip_past_comma (&ptr) == FAIL)
  5900. goto wanted_comma;
  5901. if ((val = arm_typed_reg_parse (&ptr, REG_TYPE_VFS, NULL,
  5902. &optype)) == FAIL)
  5903. {
  5904. first_error (_(reg_expected_msgs[REG_TYPE_VFS]));
  5905. return FAIL;
  5906. }
  5907. inst.operands[i].reg = val;
  5908. inst.operands[i].isreg = 1;
  5909. inst.operands[i].isvec = 1;
  5910. inst.operands[i].issingle = 1;
  5911. inst.operands[i].vectype = optype;
  5912. inst.operands[i].present = 1;
  5913. }
  5914. }
  5915. else
  5916. {
  5917. if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_MQ))
  5918. != FAIL)
  5919. {
  5920. /* Case 16: VMOV<c> <Rt>, <Rt2>, <Qd[idx]>, <Qd[idx2]> */
  5921. inst.operands[i].reg = val;
  5922. inst.operands[i].isvec = 1;
  5923. inst.operands[i].isscalar = 2;
  5924. inst.operands[i].vectype = optype;
  5925. inst.operands[i++].present = 1;
  5926. if (skip_past_comma (&ptr) == FAIL)
  5927. goto wanted_comma;
  5928. if ((val = parse_scalar (&ptr, 8, &optype, REG_TYPE_MQ))
  5929. == FAIL)
  5930. {
  5931. first_error (_(reg_expected_msgs[REG_TYPE_MQ]));
  5932. return FAIL;
  5933. }
  5934. inst.operands[i].reg = val;
  5935. inst.operands[i].isvec = 1;
  5936. inst.operands[i].isscalar = 2;
  5937. inst.operands[i].vectype = optype;
  5938. inst.operands[i].present = 1;
  5939. }
  5940. else
  5941. {
  5942. first_error (_("VFP single, double or MVE vector register"
  5943. " expected"));
  5944. return FAIL;
  5945. }
  5946. }
  5947. }
  5948. else if ((val = arm_typed_reg_parse (&ptr, REG_TYPE_VFS, NULL, &optype))
  5949. != FAIL)
  5950. {
  5951. /* Case 13. */
  5952. inst.operands[i].reg = val;
  5953. inst.operands[i].isreg = 1;
  5954. inst.operands[i].isvec = 1;
  5955. inst.operands[i].issingle = 1;
  5956. inst.operands[i].vectype = optype;
  5957. inst.operands[i].present = 1;
  5958. }
  5959. }
  5960. else
  5961. {
  5962. first_error (_("parse error"));
  5963. return FAIL;
  5964. }
  5965. /* Successfully parsed the operands. Update args. */
  5966. *which_operand = i;
  5967. *str = ptr;
  5968. return SUCCESS;
  5969. wanted_comma:
  5970. first_error (_("expected comma"));
  5971. return FAIL;
  5972. wanted_arm:
  5973. first_error (_(reg_expected_msgs[REG_TYPE_RN]));
  5974. return FAIL;
  5975. }
  5976. /* Use this macro when the operand constraints are different
  5977. for ARM and THUMB (e.g. ldrd). */
  5978. #define MIX_ARM_THUMB_OPERANDS(arm_operand, thumb_operand) \
  5979. ((arm_operand) | ((thumb_operand) << 16))
  5980. /* Matcher codes for parse_operands. */
  5981. enum operand_parse_code
  5982. {
  5983. OP_stop, /* end of line */
  5984. OP_RR, /* ARM register */
  5985. OP_RRnpc, /* ARM register, not r15 */
  5986. OP_RRnpcsp, /* ARM register, neither r15 nor r13 (a.k.a. 'BadReg') */
  5987. OP_RRnpcb, /* ARM register, not r15, in square brackets */
  5988. OP_RRnpctw, /* ARM register, not r15 in Thumb-state or with writeback,
  5989. optional trailing ! */
  5990. OP_RRw, /* ARM register, not r15, optional trailing ! */
  5991. OP_RCP, /* Coprocessor number */
  5992. OP_RCN, /* Coprocessor register */
  5993. OP_RF, /* FPA register */
  5994. OP_RVS, /* VFP single precision register */
  5995. OP_RVD, /* VFP double precision register (0..15) */
  5996. OP_RND, /* Neon double precision register (0..31) */
  5997. OP_RNDMQ, /* Neon double precision (0..31) or MVE vector register. */
  5998. OP_RNDMQR, /* Neon double precision (0..31), MVE vector or ARM register.
  5999. */
  6000. OP_RNSDMQR, /* Neon single or double precision, MVE vector or ARM register.
  6001. */
  6002. OP_RNQ, /* Neon quad precision register */
  6003. OP_RNQMQ, /* Neon quad or MVE vector register. */
  6004. OP_RVSD, /* VFP single or double precision register */
  6005. OP_RVSD_COND, /* VFP single, double precision register or condition code. */
  6006. OP_RVSDMQ, /* VFP single, double precision or MVE vector register. */
  6007. OP_RNSD, /* Neon single or double precision register */
  6008. OP_RNDQ, /* Neon double or quad precision register */
  6009. OP_RNDQMQ, /* Neon double, quad or MVE vector register. */
  6010. OP_RNDQMQR, /* Neon double, quad, MVE vector or ARM register. */
  6011. OP_RNSDQ, /* Neon single, double or quad precision register */
  6012. OP_RNSC, /* Neon scalar D[X] */
  6013. OP_RVC, /* VFP control register */
  6014. OP_RMF, /* Maverick F register */
  6015. OP_RMD, /* Maverick D register */
  6016. OP_RMFX, /* Maverick FX register */
  6017. OP_RMDX, /* Maverick DX register */
  6018. OP_RMAX, /* Maverick AX register */
  6019. OP_RMDS, /* Maverick DSPSC register */
  6020. OP_RIWR, /* iWMMXt wR register */
  6021. OP_RIWC, /* iWMMXt wC register */
  6022. OP_RIWG, /* iWMMXt wCG register */
  6023. OP_RXA, /* XScale accumulator register */
  6024. OP_RNSDMQ, /* Neon single, double or MVE vector register */
  6025. OP_RNSDQMQ, /* Neon single, double or quad register or MVE vector register
  6026. */
  6027. OP_RNSDQMQR, /* Neon single, double or quad register, MVE vector register or
  6028. GPR (no SP/SP) */
  6029. OP_RMQ, /* MVE vector register. */
  6030. OP_RMQRZ, /* MVE vector or ARM register including ZR. */
  6031. OP_RMQRR, /* MVE vector or ARM register. */
  6032. /* New operands for Armv8.1-M Mainline. */
  6033. OP_LR, /* ARM LR register */
  6034. OP_SP, /* ARM SP register */
  6035. OP_R12,
  6036. OP_RRe, /* ARM register, only even numbered. */
  6037. OP_RRo, /* ARM register, only odd numbered, not r13 or r15. */
  6038. OP_RRnpcsp_I32, /* ARM register (no BadReg) or literal 1 .. 32 */
  6039. OP_RR_ZR, /* ARM register or ZR but no PC */
  6040. OP_REGLST, /* ARM register list */
  6041. OP_CLRMLST, /* CLRM register list */
  6042. OP_VRSLST, /* VFP single-precision register list */
  6043. OP_VRDLST, /* VFP double-precision register list */
  6044. OP_VRSDLST, /* VFP single or double-precision register list (& quad) */
  6045. OP_NRDLST, /* Neon double-precision register list (d0-d31, qN aliases) */
  6046. OP_NSTRLST, /* Neon element/structure list */
  6047. OP_VRSDVLST, /* VFP single or double-precision register list and VPR */
  6048. OP_MSTRLST2, /* MVE vector list with two elements. */
  6049. OP_MSTRLST4, /* MVE vector list with four elements. */
  6050. OP_RNDQ_I0, /* Neon D or Q reg, or immediate zero. */
  6051. OP_RVSD_I0, /* VFP S or D reg, or immediate zero. */
  6052. OP_RSVD_FI0, /* VFP S or D reg, or floating point immediate zero. */
  6053. OP_RSVDMQ_FI0, /* VFP S, D, MVE vector register or floating point immediate
  6054. zero. */
  6055. OP_RR_RNSC, /* ARM reg or Neon scalar. */
  6056. OP_RNSD_RNSC, /* Neon S or D reg, or Neon scalar. */
  6057. OP_RNSDQ_RNSC, /* Vector S, D or Q reg, or Neon scalar. */
  6058. OP_RNSDQ_RNSC_MQ, /* Vector S, D or Q reg, Neon scalar or MVE vector register.
  6059. */
  6060. OP_RNSDQ_RNSC_MQ_RR, /* Vector S, D or Q reg, or MVE vector reg , or Neon
  6061. scalar, or ARM register. */
  6062. OP_RNDQ_RNSC, /* Neon D or Q reg, or Neon scalar. */
  6063. OP_RNDQ_RNSC_RR, /* Neon D or Q reg, Neon scalar, or ARM register. */
  6064. OP_RNDQMQ_RNSC_RR, /* Neon D or Q reg, Neon scalar, MVE vector or ARM
  6065. register. */
  6066. OP_RNDQMQ_RNSC, /* Neon D, Q or MVE vector reg, or Neon scalar. */
  6067. OP_RND_RNSC, /* Neon D reg, or Neon scalar. */
  6068. OP_VMOV, /* Neon VMOV operands. */
  6069. OP_RNDQ_Ibig, /* Neon D or Q reg, or big immediate for logic and VMVN. */
  6070. /* Neon D, Q or MVE vector register, or big immediate for logic and VMVN. */
  6071. OP_RNDQMQ_Ibig,
  6072. OP_RNDQ_I63b, /* Neon D or Q reg, or immediate for shift. */
  6073. OP_RNDQMQ_I63b_RR, /* Neon D or Q reg, immediate for shift, MVE vector or
  6074. ARM register. */
  6075. OP_RIWR_I32z, /* iWMMXt wR register, or immediate 0 .. 32 for iWMMXt2. */
  6076. OP_VLDR, /* VLDR operand. */
  6077. OP_I0, /* immediate zero */
  6078. OP_I7, /* immediate value 0 .. 7 */
  6079. OP_I15, /* 0 .. 15 */
  6080. OP_I16, /* 1 .. 16 */
  6081. OP_I16z, /* 0 .. 16 */
  6082. OP_I31, /* 0 .. 31 */
  6083. OP_I31w, /* 0 .. 31, optional trailing ! */
  6084. OP_I32, /* 1 .. 32 */
  6085. OP_I32z, /* 0 .. 32 */
  6086. OP_I48_I64, /* 48 or 64 */
  6087. OP_I63, /* 0 .. 63 */
  6088. OP_I63s, /* -64 .. 63 */
  6089. OP_I64, /* 1 .. 64 */
  6090. OP_I64z, /* 0 .. 64 */
  6091. OP_I127, /* 0 .. 127 */
  6092. OP_I255, /* 0 .. 255 */
  6093. OP_I511, /* 0 .. 511 */
  6094. OP_I4095, /* 0 .. 4095 */
  6095. OP_I8191, /* 0 .. 8191 */
  6096. OP_I4b, /* immediate, prefix optional, 1 .. 4 */
  6097. OP_I7b, /* 0 .. 7 */
  6098. OP_I15b, /* 0 .. 15 */
  6099. OP_I31b, /* 0 .. 31 */
  6100. OP_SH, /* shifter operand */
  6101. OP_SHG, /* shifter operand with possible group relocation */
  6102. OP_ADDR, /* Memory address expression (any mode) */
  6103. OP_ADDRMVE, /* Memory address expression for MVE's VSTR/VLDR. */
  6104. OP_ADDRGLDR, /* Mem addr expr (any mode) with possible LDR group reloc */
  6105. OP_ADDRGLDRS, /* Mem addr expr (any mode) with possible LDRS group reloc */
  6106. OP_ADDRGLDC, /* Mem addr expr (any mode) with possible LDC group reloc */
  6107. OP_EXP, /* arbitrary expression */
  6108. OP_EXPi, /* same, with optional immediate prefix */
  6109. OP_EXPr, /* same, with optional relocation suffix */
  6110. OP_EXPs, /* same, with optional non-first operand relocation suffix */
  6111. OP_HALF, /* 0 .. 65535 or low/high reloc. */
  6112. OP_IROT1, /* VCADD rotate immediate: 90, 270. */
  6113. OP_IROT2, /* VCMLA rotate immediate: 0, 90, 180, 270. */
  6114. OP_CPSF, /* CPS flags */
  6115. OP_ENDI, /* Endianness specifier */
  6116. OP_wPSR, /* CPSR/SPSR/APSR mask for msr (writing). */
  6117. OP_rPSR, /* CPSR/SPSR/APSR mask for msr (reading). */
  6118. OP_COND, /* conditional code */
  6119. OP_TB, /* Table branch. */
  6120. OP_APSR_RR, /* ARM register or "APSR_nzcv". */
  6121. OP_RRnpc_I0, /* ARM register or literal 0 */
  6122. OP_RR_EXr, /* ARM register or expression with opt. reloc stuff. */
  6123. OP_RR_EXi, /* ARM register or expression with imm prefix */
  6124. OP_RF_IF, /* FPA register or immediate */
  6125. OP_RIWR_RIWC, /* iWMMXt R or C reg */
  6126. OP_RIWC_RIWG, /* iWMMXt wC or wCG reg */
  6127. /* Optional operands. */
  6128. OP_oI7b, /* immediate, prefix optional, 0 .. 7 */
  6129. OP_oI31b, /* 0 .. 31 */
  6130. OP_oI32b, /* 1 .. 32 */
  6131. OP_oI32z, /* 0 .. 32 */
  6132. OP_oIffffb, /* 0 .. 65535 */
  6133. OP_oI255c, /* curly-brace enclosed, 0 .. 255 */
  6134. OP_oRR, /* ARM register */
  6135. OP_oLR, /* ARM LR register */
  6136. OP_oRRnpc, /* ARM register, not the PC */
  6137. OP_oRRnpcsp, /* ARM register, neither the PC nor the SP (a.k.a. BadReg) */
  6138. OP_oRRw, /* ARM register, not r15, optional trailing ! */
  6139. OP_oRND, /* Optional Neon double precision register */
  6140. OP_oRNQ, /* Optional Neon quad precision register */
  6141. OP_oRNDQMQ, /* Optional Neon double, quad or MVE vector register. */
  6142. OP_oRNDQ, /* Optional Neon double or quad precision register */
  6143. OP_oRNSDQ, /* Optional single, double or quad precision vector register */
  6144. OP_oRNSDQMQ, /* Optional single, double or quad register or MVE vector
  6145. register. */
  6146. OP_oRNSDMQ, /* Optional single, double register or MVE vector
  6147. register. */
  6148. OP_oSHll, /* LSL immediate */
  6149. OP_oSHar, /* ASR immediate */
  6150. OP_oSHllar, /* LSL or ASR immediate */
  6151. OP_oROR, /* ROR 0/8/16/24 */
  6152. OP_oBARRIER_I15, /* Option argument for a barrier instruction. */
  6153. OP_oRMQRZ, /* optional MVE vector or ARM register including ZR. */
  6154. /* Some pre-defined mixed (ARM/THUMB) operands. */
  6155. OP_RR_npcsp = MIX_ARM_THUMB_OPERANDS (OP_RR, OP_RRnpcsp),
  6156. OP_RRnpc_npcsp = MIX_ARM_THUMB_OPERANDS (OP_RRnpc, OP_RRnpcsp),
  6157. OP_oRRnpc_npcsp = MIX_ARM_THUMB_OPERANDS (OP_oRRnpc, OP_oRRnpcsp),
  6158. OP_FIRST_OPTIONAL = OP_oI7b
  6159. };
  6160. /* Generic instruction operand parser. This does no encoding and no
  6161. semantic validation; it merely squirrels values away in the inst
  6162. structure. Returns SUCCESS or FAIL depending on whether the
  6163. specified grammar matched. */
  6164. static int
  6165. parse_operands (char *str, const unsigned int *pattern, bool thumb)
  6166. {
  6167. unsigned const int *upat = pattern;
  6168. char *backtrack_pos = 0;
  6169. const char *backtrack_error = 0;
  6170. int i, val = 0, backtrack_index = 0;
  6171. enum arm_reg_type rtype;
  6172. parse_operand_result result;
  6173. unsigned int op_parse_code;
  6174. bool partial_match;
  6175. #define po_char_or_fail(chr) \
  6176. do \
  6177. { \
  6178. if (skip_past_char (&str, chr) == FAIL) \
  6179. goto bad_args; \
  6180. } \
  6181. while (0)
  6182. #define po_reg_or_fail(regtype) \
  6183. do \
  6184. { \
  6185. val = arm_typed_reg_parse (& str, regtype, & rtype, \
  6186. & inst.operands[i].vectype); \
  6187. if (val == FAIL) \
  6188. { \
  6189. first_error (_(reg_expected_msgs[regtype])); \
  6190. goto failure; \
  6191. } \
  6192. inst.operands[i].reg = val; \
  6193. inst.operands[i].isreg = 1; \
  6194. inst.operands[i].isquad = (rtype == REG_TYPE_NQ); \
  6195. inst.operands[i].issingle = (rtype == REG_TYPE_VFS); \
  6196. inst.operands[i].isvec = (rtype == REG_TYPE_VFS \
  6197. || rtype == REG_TYPE_VFD \
  6198. || rtype == REG_TYPE_NQ); \
  6199. inst.operands[i].iszr = (rtype == REG_TYPE_ZR); \
  6200. } \
  6201. while (0)
  6202. #define po_reg_or_goto(regtype, label) \
  6203. do \
  6204. { \
  6205. val = arm_typed_reg_parse (& str, regtype, & rtype, \
  6206. & inst.operands[i].vectype); \
  6207. if (val == FAIL) \
  6208. goto label; \
  6209. \
  6210. inst.operands[i].reg = val; \
  6211. inst.operands[i].isreg = 1; \
  6212. inst.operands[i].isquad = (rtype == REG_TYPE_NQ); \
  6213. inst.operands[i].issingle = (rtype == REG_TYPE_VFS); \
  6214. inst.operands[i].isvec = (rtype == REG_TYPE_VFS \
  6215. || rtype == REG_TYPE_VFD \
  6216. || rtype == REG_TYPE_NQ); \
  6217. inst.operands[i].iszr = (rtype == REG_TYPE_ZR); \
  6218. } \
  6219. while (0)
  6220. #define po_imm_or_fail(min, max, popt) \
  6221. do \
  6222. { \
  6223. if (parse_immediate (&str, &val, min, max, popt) == FAIL) \
  6224. goto failure; \
  6225. inst.operands[i].imm = val; \
  6226. } \
  6227. while (0)
  6228. #define po_imm1_or_imm2_or_fail(imm1, imm2, popt) \
  6229. do \
  6230. { \
  6231. expressionS exp; \
  6232. my_get_expression (&exp, &str, popt); \
  6233. if (exp.X_op != O_constant) \
  6234. { \
  6235. inst.error = _("constant expression required"); \
  6236. goto failure; \
  6237. } \
  6238. if (exp.X_add_number != imm1 && exp.X_add_number != imm2) \
  6239. { \
  6240. inst.error = _("immediate value 48 or 64 expected"); \
  6241. goto failure; \
  6242. } \
  6243. inst.operands[i].imm = exp.X_add_number; \
  6244. } \
  6245. while (0)
  6246. #define po_scalar_or_goto(elsz, label, reg_type) \
  6247. do \
  6248. { \
  6249. val = parse_scalar (& str, elsz, & inst.operands[i].vectype, \
  6250. reg_type); \
  6251. if (val == FAIL) \
  6252. goto label; \
  6253. inst.operands[i].reg = val; \
  6254. inst.operands[i].isscalar = 1; \
  6255. } \
  6256. while (0)
  6257. #define po_misc_or_fail(expr) \
  6258. do \
  6259. { \
  6260. if (expr) \
  6261. goto failure; \
  6262. } \
  6263. while (0)
  6264. #define po_misc_or_fail_no_backtrack(expr) \
  6265. do \
  6266. { \
  6267. result = expr; \
  6268. if (result == PARSE_OPERAND_FAIL_NO_BACKTRACK) \
  6269. backtrack_pos = 0; \
  6270. if (result != PARSE_OPERAND_SUCCESS) \
  6271. goto failure; \
  6272. } \
  6273. while (0)
  6274. #define po_barrier_or_imm(str) \
  6275. do \
  6276. { \
  6277. val = parse_barrier (&str); \
  6278. if (val == FAIL && ! ISALPHA (*str)) \
  6279. goto immediate; \
  6280. if (val == FAIL \
  6281. /* ISB can only take SY as an option. */ \
  6282. || ((inst.instruction & 0xf0) == 0x60 \
  6283. && val != 0xf)) \
  6284. { \
  6285. inst.error = _("invalid barrier type"); \
  6286. backtrack_pos = 0; \
  6287. goto failure; \
  6288. } \
  6289. } \
  6290. while (0)
  6291. skip_whitespace (str);
  6292. for (i = 0; upat[i] != OP_stop; i++)
  6293. {
  6294. op_parse_code = upat[i];
  6295. if (op_parse_code >= 1<<16)
  6296. op_parse_code = thumb ? (op_parse_code >> 16)
  6297. : (op_parse_code & ((1<<16)-1));
  6298. if (op_parse_code >= OP_FIRST_OPTIONAL)
  6299. {
  6300. /* Remember where we are in case we need to backtrack. */
  6301. backtrack_pos = str;
  6302. backtrack_error = inst.error;
  6303. backtrack_index = i;
  6304. }
  6305. if (i > 0 && (i > 1 || inst.operands[0].present))
  6306. po_char_or_fail (',');
  6307. switch (op_parse_code)
  6308. {
  6309. /* Registers */
  6310. case OP_oRRnpc:
  6311. case OP_oRRnpcsp:
  6312. case OP_RRnpc:
  6313. case OP_RRnpcsp:
  6314. case OP_oRR:
  6315. case OP_RRe:
  6316. case OP_RRo:
  6317. case OP_LR:
  6318. case OP_oLR:
  6319. case OP_SP:
  6320. case OP_R12:
  6321. case OP_RR: po_reg_or_fail (REG_TYPE_RN); break;
  6322. case OP_RCP: po_reg_or_fail (REG_TYPE_CP); break;
  6323. case OP_RCN: po_reg_or_fail (REG_TYPE_CN); break;
  6324. case OP_RF: po_reg_or_fail (REG_TYPE_FN); break;
  6325. case OP_RVS: po_reg_or_fail (REG_TYPE_VFS); break;
  6326. case OP_RVD: po_reg_or_fail (REG_TYPE_VFD); break;
  6327. case OP_oRND:
  6328. case OP_RNSDMQR:
  6329. po_reg_or_goto (REG_TYPE_VFS, try_rndmqr);
  6330. break;
  6331. try_rndmqr:
  6332. case OP_RNDMQR:
  6333. po_reg_or_goto (REG_TYPE_RN, try_rndmq);
  6334. break;
  6335. try_rndmq:
  6336. case OP_RNDMQ:
  6337. po_reg_or_goto (REG_TYPE_MQ, try_rnd);
  6338. break;
  6339. try_rnd:
  6340. case OP_RND: po_reg_or_fail (REG_TYPE_VFD); break;
  6341. case OP_RVC:
  6342. po_reg_or_goto (REG_TYPE_VFC, coproc_reg);
  6343. break;
  6344. /* Also accept generic coprocessor regs for unknown registers. */
  6345. coproc_reg:
  6346. po_reg_or_goto (REG_TYPE_CN, vpr_po);
  6347. break;
  6348. /* Also accept P0 or p0 for VPR.P0. Since P0 is already an
  6349. existing register with a value of 0, this seems like the
  6350. best way to parse P0. */
  6351. vpr_po:
  6352. if (strncasecmp (str, "P0", 2) == 0)
  6353. {
  6354. str += 2;
  6355. inst.operands[i].isreg = 1;
  6356. inst.operands[i].reg = 13;
  6357. }
  6358. else
  6359. goto failure;
  6360. break;
  6361. case OP_RMF: po_reg_or_fail (REG_TYPE_MVF); break;
  6362. case OP_RMD: po_reg_or_fail (REG_TYPE_MVD); break;
  6363. case OP_RMFX: po_reg_or_fail (REG_TYPE_MVFX); break;
  6364. case OP_RMDX: po_reg_or_fail (REG_TYPE_MVDX); break;
  6365. case OP_RMAX: po_reg_or_fail (REG_TYPE_MVAX); break;
  6366. case OP_RMDS: po_reg_or_fail (REG_TYPE_DSPSC); break;
  6367. case OP_RIWR: po_reg_or_fail (REG_TYPE_MMXWR); break;
  6368. case OP_RIWC: po_reg_or_fail (REG_TYPE_MMXWC); break;
  6369. case OP_RIWG: po_reg_or_fail (REG_TYPE_MMXWCG); break;
  6370. case OP_RXA: po_reg_or_fail (REG_TYPE_XSCALE); break;
  6371. case OP_oRNQ:
  6372. case OP_RNQMQ:
  6373. po_reg_or_goto (REG_TYPE_MQ, try_nq);
  6374. break;
  6375. try_nq:
  6376. case OP_RNQ: po_reg_or_fail (REG_TYPE_NQ); break;
  6377. case OP_RNSD: po_reg_or_fail (REG_TYPE_NSD); break;
  6378. case OP_RNDQMQR:
  6379. po_reg_or_goto (REG_TYPE_RN, try_rndqmq);
  6380. break;
  6381. try_rndqmq:
  6382. case OP_oRNDQMQ:
  6383. case OP_RNDQMQ:
  6384. po_reg_or_goto (REG_TYPE_MQ, try_rndq);
  6385. break;
  6386. try_rndq:
  6387. case OP_oRNDQ:
  6388. case OP_RNDQ: po_reg_or_fail (REG_TYPE_NDQ); break;
  6389. case OP_RVSDMQ:
  6390. po_reg_or_goto (REG_TYPE_MQ, try_rvsd);
  6391. break;
  6392. try_rvsd:
  6393. case OP_RVSD: po_reg_or_fail (REG_TYPE_VFSD); break;
  6394. case OP_RVSD_COND:
  6395. po_reg_or_goto (REG_TYPE_VFSD, try_cond);
  6396. break;
  6397. case OP_oRNSDMQ:
  6398. case OP_RNSDMQ:
  6399. po_reg_or_goto (REG_TYPE_NSD, try_mq2);
  6400. break;
  6401. try_mq2:
  6402. po_reg_or_fail (REG_TYPE_MQ);
  6403. break;
  6404. case OP_oRNSDQ:
  6405. case OP_RNSDQ: po_reg_or_fail (REG_TYPE_NSDQ); break;
  6406. case OP_RNSDQMQR:
  6407. po_reg_or_goto (REG_TYPE_RN, try_mq);
  6408. break;
  6409. try_mq:
  6410. case OP_oRNSDQMQ:
  6411. case OP_RNSDQMQ:
  6412. po_reg_or_goto (REG_TYPE_MQ, try_nsdq2);
  6413. break;
  6414. try_nsdq2:
  6415. po_reg_or_fail (REG_TYPE_NSDQ);
  6416. inst.error = 0;
  6417. break;
  6418. case OP_RMQRR:
  6419. po_reg_or_goto (REG_TYPE_RN, try_rmq);
  6420. break;
  6421. try_rmq:
  6422. case OP_RMQ:
  6423. po_reg_or_fail (REG_TYPE_MQ);
  6424. break;
  6425. /* Neon scalar. Using an element size of 8 means that some invalid
  6426. scalars are accepted here, so deal with those in later code. */
  6427. case OP_RNSC: po_scalar_or_goto (8, failure, REG_TYPE_VFD); break;
  6428. case OP_RNDQ_I0:
  6429. {
  6430. po_reg_or_goto (REG_TYPE_NDQ, try_imm0);
  6431. break;
  6432. try_imm0:
  6433. po_imm_or_fail (0, 0, true);
  6434. }
  6435. break;
  6436. case OP_RVSD_I0:
  6437. po_reg_or_goto (REG_TYPE_VFSD, try_imm0);
  6438. break;
  6439. case OP_RSVDMQ_FI0:
  6440. po_reg_or_goto (REG_TYPE_MQ, try_rsvd_fi0);
  6441. break;
  6442. try_rsvd_fi0:
  6443. case OP_RSVD_FI0:
  6444. {
  6445. po_reg_or_goto (REG_TYPE_VFSD, try_ifimm0);
  6446. break;
  6447. try_ifimm0:
  6448. if (parse_ifimm_zero (&str))
  6449. inst.operands[i].imm = 0;
  6450. else
  6451. {
  6452. inst.error
  6453. = _("only floating point zero is allowed as immediate value");
  6454. goto failure;
  6455. }
  6456. }
  6457. break;
  6458. case OP_RR_RNSC:
  6459. {
  6460. po_scalar_or_goto (8, try_rr, REG_TYPE_VFD);
  6461. break;
  6462. try_rr:
  6463. po_reg_or_fail (REG_TYPE_RN);
  6464. }
  6465. break;
  6466. case OP_RNSDQ_RNSC_MQ_RR:
  6467. po_reg_or_goto (REG_TYPE_RN, try_rnsdq_rnsc_mq);
  6468. break;
  6469. try_rnsdq_rnsc_mq:
  6470. case OP_RNSDQ_RNSC_MQ:
  6471. po_reg_or_goto (REG_TYPE_MQ, try_rnsdq_rnsc);
  6472. break;
  6473. try_rnsdq_rnsc:
  6474. case OP_RNSDQ_RNSC:
  6475. {
  6476. po_scalar_or_goto (8, try_nsdq, REG_TYPE_VFD);
  6477. inst.error = 0;
  6478. break;
  6479. try_nsdq:
  6480. po_reg_or_fail (REG_TYPE_NSDQ);
  6481. inst.error = 0;
  6482. }
  6483. break;
  6484. case OP_RNSD_RNSC:
  6485. {
  6486. po_scalar_or_goto (8, try_s_scalar, REG_TYPE_VFD);
  6487. break;
  6488. try_s_scalar:
  6489. po_scalar_or_goto (4, try_nsd, REG_TYPE_VFS);
  6490. break;
  6491. try_nsd:
  6492. po_reg_or_fail (REG_TYPE_NSD);
  6493. }
  6494. break;
  6495. case OP_RNDQMQ_RNSC_RR:
  6496. po_reg_or_goto (REG_TYPE_MQ, try_rndq_rnsc_rr);
  6497. break;
  6498. try_rndq_rnsc_rr:
  6499. case OP_RNDQ_RNSC_RR:
  6500. po_reg_or_goto (REG_TYPE_RN, try_rndq_rnsc);
  6501. break;
  6502. case OP_RNDQMQ_RNSC:
  6503. po_reg_or_goto (REG_TYPE_MQ, try_rndq_rnsc);
  6504. break;
  6505. try_rndq_rnsc:
  6506. case OP_RNDQ_RNSC:
  6507. {
  6508. po_scalar_or_goto (8, try_ndq, REG_TYPE_VFD);
  6509. break;
  6510. try_ndq:
  6511. po_reg_or_fail (REG_TYPE_NDQ);
  6512. }
  6513. break;
  6514. case OP_RND_RNSC:
  6515. {
  6516. po_scalar_or_goto (8, try_vfd, REG_TYPE_VFD);
  6517. break;
  6518. try_vfd:
  6519. po_reg_or_fail (REG_TYPE_VFD);
  6520. }
  6521. break;
  6522. case OP_VMOV:
  6523. /* WARNING: parse_neon_mov can move the operand counter, i. If we're
  6524. not careful then bad things might happen. */
  6525. po_misc_or_fail (parse_neon_mov (&str, &i) == FAIL);
  6526. break;
  6527. case OP_RNDQMQ_Ibig:
  6528. po_reg_or_goto (REG_TYPE_MQ, try_rndq_ibig);
  6529. break;
  6530. try_rndq_ibig:
  6531. case OP_RNDQ_Ibig:
  6532. {
  6533. po_reg_or_goto (REG_TYPE_NDQ, try_immbig);
  6534. break;
  6535. try_immbig:
  6536. /* There's a possibility of getting a 64-bit immediate here, so
  6537. we need special handling. */
  6538. if (parse_big_immediate (&str, i, NULL, /*allow_symbol_p=*/false)
  6539. == FAIL)
  6540. {
  6541. inst.error = _("immediate value is out of range");
  6542. goto failure;
  6543. }
  6544. }
  6545. break;
  6546. case OP_RNDQMQ_I63b_RR:
  6547. po_reg_or_goto (REG_TYPE_MQ, try_rndq_i63b_rr);
  6548. break;
  6549. try_rndq_i63b_rr:
  6550. po_reg_or_goto (REG_TYPE_RN, try_rndq_i63b);
  6551. break;
  6552. try_rndq_i63b:
  6553. case OP_RNDQ_I63b:
  6554. {
  6555. po_reg_or_goto (REG_TYPE_NDQ, try_shimm);
  6556. break;
  6557. try_shimm:
  6558. po_imm_or_fail (0, 63, true);
  6559. }
  6560. break;
  6561. case OP_RRnpcb:
  6562. po_char_or_fail ('[');
  6563. po_reg_or_fail (REG_TYPE_RN);
  6564. po_char_or_fail (']');
  6565. break;
  6566. case OP_RRnpctw:
  6567. case OP_RRw:
  6568. case OP_oRRw:
  6569. po_reg_or_fail (REG_TYPE_RN);
  6570. if (skip_past_char (&str, '!') == SUCCESS)
  6571. inst.operands[i].writeback = 1;
  6572. break;
  6573. /* Immediates */
  6574. case OP_I7: po_imm_or_fail ( 0, 7, false); break;
  6575. case OP_I15: po_imm_or_fail ( 0, 15, false); break;
  6576. case OP_I16: po_imm_or_fail ( 1, 16, false); break;
  6577. case OP_I16z: po_imm_or_fail ( 0, 16, false); break;
  6578. case OP_I31: po_imm_or_fail ( 0, 31, false); break;
  6579. case OP_I32: po_imm_or_fail ( 1, 32, false); break;
  6580. case OP_I32z: po_imm_or_fail ( 0, 32, false); break;
  6581. case OP_I48_I64: po_imm1_or_imm2_or_fail (48, 64, false); break;
  6582. case OP_I63s: po_imm_or_fail (-64, 63, false); break;
  6583. case OP_I63: po_imm_or_fail ( 0, 63, false); break;
  6584. case OP_I64: po_imm_or_fail ( 1, 64, false); break;
  6585. case OP_I64z: po_imm_or_fail ( 0, 64, false); break;
  6586. case OP_I127: po_imm_or_fail ( 0, 127, false); break;
  6587. case OP_I255: po_imm_or_fail ( 0, 255, false); break;
  6588. case OP_I511: po_imm_or_fail ( 0, 511, false); break;
  6589. case OP_I4095: po_imm_or_fail ( 0, 4095, false); break;
  6590. case OP_I8191: po_imm_or_fail ( 0, 8191, false); break;
  6591. case OP_I4b: po_imm_or_fail ( 1, 4, true); break;
  6592. case OP_oI7b:
  6593. case OP_I7b: po_imm_or_fail ( 0, 7, true); break;
  6594. case OP_I15b: po_imm_or_fail ( 0, 15, true); break;
  6595. case OP_oI31b:
  6596. case OP_I31b: po_imm_or_fail ( 0, 31, true); break;
  6597. case OP_oI32b: po_imm_or_fail ( 1, 32, true); break;
  6598. case OP_oI32z: po_imm_or_fail ( 0, 32, true); break;
  6599. case OP_oIffffb: po_imm_or_fail ( 0, 0xffff, true); break;
  6600. /* Immediate variants */
  6601. case OP_oI255c:
  6602. po_char_or_fail ('{');
  6603. po_imm_or_fail (0, 255, true);
  6604. po_char_or_fail ('}');
  6605. break;
  6606. case OP_I31w:
  6607. /* The expression parser chokes on a trailing !, so we have
  6608. to find it first and zap it. */
  6609. {
  6610. char *s = str;
  6611. while (*s && *s != ',')
  6612. s++;
  6613. if (s[-1] == '!')
  6614. {
  6615. s[-1] = '\0';
  6616. inst.operands[i].writeback = 1;
  6617. }
  6618. po_imm_or_fail (0, 31, true);
  6619. if (str == s - 1)
  6620. str = s;
  6621. }
  6622. break;
  6623. /* Expressions */
  6624. case OP_EXPi: EXPi:
  6625. po_misc_or_fail (my_get_expression (&inst.relocs[0].exp, &str,
  6626. GE_OPT_PREFIX));
  6627. break;
  6628. case OP_EXP:
  6629. po_misc_or_fail (my_get_expression (&inst.relocs[0].exp, &str,
  6630. GE_NO_PREFIX));
  6631. break;
  6632. case OP_EXPr: EXPr:
  6633. po_misc_or_fail (my_get_expression (&inst.relocs[0].exp, &str,
  6634. GE_NO_PREFIX));
  6635. if (inst.relocs[0].exp.X_op == O_symbol)
  6636. {
  6637. val = parse_reloc (&str);
  6638. if (val == -1)
  6639. {
  6640. inst.error = _("unrecognized relocation suffix");
  6641. goto failure;
  6642. }
  6643. else if (val != BFD_RELOC_UNUSED)
  6644. {
  6645. inst.operands[i].imm = val;
  6646. inst.operands[i].hasreloc = 1;
  6647. }
  6648. }
  6649. break;
  6650. case OP_EXPs:
  6651. po_misc_or_fail (my_get_expression (&inst.relocs[i].exp, &str,
  6652. GE_NO_PREFIX));
  6653. if (inst.relocs[i].exp.X_op == O_symbol)
  6654. {
  6655. inst.operands[i].hasreloc = 1;
  6656. }
  6657. else if (inst.relocs[i].exp.X_op == O_constant)
  6658. {
  6659. inst.operands[i].imm = inst.relocs[i].exp.X_add_number;
  6660. inst.operands[i].hasreloc = 0;
  6661. }
  6662. break;
  6663. /* Operand for MOVW or MOVT. */
  6664. case OP_HALF:
  6665. po_misc_or_fail (parse_half (&str));
  6666. break;
  6667. /* Register or expression. */
  6668. case OP_RR_EXr: po_reg_or_goto (REG_TYPE_RN, EXPr); break;
  6669. case OP_RR_EXi: po_reg_or_goto (REG_TYPE_RN, EXPi); break;
  6670. /* Register or immediate. */
  6671. case OP_RRnpc_I0: po_reg_or_goto (REG_TYPE_RN, I0); break;
  6672. I0: po_imm_or_fail (0, 0, false); break;
  6673. case OP_RRnpcsp_I32: po_reg_or_goto (REG_TYPE_RN, I32); break;
  6674. I32: po_imm_or_fail (1, 32, false); break;
  6675. case OP_RF_IF: po_reg_or_goto (REG_TYPE_FN, IF); break;
  6676. IF:
  6677. if (!is_immediate_prefix (*str))
  6678. goto bad_args;
  6679. str++;
  6680. val = parse_fpa_immediate (&str);
  6681. if (val == FAIL)
  6682. goto failure;
  6683. /* FPA immediates are encoded as registers 8-15.
  6684. parse_fpa_immediate has already applied the offset. */
  6685. inst.operands[i].reg = val;
  6686. inst.operands[i].isreg = 1;
  6687. break;
  6688. case OP_RIWR_I32z: po_reg_or_goto (REG_TYPE_MMXWR, I32z); break;
  6689. I32z: po_imm_or_fail (0, 32, false); break;
  6690. /* Two kinds of register. */
  6691. case OP_RIWR_RIWC:
  6692. {
  6693. struct reg_entry *rege = arm_reg_parse_multi (&str);
  6694. if (!rege
  6695. || (rege->type != REG_TYPE_MMXWR
  6696. && rege->type != REG_TYPE_MMXWC
  6697. && rege->type != REG_TYPE_MMXWCG))
  6698. {
  6699. inst.error = _("iWMMXt data or control register expected");
  6700. goto failure;
  6701. }
  6702. inst.operands[i].reg = rege->number;
  6703. inst.operands[i].isreg = (rege->type == REG_TYPE_MMXWR);
  6704. }
  6705. break;
  6706. case OP_RIWC_RIWG:
  6707. {
  6708. struct reg_entry *rege = arm_reg_parse_multi (&str);
  6709. if (!rege
  6710. || (rege->type != REG_TYPE_MMXWC
  6711. && rege->type != REG_TYPE_MMXWCG))
  6712. {
  6713. inst.error = _("iWMMXt control register expected");
  6714. goto failure;
  6715. }
  6716. inst.operands[i].reg = rege->number;
  6717. inst.operands[i].isreg = 1;
  6718. }
  6719. break;
  6720. /* Misc */
  6721. case OP_CPSF: val = parse_cps_flags (&str); break;
  6722. case OP_ENDI: val = parse_endian_specifier (&str); break;
  6723. case OP_oROR: val = parse_ror (&str); break;
  6724. try_cond:
  6725. case OP_COND: val = parse_cond (&str); break;
  6726. case OP_oBARRIER_I15:
  6727. po_barrier_or_imm (str); break;
  6728. immediate:
  6729. if (parse_immediate (&str, &val, 0, 15, true) == FAIL)
  6730. goto failure;
  6731. break;
  6732. case OP_wPSR:
  6733. case OP_rPSR:
  6734. po_reg_or_goto (REG_TYPE_RNB, try_psr);
  6735. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_virt))
  6736. {
  6737. inst.error = _("Banked registers are not available with this "
  6738. "architecture.");
  6739. goto failure;
  6740. }
  6741. break;
  6742. try_psr:
  6743. val = parse_psr (&str, op_parse_code == OP_wPSR);
  6744. break;
  6745. case OP_VLDR:
  6746. po_reg_or_goto (REG_TYPE_VFSD, try_sysreg);
  6747. break;
  6748. try_sysreg:
  6749. val = parse_sys_vldr_vstr (&str);
  6750. break;
  6751. case OP_APSR_RR:
  6752. po_reg_or_goto (REG_TYPE_RN, try_apsr);
  6753. break;
  6754. try_apsr:
  6755. /* Parse "APSR_nvzc" operand (for FMSTAT-equivalent MRS
  6756. instruction). */
  6757. if (strncasecmp (str, "APSR_", 5) == 0)
  6758. {
  6759. unsigned found = 0;
  6760. str += 5;
  6761. while (found < 15)
  6762. switch (*str++)
  6763. {
  6764. case 'c': found = (found & 1) ? 16 : found | 1; break;
  6765. case 'n': found = (found & 2) ? 16 : found | 2; break;
  6766. case 'z': found = (found & 4) ? 16 : found | 4; break;
  6767. case 'v': found = (found & 8) ? 16 : found | 8; break;
  6768. default: found = 16;
  6769. }
  6770. if (found != 15)
  6771. goto failure;
  6772. inst.operands[i].isvec = 1;
  6773. /* APSR_nzcv is encoded in instructions as if it were the REG_PC. */
  6774. inst.operands[i].reg = REG_PC;
  6775. }
  6776. else
  6777. goto failure;
  6778. break;
  6779. case OP_TB:
  6780. po_misc_or_fail (parse_tb (&str));
  6781. break;
  6782. /* Register lists. */
  6783. case OP_REGLST:
  6784. val = parse_reg_list (&str, REGLIST_RN);
  6785. if (*str == '^')
  6786. {
  6787. inst.operands[i].writeback = 1;
  6788. str++;
  6789. }
  6790. break;
  6791. case OP_CLRMLST:
  6792. val = parse_reg_list (&str, REGLIST_CLRM);
  6793. break;
  6794. case OP_VRSLST:
  6795. val = parse_vfp_reg_list (&str, &inst.operands[i].reg, REGLIST_VFP_S,
  6796. &partial_match);
  6797. break;
  6798. case OP_VRDLST:
  6799. val = parse_vfp_reg_list (&str, &inst.operands[i].reg, REGLIST_VFP_D,
  6800. &partial_match);
  6801. break;
  6802. case OP_VRSDLST:
  6803. /* Allow Q registers too. */
  6804. val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
  6805. REGLIST_NEON_D, &partial_match);
  6806. if (val == FAIL)
  6807. {
  6808. inst.error = NULL;
  6809. val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
  6810. REGLIST_VFP_S, &partial_match);
  6811. inst.operands[i].issingle = 1;
  6812. }
  6813. break;
  6814. case OP_VRSDVLST:
  6815. val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
  6816. REGLIST_VFP_D_VPR, &partial_match);
  6817. if (val == FAIL && !partial_match)
  6818. {
  6819. inst.error = NULL;
  6820. val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
  6821. REGLIST_VFP_S_VPR, &partial_match);
  6822. inst.operands[i].issingle = 1;
  6823. }
  6824. break;
  6825. case OP_NRDLST:
  6826. val = parse_vfp_reg_list (&str, &inst.operands[i].reg,
  6827. REGLIST_NEON_D, &partial_match);
  6828. break;
  6829. case OP_MSTRLST4:
  6830. case OP_MSTRLST2:
  6831. val = parse_neon_el_struct_list (&str, &inst.operands[i].reg,
  6832. 1, &inst.operands[i].vectype);
  6833. if (val != (((op_parse_code == OP_MSTRLST2) ? 3 : 7) << 5 | 0xe))
  6834. goto failure;
  6835. break;
  6836. case OP_NSTRLST:
  6837. val = parse_neon_el_struct_list (&str, &inst.operands[i].reg,
  6838. 0, &inst.operands[i].vectype);
  6839. break;
  6840. /* Addressing modes */
  6841. case OP_ADDRMVE:
  6842. po_misc_or_fail (parse_address_group_reloc (&str, i, GROUP_MVE));
  6843. break;
  6844. case OP_ADDR:
  6845. po_misc_or_fail (parse_address (&str, i));
  6846. break;
  6847. case OP_ADDRGLDR:
  6848. po_misc_or_fail_no_backtrack (
  6849. parse_address_group_reloc (&str, i, GROUP_LDR));
  6850. break;
  6851. case OP_ADDRGLDRS:
  6852. po_misc_or_fail_no_backtrack (
  6853. parse_address_group_reloc (&str, i, GROUP_LDRS));
  6854. break;
  6855. case OP_ADDRGLDC:
  6856. po_misc_or_fail_no_backtrack (
  6857. parse_address_group_reloc (&str, i, GROUP_LDC));
  6858. break;
  6859. case OP_SH:
  6860. po_misc_or_fail (parse_shifter_operand (&str, i));
  6861. break;
  6862. case OP_SHG:
  6863. po_misc_or_fail_no_backtrack (
  6864. parse_shifter_operand_group_reloc (&str, i));
  6865. break;
  6866. case OP_oSHll:
  6867. po_misc_or_fail (parse_shift (&str, i, SHIFT_LSL_IMMEDIATE));
  6868. break;
  6869. case OP_oSHar:
  6870. po_misc_or_fail (parse_shift (&str, i, SHIFT_ASR_IMMEDIATE));
  6871. break;
  6872. case OP_oSHllar:
  6873. po_misc_or_fail (parse_shift (&str, i, SHIFT_LSL_OR_ASR_IMMEDIATE));
  6874. break;
  6875. case OP_RMQRZ:
  6876. case OP_oRMQRZ:
  6877. po_reg_or_goto (REG_TYPE_MQ, try_rr_zr);
  6878. break;
  6879. case OP_RR_ZR:
  6880. try_rr_zr:
  6881. po_reg_or_goto (REG_TYPE_RN, ZR);
  6882. break;
  6883. ZR:
  6884. po_reg_or_fail (REG_TYPE_ZR);
  6885. break;
  6886. default:
  6887. as_fatal (_("unhandled operand code %d"), op_parse_code);
  6888. }
  6889. /* Various value-based sanity checks and shared operations. We
  6890. do not signal immediate failures for the register constraints;
  6891. this allows a syntax error to take precedence. */
  6892. switch (op_parse_code)
  6893. {
  6894. case OP_oRRnpc:
  6895. case OP_RRnpc:
  6896. case OP_RRnpcb:
  6897. case OP_RRw:
  6898. case OP_oRRw:
  6899. case OP_RRnpc_I0:
  6900. if (inst.operands[i].isreg && inst.operands[i].reg == REG_PC)
  6901. inst.error = BAD_PC;
  6902. break;
  6903. case OP_oRRnpcsp:
  6904. case OP_RRnpcsp:
  6905. case OP_RRnpcsp_I32:
  6906. if (inst.operands[i].isreg)
  6907. {
  6908. if (inst.operands[i].reg == REG_PC)
  6909. inst.error = BAD_PC;
  6910. else if (inst.operands[i].reg == REG_SP
  6911. /* The restriction on Rd/Rt/Rt2 on Thumb mode has been
  6912. relaxed since ARMv8-A. */
  6913. && !ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  6914. {
  6915. gas_assert (thumb);
  6916. inst.error = BAD_SP;
  6917. }
  6918. }
  6919. break;
  6920. case OP_RRnpctw:
  6921. if (inst.operands[i].isreg
  6922. && inst.operands[i].reg == REG_PC
  6923. && (inst.operands[i].writeback || thumb))
  6924. inst.error = BAD_PC;
  6925. break;
  6926. case OP_RVSD_COND:
  6927. case OP_VLDR:
  6928. if (inst.operands[i].isreg)
  6929. break;
  6930. /* fall through. */
  6931. case OP_CPSF:
  6932. case OP_ENDI:
  6933. case OP_oROR:
  6934. case OP_wPSR:
  6935. case OP_rPSR:
  6936. case OP_COND:
  6937. case OP_oBARRIER_I15:
  6938. case OP_REGLST:
  6939. case OP_CLRMLST:
  6940. case OP_VRSLST:
  6941. case OP_VRDLST:
  6942. case OP_VRSDLST:
  6943. case OP_VRSDVLST:
  6944. case OP_NRDLST:
  6945. case OP_NSTRLST:
  6946. case OP_MSTRLST2:
  6947. case OP_MSTRLST4:
  6948. if (val == FAIL)
  6949. goto failure;
  6950. inst.operands[i].imm = val;
  6951. break;
  6952. case OP_LR:
  6953. case OP_oLR:
  6954. if (inst.operands[i].reg != REG_LR)
  6955. inst.error = _("operand must be LR register");
  6956. break;
  6957. case OP_SP:
  6958. if (inst.operands[i].reg != REG_SP)
  6959. inst.error = _("operand must be SP register");
  6960. break;
  6961. case OP_R12:
  6962. if (inst.operands[i].reg != REG_R12)
  6963. inst.error = _("operand must be r12");
  6964. break;
  6965. case OP_RMQRZ:
  6966. case OP_oRMQRZ:
  6967. case OP_RR_ZR:
  6968. if (!inst.operands[i].iszr && inst.operands[i].reg == REG_PC)
  6969. inst.error = BAD_PC;
  6970. break;
  6971. case OP_RRe:
  6972. if (inst.operands[i].isreg
  6973. && (inst.operands[i].reg & 0x00000001) != 0)
  6974. inst.error = BAD_ODD;
  6975. break;
  6976. case OP_RRo:
  6977. if (inst.operands[i].isreg)
  6978. {
  6979. if ((inst.operands[i].reg & 0x00000001) != 1)
  6980. inst.error = BAD_EVEN;
  6981. else if (inst.operands[i].reg == REG_SP)
  6982. as_tsktsk (MVE_BAD_SP);
  6983. else if (inst.operands[i].reg == REG_PC)
  6984. inst.error = BAD_PC;
  6985. }
  6986. break;
  6987. default:
  6988. break;
  6989. }
  6990. /* If we get here, this operand was successfully parsed. */
  6991. inst.operands[i].present = 1;
  6992. continue;
  6993. bad_args:
  6994. inst.error = BAD_ARGS;
  6995. failure:
  6996. if (!backtrack_pos)
  6997. {
  6998. /* The parse routine should already have set inst.error, but set a
  6999. default here just in case. */
  7000. if (!inst.error)
  7001. inst.error = BAD_SYNTAX;
  7002. return FAIL;
  7003. }
  7004. /* Do not backtrack over a trailing optional argument that
  7005. absorbed some text. We will only fail again, with the
  7006. 'garbage following instruction' error message, which is
  7007. probably less helpful than the current one. */
  7008. if (backtrack_index == i && backtrack_pos != str
  7009. && upat[i+1] == OP_stop)
  7010. {
  7011. if (!inst.error)
  7012. inst.error = BAD_SYNTAX;
  7013. return FAIL;
  7014. }
  7015. /* Try again, skipping the optional argument at backtrack_pos. */
  7016. str = backtrack_pos;
  7017. inst.error = backtrack_error;
  7018. inst.operands[backtrack_index].present = 0;
  7019. i = backtrack_index;
  7020. backtrack_pos = 0;
  7021. }
  7022. /* Check that we have parsed all the arguments. */
  7023. if (*str != '\0' && !inst.error)
  7024. inst.error = _("garbage following instruction");
  7025. return inst.error ? FAIL : SUCCESS;
  7026. }
  7027. #undef po_char_or_fail
  7028. #undef po_reg_or_fail
  7029. #undef po_reg_or_goto
  7030. #undef po_imm_or_fail
  7031. #undef po_scalar_or_fail
  7032. #undef po_barrier_or_imm
  7033. /* Shorthand macro for instruction encoding functions issuing errors. */
  7034. #define constraint(expr, err) \
  7035. do \
  7036. { \
  7037. if (expr) \
  7038. { \
  7039. inst.error = err; \
  7040. return; \
  7041. } \
  7042. } \
  7043. while (0)
  7044. /* Reject "bad registers" for Thumb-2 instructions. Many Thumb-2
  7045. instructions are unpredictable if these registers are used. This
  7046. is the BadReg predicate in ARM's Thumb-2 documentation.
  7047. Before ARMv8-A, REG_PC and REG_SP were not allowed in quite a few
  7048. places, while the restriction on REG_SP was relaxed since ARMv8-A. */
  7049. #define reject_bad_reg(reg) \
  7050. do \
  7051. if (reg == REG_PC) \
  7052. { \
  7053. inst.error = BAD_PC; \
  7054. return; \
  7055. } \
  7056. else if (reg == REG_SP \
  7057. && !ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8)) \
  7058. { \
  7059. inst.error = BAD_SP; \
  7060. return; \
  7061. } \
  7062. while (0)
  7063. /* If REG is R13 (the stack pointer), warn that its use is
  7064. deprecated. */
  7065. #define warn_deprecated_sp(reg) \
  7066. do \
  7067. if (warn_on_deprecated && reg == REG_SP) \
  7068. as_tsktsk (_("use of r13 is deprecated")); \
  7069. while (0)
  7070. /* Functions for operand encoding. ARM, then Thumb. */
  7071. #define rotate_left(v, n) (v << (n & 31) | v >> ((32 - n) & 31))
  7072. /* If the current inst is scalar ARMv8.2 fp16 instruction, do special encoding.
  7073. The only binary encoding difference is the Coprocessor number. Coprocessor
  7074. 9 is used for half-precision calculations or conversions. The format of the
  7075. instruction is the same as the equivalent Coprocessor 10 instruction that
  7076. exists for Single-Precision operation. */
  7077. static void
  7078. do_scalar_fp16_v82_encode (void)
  7079. {
  7080. if (inst.cond < COND_ALWAYS)
  7081. as_warn (_("ARMv8.2 scalar fp16 instruction cannot be conditional,"
  7082. " the behaviour is UNPREDICTABLE"));
  7083. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_fp16),
  7084. _(BAD_FP16));
  7085. inst.instruction = (inst.instruction & 0xfffff0ff) | 0x900;
  7086. mark_feature_used (&arm_ext_fp16);
  7087. }
  7088. /* If VAL can be encoded in the immediate field of an ARM instruction,
  7089. return the encoded form. Otherwise, return FAIL. */
  7090. static unsigned int
  7091. encode_arm_immediate (unsigned int val)
  7092. {
  7093. unsigned int a, i;
  7094. if (val <= 0xff)
  7095. return val;
  7096. for (i = 2; i < 32; i += 2)
  7097. if ((a = rotate_left (val, i)) <= 0xff)
  7098. return a | (i << 7); /* 12-bit pack: [shift-cnt,const]. */
  7099. return FAIL;
  7100. }
  7101. /* If VAL can be encoded in the immediate field of a Thumb32 instruction,
  7102. return the encoded form. Otherwise, return FAIL. */
  7103. static unsigned int
  7104. encode_thumb32_immediate (unsigned int val)
  7105. {
  7106. unsigned int a, i;
  7107. if (val <= 0xff)
  7108. return val;
  7109. for (i = 1; i <= 24; i++)
  7110. {
  7111. a = val >> i;
  7112. if ((val & ~(0xffU << i)) == 0)
  7113. return ((val >> i) & 0x7f) | ((32 - i) << 7);
  7114. }
  7115. a = val & 0xff;
  7116. if (val == ((a << 16) | a))
  7117. return 0x100 | a;
  7118. if (val == ((a << 24) | (a << 16) | (a << 8) | a))
  7119. return 0x300 | a;
  7120. a = val & 0xff00;
  7121. if (val == ((a << 16) | a))
  7122. return 0x200 | (a >> 8);
  7123. return FAIL;
  7124. }
  7125. /* Encode a VFP SP or DP register number into inst.instruction. */
  7126. static void
  7127. encode_arm_vfp_reg (int reg, enum vfp_reg_pos pos)
  7128. {
  7129. if ((pos == VFP_REG_Dd || pos == VFP_REG_Dn || pos == VFP_REG_Dm)
  7130. && reg > 15)
  7131. {
  7132. if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_d32))
  7133. {
  7134. if (thumb_mode)
  7135. ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
  7136. fpu_vfp_ext_d32);
  7137. else
  7138. ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used,
  7139. fpu_vfp_ext_d32);
  7140. }
  7141. else
  7142. {
  7143. first_error (_("D register out of range for selected VFP version"));
  7144. return;
  7145. }
  7146. }
  7147. switch (pos)
  7148. {
  7149. case VFP_REG_Sd:
  7150. inst.instruction |= ((reg >> 1) << 12) | ((reg & 1) << 22);
  7151. break;
  7152. case VFP_REG_Sn:
  7153. inst.instruction |= ((reg >> 1) << 16) | ((reg & 1) << 7);
  7154. break;
  7155. case VFP_REG_Sm:
  7156. inst.instruction |= ((reg >> 1) << 0) | ((reg & 1) << 5);
  7157. break;
  7158. case VFP_REG_Dd:
  7159. inst.instruction |= ((reg & 15) << 12) | ((reg >> 4) << 22);
  7160. break;
  7161. case VFP_REG_Dn:
  7162. inst.instruction |= ((reg & 15) << 16) | ((reg >> 4) << 7);
  7163. break;
  7164. case VFP_REG_Dm:
  7165. inst.instruction |= (reg & 15) | ((reg >> 4) << 5);
  7166. break;
  7167. default:
  7168. abort ();
  7169. }
  7170. }
  7171. /* Encode a <shift> in an ARM-format instruction. The immediate,
  7172. if any, is handled by md_apply_fix. */
  7173. static void
  7174. encode_arm_shift (int i)
  7175. {
  7176. /* register-shifted register. */
  7177. if (inst.operands[i].immisreg)
  7178. {
  7179. int op_index;
  7180. for (op_index = 0; op_index <= i; ++op_index)
  7181. {
  7182. /* Check the operand only when it's presented. In pre-UAL syntax,
  7183. if the destination register is the same as the first operand, two
  7184. register form of the instruction can be used. */
  7185. if (inst.operands[op_index].present && inst.operands[op_index].isreg
  7186. && inst.operands[op_index].reg == REG_PC)
  7187. as_warn (UNPRED_REG ("r15"));
  7188. }
  7189. if (inst.operands[i].imm == REG_PC)
  7190. as_warn (UNPRED_REG ("r15"));
  7191. }
  7192. if (inst.operands[i].shift_kind == SHIFT_RRX)
  7193. inst.instruction |= SHIFT_ROR << 5;
  7194. else
  7195. {
  7196. inst.instruction |= inst.operands[i].shift_kind << 5;
  7197. if (inst.operands[i].immisreg)
  7198. {
  7199. inst.instruction |= SHIFT_BY_REG;
  7200. inst.instruction |= inst.operands[i].imm << 8;
  7201. }
  7202. else
  7203. inst.relocs[0].type = BFD_RELOC_ARM_SHIFT_IMM;
  7204. }
  7205. }
  7206. static void
  7207. encode_arm_shifter_operand (int i)
  7208. {
  7209. if (inst.operands[i].isreg)
  7210. {
  7211. inst.instruction |= inst.operands[i].reg;
  7212. encode_arm_shift (i);
  7213. }
  7214. else
  7215. {
  7216. inst.instruction |= INST_IMMEDIATE;
  7217. if (inst.relocs[0].type != BFD_RELOC_ARM_IMMEDIATE)
  7218. inst.instruction |= inst.operands[i].imm;
  7219. }
  7220. }
  7221. /* Subroutine of encode_arm_addr_mode_2 and encode_arm_addr_mode_3. */
  7222. static void
  7223. encode_arm_addr_mode_common (int i, bool is_t)
  7224. {
  7225. /* PR 14260:
  7226. Generate an error if the operand is not a register. */
  7227. constraint (!inst.operands[i].isreg,
  7228. _("Instruction does not support =N addresses"));
  7229. inst.instruction |= inst.operands[i].reg << 16;
  7230. if (inst.operands[i].preind)
  7231. {
  7232. if (is_t)
  7233. {
  7234. inst.error = _("instruction does not accept preindexed addressing");
  7235. return;
  7236. }
  7237. inst.instruction |= PRE_INDEX;
  7238. if (inst.operands[i].writeback)
  7239. inst.instruction |= WRITE_BACK;
  7240. }
  7241. else if (inst.operands[i].postind)
  7242. {
  7243. gas_assert (inst.operands[i].writeback);
  7244. if (is_t)
  7245. inst.instruction |= WRITE_BACK;
  7246. }
  7247. else /* unindexed - only for coprocessor */
  7248. {
  7249. inst.error = _("instruction does not accept unindexed addressing");
  7250. return;
  7251. }
  7252. if (((inst.instruction & WRITE_BACK) || !(inst.instruction & PRE_INDEX))
  7253. && (((inst.instruction & 0x000f0000) >> 16)
  7254. == ((inst.instruction & 0x0000f000) >> 12)))
  7255. as_warn ((inst.instruction & LOAD_BIT)
  7256. ? _("destination register same as write-back base")
  7257. : _("source register same as write-back base"));
  7258. }
  7259. /* inst.operands[i] was set up by parse_address. Encode it into an
  7260. ARM-format mode 2 load or store instruction. If is_t is true,
  7261. reject forms that cannot be used with a T instruction (i.e. not
  7262. post-indexed). */
  7263. static void
  7264. encode_arm_addr_mode_2 (int i, bool is_t)
  7265. {
  7266. const bool is_pc = (inst.operands[i].reg == REG_PC);
  7267. encode_arm_addr_mode_common (i, is_t);
  7268. if (inst.operands[i].immisreg)
  7269. {
  7270. constraint ((inst.operands[i].imm == REG_PC
  7271. || (is_pc && inst.operands[i].writeback)),
  7272. BAD_PC_ADDRESSING);
  7273. inst.instruction |= INST_IMMEDIATE; /* yes, this is backwards */
  7274. inst.instruction |= inst.operands[i].imm;
  7275. if (!inst.operands[i].negative)
  7276. inst.instruction |= INDEX_UP;
  7277. if (inst.operands[i].shifted)
  7278. {
  7279. if (inst.operands[i].shift_kind == SHIFT_RRX)
  7280. inst.instruction |= SHIFT_ROR << 5;
  7281. else
  7282. {
  7283. inst.instruction |= inst.operands[i].shift_kind << 5;
  7284. inst.relocs[0].type = BFD_RELOC_ARM_SHIFT_IMM;
  7285. }
  7286. }
  7287. }
  7288. else /* immediate offset in inst.relocs[0] */
  7289. {
  7290. if (is_pc && !inst.relocs[0].pc_rel)
  7291. {
  7292. const bool is_load = ((inst.instruction & LOAD_BIT) != 0);
  7293. /* If is_t is TRUE, it's called from do_ldstt. ldrt/strt
  7294. cannot use PC in addressing.
  7295. PC cannot be used in writeback addressing, either. */
  7296. constraint ((is_t || inst.operands[i].writeback),
  7297. BAD_PC_ADDRESSING);
  7298. /* Use of PC in str is deprecated for ARMv7. */
  7299. if (warn_on_deprecated
  7300. && !is_load
  7301. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v7))
  7302. as_tsktsk (_("use of PC in this instruction is deprecated"));
  7303. }
  7304. if (inst.relocs[0].type == BFD_RELOC_UNUSED)
  7305. {
  7306. /* Prefer + for zero encoded value. */
  7307. if (!inst.operands[i].negative)
  7308. inst.instruction |= INDEX_UP;
  7309. inst.relocs[0].type = BFD_RELOC_ARM_OFFSET_IMM;
  7310. }
  7311. }
  7312. }
  7313. /* inst.operands[i] was set up by parse_address. Encode it into an
  7314. ARM-format mode 3 load or store instruction. Reject forms that
  7315. cannot be used with such instructions. If is_t is true, reject
  7316. forms that cannot be used with a T instruction (i.e. not
  7317. post-indexed). */
  7318. static void
  7319. encode_arm_addr_mode_3 (int i, bool is_t)
  7320. {
  7321. if (inst.operands[i].immisreg && inst.operands[i].shifted)
  7322. {
  7323. inst.error = _("instruction does not accept scaled register index");
  7324. return;
  7325. }
  7326. encode_arm_addr_mode_common (i, is_t);
  7327. if (inst.operands[i].immisreg)
  7328. {
  7329. constraint ((inst.operands[i].imm == REG_PC
  7330. || (is_t && inst.operands[i].reg == REG_PC)),
  7331. BAD_PC_ADDRESSING);
  7332. constraint (inst.operands[i].reg == REG_PC && inst.operands[i].writeback,
  7333. BAD_PC_WRITEBACK);
  7334. inst.instruction |= inst.operands[i].imm;
  7335. if (!inst.operands[i].negative)
  7336. inst.instruction |= INDEX_UP;
  7337. }
  7338. else /* immediate offset in inst.relocs[0] */
  7339. {
  7340. constraint ((inst.operands[i].reg == REG_PC && !inst.relocs[0].pc_rel
  7341. && inst.operands[i].writeback),
  7342. BAD_PC_WRITEBACK);
  7343. inst.instruction |= HWOFFSET_IMM;
  7344. if (inst.relocs[0].type == BFD_RELOC_UNUSED)
  7345. {
  7346. /* Prefer + for zero encoded value. */
  7347. if (!inst.operands[i].negative)
  7348. inst.instruction |= INDEX_UP;
  7349. inst.relocs[0].type = BFD_RELOC_ARM_OFFSET_IMM8;
  7350. }
  7351. }
  7352. }
  7353. /* Write immediate bits [7:0] to the following locations:
  7354. |28/24|23 19|18 16|15 4|3 0|
  7355. | a |x x x x x|b c d|x x x x x x x x x x x x|e f g h|
  7356. This function is used by VMOV/VMVN/VORR/VBIC. */
  7357. static void
  7358. neon_write_immbits (unsigned immbits)
  7359. {
  7360. inst.instruction |= immbits & 0xf;
  7361. inst.instruction |= ((immbits >> 4) & 0x7) << 16;
  7362. inst.instruction |= ((immbits >> 7) & 0x1) << (thumb_mode ? 28 : 24);
  7363. }
  7364. /* Invert low-order SIZE bits of XHI:XLO. */
  7365. static void
  7366. neon_invert_size (unsigned *xlo, unsigned *xhi, int size)
  7367. {
  7368. unsigned immlo = xlo ? *xlo : 0;
  7369. unsigned immhi = xhi ? *xhi : 0;
  7370. switch (size)
  7371. {
  7372. case 8:
  7373. immlo = (~immlo) & 0xff;
  7374. break;
  7375. case 16:
  7376. immlo = (~immlo) & 0xffff;
  7377. break;
  7378. case 64:
  7379. immhi = (~immhi) & 0xffffffff;
  7380. /* fall through. */
  7381. case 32:
  7382. immlo = (~immlo) & 0xffffffff;
  7383. break;
  7384. default:
  7385. abort ();
  7386. }
  7387. if (xlo)
  7388. *xlo = immlo;
  7389. if (xhi)
  7390. *xhi = immhi;
  7391. }
  7392. /* True if IMM has form 0bAAAAAAAABBBBBBBBCCCCCCCCDDDDDDDD for bits
  7393. A, B, C, D. */
  7394. static int
  7395. neon_bits_same_in_bytes (unsigned imm)
  7396. {
  7397. return ((imm & 0x000000ff) == 0 || (imm & 0x000000ff) == 0x000000ff)
  7398. && ((imm & 0x0000ff00) == 0 || (imm & 0x0000ff00) == 0x0000ff00)
  7399. && ((imm & 0x00ff0000) == 0 || (imm & 0x00ff0000) == 0x00ff0000)
  7400. && ((imm & 0xff000000) == 0 || (imm & 0xff000000) == 0xff000000);
  7401. }
  7402. /* For immediate of above form, return 0bABCD. */
  7403. static unsigned
  7404. neon_squash_bits (unsigned imm)
  7405. {
  7406. return (imm & 0x01) | ((imm & 0x0100) >> 7) | ((imm & 0x010000) >> 14)
  7407. | ((imm & 0x01000000) >> 21);
  7408. }
  7409. /* Compress quarter-float representation to 0b...000 abcdefgh. */
  7410. static unsigned
  7411. neon_qfloat_bits (unsigned imm)
  7412. {
  7413. return ((imm >> 19) & 0x7f) | ((imm >> 24) & 0x80);
  7414. }
  7415. /* Returns CMODE. IMMBITS [7:0] is set to bits suitable for inserting into
  7416. the instruction. *OP is passed as the initial value of the op field, and
  7417. may be set to a different value depending on the constant (i.e.
  7418. "MOV I64, 0bAAAAAAAABBBB..." which uses OP = 1 despite being MOV not
  7419. MVN). If the immediate looks like a repeated pattern then also
  7420. try smaller element sizes. */
  7421. static int
  7422. neon_cmode_for_move_imm (unsigned immlo, unsigned immhi, int float_p,
  7423. unsigned *immbits, int *op, int size,
  7424. enum neon_el_type type)
  7425. {
  7426. /* Only permit float immediates (including 0.0/-0.0) if the operand type is
  7427. float. */
  7428. if (type == NT_float && !float_p)
  7429. return FAIL;
  7430. if (type == NT_float && is_quarter_float (immlo) && immhi == 0)
  7431. {
  7432. if (size != 32 || *op == 1)
  7433. return FAIL;
  7434. *immbits = neon_qfloat_bits (immlo);
  7435. return 0xf;
  7436. }
  7437. if (size == 64)
  7438. {
  7439. if (neon_bits_same_in_bytes (immhi)
  7440. && neon_bits_same_in_bytes (immlo))
  7441. {
  7442. if (*op == 1)
  7443. return FAIL;
  7444. *immbits = (neon_squash_bits (immhi) << 4)
  7445. | neon_squash_bits (immlo);
  7446. *op = 1;
  7447. return 0xe;
  7448. }
  7449. if (immhi != immlo)
  7450. return FAIL;
  7451. }
  7452. if (size >= 32)
  7453. {
  7454. if (immlo == (immlo & 0x000000ff))
  7455. {
  7456. *immbits = immlo;
  7457. return 0x0;
  7458. }
  7459. else if (immlo == (immlo & 0x0000ff00))
  7460. {
  7461. *immbits = immlo >> 8;
  7462. return 0x2;
  7463. }
  7464. else if (immlo == (immlo & 0x00ff0000))
  7465. {
  7466. *immbits = immlo >> 16;
  7467. return 0x4;
  7468. }
  7469. else if (immlo == (immlo & 0xff000000))
  7470. {
  7471. *immbits = immlo >> 24;
  7472. return 0x6;
  7473. }
  7474. else if (immlo == ((immlo & 0x0000ff00) | 0x000000ff))
  7475. {
  7476. *immbits = (immlo >> 8) & 0xff;
  7477. return 0xc;
  7478. }
  7479. else if (immlo == ((immlo & 0x00ff0000) | 0x0000ffff))
  7480. {
  7481. *immbits = (immlo >> 16) & 0xff;
  7482. return 0xd;
  7483. }
  7484. if ((immlo & 0xffff) != (immlo >> 16))
  7485. return FAIL;
  7486. immlo &= 0xffff;
  7487. }
  7488. if (size >= 16)
  7489. {
  7490. if (immlo == (immlo & 0x000000ff))
  7491. {
  7492. *immbits = immlo;
  7493. return 0x8;
  7494. }
  7495. else if (immlo == (immlo & 0x0000ff00))
  7496. {
  7497. *immbits = immlo >> 8;
  7498. return 0xa;
  7499. }
  7500. if ((immlo & 0xff) != (immlo >> 8))
  7501. return FAIL;
  7502. immlo &= 0xff;
  7503. }
  7504. if (immlo == (immlo & 0x000000ff))
  7505. {
  7506. /* Don't allow MVN with 8-bit immediate. */
  7507. if (*op == 1)
  7508. return FAIL;
  7509. *immbits = immlo;
  7510. return 0xe;
  7511. }
  7512. return FAIL;
  7513. }
  7514. #if defined BFD_HOST_64_BIT
  7515. /* Returns TRUE if double precision value V may be cast
  7516. to single precision without loss of accuracy. */
  7517. static bool
  7518. is_double_a_single (bfd_uint64_t v)
  7519. {
  7520. int exp = (v >> 52) & 0x7FF;
  7521. bfd_uint64_t mantissa = v & 0xFFFFFFFFFFFFFULL;
  7522. return ((exp == 0 || exp == 0x7FF
  7523. || (exp >= 1023 - 126 && exp <= 1023 + 127))
  7524. && (mantissa & 0x1FFFFFFFL) == 0);
  7525. }
  7526. /* Returns a double precision value casted to single precision
  7527. (ignoring the least significant bits in exponent and mantissa). */
  7528. static int
  7529. double_to_single (bfd_uint64_t v)
  7530. {
  7531. unsigned int sign = (v >> 63) & 1;
  7532. int exp = (v >> 52) & 0x7FF;
  7533. bfd_uint64_t mantissa = v & 0xFFFFFFFFFFFFFULL;
  7534. if (exp == 0x7FF)
  7535. exp = 0xFF;
  7536. else
  7537. {
  7538. exp = exp - 1023 + 127;
  7539. if (exp >= 0xFF)
  7540. {
  7541. /* Infinity. */
  7542. exp = 0x7F;
  7543. mantissa = 0;
  7544. }
  7545. else if (exp < 0)
  7546. {
  7547. /* No denormalized numbers. */
  7548. exp = 0;
  7549. mantissa = 0;
  7550. }
  7551. }
  7552. mantissa >>= 29;
  7553. return (sign << 31) | (exp << 23) | mantissa;
  7554. }
  7555. #endif /* BFD_HOST_64_BIT */
  7556. enum lit_type
  7557. {
  7558. CONST_THUMB,
  7559. CONST_ARM,
  7560. CONST_VEC
  7561. };
  7562. static void do_vfp_nsyn_opcode (const char *);
  7563. /* inst.relocs[0].exp describes an "=expr" load pseudo-operation.
  7564. Determine whether it can be performed with a move instruction; if
  7565. it can, convert inst.instruction to that move instruction and
  7566. return true; if it can't, convert inst.instruction to a literal-pool
  7567. load and return FALSE. If this is not a valid thing to do in the
  7568. current context, set inst.error and return TRUE.
  7569. inst.operands[i] describes the destination register. */
  7570. static bool
  7571. move_or_literal_pool (int i, enum lit_type t, bool mode_3)
  7572. {
  7573. unsigned long tbit;
  7574. bool thumb_p = (t == CONST_THUMB);
  7575. bool arm_p = (t == CONST_ARM);
  7576. if (thumb_p)
  7577. tbit = (inst.instruction > 0xffff) ? THUMB2_LOAD_BIT : THUMB_LOAD_BIT;
  7578. else
  7579. tbit = LOAD_BIT;
  7580. if ((inst.instruction & tbit) == 0)
  7581. {
  7582. inst.error = _("invalid pseudo operation");
  7583. return true;
  7584. }
  7585. if (inst.relocs[0].exp.X_op != O_constant
  7586. && inst.relocs[0].exp.X_op != O_symbol
  7587. && inst.relocs[0].exp.X_op != O_big)
  7588. {
  7589. inst.error = _("constant expression expected");
  7590. return true;
  7591. }
  7592. if (inst.relocs[0].exp.X_op == O_constant
  7593. || inst.relocs[0].exp.X_op == O_big)
  7594. {
  7595. #if defined BFD_HOST_64_BIT
  7596. bfd_uint64_t v;
  7597. #else
  7598. valueT v;
  7599. #endif
  7600. if (inst.relocs[0].exp.X_op == O_big)
  7601. {
  7602. LITTLENUM_TYPE w[X_PRECISION];
  7603. LITTLENUM_TYPE * l;
  7604. if (inst.relocs[0].exp.X_add_number == -1)
  7605. {
  7606. gen_to_words (w, X_PRECISION, E_PRECISION);
  7607. l = w;
  7608. /* FIXME: Should we check words w[2..5] ? */
  7609. }
  7610. else
  7611. l = generic_bignum;
  7612. #if defined BFD_HOST_64_BIT
  7613. v = l[3] & LITTLENUM_MASK;
  7614. v <<= LITTLENUM_NUMBER_OF_BITS;
  7615. v |= l[2] & LITTLENUM_MASK;
  7616. v <<= LITTLENUM_NUMBER_OF_BITS;
  7617. v |= l[1] & LITTLENUM_MASK;
  7618. v <<= LITTLENUM_NUMBER_OF_BITS;
  7619. v |= l[0] & LITTLENUM_MASK;
  7620. #else
  7621. v = l[1] & LITTLENUM_MASK;
  7622. v <<= LITTLENUM_NUMBER_OF_BITS;
  7623. v |= l[0] & LITTLENUM_MASK;
  7624. #endif
  7625. }
  7626. else
  7627. v = inst.relocs[0].exp.X_add_number;
  7628. if (!inst.operands[i].issingle)
  7629. {
  7630. if (thumb_p)
  7631. {
  7632. /* LDR should not use lead in a flag-setting instruction being
  7633. chosen so we do not check whether movs can be used. */
  7634. if ((ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2)
  7635. || ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2_v8m))
  7636. && inst.operands[i].reg != 13
  7637. && inst.operands[i].reg != 15)
  7638. {
  7639. /* Check if on thumb2 it can be done with a mov.w, mvn or
  7640. movw instruction. */
  7641. unsigned int newimm;
  7642. bool isNegated = false;
  7643. newimm = encode_thumb32_immediate (v);
  7644. if (newimm == (unsigned int) FAIL)
  7645. {
  7646. newimm = encode_thumb32_immediate (~v);
  7647. isNegated = true;
  7648. }
  7649. /* The number can be loaded with a mov.w or mvn
  7650. instruction. */
  7651. if (newimm != (unsigned int) FAIL
  7652. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2))
  7653. {
  7654. inst.instruction = (0xf04f0000 /* MOV.W. */
  7655. | (inst.operands[i].reg << 8));
  7656. /* Change to MOVN. */
  7657. inst.instruction |= (isNegated ? 0x200000 : 0);
  7658. inst.instruction |= (newimm & 0x800) << 15;
  7659. inst.instruction |= (newimm & 0x700) << 4;
  7660. inst.instruction |= (newimm & 0x0ff);
  7661. return true;
  7662. }
  7663. /* The number can be loaded with a movw instruction. */
  7664. else if ((v & ~0xFFFF) == 0
  7665. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2_v8m))
  7666. {
  7667. int imm = v & 0xFFFF;
  7668. inst.instruction = 0xf2400000; /* MOVW. */
  7669. inst.instruction |= (inst.operands[i].reg << 8);
  7670. inst.instruction |= (imm & 0xf000) << 4;
  7671. inst.instruction |= (imm & 0x0800) << 15;
  7672. inst.instruction |= (imm & 0x0700) << 4;
  7673. inst.instruction |= (imm & 0x00ff);
  7674. /* In case this replacement is being done on Armv8-M
  7675. Baseline we need to make sure to disable the
  7676. instruction size check, as otherwise GAS will reject
  7677. the use of this T32 instruction. */
  7678. inst.size_req = 0;
  7679. return true;
  7680. }
  7681. }
  7682. }
  7683. else if (arm_p)
  7684. {
  7685. int value = encode_arm_immediate (v);
  7686. if (value != FAIL)
  7687. {
  7688. /* This can be done with a mov instruction. */
  7689. inst.instruction &= LITERAL_MASK;
  7690. inst.instruction |= INST_IMMEDIATE | (OPCODE_MOV << DATA_OP_SHIFT);
  7691. inst.instruction |= value & 0xfff;
  7692. return true;
  7693. }
  7694. value = encode_arm_immediate (~ v);
  7695. if (value != FAIL)
  7696. {
  7697. /* This can be done with a mvn instruction. */
  7698. inst.instruction &= LITERAL_MASK;
  7699. inst.instruction |= INST_IMMEDIATE | (OPCODE_MVN << DATA_OP_SHIFT);
  7700. inst.instruction |= value & 0xfff;
  7701. return true;
  7702. }
  7703. }
  7704. else if (t == CONST_VEC && ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_v1))
  7705. {
  7706. int op = 0;
  7707. unsigned immbits = 0;
  7708. unsigned immlo = inst.operands[1].imm;
  7709. unsigned immhi = inst.operands[1].regisimm
  7710. ? inst.operands[1].reg
  7711. : inst.relocs[0].exp.X_unsigned
  7712. ? 0
  7713. : ((bfd_int64_t)((int) immlo)) >> 32;
  7714. int cmode = neon_cmode_for_move_imm (immlo, immhi, false, &immbits,
  7715. &op, 64, NT_invtype);
  7716. if (cmode == FAIL)
  7717. {
  7718. neon_invert_size (&immlo, &immhi, 64);
  7719. op = !op;
  7720. cmode = neon_cmode_for_move_imm (immlo, immhi, false, &immbits,
  7721. &op, 64, NT_invtype);
  7722. }
  7723. if (cmode != FAIL)
  7724. {
  7725. inst.instruction = (inst.instruction & VLDR_VMOV_SAME)
  7726. | (1 << 23)
  7727. | (cmode << 8)
  7728. | (op << 5)
  7729. | (1 << 4);
  7730. /* Fill other bits in vmov encoding for both thumb and arm. */
  7731. if (thumb_mode)
  7732. inst.instruction |= (0x7U << 29) | (0xF << 24);
  7733. else
  7734. inst.instruction |= (0xFU << 28) | (0x1 << 25);
  7735. neon_write_immbits (immbits);
  7736. return true;
  7737. }
  7738. }
  7739. }
  7740. if (t == CONST_VEC)
  7741. {
  7742. /* Check if vldr Rx, =constant could be optimized to vmov Rx, #constant. */
  7743. if (inst.operands[i].issingle
  7744. && is_quarter_float (inst.operands[1].imm)
  7745. && ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v3xd))
  7746. {
  7747. inst.operands[1].imm =
  7748. neon_qfloat_bits (v);
  7749. do_vfp_nsyn_opcode ("fconsts");
  7750. return true;
  7751. }
  7752. /* If our host does not support a 64-bit type then we cannot perform
  7753. the following optimization. This mean that there will be a
  7754. discrepancy between the output produced by an assembler built for
  7755. a 32-bit-only host and the output produced from a 64-bit host, but
  7756. this cannot be helped. */
  7757. #if defined BFD_HOST_64_BIT
  7758. else if (!inst.operands[1].issingle
  7759. && ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v3))
  7760. {
  7761. if (is_double_a_single (v)
  7762. && is_quarter_float (double_to_single (v)))
  7763. {
  7764. inst.operands[1].imm =
  7765. neon_qfloat_bits (double_to_single (v));
  7766. do_vfp_nsyn_opcode ("fconstd");
  7767. return true;
  7768. }
  7769. }
  7770. #endif
  7771. }
  7772. }
  7773. if (add_to_lit_pool ((!inst.operands[i].isvec
  7774. || inst.operands[i].issingle) ? 4 : 8) == FAIL)
  7775. return true;
  7776. inst.operands[1].reg = REG_PC;
  7777. inst.operands[1].isreg = 1;
  7778. inst.operands[1].preind = 1;
  7779. inst.relocs[0].pc_rel = 1;
  7780. inst.relocs[0].type = (thumb_p
  7781. ? BFD_RELOC_ARM_THUMB_OFFSET
  7782. : (mode_3
  7783. ? BFD_RELOC_ARM_HWLITERAL
  7784. : BFD_RELOC_ARM_LITERAL));
  7785. return false;
  7786. }
  7787. /* inst.operands[i] was set up by parse_address. Encode it into an
  7788. ARM-format instruction. Reject all forms which cannot be encoded
  7789. into a coprocessor load/store instruction. If wb_ok is false,
  7790. reject use of writeback; if unind_ok is false, reject use of
  7791. unindexed addressing. If reloc_override is not 0, use it instead
  7792. of BFD_ARM_CP_OFF_IMM, unless the initial relocation is a group one
  7793. (in which case it is preserved). */
  7794. static int
  7795. encode_arm_cp_address (int i, int wb_ok, int unind_ok, int reloc_override)
  7796. {
  7797. if (!inst.operands[i].isreg)
  7798. {
  7799. /* PR 18256 */
  7800. if (! inst.operands[0].isvec)
  7801. {
  7802. inst.error = _("invalid co-processor operand");
  7803. return FAIL;
  7804. }
  7805. if (move_or_literal_pool (0, CONST_VEC, /*mode_3=*/false))
  7806. return SUCCESS;
  7807. }
  7808. inst.instruction |= inst.operands[i].reg << 16;
  7809. gas_assert (!(inst.operands[i].preind && inst.operands[i].postind));
  7810. if (!inst.operands[i].preind && !inst.operands[i].postind) /* unindexed */
  7811. {
  7812. gas_assert (!inst.operands[i].writeback);
  7813. if (!unind_ok)
  7814. {
  7815. inst.error = _("instruction does not support unindexed addressing");
  7816. return FAIL;
  7817. }
  7818. inst.instruction |= inst.operands[i].imm;
  7819. inst.instruction |= INDEX_UP;
  7820. return SUCCESS;
  7821. }
  7822. if (inst.operands[i].preind)
  7823. inst.instruction |= PRE_INDEX;
  7824. if (inst.operands[i].writeback)
  7825. {
  7826. if (inst.operands[i].reg == REG_PC)
  7827. {
  7828. inst.error = _("pc may not be used with write-back");
  7829. return FAIL;
  7830. }
  7831. if (!wb_ok)
  7832. {
  7833. inst.error = _("instruction does not support writeback");
  7834. return FAIL;
  7835. }
  7836. inst.instruction |= WRITE_BACK;
  7837. }
  7838. if (reloc_override)
  7839. inst.relocs[0].type = (bfd_reloc_code_real_type) reloc_override;
  7840. else if ((inst.relocs[0].type < BFD_RELOC_ARM_ALU_PC_G0_NC
  7841. || inst.relocs[0].type > BFD_RELOC_ARM_LDC_SB_G2)
  7842. && inst.relocs[0].type != BFD_RELOC_ARM_LDR_PC_G0)
  7843. {
  7844. if (thumb_mode)
  7845. inst.relocs[0].type = BFD_RELOC_ARM_T32_CP_OFF_IMM;
  7846. else
  7847. inst.relocs[0].type = BFD_RELOC_ARM_CP_OFF_IMM;
  7848. }
  7849. /* Prefer + for zero encoded value. */
  7850. if (!inst.operands[i].negative)
  7851. inst.instruction |= INDEX_UP;
  7852. return SUCCESS;
  7853. }
  7854. /* Functions for instruction encoding, sorted by sub-architecture.
  7855. First some generics; their names are taken from the conventional
  7856. bit positions for register arguments in ARM format instructions. */
  7857. static void
  7858. do_noargs (void)
  7859. {
  7860. }
  7861. static void
  7862. do_rd (void)
  7863. {
  7864. inst.instruction |= inst.operands[0].reg << 12;
  7865. }
  7866. static void
  7867. do_rn (void)
  7868. {
  7869. inst.instruction |= inst.operands[0].reg << 16;
  7870. }
  7871. static void
  7872. do_rd_rm (void)
  7873. {
  7874. inst.instruction |= inst.operands[0].reg << 12;
  7875. inst.instruction |= inst.operands[1].reg;
  7876. }
  7877. static void
  7878. do_rm_rn (void)
  7879. {
  7880. inst.instruction |= inst.operands[0].reg;
  7881. inst.instruction |= inst.operands[1].reg << 16;
  7882. }
  7883. static void
  7884. do_rd_rn (void)
  7885. {
  7886. inst.instruction |= inst.operands[0].reg << 12;
  7887. inst.instruction |= inst.operands[1].reg << 16;
  7888. }
  7889. static void
  7890. do_rn_rd (void)
  7891. {
  7892. inst.instruction |= inst.operands[0].reg << 16;
  7893. inst.instruction |= inst.operands[1].reg << 12;
  7894. }
  7895. static void
  7896. do_tt (void)
  7897. {
  7898. inst.instruction |= inst.operands[0].reg << 8;
  7899. inst.instruction |= inst.operands[1].reg << 16;
  7900. }
  7901. static bool
  7902. check_obsolete (const arm_feature_set *feature, const char *msg)
  7903. {
  7904. if (ARM_CPU_IS_ANY (cpu_variant))
  7905. {
  7906. as_tsktsk ("%s", msg);
  7907. return true;
  7908. }
  7909. else if (ARM_CPU_HAS_FEATURE (cpu_variant, *feature))
  7910. {
  7911. as_bad ("%s", msg);
  7912. return true;
  7913. }
  7914. return false;
  7915. }
  7916. static void
  7917. do_rd_rm_rn (void)
  7918. {
  7919. unsigned Rn = inst.operands[2].reg;
  7920. /* Enforce restrictions on SWP instruction. */
  7921. if ((inst.instruction & 0x0fbfffff) == 0x01000090)
  7922. {
  7923. constraint (Rn == inst.operands[0].reg || Rn == inst.operands[1].reg,
  7924. _("Rn must not overlap other operands"));
  7925. /* SWP{b} is obsolete for ARMv8-A, and deprecated for ARMv6* and ARMv7.
  7926. */
  7927. if (!check_obsolete (&arm_ext_v8,
  7928. _("swp{b} use is obsoleted for ARMv8 and later"))
  7929. && warn_on_deprecated
  7930. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6))
  7931. as_tsktsk (_("swp{b} use is deprecated for ARMv6 and ARMv7"));
  7932. }
  7933. inst.instruction |= inst.operands[0].reg << 12;
  7934. inst.instruction |= inst.operands[1].reg;
  7935. inst.instruction |= Rn << 16;
  7936. }
  7937. static void
  7938. do_rd_rn_rm (void)
  7939. {
  7940. inst.instruction |= inst.operands[0].reg << 12;
  7941. inst.instruction |= inst.operands[1].reg << 16;
  7942. inst.instruction |= inst.operands[2].reg;
  7943. }
  7944. static void
  7945. do_rm_rd_rn (void)
  7946. {
  7947. constraint ((inst.operands[2].reg == REG_PC), BAD_PC);
  7948. constraint (((inst.relocs[0].exp.X_op != O_constant
  7949. && inst.relocs[0].exp.X_op != O_illegal)
  7950. || inst.relocs[0].exp.X_add_number != 0),
  7951. BAD_ADDR_MODE);
  7952. inst.instruction |= inst.operands[0].reg;
  7953. inst.instruction |= inst.operands[1].reg << 12;
  7954. inst.instruction |= inst.operands[2].reg << 16;
  7955. }
  7956. static void
  7957. do_imm0 (void)
  7958. {
  7959. inst.instruction |= inst.operands[0].imm;
  7960. }
  7961. static void
  7962. do_rd_cpaddr (void)
  7963. {
  7964. inst.instruction |= inst.operands[0].reg << 12;
  7965. encode_arm_cp_address (1, true, true, 0);
  7966. }
  7967. /* ARM instructions, in alphabetical order by function name (except
  7968. that wrapper functions appear immediately after the function they
  7969. wrap). */
  7970. /* This is a pseudo-op of the form "adr rd, label" to be converted
  7971. into a relative address of the form "add rd, pc, #label-.-8". */
  7972. static void
  7973. do_adr (void)
  7974. {
  7975. inst.instruction |= (inst.operands[0].reg << 12); /* Rd */
  7976. /* Frag hacking will turn this into a sub instruction if the offset turns
  7977. out to be negative. */
  7978. inst.relocs[0].type = BFD_RELOC_ARM_IMMEDIATE;
  7979. inst.relocs[0].pc_rel = 1;
  7980. inst.relocs[0].exp.X_add_number -= 8;
  7981. if (support_interwork
  7982. && inst.relocs[0].exp.X_op == O_symbol
  7983. && inst.relocs[0].exp.X_add_symbol != NULL
  7984. && S_IS_DEFINED (inst.relocs[0].exp.X_add_symbol)
  7985. && THUMB_IS_FUNC (inst.relocs[0].exp.X_add_symbol))
  7986. inst.relocs[0].exp.X_add_number |= 1;
  7987. }
  7988. /* This is a pseudo-op of the form "adrl rd, label" to be converted
  7989. into a relative address of the form:
  7990. add rd, pc, #low(label-.-8)"
  7991. add rd, rd, #high(label-.-8)" */
  7992. static void
  7993. do_adrl (void)
  7994. {
  7995. inst.instruction |= (inst.operands[0].reg << 12); /* Rd */
  7996. /* Frag hacking will turn this into a sub instruction if the offset turns
  7997. out to be negative. */
  7998. inst.relocs[0].type = BFD_RELOC_ARM_ADRL_IMMEDIATE;
  7999. inst.relocs[0].pc_rel = 1;
  8000. inst.size = INSN_SIZE * 2;
  8001. inst.relocs[0].exp.X_add_number -= 8;
  8002. if (support_interwork
  8003. && inst.relocs[0].exp.X_op == O_symbol
  8004. && inst.relocs[0].exp.X_add_symbol != NULL
  8005. && S_IS_DEFINED (inst.relocs[0].exp.X_add_symbol)
  8006. && THUMB_IS_FUNC (inst.relocs[0].exp.X_add_symbol))
  8007. inst.relocs[0].exp.X_add_number |= 1;
  8008. }
  8009. static void
  8010. do_arit (void)
  8011. {
  8012. constraint (inst.relocs[0].type >= BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC
  8013. && inst.relocs[0].type <= BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC ,
  8014. THUMB1_RELOC_ONLY);
  8015. if (!inst.operands[1].present)
  8016. inst.operands[1].reg = inst.operands[0].reg;
  8017. inst.instruction |= inst.operands[0].reg << 12;
  8018. inst.instruction |= inst.operands[1].reg << 16;
  8019. encode_arm_shifter_operand (2);
  8020. }
  8021. static void
  8022. do_barrier (void)
  8023. {
  8024. if (inst.operands[0].present)
  8025. inst.instruction |= inst.operands[0].imm;
  8026. else
  8027. inst.instruction |= 0xf;
  8028. }
  8029. static void
  8030. do_bfc (void)
  8031. {
  8032. unsigned int msb = inst.operands[1].imm + inst.operands[2].imm;
  8033. constraint (msb > 32, _("bit-field extends past end of register"));
  8034. /* The instruction encoding stores the LSB and MSB,
  8035. not the LSB and width. */
  8036. inst.instruction |= inst.operands[0].reg << 12;
  8037. inst.instruction |= inst.operands[1].imm << 7;
  8038. inst.instruction |= (msb - 1) << 16;
  8039. }
  8040. static void
  8041. do_bfi (void)
  8042. {
  8043. unsigned int msb;
  8044. /* #0 in second position is alternative syntax for bfc, which is
  8045. the same instruction but with REG_PC in the Rm field. */
  8046. if (!inst.operands[1].isreg)
  8047. inst.operands[1].reg = REG_PC;
  8048. msb = inst.operands[2].imm + inst.operands[3].imm;
  8049. constraint (msb > 32, _("bit-field extends past end of register"));
  8050. /* The instruction encoding stores the LSB and MSB,
  8051. not the LSB and width. */
  8052. inst.instruction |= inst.operands[0].reg << 12;
  8053. inst.instruction |= inst.operands[1].reg;
  8054. inst.instruction |= inst.operands[2].imm << 7;
  8055. inst.instruction |= (msb - 1) << 16;
  8056. }
  8057. static void
  8058. do_bfx (void)
  8059. {
  8060. constraint (inst.operands[2].imm + inst.operands[3].imm > 32,
  8061. _("bit-field extends past end of register"));
  8062. inst.instruction |= inst.operands[0].reg << 12;
  8063. inst.instruction |= inst.operands[1].reg;
  8064. inst.instruction |= inst.operands[2].imm << 7;
  8065. inst.instruction |= (inst.operands[3].imm - 1) << 16;
  8066. }
  8067. /* ARM V5 breakpoint instruction (argument parse)
  8068. BKPT <16 bit unsigned immediate>
  8069. Instruction is not conditional.
  8070. The bit pattern given in insns[] has the COND_ALWAYS condition,
  8071. and it is an error if the caller tried to override that. */
  8072. static void
  8073. do_bkpt (void)
  8074. {
  8075. /* Top 12 of 16 bits to bits 19:8. */
  8076. inst.instruction |= (inst.operands[0].imm & 0xfff0) << 4;
  8077. /* Bottom 4 of 16 bits to bits 3:0. */
  8078. inst.instruction |= inst.operands[0].imm & 0xf;
  8079. }
  8080. static void
  8081. encode_branch (int default_reloc)
  8082. {
  8083. if (inst.operands[0].hasreloc)
  8084. {
  8085. constraint (inst.operands[0].imm != BFD_RELOC_ARM_PLT32
  8086. && inst.operands[0].imm != BFD_RELOC_ARM_TLS_CALL,
  8087. _("the only valid suffixes here are '(plt)' and '(tlscall)'"));
  8088. inst.relocs[0].type = inst.operands[0].imm == BFD_RELOC_ARM_PLT32
  8089. ? BFD_RELOC_ARM_PLT32
  8090. : thumb_mode ? BFD_RELOC_ARM_THM_TLS_CALL : BFD_RELOC_ARM_TLS_CALL;
  8091. }
  8092. else
  8093. inst.relocs[0].type = (bfd_reloc_code_real_type) default_reloc;
  8094. inst.relocs[0].pc_rel = 1;
  8095. }
  8096. static void
  8097. do_branch (void)
  8098. {
  8099. #ifdef OBJ_ELF
  8100. if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
  8101. encode_branch (BFD_RELOC_ARM_PCREL_JUMP);
  8102. else
  8103. #endif
  8104. encode_branch (BFD_RELOC_ARM_PCREL_BRANCH);
  8105. }
  8106. static void
  8107. do_bl (void)
  8108. {
  8109. #ifdef OBJ_ELF
  8110. if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
  8111. {
  8112. if (inst.cond == COND_ALWAYS)
  8113. encode_branch (BFD_RELOC_ARM_PCREL_CALL);
  8114. else
  8115. encode_branch (BFD_RELOC_ARM_PCREL_JUMP);
  8116. }
  8117. else
  8118. #endif
  8119. encode_branch (BFD_RELOC_ARM_PCREL_BRANCH);
  8120. }
  8121. /* ARM V5 branch-link-exchange instruction (argument parse)
  8122. BLX <target_addr> ie BLX(1)
  8123. BLX{<condition>} <Rm> ie BLX(2)
  8124. Unfortunately, there are two different opcodes for this mnemonic.
  8125. So, the insns[].value is not used, and the code here zaps values
  8126. into inst.instruction.
  8127. Also, the <target_addr> can be 25 bits, hence has its own reloc. */
  8128. static void
  8129. do_blx (void)
  8130. {
  8131. if (inst.operands[0].isreg)
  8132. {
  8133. /* Arg is a register; the opcode provided by insns[] is correct.
  8134. It is not illegal to do "blx pc", just useless. */
  8135. if (inst.operands[0].reg == REG_PC)
  8136. as_tsktsk (_("use of r15 in blx in ARM mode is not really useful"));
  8137. inst.instruction |= inst.operands[0].reg;
  8138. }
  8139. else
  8140. {
  8141. /* Arg is an address; this instruction cannot be executed
  8142. conditionally, and the opcode must be adjusted.
  8143. We retain the BFD_RELOC_ARM_PCREL_BLX till the very end
  8144. where we generate out a BFD_RELOC_ARM_PCREL_CALL instead. */
  8145. constraint (inst.cond != COND_ALWAYS, BAD_COND);
  8146. inst.instruction = 0xfa000000;
  8147. encode_branch (BFD_RELOC_ARM_PCREL_BLX);
  8148. }
  8149. }
  8150. static void
  8151. do_bx (void)
  8152. {
  8153. bool want_reloc;
  8154. if (inst.operands[0].reg == REG_PC)
  8155. as_tsktsk (_("use of r15 in bx in ARM mode is not really useful"));
  8156. inst.instruction |= inst.operands[0].reg;
  8157. /* Output R_ARM_V4BX relocations if is an EABI object that looks like
  8158. it is for ARMv4t or earlier. */
  8159. want_reloc = !ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5);
  8160. if (!ARM_FEATURE_ZERO (selected_object_arch)
  8161. && !ARM_CPU_HAS_FEATURE (selected_object_arch, arm_ext_v5))
  8162. want_reloc = true;
  8163. #ifdef OBJ_ELF
  8164. if (EF_ARM_EABI_VERSION (meabi_flags) < EF_ARM_EABI_VER4)
  8165. #endif
  8166. want_reloc = false;
  8167. if (want_reloc)
  8168. inst.relocs[0].type = BFD_RELOC_ARM_V4BX;
  8169. }
  8170. /* ARM v5TEJ. Jump to Jazelle code. */
  8171. static void
  8172. do_bxj (void)
  8173. {
  8174. if (inst.operands[0].reg == REG_PC)
  8175. as_tsktsk (_("use of r15 in bxj is not really useful"));
  8176. inst.instruction |= inst.operands[0].reg;
  8177. }
  8178. /* Co-processor data operation:
  8179. CDP{cond} <coproc>, <opcode_1>, <CRd>, <CRn>, <CRm>{, <opcode_2>}
  8180. CDP2 <coproc>, <opcode_1>, <CRd>, <CRn>, <CRm>{, <opcode_2>} */
  8181. static void
  8182. do_cdp (void)
  8183. {
  8184. inst.instruction |= inst.operands[0].reg << 8;
  8185. inst.instruction |= inst.operands[1].imm << 20;
  8186. inst.instruction |= inst.operands[2].reg << 12;
  8187. inst.instruction |= inst.operands[3].reg << 16;
  8188. inst.instruction |= inst.operands[4].reg;
  8189. inst.instruction |= inst.operands[5].imm << 5;
  8190. }
  8191. static void
  8192. do_cmp (void)
  8193. {
  8194. inst.instruction |= inst.operands[0].reg << 16;
  8195. encode_arm_shifter_operand (1);
  8196. }
  8197. /* Transfer between coprocessor and ARM registers.
  8198. MRC{cond} <coproc>, <opcode_1>, <Rd>, <CRn>, <CRm>{, <opcode_2>}
  8199. MRC2
  8200. MCR{cond}
  8201. MCR2
  8202. No special properties. */
  8203. struct deprecated_coproc_regs_s
  8204. {
  8205. unsigned cp;
  8206. int opc1;
  8207. unsigned crn;
  8208. unsigned crm;
  8209. int opc2;
  8210. arm_feature_set deprecated;
  8211. arm_feature_set obsoleted;
  8212. const char *dep_msg;
  8213. const char *obs_msg;
  8214. };
  8215. #define DEPR_ACCESS_V8 \
  8216. N_("This coprocessor register access is deprecated in ARMv8")
  8217. /* Table of all deprecated coprocessor registers. */
  8218. static struct deprecated_coproc_regs_s deprecated_coproc_regs[] =
  8219. {
  8220. {15, 0, 7, 10, 5, /* CP15DMB. */
  8221. ARM_FEATURE_CORE_LOW (ARM_EXT_V8), ARM_ARCH_NONE,
  8222. DEPR_ACCESS_V8, NULL},
  8223. {15, 0, 7, 10, 4, /* CP15DSB. */
  8224. ARM_FEATURE_CORE_LOW (ARM_EXT_V8), ARM_ARCH_NONE,
  8225. DEPR_ACCESS_V8, NULL},
  8226. {15, 0, 7, 5, 4, /* CP15ISB. */
  8227. ARM_FEATURE_CORE_LOW (ARM_EXT_V8), ARM_ARCH_NONE,
  8228. DEPR_ACCESS_V8, NULL},
  8229. {14, 6, 1, 0, 0, /* TEEHBR. */
  8230. ARM_FEATURE_CORE_LOW (ARM_EXT_V8), ARM_ARCH_NONE,
  8231. DEPR_ACCESS_V8, NULL},
  8232. {14, 6, 0, 0, 0, /* TEECR. */
  8233. ARM_FEATURE_CORE_LOW (ARM_EXT_V8), ARM_ARCH_NONE,
  8234. DEPR_ACCESS_V8, NULL},
  8235. };
  8236. #undef DEPR_ACCESS_V8
  8237. static const size_t deprecated_coproc_reg_count =
  8238. sizeof (deprecated_coproc_regs) / sizeof (deprecated_coproc_regs[0]);
  8239. static void
  8240. do_co_reg (void)
  8241. {
  8242. unsigned Rd;
  8243. size_t i;
  8244. Rd = inst.operands[2].reg;
  8245. if (thumb_mode)
  8246. {
  8247. if (inst.instruction == 0xee000010
  8248. || inst.instruction == 0xfe000010)
  8249. /* MCR, MCR2 */
  8250. reject_bad_reg (Rd);
  8251. else if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  8252. /* MRC, MRC2 */
  8253. constraint (Rd == REG_SP, BAD_SP);
  8254. }
  8255. else
  8256. {
  8257. /* MCR */
  8258. if (inst.instruction == 0xe000010)
  8259. constraint (Rd == REG_PC, BAD_PC);
  8260. }
  8261. for (i = 0; i < deprecated_coproc_reg_count; ++i)
  8262. {
  8263. const struct deprecated_coproc_regs_s *r =
  8264. deprecated_coproc_regs + i;
  8265. if (inst.operands[0].reg == r->cp
  8266. && inst.operands[1].imm == r->opc1
  8267. && inst.operands[3].reg == r->crn
  8268. && inst.operands[4].reg == r->crm
  8269. && inst.operands[5].imm == r->opc2)
  8270. {
  8271. if (! ARM_CPU_IS_ANY (cpu_variant)
  8272. && warn_on_deprecated
  8273. && ARM_CPU_HAS_FEATURE (cpu_variant, r->deprecated))
  8274. as_tsktsk ("%s", r->dep_msg);
  8275. }
  8276. }
  8277. inst.instruction |= inst.operands[0].reg << 8;
  8278. inst.instruction |= inst.operands[1].imm << 21;
  8279. inst.instruction |= Rd << 12;
  8280. inst.instruction |= inst.operands[3].reg << 16;
  8281. inst.instruction |= inst.operands[4].reg;
  8282. inst.instruction |= inst.operands[5].imm << 5;
  8283. }
  8284. /* Transfer between coprocessor register and pair of ARM registers.
  8285. MCRR{cond} <coproc>, <opcode>, <Rd>, <Rn>, <CRm>.
  8286. MCRR2
  8287. MRRC{cond}
  8288. MRRC2
  8289. Two XScale instructions are special cases of these:
  8290. MAR{cond} acc0, <RdLo>, <RdHi> == MCRR{cond} p0, #0, <RdLo>, <RdHi>, c0
  8291. MRA{cond} acc0, <RdLo>, <RdHi> == MRRC{cond} p0, #0, <RdLo>, <RdHi>, c0
  8292. Result unpredictable if Rd or Rn is R15. */
  8293. static void
  8294. do_co_reg2c (void)
  8295. {
  8296. unsigned Rd, Rn;
  8297. Rd = inst.operands[2].reg;
  8298. Rn = inst.operands[3].reg;
  8299. if (thumb_mode)
  8300. {
  8301. reject_bad_reg (Rd);
  8302. reject_bad_reg (Rn);
  8303. }
  8304. else
  8305. {
  8306. constraint (Rd == REG_PC, BAD_PC);
  8307. constraint (Rn == REG_PC, BAD_PC);
  8308. }
  8309. /* Only check the MRRC{2} variants. */
  8310. if ((inst.instruction & 0x0FF00000) == 0x0C500000)
  8311. {
  8312. /* If Rd == Rn, error that the operation is
  8313. unpredictable (example MRRC p3,#1,r1,r1,c4). */
  8314. constraint (Rd == Rn, BAD_OVERLAP);
  8315. }
  8316. inst.instruction |= inst.operands[0].reg << 8;
  8317. inst.instruction |= inst.operands[1].imm << 4;
  8318. inst.instruction |= Rd << 12;
  8319. inst.instruction |= Rn << 16;
  8320. inst.instruction |= inst.operands[4].reg;
  8321. }
  8322. static void
  8323. do_cpsi (void)
  8324. {
  8325. inst.instruction |= inst.operands[0].imm << 6;
  8326. if (inst.operands[1].present)
  8327. {
  8328. inst.instruction |= CPSI_MMOD;
  8329. inst.instruction |= inst.operands[1].imm;
  8330. }
  8331. }
  8332. static void
  8333. do_dbg (void)
  8334. {
  8335. inst.instruction |= inst.operands[0].imm;
  8336. }
  8337. static void
  8338. do_div (void)
  8339. {
  8340. unsigned Rd, Rn, Rm;
  8341. Rd = inst.operands[0].reg;
  8342. Rn = (inst.operands[1].present
  8343. ? inst.operands[1].reg : Rd);
  8344. Rm = inst.operands[2].reg;
  8345. constraint ((Rd == REG_PC), BAD_PC);
  8346. constraint ((Rn == REG_PC), BAD_PC);
  8347. constraint ((Rm == REG_PC), BAD_PC);
  8348. inst.instruction |= Rd << 16;
  8349. inst.instruction |= Rn << 0;
  8350. inst.instruction |= Rm << 8;
  8351. }
  8352. static void
  8353. do_it (void)
  8354. {
  8355. /* There is no IT instruction in ARM mode. We
  8356. process it to do the validation as if in
  8357. thumb mode, just in case the code gets
  8358. assembled for thumb using the unified syntax. */
  8359. inst.size = 0;
  8360. if (unified_syntax)
  8361. {
  8362. set_pred_insn_type (IT_INSN);
  8363. now_pred.mask = (inst.instruction & 0xf) | 0x10;
  8364. now_pred.cc = inst.operands[0].imm;
  8365. }
  8366. }
  8367. /* If there is only one register in the register list,
  8368. then return its register number. Otherwise return -1. */
  8369. static int
  8370. only_one_reg_in_list (int range)
  8371. {
  8372. int i = ffs (range) - 1;
  8373. return (i > 15 || range != (1 << i)) ? -1 : i;
  8374. }
  8375. static void
  8376. encode_ldmstm(int from_push_pop_mnem)
  8377. {
  8378. int base_reg = inst.operands[0].reg;
  8379. int range = inst.operands[1].imm;
  8380. int one_reg;
  8381. inst.instruction |= base_reg << 16;
  8382. inst.instruction |= range;
  8383. if (inst.operands[1].writeback)
  8384. inst.instruction |= LDM_TYPE_2_OR_3;
  8385. if (inst.operands[0].writeback)
  8386. {
  8387. inst.instruction |= WRITE_BACK;
  8388. /* Check for unpredictable uses of writeback. */
  8389. if (inst.instruction & LOAD_BIT)
  8390. {
  8391. /* Not allowed in LDM type 2. */
  8392. if ((inst.instruction & LDM_TYPE_2_OR_3)
  8393. && ((range & (1 << REG_PC)) == 0))
  8394. as_warn (_("writeback of base register is UNPREDICTABLE"));
  8395. /* Only allowed if base reg not in list for other types. */
  8396. else if (range & (1 << base_reg))
  8397. as_warn (_("writeback of base register when in register list is UNPREDICTABLE"));
  8398. }
  8399. else /* STM. */
  8400. {
  8401. /* Not allowed for type 2. */
  8402. if (inst.instruction & LDM_TYPE_2_OR_3)
  8403. as_warn (_("writeback of base register is UNPREDICTABLE"));
  8404. /* Only allowed if base reg not in list, or first in list. */
  8405. else if ((range & (1 << base_reg))
  8406. && (range & ((1 << base_reg) - 1)))
  8407. as_warn (_("if writeback register is in list, it must be the lowest reg in the list"));
  8408. }
  8409. }
  8410. /* If PUSH/POP has only one register, then use the A2 encoding. */
  8411. one_reg = only_one_reg_in_list (range);
  8412. if (from_push_pop_mnem && one_reg >= 0)
  8413. {
  8414. int is_push = (inst.instruction & A_PUSH_POP_OP_MASK) == A1_OPCODE_PUSH;
  8415. if (is_push && one_reg == 13 /* SP */)
  8416. /* PR 22483: The A2 encoding cannot be used when
  8417. pushing the stack pointer as this is UNPREDICTABLE. */
  8418. return;
  8419. inst.instruction &= A_COND_MASK;
  8420. inst.instruction |= is_push ? A2_OPCODE_PUSH : A2_OPCODE_POP;
  8421. inst.instruction |= one_reg << 12;
  8422. }
  8423. }
  8424. static void
  8425. do_ldmstm (void)
  8426. {
  8427. encode_ldmstm (/*from_push_pop_mnem=*/false);
  8428. }
  8429. /* ARMv5TE load-consecutive (argument parse)
  8430. Mode is like LDRH.
  8431. LDRccD R, mode
  8432. STRccD R, mode. */
  8433. static void
  8434. do_ldrd (void)
  8435. {
  8436. constraint (inst.operands[0].reg % 2 != 0,
  8437. _("first transfer register must be even"));
  8438. constraint (inst.operands[1].present
  8439. && inst.operands[1].reg != inst.operands[0].reg + 1,
  8440. _("can only transfer two consecutive registers"));
  8441. constraint (inst.operands[0].reg == REG_LR, _("r14 not allowed here"));
  8442. constraint (!inst.operands[2].isreg, _("'[' expected"));
  8443. if (!inst.operands[1].present)
  8444. inst.operands[1].reg = inst.operands[0].reg + 1;
  8445. /* encode_arm_addr_mode_3 will diagnose overlap between the base
  8446. register and the first register written; we have to diagnose
  8447. overlap between the base and the second register written here. */
  8448. if (inst.operands[2].reg == inst.operands[1].reg
  8449. && (inst.operands[2].writeback || inst.operands[2].postind))
  8450. as_warn (_("base register written back, and overlaps "
  8451. "second transfer register"));
  8452. if (!(inst.instruction & V4_STR_BIT))
  8453. {
  8454. /* For an index-register load, the index register must not overlap the
  8455. destination (even if not write-back). */
  8456. if (inst.operands[2].immisreg
  8457. && ((unsigned) inst.operands[2].imm == inst.operands[0].reg
  8458. || (unsigned) inst.operands[2].imm == inst.operands[1].reg))
  8459. as_warn (_("index register overlaps transfer register"));
  8460. }
  8461. inst.instruction |= inst.operands[0].reg << 12;
  8462. encode_arm_addr_mode_3 (2, /*is_t=*/false);
  8463. }
  8464. static void
  8465. do_ldrex (void)
  8466. {
  8467. constraint (!inst.operands[1].isreg || !inst.operands[1].preind
  8468. || inst.operands[1].postind || inst.operands[1].writeback
  8469. || inst.operands[1].immisreg || inst.operands[1].shifted
  8470. || inst.operands[1].negative
  8471. /* This can arise if the programmer has written
  8472. strex rN, rM, foo
  8473. or if they have mistakenly used a register name as the last
  8474. operand, eg:
  8475. strex rN, rM, rX
  8476. It is very difficult to distinguish between these two cases
  8477. because "rX" might actually be a label. ie the register
  8478. name has been occluded by a symbol of the same name. So we
  8479. just generate a general 'bad addressing mode' type error
  8480. message and leave it up to the programmer to discover the
  8481. true cause and fix their mistake. */
  8482. || (inst.operands[1].reg == REG_PC),
  8483. BAD_ADDR_MODE);
  8484. constraint (inst.relocs[0].exp.X_op != O_constant
  8485. || inst.relocs[0].exp.X_add_number != 0,
  8486. _("offset must be zero in ARM encoding"));
  8487. constraint ((inst.operands[1].reg == REG_PC), BAD_PC);
  8488. inst.instruction |= inst.operands[0].reg << 12;
  8489. inst.instruction |= inst.operands[1].reg << 16;
  8490. inst.relocs[0].type = BFD_RELOC_UNUSED;
  8491. }
  8492. static void
  8493. do_ldrexd (void)
  8494. {
  8495. constraint (inst.operands[0].reg % 2 != 0,
  8496. _("even register required"));
  8497. constraint (inst.operands[1].present
  8498. && inst.operands[1].reg != inst.operands[0].reg + 1,
  8499. _("can only load two consecutive registers"));
  8500. /* If op 1 were present and equal to PC, this function wouldn't
  8501. have been called in the first place. */
  8502. constraint (inst.operands[0].reg == REG_LR, _("r14 not allowed here"));
  8503. inst.instruction |= inst.operands[0].reg << 12;
  8504. inst.instruction |= inst.operands[2].reg << 16;
  8505. }
  8506. /* In both ARM and thumb state 'ldr pc, #imm' with an immediate
  8507. which is not a multiple of four is UNPREDICTABLE. */
  8508. static void
  8509. check_ldr_r15_aligned (void)
  8510. {
  8511. constraint (!(inst.operands[1].immisreg)
  8512. && (inst.operands[0].reg == REG_PC
  8513. && inst.operands[1].reg == REG_PC
  8514. && (inst.relocs[0].exp.X_add_number & 0x3)),
  8515. _("ldr to register 15 must be 4-byte aligned"));
  8516. }
  8517. static void
  8518. do_ldst (void)
  8519. {
  8520. inst.instruction |= inst.operands[0].reg << 12;
  8521. if (!inst.operands[1].isreg)
  8522. if (move_or_literal_pool (0, CONST_ARM, /*mode_3=*/false))
  8523. return;
  8524. encode_arm_addr_mode_2 (1, /*is_t=*/false);
  8525. check_ldr_r15_aligned ();
  8526. }
  8527. static void
  8528. do_ldstt (void)
  8529. {
  8530. /* ldrt/strt always use post-indexed addressing. Turn [Rn] into [Rn]! and
  8531. reject [Rn,...]. */
  8532. if (inst.operands[1].preind)
  8533. {
  8534. constraint (inst.relocs[0].exp.X_op != O_constant
  8535. || inst.relocs[0].exp.X_add_number != 0,
  8536. _("this instruction requires a post-indexed address"));
  8537. inst.operands[1].preind = 0;
  8538. inst.operands[1].postind = 1;
  8539. inst.operands[1].writeback = 1;
  8540. }
  8541. inst.instruction |= inst.operands[0].reg << 12;
  8542. encode_arm_addr_mode_2 (1, /*is_t=*/true);
  8543. }
  8544. /* Halfword and signed-byte load/store operations. */
  8545. static void
  8546. do_ldstv4 (void)
  8547. {
  8548. constraint (inst.operands[0].reg == REG_PC, BAD_PC);
  8549. inst.instruction |= inst.operands[0].reg << 12;
  8550. if (!inst.operands[1].isreg)
  8551. if (move_or_literal_pool (0, CONST_ARM, /*mode_3=*/true))
  8552. return;
  8553. encode_arm_addr_mode_3 (1, /*is_t=*/false);
  8554. }
  8555. static void
  8556. do_ldsttv4 (void)
  8557. {
  8558. /* ldrt/strt always use post-indexed addressing. Turn [Rn] into [Rn]! and
  8559. reject [Rn,...]. */
  8560. if (inst.operands[1].preind)
  8561. {
  8562. constraint (inst.relocs[0].exp.X_op != O_constant
  8563. || inst.relocs[0].exp.X_add_number != 0,
  8564. _("this instruction requires a post-indexed address"));
  8565. inst.operands[1].preind = 0;
  8566. inst.operands[1].postind = 1;
  8567. inst.operands[1].writeback = 1;
  8568. }
  8569. inst.instruction |= inst.operands[0].reg << 12;
  8570. encode_arm_addr_mode_3 (1, /*is_t=*/true);
  8571. }
  8572. /* Co-processor register load/store.
  8573. Format: <LDC|STC>{cond}[L] CP#,CRd,<address> */
  8574. static void
  8575. do_lstc (void)
  8576. {
  8577. inst.instruction |= inst.operands[0].reg << 8;
  8578. inst.instruction |= inst.operands[1].reg << 12;
  8579. encode_arm_cp_address (2, true, true, 0);
  8580. }
  8581. static void
  8582. do_mlas (void)
  8583. {
  8584. /* This restriction does not apply to mls (nor to mla in v6 or later). */
  8585. if (inst.operands[0].reg == inst.operands[1].reg
  8586. && !ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6)
  8587. && !(inst.instruction & 0x00400000))
  8588. as_tsktsk (_("Rd and Rm should be different in mla"));
  8589. inst.instruction |= inst.operands[0].reg << 16;
  8590. inst.instruction |= inst.operands[1].reg;
  8591. inst.instruction |= inst.operands[2].reg << 8;
  8592. inst.instruction |= inst.operands[3].reg << 12;
  8593. }
  8594. static void
  8595. do_mov (void)
  8596. {
  8597. constraint (inst.relocs[0].type >= BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC
  8598. && inst.relocs[0].type <= BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC ,
  8599. THUMB1_RELOC_ONLY);
  8600. inst.instruction |= inst.operands[0].reg << 12;
  8601. encode_arm_shifter_operand (1);
  8602. }
  8603. /* ARM V6T2 16-bit immediate register load: MOV[WT]{cond} Rd, #<imm16>. */
  8604. static void
  8605. do_mov16 (void)
  8606. {
  8607. bfd_vma imm;
  8608. bool top;
  8609. top = (inst.instruction & 0x00400000) != 0;
  8610. constraint (top && inst.relocs[0].type == BFD_RELOC_ARM_MOVW,
  8611. _(":lower16: not allowed in this instruction"));
  8612. constraint (!top && inst.relocs[0].type == BFD_RELOC_ARM_MOVT,
  8613. _(":upper16: not allowed in this instruction"));
  8614. inst.instruction |= inst.operands[0].reg << 12;
  8615. if (inst.relocs[0].type == BFD_RELOC_UNUSED)
  8616. {
  8617. imm = inst.relocs[0].exp.X_add_number;
  8618. /* The value is in two pieces: 0:11, 16:19. */
  8619. inst.instruction |= (imm & 0x00000fff);
  8620. inst.instruction |= (imm & 0x0000f000) << 4;
  8621. }
  8622. }
  8623. static int
  8624. do_vfp_nsyn_mrs (void)
  8625. {
  8626. if (inst.operands[0].isvec)
  8627. {
  8628. if (inst.operands[1].reg != 1)
  8629. first_error (_("operand 1 must be FPSCR"));
  8630. memset (&inst.operands[0], '\0', sizeof (inst.operands[0]));
  8631. memset (&inst.operands[1], '\0', sizeof (inst.operands[1]));
  8632. do_vfp_nsyn_opcode ("fmstat");
  8633. }
  8634. else if (inst.operands[1].isvec)
  8635. do_vfp_nsyn_opcode ("fmrx");
  8636. else
  8637. return FAIL;
  8638. return SUCCESS;
  8639. }
  8640. static int
  8641. do_vfp_nsyn_msr (void)
  8642. {
  8643. if (inst.operands[0].isvec)
  8644. do_vfp_nsyn_opcode ("fmxr");
  8645. else
  8646. return FAIL;
  8647. return SUCCESS;
  8648. }
  8649. static void
  8650. do_vmrs (void)
  8651. {
  8652. unsigned Rt = inst.operands[0].reg;
  8653. if (thumb_mode && Rt == REG_SP)
  8654. {
  8655. inst.error = BAD_SP;
  8656. return;
  8657. }
  8658. switch (inst.operands[1].reg)
  8659. {
  8660. /* MVFR2 is only valid for Armv8-A. */
  8661. case 5:
  8662. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  8663. _(BAD_FPU));
  8664. break;
  8665. /* Check for new Armv8.1-M Mainline changes to <spec_reg>. */
  8666. case 1: /* fpscr. */
  8667. constraint (!(ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  8668. || ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)),
  8669. _(BAD_FPU));
  8670. break;
  8671. case 14: /* fpcxt_ns. */
  8672. case 15: /* fpcxt_s. */
  8673. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8_1m_main),
  8674. _("selected processor does not support instruction"));
  8675. break;
  8676. case 2: /* fpscr_nzcvqc. */
  8677. case 12: /* vpr. */
  8678. case 13: /* p0. */
  8679. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8_1m_main)
  8680. || (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  8681. && !ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)),
  8682. _("selected processor does not support instruction"));
  8683. if (inst.operands[0].reg != 2
  8684. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  8685. as_warn (_("accessing MVE system register without MVE is UNPREDICTABLE"));
  8686. break;
  8687. default:
  8688. break;
  8689. }
  8690. /* APSR_ sets isvec. All other refs to PC are illegal. */
  8691. if (!inst.operands[0].isvec && Rt == REG_PC)
  8692. {
  8693. inst.error = BAD_PC;
  8694. return;
  8695. }
  8696. /* If we get through parsing the register name, we just insert the number
  8697. generated into the instruction without further validation. */
  8698. inst.instruction |= (inst.operands[1].reg << 16);
  8699. inst.instruction |= (Rt << 12);
  8700. }
  8701. static void
  8702. do_vmsr (void)
  8703. {
  8704. unsigned Rt = inst.operands[1].reg;
  8705. if (thumb_mode)
  8706. reject_bad_reg (Rt);
  8707. else if (Rt == REG_PC)
  8708. {
  8709. inst.error = BAD_PC;
  8710. return;
  8711. }
  8712. switch (inst.operands[0].reg)
  8713. {
  8714. /* MVFR2 is only valid for Armv8-A. */
  8715. case 5:
  8716. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  8717. _(BAD_FPU));
  8718. break;
  8719. /* Check for new Armv8.1-M Mainline changes to <spec_reg>. */
  8720. case 1: /* fpcr. */
  8721. constraint (!(ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  8722. || ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)),
  8723. _(BAD_FPU));
  8724. break;
  8725. case 14: /* fpcxt_ns. */
  8726. case 15: /* fpcxt_s. */
  8727. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8_1m_main),
  8728. _("selected processor does not support instruction"));
  8729. break;
  8730. case 2: /* fpscr_nzcvqc. */
  8731. case 12: /* vpr. */
  8732. case 13: /* p0. */
  8733. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8_1m_main)
  8734. || (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  8735. && !ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)),
  8736. _("selected processor does not support instruction"));
  8737. if (inst.operands[0].reg != 2
  8738. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  8739. as_warn (_("accessing MVE system register without MVE is UNPREDICTABLE"));
  8740. break;
  8741. default:
  8742. break;
  8743. }
  8744. /* If we get through parsing the register name, we just insert the number
  8745. generated into the instruction without further validation. */
  8746. inst.instruction |= (inst.operands[0].reg << 16);
  8747. inst.instruction |= (Rt << 12);
  8748. }
  8749. static void
  8750. do_mrs (void)
  8751. {
  8752. unsigned br;
  8753. if (do_vfp_nsyn_mrs () == SUCCESS)
  8754. return;
  8755. constraint (inst.operands[0].reg == REG_PC, BAD_PC);
  8756. inst.instruction |= inst.operands[0].reg << 12;
  8757. if (inst.operands[1].isreg)
  8758. {
  8759. br = inst.operands[1].reg;
  8760. if (((br & 0x200) == 0) && ((br & 0xf0000) != 0xf0000))
  8761. as_bad (_("bad register for mrs"));
  8762. }
  8763. else
  8764. {
  8765. /* mrs only accepts CPSR/SPSR/CPSR_all/SPSR_all. */
  8766. constraint ((inst.operands[1].imm & (PSR_c|PSR_x|PSR_s|PSR_f))
  8767. != (PSR_c|PSR_f),
  8768. _("'APSR', 'CPSR' or 'SPSR' expected"));
  8769. br = (15<<16) | (inst.operands[1].imm & SPSR_BIT);
  8770. }
  8771. inst.instruction |= br;
  8772. }
  8773. /* Two possible forms:
  8774. "{C|S}PSR_<field>, Rm",
  8775. "{C|S}PSR_f, #expression". */
  8776. static void
  8777. do_msr (void)
  8778. {
  8779. if (do_vfp_nsyn_msr () == SUCCESS)
  8780. return;
  8781. inst.instruction |= inst.operands[0].imm;
  8782. if (inst.operands[1].isreg)
  8783. inst.instruction |= inst.operands[1].reg;
  8784. else
  8785. {
  8786. inst.instruction |= INST_IMMEDIATE;
  8787. inst.relocs[0].type = BFD_RELOC_ARM_IMMEDIATE;
  8788. inst.relocs[0].pc_rel = 0;
  8789. }
  8790. }
  8791. static void
  8792. do_mul (void)
  8793. {
  8794. constraint (inst.operands[2].reg == REG_PC, BAD_PC);
  8795. if (!inst.operands[2].present)
  8796. inst.operands[2].reg = inst.operands[0].reg;
  8797. inst.instruction |= inst.operands[0].reg << 16;
  8798. inst.instruction |= inst.operands[1].reg;
  8799. inst.instruction |= inst.operands[2].reg << 8;
  8800. if (inst.operands[0].reg == inst.operands[1].reg
  8801. && !ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6))
  8802. as_tsktsk (_("Rd and Rm should be different in mul"));
  8803. }
  8804. /* Long Multiply Parser
  8805. UMULL RdLo, RdHi, Rm, Rs
  8806. SMULL RdLo, RdHi, Rm, Rs
  8807. UMLAL RdLo, RdHi, Rm, Rs
  8808. SMLAL RdLo, RdHi, Rm, Rs. */
  8809. static void
  8810. do_mull (void)
  8811. {
  8812. inst.instruction |= inst.operands[0].reg << 12;
  8813. inst.instruction |= inst.operands[1].reg << 16;
  8814. inst.instruction |= inst.operands[2].reg;
  8815. inst.instruction |= inst.operands[3].reg << 8;
  8816. /* rdhi and rdlo must be different. */
  8817. if (inst.operands[0].reg == inst.operands[1].reg)
  8818. as_tsktsk (_("rdhi and rdlo must be different"));
  8819. /* rdhi, rdlo and rm must all be different before armv6. */
  8820. if ((inst.operands[0].reg == inst.operands[2].reg
  8821. || inst.operands[1].reg == inst.operands[2].reg)
  8822. && !ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6))
  8823. as_tsktsk (_("rdhi, rdlo and rm must all be different"));
  8824. }
  8825. static void
  8826. do_nop (void)
  8827. {
  8828. if (inst.operands[0].present
  8829. || ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6k))
  8830. {
  8831. /* Architectural NOP hints are CPSR sets with no bits selected. */
  8832. inst.instruction &= 0xf0000000;
  8833. inst.instruction |= 0x0320f000;
  8834. if (inst.operands[0].present)
  8835. inst.instruction |= inst.operands[0].imm;
  8836. }
  8837. }
  8838. /* ARM V6 Pack Halfword Bottom Top instruction (argument parse).
  8839. PKHBT {<cond>} <Rd>, <Rn>, <Rm> {, LSL #<shift_imm>}
  8840. Condition defaults to COND_ALWAYS.
  8841. Error if Rd, Rn or Rm are R15. */
  8842. static void
  8843. do_pkhbt (void)
  8844. {
  8845. inst.instruction |= inst.operands[0].reg << 12;
  8846. inst.instruction |= inst.operands[1].reg << 16;
  8847. inst.instruction |= inst.operands[2].reg;
  8848. if (inst.operands[3].present)
  8849. encode_arm_shift (3);
  8850. }
  8851. /* ARM V6 PKHTB (Argument Parse). */
  8852. static void
  8853. do_pkhtb (void)
  8854. {
  8855. if (!inst.operands[3].present)
  8856. {
  8857. /* If the shift specifier is omitted, turn the instruction
  8858. into pkhbt rd, rm, rn. */
  8859. inst.instruction &= 0xfff00010;
  8860. inst.instruction |= inst.operands[0].reg << 12;
  8861. inst.instruction |= inst.operands[1].reg;
  8862. inst.instruction |= inst.operands[2].reg << 16;
  8863. }
  8864. else
  8865. {
  8866. inst.instruction |= inst.operands[0].reg << 12;
  8867. inst.instruction |= inst.operands[1].reg << 16;
  8868. inst.instruction |= inst.operands[2].reg;
  8869. encode_arm_shift (3);
  8870. }
  8871. }
  8872. /* ARMv5TE: Preload-Cache
  8873. MP Extensions: Preload for write
  8874. PLD(W) <addr_mode>
  8875. Syntactically, like LDR with B=1, W=0, L=1. */
  8876. static void
  8877. do_pld (void)
  8878. {
  8879. constraint (!inst.operands[0].isreg,
  8880. _("'[' expected after PLD mnemonic"));
  8881. constraint (inst.operands[0].postind,
  8882. _("post-indexed expression used in preload instruction"));
  8883. constraint (inst.operands[0].writeback,
  8884. _("writeback used in preload instruction"));
  8885. constraint (!inst.operands[0].preind,
  8886. _("unindexed addressing used in preload instruction"));
  8887. encode_arm_addr_mode_2 (0, /*is_t=*/false);
  8888. }
  8889. /* ARMv7: PLI <addr_mode> */
  8890. static void
  8891. do_pli (void)
  8892. {
  8893. constraint (!inst.operands[0].isreg,
  8894. _("'[' expected after PLI mnemonic"));
  8895. constraint (inst.operands[0].postind,
  8896. _("post-indexed expression used in preload instruction"));
  8897. constraint (inst.operands[0].writeback,
  8898. _("writeback used in preload instruction"));
  8899. constraint (!inst.operands[0].preind,
  8900. _("unindexed addressing used in preload instruction"));
  8901. encode_arm_addr_mode_2 (0, /*is_t=*/false);
  8902. inst.instruction &= ~PRE_INDEX;
  8903. }
  8904. static void
  8905. do_push_pop (void)
  8906. {
  8907. constraint (inst.operands[0].writeback,
  8908. _("push/pop do not support {reglist}^"));
  8909. inst.operands[1] = inst.operands[0];
  8910. memset (&inst.operands[0], 0, sizeof inst.operands[0]);
  8911. inst.operands[0].isreg = 1;
  8912. inst.operands[0].writeback = 1;
  8913. inst.operands[0].reg = REG_SP;
  8914. encode_ldmstm (/*from_push_pop_mnem=*/true);
  8915. }
  8916. /* ARM V6 RFE (Return from Exception) loads the PC and CPSR from the
  8917. word at the specified address and the following word
  8918. respectively.
  8919. Unconditionally executed.
  8920. Error if Rn is R15. */
  8921. static void
  8922. do_rfe (void)
  8923. {
  8924. inst.instruction |= inst.operands[0].reg << 16;
  8925. if (inst.operands[0].writeback)
  8926. inst.instruction |= WRITE_BACK;
  8927. }
  8928. /* ARM V6 ssat (argument parse). */
  8929. static void
  8930. do_ssat (void)
  8931. {
  8932. inst.instruction |= inst.operands[0].reg << 12;
  8933. inst.instruction |= (inst.operands[1].imm - 1) << 16;
  8934. inst.instruction |= inst.operands[2].reg;
  8935. if (inst.operands[3].present)
  8936. encode_arm_shift (3);
  8937. }
  8938. /* ARM V6 usat (argument parse). */
  8939. static void
  8940. do_usat (void)
  8941. {
  8942. inst.instruction |= inst.operands[0].reg << 12;
  8943. inst.instruction |= inst.operands[1].imm << 16;
  8944. inst.instruction |= inst.operands[2].reg;
  8945. if (inst.operands[3].present)
  8946. encode_arm_shift (3);
  8947. }
  8948. /* ARM V6 ssat16 (argument parse). */
  8949. static void
  8950. do_ssat16 (void)
  8951. {
  8952. inst.instruction |= inst.operands[0].reg << 12;
  8953. inst.instruction |= ((inst.operands[1].imm - 1) << 16);
  8954. inst.instruction |= inst.operands[2].reg;
  8955. }
  8956. static void
  8957. do_usat16 (void)
  8958. {
  8959. inst.instruction |= inst.operands[0].reg << 12;
  8960. inst.instruction |= inst.operands[1].imm << 16;
  8961. inst.instruction |= inst.operands[2].reg;
  8962. }
  8963. /* ARM V6 SETEND (argument parse). Sets the E bit in the CPSR while
  8964. preserving the other bits.
  8965. setend <endian_specifier>, where <endian_specifier> is either
  8966. BE or LE. */
  8967. static void
  8968. do_setend (void)
  8969. {
  8970. if (warn_on_deprecated
  8971. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  8972. as_tsktsk (_("setend use is deprecated for ARMv8"));
  8973. if (inst.operands[0].imm)
  8974. inst.instruction |= 0x200;
  8975. }
  8976. static void
  8977. do_shift (void)
  8978. {
  8979. unsigned int Rm = (inst.operands[1].present
  8980. ? inst.operands[1].reg
  8981. : inst.operands[0].reg);
  8982. inst.instruction |= inst.operands[0].reg << 12;
  8983. inst.instruction |= Rm;
  8984. if (inst.operands[2].isreg) /* Rd, {Rm,} Rs */
  8985. {
  8986. inst.instruction |= inst.operands[2].reg << 8;
  8987. inst.instruction |= SHIFT_BY_REG;
  8988. /* PR 12854: Error on extraneous shifts. */
  8989. constraint (inst.operands[2].shifted,
  8990. _("extraneous shift as part of operand to shift insn"));
  8991. }
  8992. else
  8993. inst.relocs[0].type = BFD_RELOC_ARM_SHIFT_IMM;
  8994. }
  8995. static void
  8996. do_smc (void)
  8997. {
  8998. unsigned int value = inst.relocs[0].exp.X_add_number;
  8999. constraint (value > 0xf, _("immediate too large (bigger than 0xF)"));
  9000. inst.relocs[0].type = BFD_RELOC_ARM_SMC;
  9001. inst.relocs[0].pc_rel = 0;
  9002. }
  9003. static void
  9004. do_hvc (void)
  9005. {
  9006. inst.relocs[0].type = BFD_RELOC_ARM_HVC;
  9007. inst.relocs[0].pc_rel = 0;
  9008. }
  9009. static void
  9010. do_swi (void)
  9011. {
  9012. inst.relocs[0].type = BFD_RELOC_ARM_SWI;
  9013. inst.relocs[0].pc_rel = 0;
  9014. }
  9015. static void
  9016. do_setpan (void)
  9017. {
  9018. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_pan),
  9019. _("selected processor does not support SETPAN instruction"));
  9020. inst.instruction |= ((inst.operands[0].imm & 1) << 9);
  9021. }
  9022. static void
  9023. do_t_setpan (void)
  9024. {
  9025. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_pan),
  9026. _("selected processor does not support SETPAN instruction"));
  9027. inst.instruction |= (inst.operands[0].imm << 3);
  9028. }
  9029. /* ARM V5E (El Segundo) signed-multiply-accumulate (argument parse)
  9030. SMLAxy{cond} Rd,Rm,Rs,Rn
  9031. SMLAWy{cond} Rd,Rm,Rs,Rn
  9032. Error if any register is R15. */
  9033. static void
  9034. do_smla (void)
  9035. {
  9036. inst.instruction |= inst.operands[0].reg << 16;
  9037. inst.instruction |= inst.operands[1].reg;
  9038. inst.instruction |= inst.operands[2].reg << 8;
  9039. inst.instruction |= inst.operands[3].reg << 12;
  9040. }
  9041. /* ARM V5E (El Segundo) signed-multiply-accumulate-long (argument parse)
  9042. SMLALxy{cond} Rdlo,Rdhi,Rm,Rs
  9043. Error if any register is R15.
  9044. Warning if Rdlo == Rdhi. */
  9045. static void
  9046. do_smlal (void)
  9047. {
  9048. inst.instruction |= inst.operands[0].reg << 12;
  9049. inst.instruction |= inst.operands[1].reg << 16;
  9050. inst.instruction |= inst.operands[2].reg;
  9051. inst.instruction |= inst.operands[3].reg << 8;
  9052. if (inst.operands[0].reg == inst.operands[1].reg)
  9053. as_tsktsk (_("rdhi and rdlo must be different"));
  9054. }
  9055. /* ARM V5E (El Segundo) signed-multiply (argument parse)
  9056. SMULxy{cond} Rd,Rm,Rs
  9057. Error if any register is R15. */
  9058. static void
  9059. do_smul (void)
  9060. {
  9061. inst.instruction |= inst.operands[0].reg << 16;
  9062. inst.instruction |= inst.operands[1].reg;
  9063. inst.instruction |= inst.operands[2].reg << 8;
  9064. }
  9065. /* ARM V6 srs (argument parse). The variable fields in the encoding are
  9066. the same for both ARM and Thumb-2. */
  9067. static void
  9068. do_srs (void)
  9069. {
  9070. int reg;
  9071. if (inst.operands[0].present)
  9072. {
  9073. reg = inst.operands[0].reg;
  9074. constraint (reg != REG_SP, _("SRS base register must be r13"));
  9075. }
  9076. else
  9077. reg = REG_SP;
  9078. inst.instruction |= reg << 16;
  9079. inst.instruction |= inst.operands[1].imm;
  9080. if (inst.operands[0].writeback || inst.operands[1].writeback)
  9081. inst.instruction |= WRITE_BACK;
  9082. }
  9083. /* ARM V6 strex (argument parse). */
  9084. static void
  9085. do_strex (void)
  9086. {
  9087. constraint (!inst.operands[2].isreg || !inst.operands[2].preind
  9088. || inst.operands[2].postind || inst.operands[2].writeback
  9089. || inst.operands[2].immisreg || inst.operands[2].shifted
  9090. || inst.operands[2].negative
  9091. /* See comment in do_ldrex(). */
  9092. || (inst.operands[2].reg == REG_PC),
  9093. BAD_ADDR_MODE);
  9094. constraint (inst.operands[0].reg == inst.operands[1].reg
  9095. || inst.operands[0].reg == inst.operands[2].reg, BAD_OVERLAP);
  9096. constraint (inst.relocs[0].exp.X_op != O_constant
  9097. || inst.relocs[0].exp.X_add_number != 0,
  9098. _("offset must be zero in ARM encoding"));
  9099. inst.instruction |= inst.operands[0].reg << 12;
  9100. inst.instruction |= inst.operands[1].reg;
  9101. inst.instruction |= inst.operands[2].reg << 16;
  9102. inst.relocs[0].type = BFD_RELOC_UNUSED;
  9103. }
  9104. static void
  9105. do_t_strexbh (void)
  9106. {
  9107. constraint (!inst.operands[2].isreg || !inst.operands[2].preind
  9108. || inst.operands[2].postind || inst.operands[2].writeback
  9109. || inst.operands[2].immisreg || inst.operands[2].shifted
  9110. || inst.operands[2].negative,
  9111. BAD_ADDR_MODE);
  9112. constraint (inst.operands[0].reg == inst.operands[1].reg
  9113. || inst.operands[0].reg == inst.operands[2].reg, BAD_OVERLAP);
  9114. do_rm_rd_rn ();
  9115. }
  9116. static void
  9117. do_strexd (void)
  9118. {
  9119. constraint (inst.operands[1].reg % 2 != 0,
  9120. _("even register required"));
  9121. constraint (inst.operands[2].present
  9122. && inst.operands[2].reg != inst.operands[1].reg + 1,
  9123. _("can only store two consecutive registers"));
  9124. /* If op 2 were present and equal to PC, this function wouldn't
  9125. have been called in the first place. */
  9126. constraint (inst.operands[1].reg == REG_LR, _("r14 not allowed here"));
  9127. constraint (inst.operands[0].reg == inst.operands[1].reg
  9128. || inst.operands[0].reg == inst.operands[1].reg + 1
  9129. || inst.operands[0].reg == inst.operands[3].reg,
  9130. BAD_OVERLAP);
  9131. inst.instruction |= inst.operands[0].reg << 12;
  9132. inst.instruction |= inst.operands[1].reg;
  9133. inst.instruction |= inst.operands[3].reg << 16;
  9134. }
  9135. /* ARM V8 STRL. */
  9136. static void
  9137. do_stlex (void)
  9138. {
  9139. constraint (inst.operands[0].reg == inst.operands[1].reg
  9140. || inst.operands[0].reg == inst.operands[2].reg, BAD_OVERLAP);
  9141. do_rd_rm_rn ();
  9142. }
  9143. static void
  9144. do_t_stlex (void)
  9145. {
  9146. constraint (inst.operands[0].reg == inst.operands[1].reg
  9147. || inst.operands[0].reg == inst.operands[2].reg, BAD_OVERLAP);
  9148. do_rm_rd_rn ();
  9149. }
  9150. /* ARM V6 SXTAH extracts a 16-bit value from a register, sign
  9151. extends it to 32-bits, and adds the result to a value in another
  9152. register. You can specify a rotation by 0, 8, 16, or 24 bits
  9153. before extracting the 16-bit value.
  9154. SXTAH{<cond>} <Rd>, <Rn>, <Rm>{, <rotation>}
  9155. Condition defaults to COND_ALWAYS.
  9156. Error if any register uses R15. */
  9157. static void
  9158. do_sxtah (void)
  9159. {
  9160. inst.instruction |= inst.operands[0].reg << 12;
  9161. inst.instruction |= inst.operands[1].reg << 16;
  9162. inst.instruction |= inst.operands[2].reg;
  9163. inst.instruction |= inst.operands[3].imm << 10;
  9164. }
  9165. /* ARM V6 SXTH.
  9166. SXTH {<cond>} <Rd>, <Rm>{, <rotation>}
  9167. Condition defaults to COND_ALWAYS.
  9168. Error if any register uses R15. */
  9169. static void
  9170. do_sxth (void)
  9171. {
  9172. inst.instruction |= inst.operands[0].reg << 12;
  9173. inst.instruction |= inst.operands[1].reg;
  9174. inst.instruction |= inst.operands[2].imm << 10;
  9175. }
  9176. /* VFP instructions. In a logical order: SP variant first, monad
  9177. before dyad, arithmetic then move then load/store. */
  9178. static void
  9179. do_vfp_sp_monadic (void)
  9180. {
  9181. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)
  9182. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  9183. _(BAD_FPU));
  9184. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9185. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Sm);
  9186. }
  9187. static void
  9188. do_vfp_sp_dyadic (void)
  9189. {
  9190. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9191. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Sn);
  9192. encode_arm_vfp_reg (inst.operands[2].reg, VFP_REG_Sm);
  9193. }
  9194. static void
  9195. do_vfp_sp_compare_z (void)
  9196. {
  9197. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9198. }
  9199. static void
  9200. do_vfp_dp_sp_cvt (void)
  9201. {
  9202. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9203. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Sm);
  9204. }
  9205. static void
  9206. do_vfp_sp_dp_cvt (void)
  9207. {
  9208. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9209. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dm);
  9210. }
  9211. static void
  9212. do_vfp_reg_from_sp (void)
  9213. {
  9214. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)
  9215. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  9216. _(BAD_FPU));
  9217. inst.instruction |= inst.operands[0].reg << 12;
  9218. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Sn);
  9219. }
  9220. static void
  9221. do_vfp_reg2_from_sp2 (void)
  9222. {
  9223. constraint (inst.operands[2].imm != 2,
  9224. _("only two consecutive VFP SP registers allowed here"));
  9225. inst.instruction |= inst.operands[0].reg << 12;
  9226. inst.instruction |= inst.operands[1].reg << 16;
  9227. encode_arm_vfp_reg (inst.operands[2].reg, VFP_REG_Sm);
  9228. }
  9229. static void
  9230. do_vfp_sp_from_reg (void)
  9231. {
  9232. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)
  9233. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  9234. _(BAD_FPU));
  9235. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sn);
  9236. inst.instruction |= inst.operands[1].reg << 12;
  9237. }
  9238. static void
  9239. do_vfp_sp2_from_reg2 (void)
  9240. {
  9241. constraint (inst.operands[0].imm != 2,
  9242. _("only two consecutive VFP SP registers allowed here"));
  9243. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sm);
  9244. inst.instruction |= inst.operands[1].reg << 12;
  9245. inst.instruction |= inst.operands[2].reg << 16;
  9246. }
  9247. static void
  9248. do_vfp_sp_ldst (void)
  9249. {
  9250. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9251. encode_arm_cp_address (1, false, true, 0);
  9252. }
  9253. static void
  9254. do_vfp_dp_ldst (void)
  9255. {
  9256. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9257. encode_arm_cp_address (1, false, true, 0);
  9258. }
  9259. static void
  9260. vfp_sp_ldstm (enum vfp_ldstm_type ldstm_type)
  9261. {
  9262. if (inst.operands[0].writeback)
  9263. inst.instruction |= WRITE_BACK;
  9264. else
  9265. constraint (ldstm_type != VFP_LDSTMIA,
  9266. _("this addressing mode requires base-register writeback"));
  9267. inst.instruction |= inst.operands[0].reg << 16;
  9268. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Sd);
  9269. inst.instruction |= inst.operands[1].imm;
  9270. }
  9271. static void
  9272. vfp_dp_ldstm (enum vfp_ldstm_type ldstm_type)
  9273. {
  9274. int count;
  9275. if (inst.operands[0].writeback)
  9276. inst.instruction |= WRITE_BACK;
  9277. else
  9278. constraint (ldstm_type != VFP_LDSTMIA && ldstm_type != VFP_LDSTMIAX,
  9279. _("this addressing mode requires base-register writeback"));
  9280. inst.instruction |= inst.operands[0].reg << 16;
  9281. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dd);
  9282. count = inst.operands[1].imm << 1;
  9283. if (ldstm_type == VFP_LDSTMIAX || ldstm_type == VFP_LDSTMDBX)
  9284. count += 1;
  9285. inst.instruction |= count;
  9286. }
  9287. static void
  9288. do_vfp_sp_ldstmia (void)
  9289. {
  9290. vfp_sp_ldstm (VFP_LDSTMIA);
  9291. }
  9292. static void
  9293. do_vfp_sp_ldstmdb (void)
  9294. {
  9295. vfp_sp_ldstm (VFP_LDSTMDB);
  9296. }
  9297. static void
  9298. do_vfp_dp_ldstmia (void)
  9299. {
  9300. vfp_dp_ldstm (VFP_LDSTMIA);
  9301. }
  9302. static void
  9303. do_vfp_dp_ldstmdb (void)
  9304. {
  9305. vfp_dp_ldstm (VFP_LDSTMDB);
  9306. }
  9307. static void
  9308. do_vfp_xp_ldstmia (void)
  9309. {
  9310. vfp_dp_ldstm (VFP_LDSTMIAX);
  9311. }
  9312. static void
  9313. do_vfp_xp_ldstmdb (void)
  9314. {
  9315. vfp_dp_ldstm (VFP_LDSTMDBX);
  9316. }
  9317. static void
  9318. do_vfp_dp_rd_rm (void)
  9319. {
  9320. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1)
  9321. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  9322. _(BAD_FPU));
  9323. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9324. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dm);
  9325. }
  9326. static void
  9327. do_vfp_dp_rn_rd (void)
  9328. {
  9329. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dn);
  9330. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dd);
  9331. }
  9332. static void
  9333. do_vfp_dp_rd_rn (void)
  9334. {
  9335. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9336. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dn);
  9337. }
  9338. static void
  9339. do_vfp_dp_rd_rn_rm (void)
  9340. {
  9341. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v2)
  9342. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  9343. _(BAD_FPU));
  9344. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9345. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dn);
  9346. encode_arm_vfp_reg (inst.operands[2].reg, VFP_REG_Dm);
  9347. }
  9348. static void
  9349. do_vfp_dp_rd (void)
  9350. {
  9351. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9352. }
  9353. static void
  9354. do_vfp_dp_rm_rd_rn (void)
  9355. {
  9356. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v2)
  9357. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  9358. _(BAD_FPU));
  9359. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dm);
  9360. encode_arm_vfp_reg (inst.operands[1].reg, VFP_REG_Dd);
  9361. encode_arm_vfp_reg (inst.operands[2].reg, VFP_REG_Dn);
  9362. }
  9363. /* VFPv3 instructions. */
  9364. static void
  9365. do_vfp_sp_const (void)
  9366. {
  9367. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9368. inst.instruction |= (inst.operands[1].imm & 0xf0) << 12;
  9369. inst.instruction |= (inst.operands[1].imm & 0x0f);
  9370. }
  9371. static void
  9372. do_vfp_dp_const (void)
  9373. {
  9374. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9375. inst.instruction |= (inst.operands[1].imm & 0xf0) << 12;
  9376. inst.instruction |= (inst.operands[1].imm & 0x0f);
  9377. }
  9378. static void
  9379. vfp_conv (int srcsize)
  9380. {
  9381. int immbits = srcsize - inst.operands[1].imm;
  9382. if (srcsize == 16 && !(immbits >= 0 && immbits <= srcsize))
  9383. {
  9384. /* If srcsize is 16, inst.operands[1].imm must be in the range 0-16.
  9385. i.e. immbits must be in range 0 - 16. */
  9386. inst.error = _("immediate value out of range, expected range [0, 16]");
  9387. return;
  9388. }
  9389. else if (srcsize == 32 && !(immbits >= 0 && immbits < srcsize))
  9390. {
  9391. /* If srcsize is 32, inst.operands[1].imm must be in the range 1-32.
  9392. i.e. immbits must be in range 0 - 31. */
  9393. inst.error = _("immediate value out of range, expected range [1, 32]");
  9394. return;
  9395. }
  9396. inst.instruction |= (immbits & 1) << 5;
  9397. inst.instruction |= (immbits >> 1);
  9398. }
  9399. static void
  9400. do_vfp_sp_conv_16 (void)
  9401. {
  9402. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9403. vfp_conv (16);
  9404. }
  9405. static void
  9406. do_vfp_dp_conv_16 (void)
  9407. {
  9408. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9409. vfp_conv (16);
  9410. }
  9411. static void
  9412. do_vfp_sp_conv_32 (void)
  9413. {
  9414. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  9415. vfp_conv (32);
  9416. }
  9417. static void
  9418. do_vfp_dp_conv_32 (void)
  9419. {
  9420. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Dd);
  9421. vfp_conv (32);
  9422. }
  9423. /* FPA instructions. Also in a logical order. */
  9424. static void
  9425. do_fpa_cmp (void)
  9426. {
  9427. inst.instruction |= inst.operands[0].reg << 16;
  9428. inst.instruction |= inst.operands[1].reg;
  9429. }
  9430. static void
  9431. do_fpa_ldmstm (void)
  9432. {
  9433. inst.instruction |= inst.operands[0].reg << 12;
  9434. switch (inst.operands[1].imm)
  9435. {
  9436. case 1: inst.instruction |= CP_T_X; break;
  9437. case 2: inst.instruction |= CP_T_Y; break;
  9438. case 3: inst.instruction |= CP_T_Y | CP_T_X; break;
  9439. case 4: break;
  9440. default: abort ();
  9441. }
  9442. if (inst.instruction & (PRE_INDEX | INDEX_UP))
  9443. {
  9444. /* The instruction specified "ea" or "fd", so we can only accept
  9445. [Rn]{!}. The instruction does not really support stacking or
  9446. unstacking, so we have to emulate these by setting appropriate
  9447. bits and offsets. */
  9448. constraint (inst.relocs[0].exp.X_op != O_constant
  9449. || inst.relocs[0].exp.X_add_number != 0,
  9450. _("this instruction does not support indexing"));
  9451. if ((inst.instruction & PRE_INDEX) || inst.operands[2].writeback)
  9452. inst.relocs[0].exp.X_add_number = 12 * inst.operands[1].imm;
  9453. if (!(inst.instruction & INDEX_UP))
  9454. inst.relocs[0].exp.X_add_number = -inst.relocs[0].exp.X_add_number;
  9455. if (!(inst.instruction & PRE_INDEX) && inst.operands[2].writeback)
  9456. {
  9457. inst.operands[2].preind = 0;
  9458. inst.operands[2].postind = 1;
  9459. }
  9460. }
  9461. encode_arm_cp_address (2, true, true, 0);
  9462. }
  9463. /* iWMMXt instructions: strictly in alphabetical order. */
  9464. static void
  9465. do_iwmmxt_tandorc (void)
  9466. {
  9467. constraint (inst.operands[0].reg != REG_PC, _("only r15 allowed here"));
  9468. }
  9469. static void
  9470. do_iwmmxt_textrc (void)
  9471. {
  9472. inst.instruction |= inst.operands[0].reg << 12;
  9473. inst.instruction |= inst.operands[1].imm;
  9474. }
  9475. static void
  9476. do_iwmmxt_textrm (void)
  9477. {
  9478. inst.instruction |= inst.operands[0].reg << 12;
  9479. inst.instruction |= inst.operands[1].reg << 16;
  9480. inst.instruction |= inst.operands[2].imm;
  9481. }
  9482. static void
  9483. do_iwmmxt_tinsr (void)
  9484. {
  9485. inst.instruction |= inst.operands[0].reg << 16;
  9486. inst.instruction |= inst.operands[1].reg << 12;
  9487. inst.instruction |= inst.operands[2].imm;
  9488. }
  9489. static void
  9490. do_iwmmxt_tmia (void)
  9491. {
  9492. inst.instruction |= inst.operands[0].reg << 5;
  9493. inst.instruction |= inst.operands[1].reg;
  9494. inst.instruction |= inst.operands[2].reg << 12;
  9495. }
  9496. static void
  9497. do_iwmmxt_waligni (void)
  9498. {
  9499. inst.instruction |= inst.operands[0].reg << 12;
  9500. inst.instruction |= inst.operands[1].reg << 16;
  9501. inst.instruction |= inst.operands[2].reg;
  9502. inst.instruction |= inst.operands[3].imm << 20;
  9503. }
  9504. static void
  9505. do_iwmmxt_wmerge (void)
  9506. {
  9507. inst.instruction |= inst.operands[0].reg << 12;
  9508. inst.instruction |= inst.operands[1].reg << 16;
  9509. inst.instruction |= inst.operands[2].reg;
  9510. inst.instruction |= inst.operands[3].imm << 21;
  9511. }
  9512. static void
  9513. do_iwmmxt_wmov (void)
  9514. {
  9515. /* WMOV rD, rN is an alias for WOR rD, rN, rN. */
  9516. inst.instruction |= inst.operands[0].reg << 12;
  9517. inst.instruction |= inst.operands[1].reg << 16;
  9518. inst.instruction |= inst.operands[1].reg;
  9519. }
  9520. static void
  9521. do_iwmmxt_wldstbh (void)
  9522. {
  9523. int reloc;
  9524. inst.instruction |= inst.operands[0].reg << 12;
  9525. if (thumb_mode)
  9526. reloc = BFD_RELOC_ARM_T32_CP_OFF_IMM_S2;
  9527. else
  9528. reloc = BFD_RELOC_ARM_CP_OFF_IMM_S2;
  9529. encode_arm_cp_address (1, true, false, reloc);
  9530. }
  9531. static void
  9532. do_iwmmxt_wldstw (void)
  9533. {
  9534. /* RIWR_RIWC clears .isreg for a control register. */
  9535. if (!inst.operands[0].isreg)
  9536. {
  9537. constraint (inst.cond != COND_ALWAYS, BAD_COND);
  9538. inst.instruction |= 0xf0000000;
  9539. }
  9540. inst.instruction |= inst.operands[0].reg << 12;
  9541. encode_arm_cp_address (1, true, true, 0);
  9542. }
  9543. static void
  9544. do_iwmmxt_wldstd (void)
  9545. {
  9546. inst.instruction |= inst.operands[0].reg << 12;
  9547. if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_iwmmxt2)
  9548. && inst.operands[1].immisreg)
  9549. {
  9550. inst.instruction &= ~0x1a000ff;
  9551. inst.instruction |= (0xfU << 28);
  9552. if (inst.operands[1].preind)
  9553. inst.instruction |= PRE_INDEX;
  9554. if (!inst.operands[1].negative)
  9555. inst.instruction |= INDEX_UP;
  9556. if (inst.operands[1].writeback)
  9557. inst.instruction |= WRITE_BACK;
  9558. inst.instruction |= inst.operands[1].reg << 16;
  9559. inst.instruction |= inst.relocs[0].exp.X_add_number << 4;
  9560. inst.instruction |= inst.operands[1].imm;
  9561. }
  9562. else
  9563. encode_arm_cp_address (1, true, false, 0);
  9564. }
  9565. static void
  9566. do_iwmmxt_wshufh (void)
  9567. {
  9568. inst.instruction |= inst.operands[0].reg << 12;
  9569. inst.instruction |= inst.operands[1].reg << 16;
  9570. inst.instruction |= ((inst.operands[2].imm & 0xf0) << 16);
  9571. inst.instruction |= (inst.operands[2].imm & 0x0f);
  9572. }
  9573. static void
  9574. do_iwmmxt_wzero (void)
  9575. {
  9576. /* WZERO reg is an alias for WANDN reg, reg, reg. */
  9577. inst.instruction |= inst.operands[0].reg;
  9578. inst.instruction |= inst.operands[0].reg << 12;
  9579. inst.instruction |= inst.operands[0].reg << 16;
  9580. }
  9581. static void
  9582. do_iwmmxt_wrwrwr_or_imm5 (void)
  9583. {
  9584. if (inst.operands[2].isreg)
  9585. do_rd_rn_rm ();
  9586. else {
  9587. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_iwmmxt2),
  9588. _("immediate operand requires iWMMXt2"));
  9589. do_rd_rn ();
  9590. if (inst.operands[2].imm == 0)
  9591. {
  9592. switch ((inst.instruction >> 20) & 0xf)
  9593. {
  9594. case 4:
  9595. case 5:
  9596. case 6:
  9597. case 7:
  9598. /* w...h wrd, wrn, #0 -> wrorh wrd, wrn, #16. */
  9599. inst.operands[2].imm = 16;
  9600. inst.instruction = (inst.instruction & 0xff0fffff) | (0x7 << 20);
  9601. break;
  9602. case 8:
  9603. case 9:
  9604. case 10:
  9605. case 11:
  9606. /* w...w wrd, wrn, #0 -> wrorw wrd, wrn, #32. */
  9607. inst.operands[2].imm = 32;
  9608. inst.instruction = (inst.instruction & 0xff0fffff) | (0xb << 20);
  9609. break;
  9610. case 12:
  9611. case 13:
  9612. case 14:
  9613. case 15:
  9614. {
  9615. /* w...d wrd, wrn, #0 -> wor wrd, wrn, wrn. */
  9616. unsigned long wrn;
  9617. wrn = (inst.instruction >> 16) & 0xf;
  9618. inst.instruction &= 0xff0fff0f;
  9619. inst.instruction |= wrn;
  9620. /* Bail out here; the instruction is now assembled. */
  9621. return;
  9622. }
  9623. }
  9624. }
  9625. /* Map 32 -> 0, etc. */
  9626. inst.operands[2].imm &= 0x1f;
  9627. inst.instruction |= (0xfU << 28) | ((inst.operands[2].imm & 0x10) << 4) | (inst.operands[2].imm & 0xf);
  9628. }
  9629. }
  9630. /* Cirrus Maverick instructions. Simple 2-, 3-, and 4-register
  9631. operations first, then control, shift, and load/store. */
  9632. /* Insns like "foo X,Y,Z". */
  9633. static void
  9634. do_mav_triple (void)
  9635. {
  9636. inst.instruction |= inst.operands[0].reg << 16;
  9637. inst.instruction |= inst.operands[1].reg;
  9638. inst.instruction |= inst.operands[2].reg << 12;
  9639. }
  9640. /* Insns like "foo W,X,Y,Z".
  9641. where W=MVAX[0:3] and X,Y,Z=MVFX[0:15]. */
  9642. static void
  9643. do_mav_quad (void)
  9644. {
  9645. inst.instruction |= inst.operands[0].reg << 5;
  9646. inst.instruction |= inst.operands[1].reg << 12;
  9647. inst.instruction |= inst.operands[2].reg << 16;
  9648. inst.instruction |= inst.operands[3].reg;
  9649. }
  9650. /* cfmvsc32<cond> DSPSC,MVDX[15:0]. */
  9651. static void
  9652. do_mav_dspsc (void)
  9653. {
  9654. inst.instruction |= inst.operands[1].reg << 12;
  9655. }
  9656. /* Maverick shift immediate instructions.
  9657. cfsh32<cond> MVFX[15:0],MVFX[15:0],Shift[6:0].
  9658. cfsh64<cond> MVDX[15:0],MVDX[15:0],Shift[6:0]. */
  9659. static void
  9660. do_mav_shift (void)
  9661. {
  9662. int imm = inst.operands[2].imm;
  9663. inst.instruction |= inst.operands[0].reg << 12;
  9664. inst.instruction |= inst.operands[1].reg << 16;
  9665. /* Bits 0-3 of the insn should have bits 0-3 of the immediate.
  9666. Bits 5-7 of the insn should have bits 4-6 of the immediate.
  9667. Bit 4 should be 0. */
  9668. imm = (imm & 0xf) | ((imm & 0x70) << 1);
  9669. inst.instruction |= imm;
  9670. }
  9671. /* XScale instructions. Also sorted arithmetic before move. */
  9672. /* Xscale multiply-accumulate (argument parse)
  9673. MIAcc acc0,Rm,Rs
  9674. MIAPHcc acc0,Rm,Rs
  9675. MIAxycc acc0,Rm,Rs. */
  9676. static void
  9677. do_xsc_mia (void)
  9678. {
  9679. inst.instruction |= inst.operands[1].reg;
  9680. inst.instruction |= inst.operands[2].reg << 12;
  9681. }
  9682. /* Xscale move-accumulator-register (argument parse)
  9683. MARcc acc0,RdLo,RdHi. */
  9684. static void
  9685. do_xsc_mar (void)
  9686. {
  9687. inst.instruction |= inst.operands[1].reg << 12;
  9688. inst.instruction |= inst.operands[2].reg << 16;
  9689. }
  9690. /* Xscale move-register-accumulator (argument parse)
  9691. MRAcc RdLo,RdHi,acc0. */
  9692. static void
  9693. do_xsc_mra (void)
  9694. {
  9695. constraint (inst.operands[0].reg == inst.operands[1].reg, BAD_OVERLAP);
  9696. inst.instruction |= inst.operands[0].reg << 12;
  9697. inst.instruction |= inst.operands[1].reg << 16;
  9698. }
  9699. /* Encoding functions relevant only to Thumb. */
  9700. /* inst.operands[i] is a shifted-register operand; encode
  9701. it into inst.instruction in the format used by Thumb32. */
  9702. static void
  9703. encode_thumb32_shifted_operand (int i)
  9704. {
  9705. unsigned int value = inst.relocs[0].exp.X_add_number;
  9706. unsigned int shift = inst.operands[i].shift_kind;
  9707. constraint (inst.operands[i].immisreg,
  9708. _("shift by register not allowed in thumb mode"));
  9709. inst.instruction |= inst.operands[i].reg;
  9710. if (shift == SHIFT_RRX)
  9711. inst.instruction |= SHIFT_ROR << 4;
  9712. else
  9713. {
  9714. constraint (inst.relocs[0].exp.X_op != O_constant,
  9715. _("expression too complex"));
  9716. constraint (value > 32
  9717. || (value == 32 && (shift == SHIFT_LSL
  9718. || shift == SHIFT_ROR)),
  9719. _("shift expression is too large"));
  9720. if (value == 0)
  9721. shift = SHIFT_LSL;
  9722. else if (value == 32)
  9723. value = 0;
  9724. inst.instruction |= shift << 4;
  9725. inst.instruction |= (value & 0x1c) << 10;
  9726. inst.instruction |= (value & 0x03) << 6;
  9727. }
  9728. }
  9729. /* inst.operands[i] was set up by parse_address. Encode it into a
  9730. Thumb32 format load or store instruction. Reject forms that cannot
  9731. be used with such instructions. If is_t is true, reject forms that
  9732. cannot be used with a T instruction; if is_d is true, reject forms
  9733. that cannot be used with a D instruction. If it is a store insn,
  9734. reject PC in Rn. */
  9735. static void
  9736. encode_thumb32_addr_mode (int i, bool is_t, bool is_d)
  9737. {
  9738. const bool is_pc = (inst.operands[i].reg == REG_PC);
  9739. constraint (!inst.operands[i].isreg,
  9740. _("Instruction does not support =N addresses"));
  9741. inst.instruction |= inst.operands[i].reg << 16;
  9742. if (inst.operands[i].immisreg)
  9743. {
  9744. constraint (is_pc, BAD_PC_ADDRESSING);
  9745. constraint (is_t || is_d, _("cannot use register index with this instruction"));
  9746. constraint (inst.operands[i].negative,
  9747. _("Thumb does not support negative register indexing"));
  9748. constraint (inst.operands[i].postind,
  9749. _("Thumb does not support register post-indexing"));
  9750. constraint (inst.operands[i].writeback,
  9751. _("Thumb does not support register indexing with writeback"));
  9752. constraint (inst.operands[i].shifted && inst.operands[i].shift_kind != SHIFT_LSL,
  9753. _("Thumb supports only LSL in shifted register indexing"));
  9754. inst.instruction |= inst.operands[i].imm;
  9755. if (inst.operands[i].shifted)
  9756. {
  9757. constraint (inst.relocs[0].exp.X_op != O_constant,
  9758. _("expression too complex"));
  9759. constraint (inst.relocs[0].exp.X_add_number < 0
  9760. || inst.relocs[0].exp.X_add_number > 3,
  9761. _("shift out of range"));
  9762. inst.instruction |= inst.relocs[0].exp.X_add_number << 4;
  9763. }
  9764. inst.relocs[0].type = BFD_RELOC_UNUSED;
  9765. }
  9766. else if (inst.operands[i].preind)
  9767. {
  9768. constraint (is_pc && inst.operands[i].writeback, BAD_PC_WRITEBACK);
  9769. constraint (is_t && inst.operands[i].writeback,
  9770. _("cannot use writeback with this instruction"));
  9771. constraint (is_pc && ((inst.instruction & THUMB2_LOAD_BIT) == 0),
  9772. BAD_PC_ADDRESSING);
  9773. if (is_d)
  9774. {
  9775. inst.instruction |= 0x01000000;
  9776. if (inst.operands[i].writeback)
  9777. inst.instruction |= 0x00200000;
  9778. }
  9779. else
  9780. {
  9781. inst.instruction |= 0x00000c00;
  9782. if (inst.operands[i].writeback)
  9783. inst.instruction |= 0x00000100;
  9784. }
  9785. inst.relocs[0].type = BFD_RELOC_ARM_T32_OFFSET_IMM;
  9786. }
  9787. else if (inst.operands[i].postind)
  9788. {
  9789. gas_assert (inst.operands[i].writeback);
  9790. constraint (is_pc, _("cannot use post-indexing with PC-relative addressing"));
  9791. constraint (is_t, _("cannot use post-indexing with this instruction"));
  9792. if (is_d)
  9793. inst.instruction |= 0x00200000;
  9794. else
  9795. inst.instruction |= 0x00000900;
  9796. inst.relocs[0].type = BFD_RELOC_ARM_T32_OFFSET_IMM;
  9797. }
  9798. else /* unindexed - only for coprocessor */
  9799. inst.error = _("instruction does not accept unindexed addressing");
  9800. }
  9801. /* Table of Thumb instructions which exist in 16- and/or 32-bit
  9802. encodings (the latter only in post-V6T2 cores). The index is the
  9803. value used in the insns table below. When there is more than one
  9804. possible 16-bit encoding for the instruction, this table always
  9805. holds variant (1).
  9806. Also contains several pseudo-instructions used during relaxation. */
  9807. #define T16_32_TAB \
  9808. X(_adc, 4140, eb400000), \
  9809. X(_adcs, 4140, eb500000), \
  9810. X(_add, 1c00, eb000000), \
  9811. X(_adds, 1c00, eb100000), \
  9812. X(_addi, 0000, f1000000), \
  9813. X(_addis, 0000, f1100000), \
  9814. X(_add_pc,000f, f20f0000), \
  9815. X(_add_sp,000d, f10d0000), \
  9816. X(_adr, 000f, f20f0000), \
  9817. X(_and, 4000, ea000000), \
  9818. X(_ands, 4000, ea100000), \
  9819. X(_asr, 1000, fa40f000), \
  9820. X(_asrs, 1000, fa50f000), \
  9821. X(_aut, 0000, f3af802d), \
  9822. X(_autg, 0000, fb500f00), \
  9823. X(_b, e000, f000b000), \
  9824. X(_bcond, d000, f0008000), \
  9825. X(_bf, 0000, f040e001), \
  9826. X(_bfcsel,0000, f000e001), \
  9827. X(_bfx, 0000, f060e001), \
  9828. X(_bfl, 0000, f000c001), \
  9829. X(_bflx, 0000, f070e001), \
  9830. X(_bic, 4380, ea200000), \
  9831. X(_bics, 4380, ea300000), \
  9832. X(_bxaut, 0000, fb500f10), \
  9833. X(_cinc, 0000, ea509000), \
  9834. X(_cinv, 0000, ea50a000), \
  9835. X(_cmn, 42c0, eb100f00), \
  9836. X(_cmp, 2800, ebb00f00), \
  9837. X(_cneg, 0000, ea50b000), \
  9838. X(_cpsie, b660, f3af8400), \
  9839. X(_cpsid, b670, f3af8600), \
  9840. X(_cpy, 4600, ea4f0000), \
  9841. X(_csel, 0000, ea508000), \
  9842. X(_cset, 0000, ea5f900f), \
  9843. X(_csetm, 0000, ea5fa00f), \
  9844. X(_csinc, 0000, ea509000), \
  9845. X(_csinv, 0000, ea50a000), \
  9846. X(_csneg, 0000, ea50b000), \
  9847. X(_dec_sp,80dd, f1ad0d00), \
  9848. X(_dls, 0000, f040e001), \
  9849. X(_dlstp, 0000, f000e001), \
  9850. X(_eor, 4040, ea800000), \
  9851. X(_eors, 4040, ea900000), \
  9852. X(_inc_sp,00dd, f10d0d00), \
  9853. X(_lctp, 0000, f00fe001), \
  9854. X(_ldmia, c800, e8900000), \
  9855. X(_ldr, 6800, f8500000), \
  9856. X(_ldrb, 7800, f8100000), \
  9857. X(_ldrh, 8800, f8300000), \
  9858. X(_ldrsb, 5600, f9100000), \
  9859. X(_ldrsh, 5e00, f9300000), \
  9860. X(_ldr_pc,4800, f85f0000), \
  9861. X(_ldr_pc2,4800, f85f0000), \
  9862. X(_ldr_sp,9800, f85d0000), \
  9863. X(_le, 0000, f00fc001), \
  9864. X(_letp, 0000, f01fc001), \
  9865. X(_lsl, 0000, fa00f000), \
  9866. X(_lsls, 0000, fa10f000), \
  9867. X(_lsr, 0800, fa20f000), \
  9868. X(_lsrs, 0800, fa30f000), \
  9869. X(_mov, 2000, ea4f0000), \
  9870. X(_movs, 2000, ea5f0000), \
  9871. X(_mul, 4340, fb00f000), \
  9872. X(_muls, 4340, ffffffff), /* no 32b muls */ \
  9873. X(_mvn, 43c0, ea6f0000), \
  9874. X(_mvns, 43c0, ea7f0000), \
  9875. X(_neg, 4240, f1c00000), /* rsb #0 */ \
  9876. X(_negs, 4240, f1d00000), /* rsbs #0 */ \
  9877. X(_orr, 4300, ea400000), \
  9878. X(_orrs, 4300, ea500000), \
  9879. X(_pac, 0000, f3af801d), \
  9880. X(_pacbti, 0000, f3af800d), \
  9881. X(_pacg, 0000, fb60f000), \
  9882. X(_pop, bc00, e8bd0000), /* ldmia sp!,... */ \
  9883. X(_push, b400, e92d0000), /* stmdb sp!,... */ \
  9884. X(_rev, ba00, fa90f080), \
  9885. X(_rev16, ba40, fa90f090), \
  9886. X(_revsh, bac0, fa90f0b0), \
  9887. X(_ror, 41c0, fa60f000), \
  9888. X(_rors, 41c0, fa70f000), \
  9889. X(_sbc, 4180, eb600000), \
  9890. X(_sbcs, 4180, eb700000), \
  9891. X(_stmia, c000, e8800000), \
  9892. X(_str, 6000, f8400000), \
  9893. X(_strb, 7000, f8000000), \
  9894. X(_strh, 8000, f8200000), \
  9895. X(_str_sp,9000, f84d0000), \
  9896. X(_sub, 1e00, eba00000), \
  9897. X(_subs, 1e00, ebb00000), \
  9898. X(_subi, 8000, f1a00000), \
  9899. X(_subis, 8000, f1b00000), \
  9900. X(_sxtb, b240, fa4ff080), \
  9901. X(_sxth, b200, fa0ff080), \
  9902. X(_tst, 4200, ea100f00), \
  9903. X(_uxtb, b2c0, fa5ff080), \
  9904. X(_uxth, b280, fa1ff080), \
  9905. X(_nop, bf00, f3af8000), \
  9906. X(_yield, bf10, f3af8001), \
  9907. X(_wfe, bf20, f3af8002), \
  9908. X(_wfi, bf30, f3af8003), \
  9909. X(_wls, 0000, f040c001), \
  9910. X(_wlstp, 0000, f000c001), \
  9911. X(_sev, bf40, f3af8004), \
  9912. X(_sevl, bf50, f3af8005), \
  9913. X(_udf, de00, f7f0a000)
  9914. /* To catch errors in encoding functions, the codes are all offset by
  9915. 0xF800, putting them in one of the 32-bit prefix ranges, ergo undefined
  9916. as 16-bit instructions. */
  9917. #define X(a,b,c) T_MNEM##a
  9918. enum t16_32_codes { T16_32_OFFSET = 0xF7FF, T16_32_TAB };
  9919. #undef X
  9920. #define X(a,b,c) 0x##b
  9921. static const unsigned short thumb_op16[] = { T16_32_TAB };
  9922. #define THUMB_OP16(n) (thumb_op16[(n) - (T16_32_OFFSET + 1)])
  9923. #undef X
  9924. #define X(a,b,c) 0x##c
  9925. static const unsigned int thumb_op32[] = { T16_32_TAB };
  9926. #define THUMB_OP32(n) (thumb_op32[(n) - (T16_32_OFFSET + 1)])
  9927. #define THUMB_SETS_FLAGS(n) (THUMB_OP32 (n) & 0x00100000)
  9928. #undef X
  9929. #undef T16_32_TAB
  9930. /* Thumb instruction encoders, in alphabetical order. */
  9931. /* ADDW or SUBW. */
  9932. static void
  9933. do_t_add_sub_w (void)
  9934. {
  9935. int Rd, Rn;
  9936. Rd = inst.operands[0].reg;
  9937. Rn = inst.operands[1].reg;
  9938. /* If Rn is REG_PC, this is ADR; if Rn is REG_SP, then this
  9939. is the SP-{plus,minus}-immediate form of the instruction. */
  9940. if (Rn == REG_SP)
  9941. constraint (Rd == REG_PC, BAD_PC);
  9942. else
  9943. reject_bad_reg (Rd);
  9944. inst.instruction |= (Rn << 16) | (Rd << 8);
  9945. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMM12;
  9946. }
  9947. /* Parse an add or subtract instruction. We get here with inst.instruction
  9948. equaling any of THUMB_OPCODE_add, adds, sub, or subs. */
  9949. static void
  9950. do_t_add_sub (void)
  9951. {
  9952. int Rd, Rs, Rn;
  9953. Rd = inst.operands[0].reg;
  9954. Rs = (inst.operands[1].present
  9955. ? inst.operands[1].reg /* Rd, Rs, foo */
  9956. : inst.operands[0].reg); /* Rd, foo -> Rd, Rd, foo */
  9957. if (Rd == REG_PC)
  9958. set_pred_insn_type_last ();
  9959. if (unified_syntax)
  9960. {
  9961. bool flags;
  9962. bool narrow;
  9963. int opcode;
  9964. flags = (inst.instruction == T_MNEM_adds
  9965. || inst.instruction == T_MNEM_subs);
  9966. if (flags)
  9967. narrow = !in_pred_block ();
  9968. else
  9969. narrow = in_pred_block ();
  9970. if (!inst.operands[2].isreg)
  9971. {
  9972. int add;
  9973. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  9974. constraint (Rd == REG_SP && Rs != REG_SP, BAD_SP);
  9975. add = (inst.instruction == T_MNEM_add
  9976. || inst.instruction == T_MNEM_adds);
  9977. opcode = 0;
  9978. if (inst.size_req != 4)
  9979. {
  9980. /* Attempt to use a narrow opcode, with relaxation if
  9981. appropriate. */
  9982. if (Rd == REG_SP && Rs == REG_SP && !flags)
  9983. opcode = add ? T_MNEM_inc_sp : T_MNEM_dec_sp;
  9984. else if (Rd <= 7 && Rs == REG_SP && add && !flags)
  9985. opcode = T_MNEM_add_sp;
  9986. else if (Rd <= 7 && Rs == REG_PC && add && !flags)
  9987. opcode = T_MNEM_add_pc;
  9988. else if (Rd <= 7 && Rs <= 7 && narrow)
  9989. {
  9990. if (flags)
  9991. opcode = add ? T_MNEM_addis : T_MNEM_subis;
  9992. else
  9993. opcode = add ? T_MNEM_addi : T_MNEM_subi;
  9994. }
  9995. if (opcode)
  9996. {
  9997. inst.instruction = THUMB_OP16(opcode);
  9998. inst.instruction |= (Rd << 4) | Rs;
  9999. if (inst.relocs[0].type < BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC
  10000. || (inst.relocs[0].type
  10001. > BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC))
  10002. {
  10003. if (inst.size_req == 2)
  10004. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_ADD;
  10005. else
  10006. inst.relax = opcode;
  10007. }
  10008. }
  10009. else
  10010. constraint (inst.size_req == 2, _("cannot honor width suffix"));
  10011. }
  10012. if (inst.size_req == 4
  10013. || (inst.size_req != 2 && !opcode))
  10014. {
  10015. constraint ((inst.relocs[0].type
  10016. >= BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC)
  10017. && (inst.relocs[0].type
  10018. <= BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC) ,
  10019. THUMB1_RELOC_ONLY);
  10020. if (Rd == REG_PC)
  10021. {
  10022. constraint (add, BAD_PC);
  10023. constraint (Rs != REG_LR || inst.instruction != T_MNEM_subs,
  10024. _("only SUBS PC, LR, #const allowed"));
  10025. constraint (inst.relocs[0].exp.X_op != O_constant,
  10026. _("expression too complex"));
  10027. constraint (inst.relocs[0].exp.X_add_number < 0
  10028. || inst.relocs[0].exp.X_add_number > 0xff,
  10029. _("immediate value out of range"));
  10030. inst.instruction = T2_SUBS_PC_LR
  10031. | inst.relocs[0].exp.X_add_number;
  10032. inst.relocs[0].type = BFD_RELOC_UNUSED;
  10033. return;
  10034. }
  10035. else if (Rs == REG_PC)
  10036. {
  10037. /* Always use addw/subw. */
  10038. inst.instruction = add ? 0xf20f0000 : 0xf2af0000;
  10039. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMM12;
  10040. }
  10041. else
  10042. {
  10043. inst.instruction = THUMB_OP32 (inst.instruction);
  10044. inst.instruction = (inst.instruction & 0xe1ffffff)
  10045. | 0x10000000;
  10046. if (flags)
  10047. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  10048. else
  10049. inst.relocs[0].type = BFD_RELOC_ARM_T32_ADD_IMM;
  10050. }
  10051. inst.instruction |= Rd << 8;
  10052. inst.instruction |= Rs << 16;
  10053. }
  10054. }
  10055. else
  10056. {
  10057. unsigned int value = inst.relocs[0].exp.X_add_number;
  10058. unsigned int shift = inst.operands[2].shift_kind;
  10059. Rn = inst.operands[2].reg;
  10060. /* See if we can do this with a 16-bit instruction. */
  10061. if (!inst.operands[2].shifted && inst.size_req != 4)
  10062. {
  10063. if (Rd > 7 || Rs > 7 || Rn > 7)
  10064. narrow = false;
  10065. if (narrow)
  10066. {
  10067. inst.instruction = ((inst.instruction == T_MNEM_adds
  10068. || inst.instruction == T_MNEM_add)
  10069. ? T_OPCODE_ADD_R3
  10070. : T_OPCODE_SUB_R3);
  10071. inst.instruction |= Rd | (Rs << 3) | (Rn << 6);
  10072. return;
  10073. }
  10074. if (inst.instruction == T_MNEM_add && (Rd == Rs || Rd == Rn))
  10075. {
  10076. /* Thumb-1 cores (except v6-M) require at least one high
  10077. register in a narrow non flag setting add. */
  10078. if (Rd > 7 || Rn > 7
  10079. || ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6t2)
  10080. || ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_msr))
  10081. {
  10082. if (Rd == Rn)
  10083. {
  10084. Rn = Rs;
  10085. Rs = Rd;
  10086. }
  10087. inst.instruction = T_OPCODE_ADD_HI;
  10088. inst.instruction |= (Rd & 8) << 4;
  10089. inst.instruction |= (Rd & 7);
  10090. inst.instruction |= Rn << 3;
  10091. return;
  10092. }
  10093. }
  10094. }
  10095. constraint (Rd == REG_PC, BAD_PC);
  10096. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  10097. constraint (Rd == REG_SP && Rs != REG_SP, BAD_SP);
  10098. constraint (Rs == REG_PC, BAD_PC);
  10099. reject_bad_reg (Rn);
  10100. /* If we get here, it can't be done in 16 bits. */
  10101. constraint (inst.operands[2].shifted && inst.operands[2].immisreg,
  10102. _("shift must be constant"));
  10103. inst.instruction = THUMB_OP32 (inst.instruction);
  10104. inst.instruction |= Rd << 8;
  10105. inst.instruction |= Rs << 16;
  10106. constraint (Rd == REG_SP && Rs == REG_SP && value > 3,
  10107. _("shift value over 3 not allowed in thumb mode"));
  10108. constraint (Rd == REG_SP && Rs == REG_SP && shift != SHIFT_LSL,
  10109. _("only LSL shift allowed in thumb mode"));
  10110. encode_thumb32_shifted_operand (2);
  10111. }
  10112. }
  10113. else
  10114. {
  10115. constraint (inst.instruction == T_MNEM_adds
  10116. || inst.instruction == T_MNEM_subs,
  10117. BAD_THUMB32);
  10118. if (!inst.operands[2].isreg) /* Rd, Rs, #imm */
  10119. {
  10120. constraint ((Rd > 7 && (Rd != REG_SP || Rs != REG_SP))
  10121. || (Rs > 7 && Rs != REG_SP && Rs != REG_PC),
  10122. BAD_HIREG);
  10123. inst.instruction = (inst.instruction == T_MNEM_add
  10124. ? 0x0000 : 0x8000);
  10125. inst.instruction |= (Rd << 4) | Rs;
  10126. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_ADD;
  10127. return;
  10128. }
  10129. Rn = inst.operands[2].reg;
  10130. constraint (inst.operands[2].shifted, _("unshifted register required"));
  10131. /* We now have Rd, Rs, and Rn set to registers. */
  10132. if (Rd > 7 || Rs > 7 || Rn > 7)
  10133. {
  10134. /* Can't do this for SUB. */
  10135. constraint (inst.instruction == T_MNEM_sub, BAD_HIREG);
  10136. inst.instruction = T_OPCODE_ADD_HI;
  10137. inst.instruction |= (Rd & 8) << 4;
  10138. inst.instruction |= (Rd & 7);
  10139. if (Rs == Rd)
  10140. inst.instruction |= Rn << 3;
  10141. else if (Rn == Rd)
  10142. inst.instruction |= Rs << 3;
  10143. else
  10144. constraint (1, _("dest must overlap one source register"));
  10145. }
  10146. else
  10147. {
  10148. inst.instruction = (inst.instruction == T_MNEM_add
  10149. ? T_OPCODE_ADD_R3 : T_OPCODE_SUB_R3);
  10150. inst.instruction |= Rd | (Rs << 3) | (Rn << 6);
  10151. }
  10152. }
  10153. }
  10154. static void
  10155. do_t_adr (void)
  10156. {
  10157. unsigned Rd;
  10158. Rd = inst.operands[0].reg;
  10159. reject_bad_reg (Rd);
  10160. if (unified_syntax && inst.size_req == 0 && Rd <= 7)
  10161. {
  10162. /* Defer to section relaxation. */
  10163. inst.relax = inst.instruction;
  10164. inst.instruction = THUMB_OP16 (inst.instruction);
  10165. inst.instruction |= Rd << 4;
  10166. }
  10167. else if (unified_syntax && inst.size_req != 2)
  10168. {
  10169. /* Generate a 32-bit opcode. */
  10170. inst.instruction = THUMB_OP32 (inst.instruction);
  10171. inst.instruction |= Rd << 8;
  10172. inst.relocs[0].type = BFD_RELOC_ARM_T32_ADD_PC12;
  10173. inst.relocs[0].pc_rel = 1;
  10174. }
  10175. else
  10176. {
  10177. /* Generate a 16-bit opcode. */
  10178. inst.instruction = THUMB_OP16 (inst.instruction);
  10179. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_ADD;
  10180. inst.relocs[0].exp.X_add_number -= 4; /* PC relative adjust. */
  10181. inst.relocs[0].pc_rel = 1;
  10182. inst.instruction |= Rd << 4;
  10183. }
  10184. if (inst.relocs[0].exp.X_op == O_symbol
  10185. && inst.relocs[0].exp.X_add_symbol != NULL
  10186. && S_IS_DEFINED (inst.relocs[0].exp.X_add_symbol)
  10187. && THUMB_IS_FUNC (inst.relocs[0].exp.X_add_symbol))
  10188. inst.relocs[0].exp.X_add_number += 1;
  10189. }
  10190. /* Arithmetic instructions for which there is just one 16-bit
  10191. instruction encoding, and it allows only two low registers.
  10192. For maximal compatibility with ARM syntax, we allow three register
  10193. operands even when Thumb-32 instructions are not available, as long
  10194. as the first two are identical. For instance, both "sbc r0,r1" and
  10195. "sbc r0,r0,r1" are allowed. */
  10196. static void
  10197. do_t_arit3 (void)
  10198. {
  10199. int Rd, Rs, Rn;
  10200. Rd = inst.operands[0].reg;
  10201. Rs = (inst.operands[1].present
  10202. ? inst.operands[1].reg /* Rd, Rs, foo */
  10203. : inst.operands[0].reg); /* Rd, foo -> Rd, Rd, foo */
  10204. Rn = inst.operands[2].reg;
  10205. reject_bad_reg (Rd);
  10206. reject_bad_reg (Rs);
  10207. if (inst.operands[2].isreg)
  10208. reject_bad_reg (Rn);
  10209. if (unified_syntax)
  10210. {
  10211. if (!inst.operands[2].isreg)
  10212. {
  10213. /* For an immediate, we always generate a 32-bit opcode;
  10214. section relaxation will shrink it later if possible. */
  10215. inst.instruction = THUMB_OP32 (inst.instruction);
  10216. inst.instruction = (inst.instruction & 0xe1ffffff) | 0x10000000;
  10217. inst.instruction |= Rd << 8;
  10218. inst.instruction |= Rs << 16;
  10219. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  10220. }
  10221. else
  10222. {
  10223. bool narrow;
  10224. /* See if we can do this with a 16-bit instruction. */
  10225. if (THUMB_SETS_FLAGS (inst.instruction))
  10226. narrow = !in_pred_block ();
  10227. else
  10228. narrow = in_pred_block ();
  10229. if (Rd > 7 || Rn > 7 || Rs > 7)
  10230. narrow = false;
  10231. if (inst.operands[2].shifted)
  10232. narrow = false;
  10233. if (inst.size_req == 4)
  10234. narrow = false;
  10235. if (narrow
  10236. && Rd == Rs)
  10237. {
  10238. inst.instruction = THUMB_OP16 (inst.instruction);
  10239. inst.instruction |= Rd;
  10240. inst.instruction |= Rn << 3;
  10241. return;
  10242. }
  10243. /* If we get here, it can't be done in 16 bits. */
  10244. constraint (inst.operands[2].shifted
  10245. && inst.operands[2].immisreg,
  10246. _("shift must be constant"));
  10247. inst.instruction = THUMB_OP32 (inst.instruction);
  10248. inst.instruction |= Rd << 8;
  10249. inst.instruction |= Rs << 16;
  10250. encode_thumb32_shifted_operand (2);
  10251. }
  10252. }
  10253. else
  10254. {
  10255. /* On its face this is a lie - the instruction does set the
  10256. flags. However, the only supported mnemonic in this mode
  10257. says it doesn't. */
  10258. constraint (THUMB_SETS_FLAGS (inst.instruction), BAD_THUMB32);
  10259. constraint (!inst.operands[2].isreg || inst.operands[2].shifted,
  10260. _("unshifted register required"));
  10261. constraint (Rd > 7 || Rs > 7 || Rn > 7, BAD_HIREG);
  10262. constraint (Rd != Rs,
  10263. _("dest and source1 must be the same register"));
  10264. inst.instruction = THUMB_OP16 (inst.instruction);
  10265. inst.instruction |= Rd;
  10266. inst.instruction |= Rn << 3;
  10267. }
  10268. }
  10269. /* Similarly, but for instructions where the arithmetic operation is
  10270. commutative, so we can allow either of them to be different from
  10271. the destination operand in a 16-bit instruction. For instance, all
  10272. three of "adc r0,r1", "adc r0,r0,r1", and "adc r0,r1,r0" are
  10273. accepted. */
  10274. static void
  10275. do_t_arit3c (void)
  10276. {
  10277. int Rd, Rs, Rn;
  10278. Rd = inst.operands[0].reg;
  10279. Rs = (inst.operands[1].present
  10280. ? inst.operands[1].reg /* Rd, Rs, foo */
  10281. : inst.operands[0].reg); /* Rd, foo -> Rd, Rd, foo */
  10282. Rn = inst.operands[2].reg;
  10283. reject_bad_reg (Rd);
  10284. reject_bad_reg (Rs);
  10285. if (inst.operands[2].isreg)
  10286. reject_bad_reg (Rn);
  10287. if (unified_syntax)
  10288. {
  10289. if (!inst.operands[2].isreg)
  10290. {
  10291. /* For an immediate, we always generate a 32-bit opcode;
  10292. section relaxation will shrink it later if possible. */
  10293. inst.instruction = THUMB_OP32 (inst.instruction);
  10294. inst.instruction = (inst.instruction & 0xe1ffffff) | 0x10000000;
  10295. inst.instruction |= Rd << 8;
  10296. inst.instruction |= Rs << 16;
  10297. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  10298. }
  10299. else
  10300. {
  10301. bool narrow;
  10302. /* See if we can do this with a 16-bit instruction. */
  10303. if (THUMB_SETS_FLAGS (inst.instruction))
  10304. narrow = !in_pred_block ();
  10305. else
  10306. narrow = in_pred_block ();
  10307. if (Rd > 7 || Rn > 7 || Rs > 7)
  10308. narrow = false;
  10309. if (inst.operands[2].shifted)
  10310. narrow = false;
  10311. if (inst.size_req == 4)
  10312. narrow = false;
  10313. if (narrow)
  10314. {
  10315. if (Rd == Rs)
  10316. {
  10317. inst.instruction = THUMB_OP16 (inst.instruction);
  10318. inst.instruction |= Rd;
  10319. inst.instruction |= Rn << 3;
  10320. return;
  10321. }
  10322. if (Rd == Rn)
  10323. {
  10324. inst.instruction = THUMB_OP16 (inst.instruction);
  10325. inst.instruction |= Rd;
  10326. inst.instruction |= Rs << 3;
  10327. return;
  10328. }
  10329. }
  10330. /* If we get here, it can't be done in 16 bits. */
  10331. constraint (inst.operands[2].shifted
  10332. && inst.operands[2].immisreg,
  10333. _("shift must be constant"));
  10334. inst.instruction = THUMB_OP32 (inst.instruction);
  10335. inst.instruction |= Rd << 8;
  10336. inst.instruction |= Rs << 16;
  10337. encode_thumb32_shifted_operand (2);
  10338. }
  10339. }
  10340. else
  10341. {
  10342. /* On its face this is a lie - the instruction does set the
  10343. flags. However, the only supported mnemonic in this mode
  10344. says it doesn't. */
  10345. constraint (THUMB_SETS_FLAGS (inst.instruction), BAD_THUMB32);
  10346. constraint (!inst.operands[2].isreg || inst.operands[2].shifted,
  10347. _("unshifted register required"));
  10348. constraint (Rd > 7 || Rs > 7 || Rn > 7, BAD_HIREG);
  10349. inst.instruction = THUMB_OP16 (inst.instruction);
  10350. inst.instruction |= Rd;
  10351. if (Rd == Rs)
  10352. inst.instruction |= Rn << 3;
  10353. else if (Rd == Rn)
  10354. inst.instruction |= Rs << 3;
  10355. else
  10356. constraint (1, _("dest must overlap one source register"));
  10357. }
  10358. }
  10359. static void
  10360. do_t_bfc (void)
  10361. {
  10362. unsigned Rd;
  10363. unsigned int msb = inst.operands[1].imm + inst.operands[2].imm;
  10364. constraint (msb > 32, _("bit-field extends past end of register"));
  10365. /* The instruction encoding stores the LSB and MSB,
  10366. not the LSB and width. */
  10367. Rd = inst.operands[0].reg;
  10368. reject_bad_reg (Rd);
  10369. inst.instruction |= Rd << 8;
  10370. inst.instruction |= (inst.operands[1].imm & 0x1c) << 10;
  10371. inst.instruction |= (inst.operands[1].imm & 0x03) << 6;
  10372. inst.instruction |= msb - 1;
  10373. }
  10374. static void
  10375. do_t_bfi (void)
  10376. {
  10377. int Rd, Rn;
  10378. unsigned int msb;
  10379. Rd = inst.operands[0].reg;
  10380. reject_bad_reg (Rd);
  10381. /* #0 in second position is alternative syntax for bfc, which is
  10382. the same instruction but with REG_PC in the Rm field. */
  10383. if (!inst.operands[1].isreg)
  10384. Rn = REG_PC;
  10385. else
  10386. {
  10387. Rn = inst.operands[1].reg;
  10388. reject_bad_reg (Rn);
  10389. }
  10390. msb = inst.operands[2].imm + inst.operands[3].imm;
  10391. constraint (msb > 32, _("bit-field extends past end of register"));
  10392. /* The instruction encoding stores the LSB and MSB,
  10393. not the LSB and width. */
  10394. inst.instruction |= Rd << 8;
  10395. inst.instruction |= Rn << 16;
  10396. inst.instruction |= (inst.operands[2].imm & 0x1c) << 10;
  10397. inst.instruction |= (inst.operands[2].imm & 0x03) << 6;
  10398. inst.instruction |= msb - 1;
  10399. }
  10400. static void
  10401. do_t_bfx (void)
  10402. {
  10403. unsigned Rd, Rn;
  10404. Rd = inst.operands[0].reg;
  10405. Rn = inst.operands[1].reg;
  10406. reject_bad_reg (Rd);
  10407. reject_bad_reg (Rn);
  10408. constraint (inst.operands[2].imm + inst.operands[3].imm > 32,
  10409. _("bit-field extends past end of register"));
  10410. inst.instruction |= Rd << 8;
  10411. inst.instruction |= Rn << 16;
  10412. inst.instruction |= (inst.operands[2].imm & 0x1c) << 10;
  10413. inst.instruction |= (inst.operands[2].imm & 0x03) << 6;
  10414. inst.instruction |= inst.operands[3].imm - 1;
  10415. }
  10416. /* ARM V5 Thumb BLX (argument parse)
  10417. BLX <target_addr> which is BLX(1)
  10418. BLX <Rm> which is BLX(2)
  10419. Unfortunately, there are two different opcodes for this mnemonic.
  10420. So, the insns[].value is not used, and the code here zaps values
  10421. into inst.instruction.
  10422. ??? How to take advantage of the additional two bits of displacement
  10423. available in Thumb32 mode? Need new relocation? */
  10424. static void
  10425. do_t_blx (void)
  10426. {
  10427. set_pred_insn_type_last ();
  10428. if (inst.operands[0].isreg)
  10429. {
  10430. constraint (inst.operands[0].reg == REG_PC, BAD_PC);
  10431. /* We have a register, so this is BLX(2). */
  10432. inst.instruction |= inst.operands[0].reg << 3;
  10433. }
  10434. else
  10435. {
  10436. /* No register. This must be BLX(1). */
  10437. inst.instruction = 0xf000e800;
  10438. encode_branch (BFD_RELOC_THUMB_PCREL_BLX);
  10439. }
  10440. }
  10441. static void
  10442. do_t_branch (void)
  10443. {
  10444. int opcode;
  10445. int cond;
  10446. bfd_reloc_code_real_type reloc;
  10447. cond = inst.cond;
  10448. set_pred_insn_type (IF_INSIDE_IT_LAST_INSN);
  10449. if (in_pred_block ())
  10450. {
  10451. /* Conditional branches inside IT blocks are encoded as unconditional
  10452. branches. */
  10453. cond = COND_ALWAYS;
  10454. }
  10455. else
  10456. cond = inst.cond;
  10457. if (cond != COND_ALWAYS)
  10458. opcode = T_MNEM_bcond;
  10459. else
  10460. opcode = inst.instruction;
  10461. if (unified_syntax
  10462. && (inst.size_req == 4
  10463. || (inst.size_req != 2
  10464. && (inst.operands[0].hasreloc
  10465. || inst.relocs[0].exp.X_op == O_constant))))
  10466. {
  10467. inst.instruction = THUMB_OP32(opcode);
  10468. if (cond == COND_ALWAYS)
  10469. reloc = BFD_RELOC_THUMB_PCREL_BRANCH25;
  10470. else
  10471. {
  10472. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2),
  10473. _("selected architecture does not support "
  10474. "wide conditional branch instruction"));
  10475. gas_assert (cond != 0xF);
  10476. inst.instruction |= cond << 22;
  10477. reloc = BFD_RELOC_THUMB_PCREL_BRANCH20;
  10478. }
  10479. }
  10480. else
  10481. {
  10482. inst.instruction = THUMB_OP16(opcode);
  10483. if (cond == COND_ALWAYS)
  10484. reloc = BFD_RELOC_THUMB_PCREL_BRANCH12;
  10485. else
  10486. {
  10487. inst.instruction |= cond << 8;
  10488. reloc = BFD_RELOC_THUMB_PCREL_BRANCH9;
  10489. }
  10490. /* Allow section relaxation. */
  10491. if (unified_syntax && inst.size_req != 2)
  10492. inst.relax = opcode;
  10493. }
  10494. inst.relocs[0].type = reloc;
  10495. inst.relocs[0].pc_rel = 1;
  10496. }
  10497. /* Actually do the work for Thumb state bkpt and hlt. The only difference
  10498. between the two is the maximum immediate allowed - which is passed in
  10499. RANGE. */
  10500. static void
  10501. do_t_bkpt_hlt1 (int range)
  10502. {
  10503. constraint (inst.cond != COND_ALWAYS,
  10504. _("instruction is always unconditional"));
  10505. if (inst.operands[0].present)
  10506. {
  10507. constraint (inst.operands[0].imm > range,
  10508. _("immediate value out of range"));
  10509. inst.instruction |= inst.operands[0].imm;
  10510. }
  10511. set_pred_insn_type (NEUTRAL_IT_INSN);
  10512. }
  10513. static void
  10514. do_t_hlt (void)
  10515. {
  10516. do_t_bkpt_hlt1 (63);
  10517. }
  10518. static void
  10519. do_t_bkpt (void)
  10520. {
  10521. do_t_bkpt_hlt1 (255);
  10522. }
  10523. static void
  10524. do_t_branch23 (void)
  10525. {
  10526. set_pred_insn_type_last ();
  10527. encode_branch (BFD_RELOC_THUMB_PCREL_BRANCH23);
  10528. /* md_apply_fix blows up with 'bl foo(PLT)' where foo is defined in
  10529. this file. We used to simply ignore the PLT reloc type here --
  10530. the branch encoding is now needed to deal with TLSCALL relocs.
  10531. So if we see a PLT reloc now, put it back to how it used to be to
  10532. keep the preexisting behaviour. */
  10533. if (inst.relocs[0].type == BFD_RELOC_ARM_PLT32)
  10534. inst.relocs[0].type = BFD_RELOC_THUMB_PCREL_BRANCH23;
  10535. #if defined(OBJ_COFF)
  10536. /* If the destination of the branch is a defined symbol which does not have
  10537. the THUMB_FUNC attribute, then we must be calling a function which has
  10538. the (interfacearm) attribute. We look for the Thumb entry point to that
  10539. function and change the branch to refer to that function instead. */
  10540. if ( inst.relocs[0].exp.X_op == O_symbol
  10541. && inst.relocs[0].exp.X_add_symbol != NULL
  10542. && S_IS_DEFINED (inst.relocs[0].exp.X_add_symbol)
  10543. && ! THUMB_IS_FUNC (inst.relocs[0].exp.X_add_symbol))
  10544. inst.relocs[0].exp.X_add_symbol
  10545. = find_real_start (inst.relocs[0].exp.X_add_symbol);
  10546. #endif
  10547. }
  10548. static void
  10549. do_t_bx (void)
  10550. {
  10551. set_pred_insn_type_last ();
  10552. inst.instruction |= inst.operands[0].reg << 3;
  10553. /* ??? FIXME: Should add a hacky reloc here if reg is REG_PC. The reloc
  10554. should cause the alignment to be checked once it is known. This is
  10555. because BX PC only works if the instruction is word aligned. */
  10556. }
  10557. static void
  10558. do_t_bxj (void)
  10559. {
  10560. int Rm;
  10561. set_pred_insn_type_last ();
  10562. Rm = inst.operands[0].reg;
  10563. reject_bad_reg (Rm);
  10564. inst.instruction |= Rm << 16;
  10565. }
  10566. static void
  10567. do_t_clz (void)
  10568. {
  10569. unsigned Rd;
  10570. unsigned Rm;
  10571. Rd = inst.operands[0].reg;
  10572. Rm = inst.operands[1].reg;
  10573. reject_bad_reg (Rd);
  10574. reject_bad_reg (Rm);
  10575. inst.instruction |= Rd << 8;
  10576. inst.instruction |= Rm << 16;
  10577. inst.instruction |= Rm;
  10578. }
  10579. /* For the Armv8.1-M conditional instructions. */
  10580. static void
  10581. do_t_cond (void)
  10582. {
  10583. unsigned Rd, Rn, Rm;
  10584. signed int cond;
  10585. constraint (inst.cond != COND_ALWAYS, BAD_COND);
  10586. Rd = inst.operands[0].reg;
  10587. switch (inst.instruction)
  10588. {
  10589. case T_MNEM_csinc:
  10590. case T_MNEM_csinv:
  10591. case T_MNEM_csneg:
  10592. case T_MNEM_csel:
  10593. Rn = inst.operands[1].reg;
  10594. Rm = inst.operands[2].reg;
  10595. cond = inst.operands[3].imm;
  10596. constraint (Rn == REG_SP, BAD_SP);
  10597. constraint (Rm == REG_SP, BAD_SP);
  10598. break;
  10599. case T_MNEM_cinc:
  10600. case T_MNEM_cinv:
  10601. case T_MNEM_cneg:
  10602. Rn = inst.operands[1].reg;
  10603. cond = inst.operands[2].imm;
  10604. /* Invert the last bit to invert the cond. */
  10605. cond = TOGGLE_BIT (cond, 0);
  10606. constraint (Rn == REG_SP, BAD_SP);
  10607. Rm = Rn;
  10608. break;
  10609. case T_MNEM_csetm:
  10610. case T_MNEM_cset:
  10611. cond = inst.operands[1].imm;
  10612. /* Invert the last bit to invert the cond. */
  10613. cond = TOGGLE_BIT (cond, 0);
  10614. Rn = REG_PC;
  10615. Rm = REG_PC;
  10616. break;
  10617. default: abort ();
  10618. }
  10619. set_pred_insn_type (OUTSIDE_PRED_INSN);
  10620. inst.instruction = THUMB_OP32 (inst.instruction);
  10621. inst.instruction |= Rd << 8;
  10622. inst.instruction |= Rn << 16;
  10623. inst.instruction |= Rm;
  10624. inst.instruction |= cond << 4;
  10625. }
  10626. static void
  10627. do_t_csdb (void)
  10628. {
  10629. set_pred_insn_type (OUTSIDE_PRED_INSN);
  10630. }
  10631. static void
  10632. do_t_cps (void)
  10633. {
  10634. set_pred_insn_type (OUTSIDE_PRED_INSN);
  10635. inst.instruction |= inst.operands[0].imm;
  10636. }
  10637. static void
  10638. do_t_cpsi (void)
  10639. {
  10640. set_pred_insn_type (OUTSIDE_PRED_INSN);
  10641. if (unified_syntax
  10642. && (inst.operands[1].present || inst.size_req == 4)
  10643. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6_notm))
  10644. {
  10645. unsigned int imod = (inst.instruction & 0x0030) >> 4;
  10646. inst.instruction = 0xf3af8000;
  10647. inst.instruction |= imod << 9;
  10648. inst.instruction |= inst.operands[0].imm << 5;
  10649. if (inst.operands[1].present)
  10650. inst.instruction |= 0x100 | inst.operands[1].imm;
  10651. }
  10652. else
  10653. {
  10654. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1)
  10655. && (inst.operands[0].imm & 4),
  10656. _("selected processor does not support 'A' form "
  10657. "of this instruction"));
  10658. constraint (inst.operands[1].present || inst.size_req == 4,
  10659. _("Thumb does not support the 2-argument "
  10660. "form of this instruction"));
  10661. inst.instruction |= inst.operands[0].imm;
  10662. }
  10663. }
  10664. /* THUMB CPY instruction (argument parse). */
  10665. static void
  10666. do_t_cpy (void)
  10667. {
  10668. if (inst.size_req == 4)
  10669. {
  10670. inst.instruction = THUMB_OP32 (T_MNEM_mov);
  10671. inst.instruction |= inst.operands[0].reg << 8;
  10672. inst.instruction |= inst.operands[1].reg;
  10673. }
  10674. else
  10675. {
  10676. inst.instruction |= (inst.operands[0].reg & 0x8) << 4;
  10677. inst.instruction |= (inst.operands[0].reg & 0x7);
  10678. inst.instruction |= inst.operands[1].reg << 3;
  10679. }
  10680. }
  10681. static void
  10682. do_t_cbz (void)
  10683. {
  10684. set_pred_insn_type (OUTSIDE_PRED_INSN);
  10685. constraint (inst.operands[0].reg > 7, BAD_HIREG);
  10686. inst.instruction |= inst.operands[0].reg;
  10687. inst.relocs[0].pc_rel = 1;
  10688. inst.relocs[0].type = BFD_RELOC_THUMB_PCREL_BRANCH7;
  10689. }
  10690. static void
  10691. do_t_dbg (void)
  10692. {
  10693. inst.instruction |= inst.operands[0].imm;
  10694. }
  10695. static void
  10696. do_t_div (void)
  10697. {
  10698. unsigned Rd, Rn, Rm;
  10699. Rd = inst.operands[0].reg;
  10700. Rn = (inst.operands[1].present
  10701. ? inst.operands[1].reg : Rd);
  10702. Rm = inst.operands[2].reg;
  10703. reject_bad_reg (Rd);
  10704. reject_bad_reg (Rn);
  10705. reject_bad_reg (Rm);
  10706. inst.instruction |= Rd << 8;
  10707. inst.instruction |= Rn << 16;
  10708. inst.instruction |= Rm;
  10709. }
  10710. static void
  10711. do_t_hint (void)
  10712. {
  10713. if (unified_syntax && inst.size_req == 4)
  10714. inst.instruction = THUMB_OP32 (inst.instruction);
  10715. else
  10716. inst.instruction = THUMB_OP16 (inst.instruction);
  10717. }
  10718. static void
  10719. do_t_it (void)
  10720. {
  10721. unsigned int cond = inst.operands[0].imm;
  10722. set_pred_insn_type (IT_INSN);
  10723. now_pred.mask = (inst.instruction & 0xf) | 0x10;
  10724. now_pred.cc = cond;
  10725. now_pred.warn_deprecated = false;
  10726. now_pred.type = SCALAR_PRED;
  10727. /* If the condition is a negative condition, invert the mask. */
  10728. if ((cond & 0x1) == 0x0)
  10729. {
  10730. unsigned int mask = inst.instruction & 0x000f;
  10731. if ((mask & 0x7) == 0)
  10732. {
  10733. /* No conversion needed. */
  10734. now_pred.block_length = 1;
  10735. }
  10736. else if ((mask & 0x3) == 0)
  10737. {
  10738. mask ^= 0x8;
  10739. now_pred.block_length = 2;
  10740. }
  10741. else if ((mask & 0x1) == 0)
  10742. {
  10743. mask ^= 0xC;
  10744. now_pred.block_length = 3;
  10745. }
  10746. else
  10747. {
  10748. mask ^= 0xE;
  10749. now_pred.block_length = 4;
  10750. }
  10751. inst.instruction &= 0xfff0;
  10752. inst.instruction |= mask;
  10753. }
  10754. inst.instruction |= cond << 4;
  10755. }
  10756. /* Helper function used for both push/pop and ldm/stm. */
  10757. static void
  10758. encode_thumb2_multi (bool do_io, int base, unsigned mask,
  10759. bool writeback)
  10760. {
  10761. bool load, store;
  10762. gas_assert (base != -1 || !do_io);
  10763. load = do_io && ((inst.instruction & (1 << 20)) != 0);
  10764. store = do_io && !load;
  10765. if (mask & (1 << 13))
  10766. inst.error = _("SP not allowed in register list");
  10767. if (do_io && (mask & (1 << base)) != 0
  10768. && writeback)
  10769. inst.error = _("having the base register in the register list when "
  10770. "using write back is UNPREDICTABLE");
  10771. if (load)
  10772. {
  10773. if (mask & (1 << 15))
  10774. {
  10775. if (mask & (1 << 14))
  10776. inst.error = _("LR and PC should not both be in register list");
  10777. else
  10778. set_pred_insn_type_last ();
  10779. }
  10780. }
  10781. else if (store)
  10782. {
  10783. if (mask & (1 << 15))
  10784. inst.error = _("PC not allowed in register list");
  10785. }
  10786. if (do_io && ((mask & (mask - 1)) == 0))
  10787. {
  10788. /* Single register transfers implemented as str/ldr. */
  10789. if (writeback)
  10790. {
  10791. if (inst.instruction & (1 << 23))
  10792. inst.instruction = 0x00000b04; /* ia! -> [base], #4 */
  10793. else
  10794. inst.instruction = 0x00000d04; /* db! -> [base, #-4]! */
  10795. }
  10796. else
  10797. {
  10798. if (inst.instruction & (1 << 23))
  10799. inst.instruction = 0x00800000; /* ia -> [base] */
  10800. else
  10801. inst.instruction = 0x00000c04; /* db -> [base, #-4] */
  10802. }
  10803. inst.instruction |= 0xf8400000;
  10804. if (load)
  10805. inst.instruction |= 0x00100000;
  10806. mask = ffs (mask) - 1;
  10807. mask <<= 12;
  10808. }
  10809. else if (writeback)
  10810. inst.instruction |= WRITE_BACK;
  10811. inst.instruction |= mask;
  10812. if (do_io)
  10813. inst.instruction |= base << 16;
  10814. }
  10815. static void
  10816. do_t_ldmstm (void)
  10817. {
  10818. /* This really doesn't seem worth it. */
  10819. constraint (inst.relocs[0].type != BFD_RELOC_UNUSED,
  10820. _("expression too complex"));
  10821. constraint (inst.operands[1].writeback,
  10822. _("Thumb load/store multiple does not support {reglist}^"));
  10823. if (unified_syntax)
  10824. {
  10825. bool narrow;
  10826. unsigned mask;
  10827. narrow = false;
  10828. /* See if we can use a 16-bit instruction. */
  10829. if (inst.instruction < 0xffff /* not ldmdb/stmdb */
  10830. && inst.size_req != 4
  10831. && !(inst.operands[1].imm & ~0xff))
  10832. {
  10833. mask = 1 << inst.operands[0].reg;
  10834. if (inst.operands[0].reg <= 7)
  10835. {
  10836. if (inst.instruction == T_MNEM_stmia
  10837. ? inst.operands[0].writeback
  10838. : (inst.operands[0].writeback
  10839. == !(inst.operands[1].imm & mask)))
  10840. {
  10841. if (inst.instruction == T_MNEM_stmia
  10842. && (inst.operands[1].imm & mask)
  10843. && (inst.operands[1].imm & (mask - 1)))
  10844. as_warn (_("value stored for r%d is UNKNOWN"),
  10845. inst.operands[0].reg);
  10846. inst.instruction = THUMB_OP16 (inst.instruction);
  10847. inst.instruction |= inst.operands[0].reg << 8;
  10848. inst.instruction |= inst.operands[1].imm;
  10849. narrow = true;
  10850. }
  10851. else if ((inst.operands[1].imm & (inst.operands[1].imm-1)) == 0)
  10852. {
  10853. /* This means 1 register in reg list one of 3 situations:
  10854. 1. Instruction is stmia, but without writeback.
  10855. 2. lmdia without writeback, but with Rn not in
  10856. reglist.
  10857. 3. ldmia with writeback, but with Rn in reglist.
  10858. Case 3 is UNPREDICTABLE behaviour, so we handle
  10859. case 1 and 2 which can be converted into a 16-bit
  10860. str or ldr. The SP cases are handled below. */
  10861. unsigned long opcode;
  10862. /* First, record an error for Case 3. */
  10863. if (inst.operands[1].imm & mask
  10864. && inst.operands[0].writeback)
  10865. inst.error =
  10866. _("having the base register in the register list when "
  10867. "using write back is UNPREDICTABLE");
  10868. opcode = (inst.instruction == T_MNEM_stmia ? T_MNEM_str
  10869. : T_MNEM_ldr);
  10870. inst.instruction = THUMB_OP16 (opcode);
  10871. inst.instruction |= inst.operands[0].reg << 3;
  10872. inst.instruction |= (ffs (inst.operands[1].imm)-1);
  10873. narrow = true;
  10874. }
  10875. }
  10876. else if (inst.operands[0] .reg == REG_SP)
  10877. {
  10878. if (inst.operands[0].writeback)
  10879. {
  10880. inst.instruction =
  10881. THUMB_OP16 (inst.instruction == T_MNEM_stmia
  10882. ? T_MNEM_push : T_MNEM_pop);
  10883. inst.instruction |= inst.operands[1].imm;
  10884. narrow = true;
  10885. }
  10886. else if ((inst.operands[1].imm & (inst.operands[1].imm-1)) == 0)
  10887. {
  10888. inst.instruction =
  10889. THUMB_OP16 (inst.instruction == T_MNEM_stmia
  10890. ? T_MNEM_str_sp : T_MNEM_ldr_sp);
  10891. inst.instruction |= ((ffs (inst.operands[1].imm)-1) << 8);
  10892. narrow = true;
  10893. }
  10894. }
  10895. }
  10896. if (!narrow)
  10897. {
  10898. if (inst.instruction < 0xffff)
  10899. inst.instruction = THUMB_OP32 (inst.instruction);
  10900. encode_thumb2_multi (true /* do_io */, inst.operands[0].reg,
  10901. inst.operands[1].imm,
  10902. inst.operands[0].writeback);
  10903. }
  10904. }
  10905. else
  10906. {
  10907. constraint (inst.operands[0].reg > 7
  10908. || (inst.operands[1].imm & ~0xff), BAD_HIREG);
  10909. constraint (inst.instruction != T_MNEM_ldmia
  10910. && inst.instruction != T_MNEM_stmia,
  10911. _("Thumb-2 instruction only valid in unified syntax"));
  10912. if (inst.instruction == T_MNEM_stmia)
  10913. {
  10914. if (!inst.operands[0].writeback)
  10915. as_warn (_("this instruction will write back the base register"));
  10916. if ((inst.operands[1].imm & (1 << inst.operands[0].reg))
  10917. && (inst.operands[1].imm & ((1 << inst.operands[0].reg) - 1)))
  10918. as_warn (_("value stored for r%d is UNKNOWN"),
  10919. inst.operands[0].reg);
  10920. }
  10921. else
  10922. {
  10923. if (!inst.operands[0].writeback
  10924. && !(inst.operands[1].imm & (1 << inst.operands[0].reg)))
  10925. as_warn (_("this instruction will write back the base register"));
  10926. else if (inst.operands[0].writeback
  10927. && (inst.operands[1].imm & (1 << inst.operands[0].reg)))
  10928. as_warn (_("this instruction will not write back the base register"));
  10929. }
  10930. inst.instruction = THUMB_OP16 (inst.instruction);
  10931. inst.instruction |= inst.operands[0].reg << 8;
  10932. inst.instruction |= inst.operands[1].imm;
  10933. }
  10934. }
  10935. static void
  10936. do_t_ldrex (void)
  10937. {
  10938. constraint (!inst.operands[1].isreg || !inst.operands[1].preind
  10939. || inst.operands[1].postind || inst.operands[1].writeback
  10940. || inst.operands[1].immisreg || inst.operands[1].shifted
  10941. || inst.operands[1].negative,
  10942. BAD_ADDR_MODE);
  10943. constraint ((inst.operands[1].reg == REG_PC), BAD_PC);
  10944. inst.instruction |= inst.operands[0].reg << 12;
  10945. inst.instruction |= inst.operands[1].reg << 16;
  10946. inst.relocs[0].type = BFD_RELOC_ARM_T32_OFFSET_U8;
  10947. }
  10948. static void
  10949. do_t_ldrexd (void)
  10950. {
  10951. if (!inst.operands[1].present)
  10952. {
  10953. constraint (inst.operands[0].reg == REG_LR,
  10954. _("r14 not allowed as first register "
  10955. "when second register is omitted"));
  10956. inst.operands[1].reg = inst.operands[0].reg + 1;
  10957. }
  10958. constraint (inst.operands[0].reg == inst.operands[1].reg,
  10959. BAD_OVERLAP);
  10960. inst.instruction |= inst.operands[0].reg << 12;
  10961. inst.instruction |= inst.operands[1].reg << 8;
  10962. inst.instruction |= inst.operands[2].reg << 16;
  10963. }
  10964. static void
  10965. do_t_ldst (void)
  10966. {
  10967. unsigned long opcode;
  10968. int Rn;
  10969. if (inst.operands[0].isreg
  10970. && !inst.operands[0].preind
  10971. && inst.operands[0].reg == REG_PC)
  10972. set_pred_insn_type_last ();
  10973. opcode = inst.instruction;
  10974. if (unified_syntax)
  10975. {
  10976. if (!inst.operands[1].isreg)
  10977. {
  10978. if (opcode <= 0xffff)
  10979. inst.instruction = THUMB_OP32 (opcode);
  10980. if (move_or_literal_pool (0, CONST_THUMB, /*mode_3=*/false))
  10981. return;
  10982. }
  10983. if (inst.operands[1].isreg
  10984. && !inst.operands[1].writeback
  10985. && !inst.operands[1].shifted && !inst.operands[1].postind
  10986. && !inst.operands[1].negative && inst.operands[0].reg <= 7
  10987. && opcode <= 0xffff
  10988. && inst.size_req != 4)
  10989. {
  10990. /* Insn may have a 16-bit form. */
  10991. Rn = inst.operands[1].reg;
  10992. if (inst.operands[1].immisreg)
  10993. {
  10994. inst.instruction = THUMB_OP16 (opcode);
  10995. /* [Rn, Rik] */
  10996. if (Rn <= 7 && inst.operands[1].imm <= 7)
  10997. goto op16;
  10998. else if (opcode != T_MNEM_ldr && opcode != T_MNEM_str)
  10999. reject_bad_reg (inst.operands[1].imm);
  11000. }
  11001. else if ((Rn <= 7 && opcode != T_MNEM_ldrsh
  11002. && opcode != T_MNEM_ldrsb)
  11003. || ((Rn == REG_PC || Rn == REG_SP) && opcode == T_MNEM_ldr)
  11004. || (Rn == REG_SP && opcode == T_MNEM_str))
  11005. {
  11006. /* [Rn, #const] */
  11007. if (Rn > 7)
  11008. {
  11009. if (Rn == REG_PC)
  11010. {
  11011. if (inst.relocs[0].pc_rel)
  11012. opcode = T_MNEM_ldr_pc2;
  11013. else
  11014. opcode = T_MNEM_ldr_pc;
  11015. }
  11016. else
  11017. {
  11018. if (opcode == T_MNEM_ldr)
  11019. opcode = T_MNEM_ldr_sp;
  11020. else
  11021. opcode = T_MNEM_str_sp;
  11022. }
  11023. inst.instruction = inst.operands[0].reg << 8;
  11024. }
  11025. else
  11026. {
  11027. inst.instruction = inst.operands[0].reg;
  11028. inst.instruction |= inst.operands[1].reg << 3;
  11029. }
  11030. inst.instruction |= THUMB_OP16 (opcode);
  11031. if (inst.size_req == 2)
  11032. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_OFFSET;
  11033. else
  11034. inst.relax = opcode;
  11035. return;
  11036. }
  11037. }
  11038. /* Definitely a 32-bit variant. */
  11039. /* Warning for Erratum 752419. */
  11040. if (opcode == T_MNEM_ldr
  11041. && inst.operands[0].reg == REG_SP
  11042. && inst.operands[1].writeback == 1
  11043. && !inst.operands[1].immisreg)
  11044. {
  11045. if (no_cpu_selected ()
  11046. || (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v7)
  11047. && !ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v7a)
  11048. && !ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v7r)))
  11049. as_warn (_("This instruction may be unpredictable "
  11050. "if executed on M-profile cores "
  11051. "with interrupts enabled."));
  11052. }
  11053. /* Do some validations regarding addressing modes. */
  11054. if (inst.operands[1].immisreg)
  11055. reject_bad_reg (inst.operands[1].imm);
  11056. constraint (inst.operands[1].writeback == 1
  11057. && inst.operands[0].reg == inst.operands[1].reg,
  11058. BAD_OVERLAP);
  11059. inst.instruction = THUMB_OP32 (opcode);
  11060. inst.instruction |= inst.operands[0].reg << 12;
  11061. encode_thumb32_addr_mode (1, /*is_t=*/false, /*is_d=*/false);
  11062. check_ldr_r15_aligned ();
  11063. return;
  11064. }
  11065. constraint (inst.operands[0].reg > 7, BAD_HIREG);
  11066. if (inst.instruction == T_MNEM_ldrsh || inst.instruction == T_MNEM_ldrsb)
  11067. {
  11068. /* Only [Rn,Rm] is acceptable. */
  11069. constraint (inst.operands[1].reg > 7 || inst.operands[1].imm > 7, BAD_HIREG);
  11070. constraint (!inst.operands[1].isreg || !inst.operands[1].immisreg
  11071. || inst.operands[1].postind || inst.operands[1].shifted
  11072. || inst.operands[1].negative,
  11073. _("Thumb does not support this addressing mode"));
  11074. inst.instruction = THUMB_OP16 (inst.instruction);
  11075. goto op16;
  11076. }
  11077. inst.instruction = THUMB_OP16 (inst.instruction);
  11078. if (!inst.operands[1].isreg)
  11079. if (move_or_literal_pool (0, CONST_THUMB, /*mode_3=*/false))
  11080. return;
  11081. constraint (!inst.operands[1].preind
  11082. || inst.operands[1].shifted
  11083. || inst.operands[1].writeback,
  11084. _("Thumb does not support this addressing mode"));
  11085. if (inst.operands[1].reg == REG_PC || inst.operands[1].reg == REG_SP)
  11086. {
  11087. constraint (inst.instruction & 0x0600,
  11088. _("byte or halfword not valid for base register"));
  11089. constraint (inst.operands[1].reg == REG_PC
  11090. && !(inst.instruction & THUMB_LOAD_BIT),
  11091. _("r15 based store not allowed"));
  11092. constraint (inst.operands[1].immisreg,
  11093. _("invalid base register for register offset"));
  11094. if (inst.operands[1].reg == REG_PC)
  11095. inst.instruction = T_OPCODE_LDR_PC;
  11096. else if (inst.instruction & THUMB_LOAD_BIT)
  11097. inst.instruction = T_OPCODE_LDR_SP;
  11098. else
  11099. inst.instruction = T_OPCODE_STR_SP;
  11100. inst.instruction |= inst.operands[0].reg << 8;
  11101. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_OFFSET;
  11102. return;
  11103. }
  11104. constraint (inst.operands[1].reg > 7, BAD_HIREG);
  11105. if (!inst.operands[1].immisreg)
  11106. {
  11107. /* Immediate offset. */
  11108. inst.instruction |= inst.operands[0].reg;
  11109. inst.instruction |= inst.operands[1].reg << 3;
  11110. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_OFFSET;
  11111. return;
  11112. }
  11113. /* Register offset. */
  11114. constraint (inst.operands[1].imm > 7, BAD_HIREG);
  11115. constraint (inst.operands[1].negative,
  11116. _("Thumb does not support this addressing mode"));
  11117. op16:
  11118. switch (inst.instruction)
  11119. {
  11120. case T_OPCODE_STR_IW: inst.instruction = T_OPCODE_STR_RW; break;
  11121. case T_OPCODE_STR_IH: inst.instruction = T_OPCODE_STR_RH; break;
  11122. case T_OPCODE_STR_IB: inst.instruction = T_OPCODE_STR_RB; break;
  11123. case T_OPCODE_LDR_IW: inst.instruction = T_OPCODE_LDR_RW; break;
  11124. case T_OPCODE_LDR_IH: inst.instruction = T_OPCODE_LDR_RH; break;
  11125. case T_OPCODE_LDR_IB: inst.instruction = T_OPCODE_LDR_RB; break;
  11126. case 0x5600 /* ldrsb */:
  11127. case 0x5e00 /* ldrsh */: break;
  11128. default: abort ();
  11129. }
  11130. inst.instruction |= inst.operands[0].reg;
  11131. inst.instruction |= inst.operands[1].reg << 3;
  11132. inst.instruction |= inst.operands[1].imm << 6;
  11133. }
  11134. static void
  11135. do_t_ldstd (void)
  11136. {
  11137. if (!inst.operands[1].present)
  11138. {
  11139. inst.operands[1].reg = inst.operands[0].reg + 1;
  11140. constraint (inst.operands[0].reg == REG_LR,
  11141. _("r14 not allowed here"));
  11142. constraint (inst.operands[0].reg == REG_R12,
  11143. _("r12 not allowed here"));
  11144. }
  11145. if (inst.operands[2].writeback
  11146. && (inst.operands[0].reg == inst.operands[2].reg
  11147. || inst.operands[1].reg == inst.operands[2].reg))
  11148. as_warn (_("base register written back, and overlaps "
  11149. "one of transfer registers"));
  11150. inst.instruction |= inst.operands[0].reg << 12;
  11151. inst.instruction |= inst.operands[1].reg << 8;
  11152. encode_thumb32_addr_mode (2, /*is_t=*/false, /*is_d=*/true);
  11153. }
  11154. static void
  11155. do_t_ldstt (void)
  11156. {
  11157. inst.instruction |= inst.operands[0].reg << 12;
  11158. encode_thumb32_addr_mode (1, /*is_t=*/true, /*is_d=*/false);
  11159. }
  11160. static void
  11161. do_t_mla (void)
  11162. {
  11163. unsigned Rd, Rn, Rm, Ra;
  11164. Rd = inst.operands[0].reg;
  11165. Rn = inst.operands[1].reg;
  11166. Rm = inst.operands[2].reg;
  11167. Ra = inst.operands[3].reg;
  11168. reject_bad_reg (Rd);
  11169. reject_bad_reg (Rn);
  11170. reject_bad_reg (Rm);
  11171. reject_bad_reg (Ra);
  11172. inst.instruction |= Rd << 8;
  11173. inst.instruction |= Rn << 16;
  11174. inst.instruction |= Rm;
  11175. inst.instruction |= Ra << 12;
  11176. }
  11177. static void
  11178. do_t_mlal (void)
  11179. {
  11180. unsigned RdLo, RdHi, Rn, Rm;
  11181. RdLo = inst.operands[0].reg;
  11182. RdHi = inst.operands[1].reg;
  11183. Rn = inst.operands[2].reg;
  11184. Rm = inst.operands[3].reg;
  11185. reject_bad_reg (RdLo);
  11186. reject_bad_reg (RdHi);
  11187. reject_bad_reg (Rn);
  11188. reject_bad_reg (Rm);
  11189. inst.instruction |= RdLo << 12;
  11190. inst.instruction |= RdHi << 8;
  11191. inst.instruction |= Rn << 16;
  11192. inst.instruction |= Rm;
  11193. }
  11194. static void
  11195. do_t_mov_cmp (void)
  11196. {
  11197. unsigned Rn, Rm;
  11198. Rn = inst.operands[0].reg;
  11199. Rm = inst.operands[1].reg;
  11200. if (Rn == REG_PC)
  11201. set_pred_insn_type_last ();
  11202. if (unified_syntax)
  11203. {
  11204. int r0off = (inst.instruction == T_MNEM_mov
  11205. || inst.instruction == T_MNEM_movs) ? 8 : 16;
  11206. unsigned long opcode;
  11207. bool narrow;
  11208. bool low_regs;
  11209. low_regs = (Rn <= 7 && Rm <= 7);
  11210. opcode = inst.instruction;
  11211. if (in_pred_block ())
  11212. narrow = opcode != T_MNEM_movs;
  11213. else
  11214. narrow = opcode != T_MNEM_movs || low_regs;
  11215. if (inst.size_req == 4
  11216. || inst.operands[1].shifted)
  11217. narrow = false;
  11218. /* MOVS PC, LR is encoded as SUBS PC, LR, #0. */
  11219. if (opcode == T_MNEM_movs && inst.operands[1].isreg
  11220. && !inst.operands[1].shifted
  11221. && Rn == REG_PC
  11222. && Rm == REG_LR)
  11223. {
  11224. inst.instruction = T2_SUBS_PC_LR;
  11225. return;
  11226. }
  11227. if (opcode == T_MNEM_cmp)
  11228. {
  11229. constraint (Rn == REG_PC, BAD_PC);
  11230. if (narrow)
  11231. {
  11232. /* In the Thumb-2 ISA, use of R13 as Rm is deprecated,
  11233. but valid. */
  11234. warn_deprecated_sp (Rm);
  11235. /* R15 was documented as a valid choice for Rm in ARMv6,
  11236. but as UNPREDICTABLE in ARMv7. ARM's proprietary
  11237. tools reject R15, so we do too. */
  11238. constraint (Rm == REG_PC, BAD_PC);
  11239. }
  11240. else
  11241. reject_bad_reg (Rm);
  11242. }
  11243. else if (opcode == T_MNEM_mov
  11244. || opcode == T_MNEM_movs)
  11245. {
  11246. if (inst.operands[1].isreg)
  11247. {
  11248. if (opcode == T_MNEM_movs)
  11249. {
  11250. reject_bad_reg (Rn);
  11251. reject_bad_reg (Rm);
  11252. }
  11253. else if (narrow)
  11254. {
  11255. /* This is mov.n. */
  11256. if ((Rn == REG_SP || Rn == REG_PC)
  11257. && (Rm == REG_SP || Rm == REG_PC))
  11258. {
  11259. as_tsktsk (_("Use of r%u as a source register is "
  11260. "deprecated when r%u is the destination "
  11261. "register."), Rm, Rn);
  11262. }
  11263. }
  11264. else
  11265. {
  11266. /* This is mov.w. */
  11267. constraint (Rn == REG_PC, BAD_PC);
  11268. constraint (Rm == REG_PC, BAD_PC);
  11269. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  11270. constraint (Rn == REG_SP && Rm == REG_SP, BAD_SP);
  11271. }
  11272. }
  11273. else
  11274. reject_bad_reg (Rn);
  11275. }
  11276. if (!inst.operands[1].isreg)
  11277. {
  11278. /* Immediate operand. */
  11279. if (!in_pred_block () && opcode == T_MNEM_mov)
  11280. narrow = 0;
  11281. if (low_regs && narrow)
  11282. {
  11283. inst.instruction = THUMB_OP16 (opcode);
  11284. inst.instruction |= Rn << 8;
  11285. if (inst.relocs[0].type < BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC
  11286. || inst.relocs[0].type > BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC)
  11287. {
  11288. if (inst.size_req == 2)
  11289. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_IMM;
  11290. else
  11291. inst.relax = opcode;
  11292. }
  11293. }
  11294. else
  11295. {
  11296. constraint ((inst.relocs[0].type
  11297. >= BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC)
  11298. && (inst.relocs[0].type
  11299. <= BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC) ,
  11300. THUMB1_RELOC_ONLY);
  11301. inst.instruction = THUMB_OP32 (inst.instruction);
  11302. inst.instruction = (inst.instruction & 0xe1ffffff) | 0x10000000;
  11303. inst.instruction |= Rn << r0off;
  11304. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  11305. }
  11306. }
  11307. else if (inst.operands[1].shifted && inst.operands[1].immisreg
  11308. && (inst.instruction == T_MNEM_mov
  11309. || inst.instruction == T_MNEM_movs))
  11310. {
  11311. /* Register shifts are encoded as separate shift instructions. */
  11312. bool flags = (inst.instruction == T_MNEM_movs);
  11313. if (in_pred_block ())
  11314. narrow = !flags;
  11315. else
  11316. narrow = flags;
  11317. if (inst.size_req == 4)
  11318. narrow = false;
  11319. if (!low_regs || inst.operands[1].imm > 7)
  11320. narrow = false;
  11321. if (Rn != Rm)
  11322. narrow = false;
  11323. switch (inst.operands[1].shift_kind)
  11324. {
  11325. case SHIFT_LSL:
  11326. opcode = narrow ? T_OPCODE_LSL_R : THUMB_OP32 (T_MNEM_lsl);
  11327. break;
  11328. case SHIFT_ASR:
  11329. opcode = narrow ? T_OPCODE_ASR_R : THUMB_OP32 (T_MNEM_asr);
  11330. break;
  11331. case SHIFT_LSR:
  11332. opcode = narrow ? T_OPCODE_LSR_R : THUMB_OP32 (T_MNEM_lsr);
  11333. break;
  11334. case SHIFT_ROR:
  11335. opcode = narrow ? T_OPCODE_ROR_R : THUMB_OP32 (T_MNEM_ror);
  11336. break;
  11337. default:
  11338. abort ();
  11339. }
  11340. inst.instruction = opcode;
  11341. if (narrow)
  11342. {
  11343. inst.instruction |= Rn;
  11344. inst.instruction |= inst.operands[1].imm << 3;
  11345. }
  11346. else
  11347. {
  11348. if (flags)
  11349. inst.instruction |= CONDS_BIT;
  11350. inst.instruction |= Rn << 8;
  11351. inst.instruction |= Rm << 16;
  11352. inst.instruction |= inst.operands[1].imm;
  11353. }
  11354. }
  11355. else if (!narrow)
  11356. {
  11357. /* Some mov with immediate shift have narrow variants.
  11358. Register shifts are handled above. */
  11359. if (low_regs && inst.operands[1].shifted
  11360. && (inst.instruction == T_MNEM_mov
  11361. || inst.instruction == T_MNEM_movs))
  11362. {
  11363. if (in_pred_block ())
  11364. narrow = (inst.instruction == T_MNEM_mov);
  11365. else
  11366. narrow = (inst.instruction == T_MNEM_movs);
  11367. }
  11368. if (narrow)
  11369. {
  11370. switch (inst.operands[1].shift_kind)
  11371. {
  11372. case SHIFT_LSL: inst.instruction = T_OPCODE_LSL_I; break;
  11373. case SHIFT_LSR: inst.instruction = T_OPCODE_LSR_I; break;
  11374. case SHIFT_ASR: inst.instruction = T_OPCODE_ASR_I; break;
  11375. default: narrow = false; break;
  11376. }
  11377. }
  11378. if (narrow)
  11379. {
  11380. inst.instruction |= Rn;
  11381. inst.instruction |= Rm << 3;
  11382. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_SHIFT;
  11383. }
  11384. else
  11385. {
  11386. inst.instruction = THUMB_OP32 (inst.instruction);
  11387. inst.instruction |= Rn << r0off;
  11388. encode_thumb32_shifted_operand (1);
  11389. }
  11390. }
  11391. else
  11392. switch (inst.instruction)
  11393. {
  11394. case T_MNEM_mov:
  11395. /* In v4t or v5t a move of two lowregs produces unpredictable
  11396. results. Don't allow this. */
  11397. if (low_regs)
  11398. {
  11399. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6),
  11400. "MOV Rd, Rs with two low registers is not "
  11401. "permitted on this architecture");
  11402. ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
  11403. arm_ext_v6);
  11404. }
  11405. inst.instruction = T_OPCODE_MOV_HR;
  11406. inst.instruction |= (Rn & 0x8) << 4;
  11407. inst.instruction |= (Rn & 0x7);
  11408. inst.instruction |= Rm << 3;
  11409. break;
  11410. case T_MNEM_movs:
  11411. /* We know we have low registers at this point.
  11412. Generate LSLS Rd, Rs, #0. */
  11413. inst.instruction = T_OPCODE_LSL_I;
  11414. inst.instruction |= Rn;
  11415. inst.instruction |= Rm << 3;
  11416. break;
  11417. case T_MNEM_cmp:
  11418. if (low_regs)
  11419. {
  11420. inst.instruction = T_OPCODE_CMP_LR;
  11421. inst.instruction |= Rn;
  11422. inst.instruction |= Rm << 3;
  11423. }
  11424. else
  11425. {
  11426. inst.instruction = T_OPCODE_CMP_HR;
  11427. inst.instruction |= (Rn & 0x8) << 4;
  11428. inst.instruction |= (Rn & 0x7);
  11429. inst.instruction |= Rm << 3;
  11430. }
  11431. break;
  11432. }
  11433. return;
  11434. }
  11435. inst.instruction = THUMB_OP16 (inst.instruction);
  11436. /* PR 10443: Do not silently ignore shifted operands. */
  11437. constraint (inst.operands[1].shifted,
  11438. _("shifts in CMP/MOV instructions are only supported in unified syntax"));
  11439. if (inst.operands[1].isreg)
  11440. {
  11441. if (Rn < 8 && Rm < 8)
  11442. {
  11443. /* A move of two lowregs is encoded as ADD Rd, Rs, #0
  11444. since a MOV instruction produces unpredictable results. */
  11445. if (inst.instruction == T_OPCODE_MOV_I8)
  11446. inst.instruction = T_OPCODE_ADD_I3;
  11447. else
  11448. inst.instruction = T_OPCODE_CMP_LR;
  11449. inst.instruction |= Rn;
  11450. inst.instruction |= Rm << 3;
  11451. }
  11452. else
  11453. {
  11454. if (inst.instruction == T_OPCODE_MOV_I8)
  11455. inst.instruction = T_OPCODE_MOV_HR;
  11456. else
  11457. inst.instruction = T_OPCODE_CMP_HR;
  11458. do_t_cpy ();
  11459. }
  11460. }
  11461. else
  11462. {
  11463. constraint (Rn > 7,
  11464. _("only lo regs allowed with immediate"));
  11465. inst.instruction |= Rn << 8;
  11466. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_IMM;
  11467. }
  11468. }
  11469. static void
  11470. do_t_mov16 (void)
  11471. {
  11472. unsigned Rd;
  11473. bfd_vma imm;
  11474. bool top;
  11475. top = (inst.instruction & 0x00800000) != 0;
  11476. if (inst.relocs[0].type == BFD_RELOC_ARM_MOVW)
  11477. {
  11478. constraint (top, _(":lower16: not allowed in this instruction"));
  11479. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_MOVW;
  11480. }
  11481. else if (inst.relocs[0].type == BFD_RELOC_ARM_MOVT)
  11482. {
  11483. constraint (!top, _(":upper16: not allowed in this instruction"));
  11484. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_MOVT;
  11485. }
  11486. Rd = inst.operands[0].reg;
  11487. reject_bad_reg (Rd);
  11488. inst.instruction |= Rd << 8;
  11489. if (inst.relocs[0].type == BFD_RELOC_UNUSED)
  11490. {
  11491. imm = inst.relocs[0].exp.X_add_number;
  11492. inst.instruction |= (imm & 0xf000) << 4;
  11493. inst.instruction |= (imm & 0x0800) << 15;
  11494. inst.instruction |= (imm & 0x0700) << 4;
  11495. inst.instruction |= (imm & 0x00ff);
  11496. }
  11497. }
  11498. static void
  11499. do_t_mvn_tst (void)
  11500. {
  11501. unsigned Rn, Rm;
  11502. Rn = inst.operands[0].reg;
  11503. Rm = inst.operands[1].reg;
  11504. if (inst.instruction == T_MNEM_cmp
  11505. || inst.instruction == T_MNEM_cmn)
  11506. constraint (Rn == REG_PC, BAD_PC);
  11507. else
  11508. reject_bad_reg (Rn);
  11509. reject_bad_reg (Rm);
  11510. if (unified_syntax)
  11511. {
  11512. int r0off = (inst.instruction == T_MNEM_mvn
  11513. || inst.instruction == T_MNEM_mvns) ? 8 : 16;
  11514. bool narrow;
  11515. if (inst.size_req == 4
  11516. || inst.instruction > 0xffff
  11517. || inst.operands[1].shifted
  11518. || Rn > 7 || Rm > 7)
  11519. narrow = false;
  11520. else if (inst.instruction == T_MNEM_cmn
  11521. || inst.instruction == T_MNEM_tst)
  11522. narrow = true;
  11523. else if (THUMB_SETS_FLAGS (inst.instruction))
  11524. narrow = !in_pred_block ();
  11525. else
  11526. narrow = in_pred_block ();
  11527. if (!inst.operands[1].isreg)
  11528. {
  11529. /* For an immediate, we always generate a 32-bit opcode;
  11530. section relaxation will shrink it later if possible. */
  11531. if (inst.instruction < 0xffff)
  11532. inst.instruction = THUMB_OP32 (inst.instruction);
  11533. inst.instruction = (inst.instruction & 0xe1ffffff) | 0x10000000;
  11534. inst.instruction |= Rn << r0off;
  11535. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  11536. }
  11537. else
  11538. {
  11539. /* See if we can do this with a 16-bit instruction. */
  11540. if (narrow)
  11541. {
  11542. inst.instruction = THUMB_OP16 (inst.instruction);
  11543. inst.instruction |= Rn;
  11544. inst.instruction |= Rm << 3;
  11545. }
  11546. else
  11547. {
  11548. constraint (inst.operands[1].shifted
  11549. && inst.operands[1].immisreg,
  11550. _("shift must be constant"));
  11551. if (inst.instruction < 0xffff)
  11552. inst.instruction = THUMB_OP32 (inst.instruction);
  11553. inst.instruction |= Rn << r0off;
  11554. encode_thumb32_shifted_operand (1);
  11555. }
  11556. }
  11557. }
  11558. else
  11559. {
  11560. constraint (inst.instruction > 0xffff
  11561. || inst.instruction == T_MNEM_mvns, BAD_THUMB32);
  11562. constraint (!inst.operands[1].isreg || inst.operands[1].shifted,
  11563. _("unshifted register required"));
  11564. constraint (Rn > 7 || Rm > 7,
  11565. BAD_HIREG);
  11566. inst.instruction = THUMB_OP16 (inst.instruction);
  11567. inst.instruction |= Rn;
  11568. inst.instruction |= Rm << 3;
  11569. }
  11570. }
  11571. static void
  11572. do_t_mrs (void)
  11573. {
  11574. unsigned Rd;
  11575. if (do_vfp_nsyn_mrs () == SUCCESS)
  11576. return;
  11577. Rd = inst.operands[0].reg;
  11578. reject_bad_reg (Rd);
  11579. inst.instruction |= Rd << 8;
  11580. if (inst.operands[1].isreg)
  11581. {
  11582. unsigned br = inst.operands[1].reg;
  11583. if (((br & 0x200) == 0) && ((br & 0xf000) != 0xf000))
  11584. as_bad (_("bad register for mrs"));
  11585. inst.instruction |= br & (0xf << 16);
  11586. inst.instruction |= (br & 0x300) >> 4;
  11587. inst.instruction |= (br & SPSR_BIT) >> 2;
  11588. }
  11589. else
  11590. {
  11591. int flags = inst.operands[1].imm & (PSR_c|PSR_x|PSR_s|PSR_f|SPSR_BIT);
  11592. if (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_m))
  11593. {
  11594. /* PR gas/12698: The constraint is only applied for m_profile.
  11595. If the user has specified -march=all, we want to ignore it as
  11596. we are building for any CPU type, including non-m variants. */
  11597. bool m_profile =
  11598. !ARM_FEATURE_CORE_EQUAL (selected_cpu, arm_arch_any);
  11599. constraint ((flags != 0) && m_profile, _("selected processor does "
  11600. "not support requested special purpose register"));
  11601. }
  11602. else
  11603. /* mrs only accepts APSR/CPSR/SPSR/CPSR_all/SPSR_all (for non-M profile
  11604. devices). */
  11605. constraint ((flags & ~SPSR_BIT) != (PSR_c|PSR_f),
  11606. _("'APSR', 'CPSR' or 'SPSR' expected"));
  11607. inst.instruction |= (flags & SPSR_BIT) >> 2;
  11608. inst.instruction |= inst.operands[1].imm & 0xff;
  11609. inst.instruction |= 0xf0000;
  11610. }
  11611. }
  11612. static void
  11613. do_t_msr (void)
  11614. {
  11615. int flags;
  11616. unsigned Rn;
  11617. if (do_vfp_nsyn_msr () == SUCCESS)
  11618. return;
  11619. constraint (!inst.operands[1].isreg,
  11620. _("Thumb encoding does not support an immediate here"));
  11621. if (inst.operands[0].isreg)
  11622. flags = (int)(inst.operands[0].reg);
  11623. else
  11624. flags = inst.operands[0].imm;
  11625. if (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_m))
  11626. {
  11627. int bits = inst.operands[0].imm & (PSR_c|PSR_x|PSR_s|PSR_f|SPSR_BIT);
  11628. /* PR gas/12698: The constraint is only applied for m_profile.
  11629. If the user has specified -march=all, we want to ignore it as
  11630. we are building for any CPU type, including non-m variants. */
  11631. bool m_profile =
  11632. !ARM_FEATURE_CORE_EQUAL (selected_cpu, arm_arch_any);
  11633. constraint (((ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6_dsp)
  11634. && (bits & ~(PSR_s | PSR_f)) != 0)
  11635. || (!ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6_dsp)
  11636. && bits != PSR_f)) && m_profile,
  11637. _("selected processor does not support requested special "
  11638. "purpose register"));
  11639. }
  11640. else
  11641. constraint ((flags & 0xff) != 0, _("selected processor does not support "
  11642. "requested special purpose register"));
  11643. Rn = inst.operands[1].reg;
  11644. reject_bad_reg (Rn);
  11645. inst.instruction |= (flags & SPSR_BIT) >> 2;
  11646. inst.instruction |= (flags & 0xf0000) >> 8;
  11647. inst.instruction |= (flags & 0x300) >> 4;
  11648. inst.instruction |= (flags & 0xff);
  11649. inst.instruction |= Rn << 16;
  11650. }
  11651. static void
  11652. do_t_mul (void)
  11653. {
  11654. bool narrow;
  11655. unsigned Rd, Rn, Rm;
  11656. if (!inst.operands[2].present)
  11657. inst.operands[2].reg = inst.operands[0].reg;
  11658. Rd = inst.operands[0].reg;
  11659. Rn = inst.operands[1].reg;
  11660. Rm = inst.operands[2].reg;
  11661. if (unified_syntax)
  11662. {
  11663. if (inst.size_req == 4
  11664. || (Rd != Rn
  11665. && Rd != Rm)
  11666. || Rn > 7
  11667. || Rm > 7)
  11668. narrow = false;
  11669. else if (inst.instruction == T_MNEM_muls)
  11670. narrow = !in_pred_block ();
  11671. else
  11672. narrow = in_pred_block ();
  11673. }
  11674. else
  11675. {
  11676. constraint (inst.instruction == T_MNEM_muls, BAD_THUMB32);
  11677. constraint (Rn > 7 || Rm > 7,
  11678. BAD_HIREG);
  11679. narrow = true;
  11680. }
  11681. if (narrow)
  11682. {
  11683. /* 16-bit MULS/Conditional MUL. */
  11684. inst.instruction = THUMB_OP16 (inst.instruction);
  11685. inst.instruction |= Rd;
  11686. if (Rd == Rn)
  11687. inst.instruction |= Rm << 3;
  11688. else if (Rd == Rm)
  11689. inst.instruction |= Rn << 3;
  11690. else
  11691. constraint (1, _("dest must overlap one source register"));
  11692. }
  11693. else
  11694. {
  11695. constraint (inst.instruction != T_MNEM_mul,
  11696. _("Thumb-2 MUL must not set flags"));
  11697. /* 32-bit MUL. */
  11698. inst.instruction = THUMB_OP32 (inst.instruction);
  11699. inst.instruction |= Rd << 8;
  11700. inst.instruction |= Rn << 16;
  11701. inst.instruction |= Rm << 0;
  11702. reject_bad_reg (Rd);
  11703. reject_bad_reg (Rn);
  11704. reject_bad_reg (Rm);
  11705. }
  11706. }
  11707. static void
  11708. do_t_mull (void)
  11709. {
  11710. unsigned RdLo, RdHi, Rn, Rm;
  11711. RdLo = inst.operands[0].reg;
  11712. RdHi = inst.operands[1].reg;
  11713. Rn = inst.operands[2].reg;
  11714. Rm = inst.operands[3].reg;
  11715. reject_bad_reg (RdLo);
  11716. reject_bad_reg (RdHi);
  11717. reject_bad_reg (Rn);
  11718. reject_bad_reg (Rm);
  11719. inst.instruction |= RdLo << 12;
  11720. inst.instruction |= RdHi << 8;
  11721. inst.instruction |= Rn << 16;
  11722. inst.instruction |= Rm;
  11723. if (RdLo == RdHi)
  11724. as_tsktsk (_("rdhi and rdlo must be different"));
  11725. }
  11726. static void
  11727. do_t_nop (void)
  11728. {
  11729. set_pred_insn_type (NEUTRAL_IT_INSN);
  11730. if (unified_syntax)
  11731. {
  11732. if (inst.size_req == 4 || inst.operands[0].imm > 15)
  11733. {
  11734. inst.instruction = THUMB_OP32 (inst.instruction);
  11735. inst.instruction |= inst.operands[0].imm;
  11736. }
  11737. else
  11738. {
  11739. /* PR9722: Check for Thumb2 availability before
  11740. generating a thumb2 nop instruction. */
  11741. if (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v6t2))
  11742. {
  11743. inst.instruction = THUMB_OP16 (inst.instruction);
  11744. inst.instruction |= inst.operands[0].imm << 4;
  11745. }
  11746. else
  11747. inst.instruction = 0x46c0;
  11748. }
  11749. }
  11750. else
  11751. {
  11752. constraint (inst.operands[0].present,
  11753. _("Thumb does not support NOP with hints"));
  11754. inst.instruction = 0x46c0;
  11755. }
  11756. }
  11757. static void
  11758. do_t_neg (void)
  11759. {
  11760. if (unified_syntax)
  11761. {
  11762. bool narrow;
  11763. if (THUMB_SETS_FLAGS (inst.instruction))
  11764. narrow = !in_pred_block ();
  11765. else
  11766. narrow = in_pred_block ();
  11767. if (inst.operands[0].reg > 7 || inst.operands[1].reg > 7)
  11768. narrow = false;
  11769. if (inst.size_req == 4)
  11770. narrow = false;
  11771. if (!narrow)
  11772. {
  11773. inst.instruction = THUMB_OP32 (inst.instruction);
  11774. inst.instruction |= inst.operands[0].reg << 8;
  11775. inst.instruction |= inst.operands[1].reg << 16;
  11776. }
  11777. else
  11778. {
  11779. inst.instruction = THUMB_OP16 (inst.instruction);
  11780. inst.instruction |= inst.operands[0].reg;
  11781. inst.instruction |= inst.operands[1].reg << 3;
  11782. }
  11783. }
  11784. else
  11785. {
  11786. constraint (inst.operands[0].reg > 7 || inst.operands[1].reg > 7,
  11787. BAD_HIREG);
  11788. constraint (THUMB_SETS_FLAGS (inst.instruction), BAD_THUMB32);
  11789. inst.instruction = THUMB_OP16 (inst.instruction);
  11790. inst.instruction |= inst.operands[0].reg;
  11791. inst.instruction |= inst.operands[1].reg << 3;
  11792. }
  11793. }
  11794. static void
  11795. do_t_orn (void)
  11796. {
  11797. unsigned Rd, Rn;
  11798. Rd = inst.operands[0].reg;
  11799. Rn = inst.operands[1].present ? inst.operands[1].reg : Rd;
  11800. reject_bad_reg (Rd);
  11801. /* Rn == REG_SP is unpredictable; Rn == REG_PC is MVN. */
  11802. reject_bad_reg (Rn);
  11803. inst.instruction |= Rd << 8;
  11804. inst.instruction |= Rn << 16;
  11805. if (!inst.operands[2].isreg)
  11806. {
  11807. inst.instruction = (inst.instruction & 0xe1ffffff) | 0x10000000;
  11808. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  11809. }
  11810. else
  11811. {
  11812. unsigned Rm;
  11813. Rm = inst.operands[2].reg;
  11814. reject_bad_reg (Rm);
  11815. constraint (inst.operands[2].shifted
  11816. && inst.operands[2].immisreg,
  11817. _("shift must be constant"));
  11818. encode_thumb32_shifted_operand (2);
  11819. }
  11820. }
  11821. static void
  11822. do_t_pkhbt (void)
  11823. {
  11824. unsigned Rd, Rn, Rm;
  11825. Rd = inst.operands[0].reg;
  11826. Rn = inst.operands[1].reg;
  11827. Rm = inst.operands[2].reg;
  11828. reject_bad_reg (Rd);
  11829. reject_bad_reg (Rn);
  11830. reject_bad_reg (Rm);
  11831. inst.instruction |= Rd << 8;
  11832. inst.instruction |= Rn << 16;
  11833. inst.instruction |= Rm;
  11834. if (inst.operands[3].present)
  11835. {
  11836. unsigned int val = inst.relocs[0].exp.X_add_number;
  11837. constraint (inst.relocs[0].exp.X_op != O_constant,
  11838. _("expression too complex"));
  11839. inst.instruction |= (val & 0x1c) << 10;
  11840. inst.instruction |= (val & 0x03) << 6;
  11841. }
  11842. }
  11843. static void
  11844. do_t_pkhtb (void)
  11845. {
  11846. if (!inst.operands[3].present)
  11847. {
  11848. unsigned Rtmp;
  11849. inst.instruction &= ~0x00000020;
  11850. /* PR 10168. Swap the Rm and Rn registers. */
  11851. Rtmp = inst.operands[1].reg;
  11852. inst.operands[1].reg = inst.operands[2].reg;
  11853. inst.operands[2].reg = Rtmp;
  11854. }
  11855. do_t_pkhbt ();
  11856. }
  11857. static void
  11858. do_t_pld (void)
  11859. {
  11860. if (inst.operands[0].immisreg)
  11861. reject_bad_reg (inst.operands[0].imm);
  11862. encode_thumb32_addr_mode (0, /*is_t=*/false, /*is_d=*/false);
  11863. }
  11864. static void
  11865. do_t_push_pop (void)
  11866. {
  11867. unsigned mask;
  11868. constraint (inst.operands[0].writeback,
  11869. _("push/pop do not support {reglist}^"));
  11870. constraint (inst.relocs[0].type != BFD_RELOC_UNUSED,
  11871. _("expression too complex"));
  11872. mask = inst.operands[0].imm;
  11873. if (inst.size_req != 4 && (mask & ~0xff) == 0)
  11874. inst.instruction = THUMB_OP16 (inst.instruction) | mask;
  11875. else if (inst.size_req != 4
  11876. && (mask & ~0xff) == (1U << (inst.instruction == T_MNEM_push
  11877. ? REG_LR : REG_PC)))
  11878. {
  11879. inst.instruction = THUMB_OP16 (inst.instruction);
  11880. inst.instruction |= THUMB_PP_PC_LR;
  11881. inst.instruction |= mask & 0xff;
  11882. }
  11883. else if (unified_syntax)
  11884. {
  11885. inst.instruction = THUMB_OP32 (inst.instruction);
  11886. encode_thumb2_multi (true /* do_io */, 13, mask, true);
  11887. }
  11888. else
  11889. {
  11890. inst.error = _("invalid register list to push/pop instruction");
  11891. return;
  11892. }
  11893. }
  11894. static void
  11895. do_t_clrm (void)
  11896. {
  11897. if (unified_syntax)
  11898. encode_thumb2_multi (false /* do_io */, -1, inst.operands[0].imm, false);
  11899. else
  11900. {
  11901. inst.error = _("invalid register list to push/pop instruction");
  11902. return;
  11903. }
  11904. }
  11905. static void
  11906. do_t_vscclrm (void)
  11907. {
  11908. if (inst.operands[0].issingle)
  11909. {
  11910. inst.instruction |= (inst.operands[0].reg & 0x1) << 22;
  11911. inst.instruction |= (inst.operands[0].reg & 0x1e) << 11;
  11912. inst.instruction |= inst.operands[0].imm;
  11913. }
  11914. else
  11915. {
  11916. inst.instruction |= (inst.operands[0].reg & 0x10) << 18;
  11917. inst.instruction |= (inst.operands[0].reg & 0xf) << 12;
  11918. inst.instruction |= 1 << 8;
  11919. inst.instruction |= inst.operands[0].imm << 1;
  11920. }
  11921. }
  11922. static void
  11923. do_t_rbit (void)
  11924. {
  11925. unsigned Rd, Rm;
  11926. Rd = inst.operands[0].reg;
  11927. Rm = inst.operands[1].reg;
  11928. reject_bad_reg (Rd);
  11929. reject_bad_reg (Rm);
  11930. inst.instruction |= Rd << 8;
  11931. inst.instruction |= Rm << 16;
  11932. inst.instruction |= Rm;
  11933. }
  11934. static void
  11935. do_t_rev (void)
  11936. {
  11937. unsigned Rd, Rm;
  11938. Rd = inst.operands[0].reg;
  11939. Rm = inst.operands[1].reg;
  11940. reject_bad_reg (Rd);
  11941. reject_bad_reg (Rm);
  11942. if (Rd <= 7 && Rm <= 7
  11943. && inst.size_req != 4)
  11944. {
  11945. inst.instruction = THUMB_OP16 (inst.instruction);
  11946. inst.instruction |= Rd;
  11947. inst.instruction |= Rm << 3;
  11948. }
  11949. else if (unified_syntax)
  11950. {
  11951. inst.instruction = THUMB_OP32 (inst.instruction);
  11952. inst.instruction |= Rd << 8;
  11953. inst.instruction |= Rm << 16;
  11954. inst.instruction |= Rm;
  11955. }
  11956. else
  11957. inst.error = BAD_HIREG;
  11958. }
  11959. static void
  11960. do_t_rrx (void)
  11961. {
  11962. unsigned Rd, Rm;
  11963. Rd = inst.operands[0].reg;
  11964. Rm = inst.operands[1].reg;
  11965. reject_bad_reg (Rd);
  11966. reject_bad_reg (Rm);
  11967. inst.instruction |= Rd << 8;
  11968. inst.instruction |= Rm;
  11969. }
  11970. static void
  11971. do_t_rsb (void)
  11972. {
  11973. unsigned Rd, Rs;
  11974. Rd = inst.operands[0].reg;
  11975. Rs = (inst.operands[1].present
  11976. ? inst.operands[1].reg /* Rd, Rs, foo */
  11977. : inst.operands[0].reg); /* Rd, foo -> Rd, Rd, foo */
  11978. reject_bad_reg (Rd);
  11979. reject_bad_reg (Rs);
  11980. if (inst.operands[2].isreg)
  11981. reject_bad_reg (inst.operands[2].reg);
  11982. inst.instruction |= Rd << 8;
  11983. inst.instruction |= Rs << 16;
  11984. if (!inst.operands[2].isreg)
  11985. {
  11986. bool narrow;
  11987. if ((inst.instruction & 0x00100000) != 0)
  11988. narrow = !in_pred_block ();
  11989. else
  11990. narrow = in_pred_block ();
  11991. if (Rd > 7 || Rs > 7)
  11992. narrow = false;
  11993. if (inst.size_req == 4 || !unified_syntax)
  11994. narrow = false;
  11995. if (inst.relocs[0].exp.X_op != O_constant
  11996. || inst.relocs[0].exp.X_add_number != 0)
  11997. narrow = false;
  11998. /* Turn rsb #0 into 16-bit neg. We should probably do this via
  11999. relaxation, but it doesn't seem worth the hassle. */
  12000. if (narrow)
  12001. {
  12002. inst.relocs[0].type = BFD_RELOC_UNUSED;
  12003. inst.instruction = THUMB_OP16 (T_MNEM_negs);
  12004. inst.instruction |= Rs << 3;
  12005. inst.instruction |= Rd;
  12006. }
  12007. else
  12008. {
  12009. inst.instruction = (inst.instruction & 0xe1ffffff) | 0x10000000;
  12010. inst.relocs[0].type = BFD_RELOC_ARM_T32_IMMEDIATE;
  12011. }
  12012. }
  12013. else
  12014. encode_thumb32_shifted_operand (2);
  12015. }
  12016. static void
  12017. do_t_setend (void)
  12018. {
  12019. if (warn_on_deprecated
  12020. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  12021. as_tsktsk (_("setend use is deprecated for ARMv8"));
  12022. set_pred_insn_type (OUTSIDE_PRED_INSN);
  12023. if (inst.operands[0].imm)
  12024. inst.instruction |= 0x8;
  12025. }
  12026. static void
  12027. do_t_shift (void)
  12028. {
  12029. if (!inst.operands[1].present)
  12030. inst.operands[1].reg = inst.operands[0].reg;
  12031. if (unified_syntax)
  12032. {
  12033. bool narrow;
  12034. int shift_kind;
  12035. switch (inst.instruction)
  12036. {
  12037. case T_MNEM_asr:
  12038. case T_MNEM_asrs: shift_kind = SHIFT_ASR; break;
  12039. case T_MNEM_lsl:
  12040. case T_MNEM_lsls: shift_kind = SHIFT_LSL; break;
  12041. case T_MNEM_lsr:
  12042. case T_MNEM_lsrs: shift_kind = SHIFT_LSR; break;
  12043. case T_MNEM_ror:
  12044. case T_MNEM_rors: shift_kind = SHIFT_ROR; break;
  12045. default: abort ();
  12046. }
  12047. if (THUMB_SETS_FLAGS (inst.instruction))
  12048. narrow = !in_pred_block ();
  12049. else
  12050. narrow = in_pred_block ();
  12051. if (inst.operands[0].reg > 7 || inst.operands[1].reg > 7)
  12052. narrow = false;
  12053. if (!inst.operands[2].isreg && shift_kind == SHIFT_ROR)
  12054. narrow = false;
  12055. if (inst.operands[2].isreg
  12056. && (inst.operands[1].reg != inst.operands[0].reg
  12057. || inst.operands[2].reg > 7))
  12058. narrow = false;
  12059. if (inst.size_req == 4)
  12060. narrow = false;
  12061. reject_bad_reg (inst.operands[0].reg);
  12062. reject_bad_reg (inst.operands[1].reg);
  12063. if (!narrow)
  12064. {
  12065. if (inst.operands[2].isreg)
  12066. {
  12067. reject_bad_reg (inst.operands[2].reg);
  12068. inst.instruction = THUMB_OP32 (inst.instruction);
  12069. inst.instruction |= inst.operands[0].reg << 8;
  12070. inst.instruction |= inst.operands[1].reg << 16;
  12071. inst.instruction |= inst.operands[2].reg;
  12072. /* PR 12854: Error on extraneous shifts. */
  12073. constraint (inst.operands[2].shifted,
  12074. _("extraneous shift as part of operand to shift insn"));
  12075. }
  12076. else
  12077. {
  12078. inst.operands[1].shifted = 1;
  12079. inst.operands[1].shift_kind = shift_kind;
  12080. inst.instruction = THUMB_OP32 (THUMB_SETS_FLAGS (inst.instruction)
  12081. ? T_MNEM_movs : T_MNEM_mov);
  12082. inst.instruction |= inst.operands[0].reg << 8;
  12083. encode_thumb32_shifted_operand (1);
  12084. /* Prevent the incorrect generation of an ARM_IMMEDIATE fixup. */
  12085. inst.relocs[0].type = BFD_RELOC_UNUSED;
  12086. }
  12087. }
  12088. else
  12089. {
  12090. if (inst.operands[2].isreg)
  12091. {
  12092. switch (shift_kind)
  12093. {
  12094. case SHIFT_ASR: inst.instruction = T_OPCODE_ASR_R; break;
  12095. case SHIFT_LSL: inst.instruction = T_OPCODE_LSL_R; break;
  12096. case SHIFT_LSR: inst.instruction = T_OPCODE_LSR_R; break;
  12097. case SHIFT_ROR: inst.instruction = T_OPCODE_ROR_R; break;
  12098. default: abort ();
  12099. }
  12100. inst.instruction |= inst.operands[0].reg;
  12101. inst.instruction |= inst.operands[2].reg << 3;
  12102. /* PR 12854: Error on extraneous shifts. */
  12103. constraint (inst.operands[2].shifted,
  12104. _("extraneous shift as part of operand to shift insn"));
  12105. }
  12106. else
  12107. {
  12108. switch (shift_kind)
  12109. {
  12110. case SHIFT_ASR: inst.instruction = T_OPCODE_ASR_I; break;
  12111. case SHIFT_LSL: inst.instruction = T_OPCODE_LSL_I; break;
  12112. case SHIFT_LSR: inst.instruction = T_OPCODE_LSR_I; break;
  12113. default: abort ();
  12114. }
  12115. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_SHIFT;
  12116. inst.instruction |= inst.operands[0].reg;
  12117. inst.instruction |= inst.operands[1].reg << 3;
  12118. }
  12119. }
  12120. }
  12121. else
  12122. {
  12123. constraint (inst.operands[0].reg > 7
  12124. || inst.operands[1].reg > 7, BAD_HIREG);
  12125. constraint (THUMB_SETS_FLAGS (inst.instruction), BAD_THUMB32);
  12126. if (inst.operands[2].isreg) /* Rd, {Rs,} Rn */
  12127. {
  12128. constraint (inst.operands[2].reg > 7, BAD_HIREG);
  12129. constraint (inst.operands[0].reg != inst.operands[1].reg,
  12130. _("source1 and dest must be same register"));
  12131. switch (inst.instruction)
  12132. {
  12133. case T_MNEM_asr: inst.instruction = T_OPCODE_ASR_R; break;
  12134. case T_MNEM_lsl: inst.instruction = T_OPCODE_LSL_R; break;
  12135. case T_MNEM_lsr: inst.instruction = T_OPCODE_LSR_R; break;
  12136. case T_MNEM_ror: inst.instruction = T_OPCODE_ROR_R; break;
  12137. default: abort ();
  12138. }
  12139. inst.instruction |= inst.operands[0].reg;
  12140. inst.instruction |= inst.operands[2].reg << 3;
  12141. /* PR 12854: Error on extraneous shifts. */
  12142. constraint (inst.operands[2].shifted,
  12143. _("extraneous shift as part of operand to shift insn"));
  12144. }
  12145. else
  12146. {
  12147. switch (inst.instruction)
  12148. {
  12149. case T_MNEM_asr: inst.instruction = T_OPCODE_ASR_I; break;
  12150. case T_MNEM_lsl: inst.instruction = T_OPCODE_LSL_I; break;
  12151. case T_MNEM_lsr: inst.instruction = T_OPCODE_LSR_I; break;
  12152. case T_MNEM_ror: inst.error = _("ror #imm not supported"); return;
  12153. default: abort ();
  12154. }
  12155. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_SHIFT;
  12156. inst.instruction |= inst.operands[0].reg;
  12157. inst.instruction |= inst.operands[1].reg << 3;
  12158. }
  12159. }
  12160. }
  12161. static void
  12162. do_t_simd (void)
  12163. {
  12164. unsigned Rd, Rn, Rm;
  12165. Rd = inst.operands[0].reg;
  12166. Rn = inst.operands[1].reg;
  12167. Rm = inst.operands[2].reg;
  12168. reject_bad_reg (Rd);
  12169. reject_bad_reg (Rn);
  12170. reject_bad_reg (Rm);
  12171. inst.instruction |= Rd << 8;
  12172. inst.instruction |= Rn << 16;
  12173. inst.instruction |= Rm;
  12174. }
  12175. static void
  12176. do_t_simd2 (void)
  12177. {
  12178. unsigned Rd, Rn, Rm;
  12179. Rd = inst.operands[0].reg;
  12180. Rm = inst.operands[1].reg;
  12181. Rn = inst.operands[2].reg;
  12182. reject_bad_reg (Rd);
  12183. reject_bad_reg (Rn);
  12184. reject_bad_reg (Rm);
  12185. inst.instruction |= Rd << 8;
  12186. inst.instruction |= Rn << 16;
  12187. inst.instruction |= Rm;
  12188. }
  12189. static void
  12190. do_t_smc (void)
  12191. {
  12192. unsigned int value = inst.relocs[0].exp.X_add_number;
  12193. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v7a),
  12194. _("SMC is not permitted on this architecture"));
  12195. constraint (inst.relocs[0].exp.X_op != O_constant,
  12196. _("expression too complex"));
  12197. constraint (value > 0xf, _("immediate too large (bigger than 0xF)"));
  12198. inst.relocs[0].type = BFD_RELOC_UNUSED;
  12199. inst.instruction |= (value & 0x000f) << 16;
  12200. /* PR gas/15623: SMC instructions must be last in an IT block. */
  12201. set_pred_insn_type_last ();
  12202. }
  12203. static void
  12204. do_t_hvc (void)
  12205. {
  12206. unsigned int value = inst.relocs[0].exp.X_add_number;
  12207. inst.relocs[0].type = BFD_RELOC_UNUSED;
  12208. inst.instruction |= (value & 0x0fff);
  12209. inst.instruction |= (value & 0xf000) << 4;
  12210. }
  12211. static void
  12212. do_t_ssat_usat (int bias)
  12213. {
  12214. unsigned Rd, Rn;
  12215. Rd = inst.operands[0].reg;
  12216. Rn = inst.operands[2].reg;
  12217. reject_bad_reg (Rd);
  12218. reject_bad_reg (Rn);
  12219. inst.instruction |= Rd << 8;
  12220. inst.instruction |= inst.operands[1].imm - bias;
  12221. inst.instruction |= Rn << 16;
  12222. if (inst.operands[3].present)
  12223. {
  12224. offsetT shift_amount = inst.relocs[0].exp.X_add_number;
  12225. inst.relocs[0].type = BFD_RELOC_UNUSED;
  12226. constraint (inst.relocs[0].exp.X_op != O_constant,
  12227. _("expression too complex"));
  12228. if (shift_amount != 0)
  12229. {
  12230. constraint (shift_amount > 31,
  12231. _("shift expression is too large"));
  12232. if (inst.operands[3].shift_kind == SHIFT_ASR)
  12233. inst.instruction |= 0x00200000; /* sh bit. */
  12234. inst.instruction |= (shift_amount & 0x1c) << 10;
  12235. inst.instruction |= (shift_amount & 0x03) << 6;
  12236. }
  12237. }
  12238. }
  12239. static void
  12240. do_t_ssat (void)
  12241. {
  12242. do_t_ssat_usat (1);
  12243. }
  12244. static void
  12245. do_t_ssat16 (void)
  12246. {
  12247. unsigned Rd, Rn;
  12248. Rd = inst.operands[0].reg;
  12249. Rn = inst.operands[2].reg;
  12250. reject_bad_reg (Rd);
  12251. reject_bad_reg (Rn);
  12252. inst.instruction |= Rd << 8;
  12253. inst.instruction |= inst.operands[1].imm - 1;
  12254. inst.instruction |= Rn << 16;
  12255. }
  12256. static void
  12257. do_t_strex (void)
  12258. {
  12259. constraint (!inst.operands[2].isreg || !inst.operands[2].preind
  12260. || inst.operands[2].postind || inst.operands[2].writeback
  12261. || inst.operands[2].immisreg || inst.operands[2].shifted
  12262. || inst.operands[2].negative,
  12263. BAD_ADDR_MODE);
  12264. constraint (inst.operands[2].reg == REG_PC, BAD_PC);
  12265. inst.instruction |= inst.operands[0].reg << 8;
  12266. inst.instruction |= inst.operands[1].reg << 12;
  12267. inst.instruction |= inst.operands[2].reg << 16;
  12268. inst.relocs[0].type = BFD_RELOC_ARM_T32_OFFSET_U8;
  12269. }
  12270. static void
  12271. do_t_strexd (void)
  12272. {
  12273. if (!inst.operands[2].present)
  12274. inst.operands[2].reg = inst.operands[1].reg + 1;
  12275. constraint (inst.operands[0].reg == inst.operands[1].reg
  12276. || inst.operands[0].reg == inst.operands[2].reg
  12277. || inst.operands[0].reg == inst.operands[3].reg,
  12278. BAD_OVERLAP);
  12279. inst.instruction |= inst.operands[0].reg;
  12280. inst.instruction |= inst.operands[1].reg << 12;
  12281. inst.instruction |= inst.operands[2].reg << 8;
  12282. inst.instruction |= inst.operands[3].reg << 16;
  12283. }
  12284. static void
  12285. do_t_sxtah (void)
  12286. {
  12287. unsigned Rd, Rn, Rm;
  12288. Rd = inst.operands[0].reg;
  12289. Rn = inst.operands[1].reg;
  12290. Rm = inst.operands[2].reg;
  12291. reject_bad_reg (Rd);
  12292. reject_bad_reg (Rn);
  12293. reject_bad_reg (Rm);
  12294. inst.instruction |= Rd << 8;
  12295. inst.instruction |= Rn << 16;
  12296. inst.instruction |= Rm;
  12297. inst.instruction |= inst.operands[3].imm << 4;
  12298. }
  12299. static void
  12300. do_t_sxth (void)
  12301. {
  12302. unsigned Rd, Rm;
  12303. Rd = inst.operands[0].reg;
  12304. Rm = inst.operands[1].reg;
  12305. reject_bad_reg (Rd);
  12306. reject_bad_reg (Rm);
  12307. if (inst.instruction <= 0xffff
  12308. && inst.size_req != 4
  12309. && Rd <= 7 && Rm <= 7
  12310. && (!inst.operands[2].present || inst.operands[2].imm == 0))
  12311. {
  12312. inst.instruction = THUMB_OP16 (inst.instruction);
  12313. inst.instruction |= Rd;
  12314. inst.instruction |= Rm << 3;
  12315. }
  12316. else if (unified_syntax)
  12317. {
  12318. if (inst.instruction <= 0xffff)
  12319. inst.instruction = THUMB_OP32 (inst.instruction);
  12320. inst.instruction |= Rd << 8;
  12321. inst.instruction |= Rm;
  12322. inst.instruction |= inst.operands[2].imm << 4;
  12323. }
  12324. else
  12325. {
  12326. constraint (inst.operands[2].present && inst.operands[2].imm != 0,
  12327. _("Thumb encoding does not support rotation"));
  12328. constraint (1, BAD_HIREG);
  12329. }
  12330. }
  12331. static void
  12332. do_t_swi (void)
  12333. {
  12334. inst.relocs[0].type = BFD_RELOC_ARM_SWI;
  12335. }
  12336. static void
  12337. do_t_tb (void)
  12338. {
  12339. unsigned Rn, Rm;
  12340. int half;
  12341. half = (inst.instruction & 0x10) != 0;
  12342. set_pred_insn_type_last ();
  12343. constraint (inst.operands[0].immisreg,
  12344. _("instruction requires register index"));
  12345. Rn = inst.operands[0].reg;
  12346. Rm = inst.operands[0].imm;
  12347. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8))
  12348. constraint (Rn == REG_SP, BAD_SP);
  12349. reject_bad_reg (Rm);
  12350. constraint (!half && inst.operands[0].shifted,
  12351. _("instruction does not allow shifted index"));
  12352. inst.instruction |= (Rn << 16) | Rm;
  12353. }
  12354. static void
  12355. do_t_udf (void)
  12356. {
  12357. if (!inst.operands[0].present)
  12358. inst.operands[0].imm = 0;
  12359. if ((unsigned int) inst.operands[0].imm > 255 || inst.size_req == 4)
  12360. {
  12361. constraint (inst.size_req == 2,
  12362. _("immediate value out of range"));
  12363. inst.instruction = THUMB_OP32 (inst.instruction);
  12364. inst.instruction |= (inst.operands[0].imm & 0xf000u) << 4;
  12365. inst.instruction |= (inst.operands[0].imm & 0x0fffu) << 0;
  12366. }
  12367. else
  12368. {
  12369. inst.instruction = THUMB_OP16 (inst.instruction);
  12370. inst.instruction |= inst.operands[0].imm;
  12371. }
  12372. set_pred_insn_type (NEUTRAL_IT_INSN);
  12373. }
  12374. static void
  12375. do_t_usat (void)
  12376. {
  12377. do_t_ssat_usat (0);
  12378. }
  12379. static void
  12380. do_t_usat16 (void)
  12381. {
  12382. unsigned Rd, Rn;
  12383. Rd = inst.operands[0].reg;
  12384. Rn = inst.operands[2].reg;
  12385. reject_bad_reg (Rd);
  12386. reject_bad_reg (Rn);
  12387. inst.instruction |= Rd << 8;
  12388. inst.instruction |= inst.operands[1].imm;
  12389. inst.instruction |= Rn << 16;
  12390. }
  12391. /* Checking the range of the branch offset (VAL) with NBITS bits
  12392. and IS_SIGNED signedness. Also checks the LSB to be 0. */
  12393. static int
  12394. v8_1_branch_value_check (int val, int nbits, int is_signed)
  12395. {
  12396. gas_assert (nbits > 0 && nbits <= 32);
  12397. if (is_signed)
  12398. {
  12399. int cmp = (1 << (nbits - 1));
  12400. if ((val < -cmp) || (val >= cmp) || (val & 0x01))
  12401. return FAIL;
  12402. }
  12403. else
  12404. {
  12405. if ((val <= 0) || (val >= (1 << nbits)) || (val & 0x1))
  12406. return FAIL;
  12407. }
  12408. return SUCCESS;
  12409. }
  12410. /* For branches in Armv8.1-M Mainline. */
  12411. static void
  12412. do_t_branch_future (void)
  12413. {
  12414. unsigned long insn = inst.instruction;
  12415. inst.instruction = THUMB_OP32 (inst.instruction);
  12416. if (inst.operands[0].hasreloc == 0)
  12417. {
  12418. if (v8_1_branch_value_check (inst.operands[0].imm, 5, false) == FAIL)
  12419. as_bad (BAD_BRANCH_OFF);
  12420. inst.instruction |= ((inst.operands[0].imm & 0x1f) >> 1) << 23;
  12421. }
  12422. else
  12423. {
  12424. inst.relocs[0].type = BFD_RELOC_THUMB_PCREL_BRANCH5;
  12425. inst.relocs[0].pc_rel = 1;
  12426. }
  12427. switch (insn)
  12428. {
  12429. case T_MNEM_bf:
  12430. if (inst.operands[1].hasreloc == 0)
  12431. {
  12432. int val = inst.operands[1].imm;
  12433. if (v8_1_branch_value_check (inst.operands[1].imm, 17, true) == FAIL)
  12434. as_bad (BAD_BRANCH_OFF);
  12435. int immA = (val & 0x0001f000) >> 12;
  12436. int immB = (val & 0x00000ffc) >> 2;
  12437. int immC = (val & 0x00000002) >> 1;
  12438. inst.instruction |= (immA << 16) | (immB << 1) | (immC << 11);
  12439. }
  12440. else
  12441. {
  12442. inst.relocs[1].type = BFD_RELOC_ARM_THUMB_BF17;
  12443. inst.relocs[1].pc_rel = 1;
  12444. }
  12445. break;
  12446. case T_MNEM_bfl:
  12447. if (inst.operands[1].hasreloc == 0)
  12448. {
  12449. int val = inst.operands[1].imm;
  12450. if (v8_1_branch_value_check (inst.operands[1].imm, 19, true) == FAIL)
  12451. as_bad (BAD_BRANCH_OFF);
  12452. int immA = (val & 0x0007f000) >> 12;
  12453. int immB = (val & 0x00000ffc) >> 2;
  12454. int immC = (val & 0x00000002) >> 1;
  12455. inst.instruction |= (immA << 16) | (immB << 1) | (immC << 11);
  12456. }
  12457. else
  12458. {
  12459. inst.relocs[1].type = BFD_RELOC_ARM_THUMB_BF19;
  12460. inst.relocs[1].pc_rel = 1;
  12461. }
  12462. break;
  12463. case T_MNEM_bfcsel:
  12464. /* Operand 1. */
  12465. if (inst.operands[1].hasreloc == 0)
  12466. {
  12467. int val = inst.operands[1].imm;
  12468. int immA = (val & 0x00001000) >> 12;
  12469. int immB = (val & 0x00000ffc) >> 2;
  12470. int immC = (val & 0x00000002) >> 1;
  12471. inst.instruction |= (immA << 16) | (immB << 1) | (immC << 11);
  12472. }
  12473. else
  12474. {
  12475. inst.relocs[1].type = BFD_RELOC_ARM_THUMB_BF13;
  12476. inst.relocs[1].pc_rel = 1;
  12477. }
  12478. /* Operand 2. */
  12479. if (inst.operands[2].hasreloc == 0)
  12480. {
  12481. constraint ((inst.operands[0].hasreloc != 0), BAD_ARGS);
  12482. int val2 = inst.operands[2].imm;
  12483. int val0 = inst.operands[0].imm & 0x1f;
  12484. int diff = val2 - val0;
  12485. if (diff == 4)
  12486. inst.instruction |= 1 << 17; /* T bit. */
  12487. else if (diff != 2)
  12488. as_bad (_("out of range label-relative fixup value"));
  12489. }
  12490. else
  12491. {
  12492. constraint ((inst.operands[0].hasreloc == 0), BAD_ARGS);
  12493. inst.relocs[2].type = BFD_RELOC_THUMB_PCREL_BFCSEL;
  12494. inst.relocs[2].pc_rel = 1;
  12495. }
  12496. /* Operand 3. */
  12497. constraint (inst.cond != COND_ALWAYS, BAD_COND);
  12498. inst.instruction |= (inst.operands[3].imm & 0xf) << 18;
  12499. break;
  12500. case T_MNEM_bfx:
  12501. case T_MNEM_bflx:
  12502. inst.instruction |= inst.operands[1].reg << 16;
  12503. break;
  12504. default: abort ();
  12505. }
  12506. }
  12507. /* Helper function for do_t_loloop to handle relocations. */
  12508. static void
  12509. v8_1_loop_reloc (int is_le)
  12510. {
  12511. if (inst.relocs[0].exp.X_op == O_constant)
  12512. {
  12513. int value = inst.relocs[0].exp.X_add_number;
  12514. value = (is_le) ? -value : value;
  12515. if (v8_1_branch_value_check (value, 12, false) == FAIL)
  12516. as_bad (BAD_BRANCH_OFF);
  12517. int imml, immh;
  12518. immh = (value & 0x00000ffc) >> 2;
  12519. imml = (value & 0x00000002) >> 1;
  12520. inst.instruction |= (imml << 11) | (immh << 1);
  12521. }
  12522. else
  12523. {
  12524. inst.relocs[0].type = BFD_RELOC_ARM_THUMB_LOOP12;
  12525. inst.relocs[0].pc_rel = 1;
  12526. }
  12527. }
  12528. /* For shifts with four operands in MVE. */
  12529. static void
  12530. do_mve_scalar_shift1 (void)
  12531. {
  12532. unsigned int value = inst.operands[2].imm;
  12533. inst.instruction |= inst.operands[0].reg << 16;
  12534. inst.instruction |= inst.operands[1].reg << 8;
  12535. /* Setting the bit for saturation. */
  12536. inst.instruction |= ((value == 64) ? 0: 1) << 7;
  12537. /* Assuming Rm is already checked not to be 11x1. */
  12538. constraint (inst.operands[3].reg == inst.operands[0].reg, BAD_OVERLAP);
  12539. constraint (inst.operands[3].reg == inst.operands[1].reg, BAD_OVERLAP);
  12540. inst.instruction |= inst.operands[3].reg << 12;
  12541. }
  12542. /* For shifts in MVE. */
  12543. static void
  12544. do_mve_scalar_shift (void)
  12545. {
  12546. if (!inst.operands[2].present)
  12547. {
  12548. inst.operands[2] = inst.operands[1];
  12549. inst.operands[1].reg = 0xf;
  12550. }
  12551. inst.instruction |= inst.operands[0].reg << 16;
  12552. inst.instruction |= inst.operands[1].reg << 8;
  12553. if (inst.operands[2].isreg)
  12554. {
  12555. /* Assuming Rm is already checked not to be 11x1. */
  12556. constraint (inst.operands[2].reg == inst.operands[0].reg, BAD_OVERLAP);
  12557. constraint (inst.operands[2].reg == inst.operands[1].reg, BAD_OVERLAP);
  12558. inst.instruction |= inst.operands[2].reg << 12;
  12559. }
  12560. else
  12561. {
  12562. /* Assuming imm is already checked as [1,32]. */
  12563. unsigned int value = inst.operands[2].imm;
  12564. inst.instruction |= (value & 0x1c) << 10;
  12565. inst.instruction |= (value & 0x03) << 6;
  12566. /* Change last 4 bits from 0xd to 0xf. */
  12567. inst.instruction |= 0x2;
  12568. }
  12569. }
  12570. /* MVE instruction encoder helpers. */
  12571. #define M_MNEM_vabav 0xee800f01
  12572. #define M_MNEM_vmladav 0xeef00e00
  12573. #define M_MNEM_vmladava 0xeef00e20
  12574. #define M_MNEM_vmladavx 0xeef01e00
  12575. #define M_MNEM_vmladavax 0xeef01e20
  12576. #define M_MNEM_vmlsdav 0xeef00e01
  12577. #define M_MNEM_vmlsdava 0xeef00e21
  12578. #define M_MNEM_vmlsdavx 0xeef01e01
  12579. #define M_MNEM_vmlsdavax 0xeef01e21
  12580. #define M_MNEM_vmullt 0xee011e00
  12581. #define M_MNEM_vmullb 0xee010e00
  12582. #define M_MNEM_vctp 0xf000e801
  12583. #define M_MNEM_vst20 0xfc801e00
  12584. #define M_MNEM_vst21 0xfc801e20
  12585. #define M_MNEM_vst40 0xfc801e01
  12586. #define M_MNEM_vst41 0xfc801e21
  12587. #define M_MNEM_vst42 0xfc801e41
  12588. #define M_MNEM_vst43 0xfc801e61
  12589. #define M_MNEM_vld20 0xfc901e00
  12590. #define M_MNEM_vld21 0xfc901e20
  12591. #define M_MNEM_vld40 0xfc901e01
  12592. #define M_MNEM_vld41 0xfc901e21
  12593. #define M_MNEM_vld42 0xfc901e41
  12594. #define M_MNEM_vld43 0xfc901e61
  12595. #define M_MNEM_vstrb 0xec000e00
  12596. #define M_MNEM_vstrh 0xec000e10
  12597. #define M_MNEM_vstrw 0xec000e40
  12598. #define M_MNEM_vstrd 0xec000e50
  12599. #define M_MNEM_vldrb 0xec100e00
  12600. #define M_MNEM_vldrh 0xec100e10
  12601. #define M_MNEM_vldrw 0xec100e40
  12602. #define M_MNEM_vldrd 0xec100e50
  12603. #define M_MNEM_vmovlt 0xeea01f40
  12604. #define M_MNEM_vmovlb 0xeea00f40
  12605. #define M_MNEM_vmovnt 0xfe311e81
  12606. #define M_MNEM_vmovnb 0xfe310e81
  12607. #define M_MNEM_vadc 0xee300f00
  12608. #define M_MNEM_vadci 0xee301f00
  12609. #define M_MNEM_vbrsr 0xfe011e60
  12610. #define M_MNEM_vaddlv 0xee890f00
  12611. #define M_MNEM_vaddlva 0xee890f20
  12612. #define M_MNEM_vaddv 0xeef10f00
  12613. #define M_MNEM_vaddva 0xeef10f20
  12614. #define M_MNEM_vddup 0xee011f6e
  12615. #define M_MNEM_vdwdup 0xee011f60
  12616. #define M_MNEM_vidup 0xee010f6e
  12617. #define M_MNEM_viwdup 0xee010f60
  12618. #define M_MNEM_vmaxv 0xeee20f00
  12619. #define M_MNEM_vmaxav 0xeee00f00
  12620. #define M_MNEM_vminv 0xeee20f80
  12621. #define M_MNEM_vminav 0xeee00f80
  12622. #define M_MNEM_vmlaldav 0xee800e00
  12623. #define M_MNEM_vmlaldava 0xee800e20
  12624. #define M_MNEM_vmlaldavx 0xee801e00
  12625. #define M_MNEM_vmlaldavax 0xee801e20
  12626. #define M_MNEM_vmlsldav 0xee800e01
  12627. #define M_MNEM_vmlsldava 0xee800e21
  12628. #define M_MNEM_vmlsldavx 0xee801e01
  12629. #define M_MNEM_vmlsldavax 0xee801e21
  12630. #define M_MNEM_vrmlaldavhx 0xee801f00
  12631. #define M_MNEM_vrmlaldavhax 0xee801f20
  12632. #define M_MNEM_vrmlsldavh 0xfe800e01
  12633. #define M_MNEM_vrmlsldavha 0xfe800e21
  12634. #define M_MNEM_vrmlsldavhx 0xfe801e01
  12635. #define M_MNEM_vrmlsldavhax 0xfe801e21
  12636. #define M_MNEM_vqmovnt 0xee331e01
  12637. #define M_MNEM_vqmovnb 0xee330e01
  12638. #define M_MNEM_vqmovunt 0xee311e81
  12639. #define M_MNEM_vqmovunb 0xee310e81
  12640. #define M_MNEM_vshrnt 0xee801fc1
  12641. #define M_MNEM_vshrnb 0xee800fc1
  12642. #define M_MNEM_vrshrnt 0xfe801fc1
  12643. #define M_MNEM_vqshrnt 0xee801f40
  12644. #define M_MNEM_vqshrnb 0xee800f40
  12645. #define M_MNEM_vqshrunt 0xee801fc0
  12646. #define M_MNEM_vqshrunb 0xee800fc0
  12647. #define M_MNEM_vrshrnb 0xfe800fc1
  12648. #define M_MNEM_vqrshrnt 0xee801f41
  12649. #define M_MNEM_vqrshrnb 0xee800f41
  12650. #define M_MNEM_vqrshrunt 0xfe801fc0
  12651. #define M_MNEM_vqrshrunb 0xfe800fc0
  12652. /* Bfloat16 instruction encoder helpers. */
  12653. #define B_MNEM_vfmat 0xfc300850
  12654. #define B_MNEM_vfmab 0xfc300810
  12655. /* Neon instruction encoder helpers. */
  12656. /* Encodings for the different types for various Neon opcodes. */
  12657. /* An "invalid" code for the following tables. */
  12658. #define N_INV -1u
  12659. struct neon_tab_entry
  12660. {
  12661. unsigned integer;
  12662. unsigned float_or_poly;
  12663. unsigned scalar_or_imm;
  12664. };
  12665. /* Map overloaded Neon opcodes to their respective encodings. */
  12666. #define NEON_ENC_TAB \
  12667. X(vabd, 0x0000700, 0x1200d00, N_INV), \
  12668. X(vabdl, 0x0800700, N_INV, N_INV), \
  12669. X(vmax, 0x0000600, 0x0000f00, N_INV), \
  12670. X(vmin, 0x0000610, 0x0200f00, N_INV), \
  12671. X(vpadd, 0x0000b10, 0x1000d00, N_INV), \
  12672. X(vpmax, 0x0000a00, 0x1000f00, N_INV), \
  12673. X(vpmin, 0x0000a10, 0x1200f00, N_INV), \
  12674. X(vadd, 0x0000800, 0x0000d00, N_INV), \
  12675. X(vaddl, 0x0800000, N_INV, N_INV), \
  12676. X(vsub, 0x1000800, 0x0200d00, N_INV), \
  12677. X(vsubl, 0x0800200, N_INV, N_INV), \
  12678. X(vceq, 0x1000810, 0x0000e00, 0x1b10100), \
  12679. X(vcge, 0x0000310, 0x1000e00, 0x1b10080), \
  12680. X(vcgt, 0x0000300, 0x1200e00, 0x1b10000), \
  12681. /* Register variants of the following two instructions are encoded as
  12682. vcge / vcgt with the operands reversed. */ \
  12683. X(vclt, 0x0000300, 0x1200e00, 0x1b10200), \
  12684. X(vcle, 0x0000310, 0x1000e00, 0x1b10180), \
  12685. X(vfma, N_INV, 0x0000c10, N_INV), \
  12686. X(vfms, N_INV, 0x0200c10, N_INV), \
  12687. X(vmla, 0x0000900, 0x0000d10, 0x0800040), \
  12688. X(vmls, 0x1000900, 0x0200d10, 0x0800440), \
  12689. X(vmul, 0x0000910, 0x1000d10, 0x0800840), \
  12690. X(vmull, 0x0800c00, 0x0800e00, 0x0800a40), /* polynomial not float. */ \
  12691. X(vmlal, 0x0800800, N_INV, 0x0800240), \
  12692. X(vmlsl, 0x0800a00, N_INV, 0x0800640), \
  12693. X(vqdmlal, 0x0800900, N_INV, 0x0800340), \
  12694. X(vqdmlsl, 0x0800b00, N_INV, 0x0800740), \
  12695. X(vqdmull, 0x0800d00, N_INV, 0x0800b40), \
  12696. X(vqdmulh, 0x0000b00, N_INV, 0x0800c40), \
  12697. X(vqrdmulh, 0x1000b00, N_INV, 0x0800d40), \
  12698. X(vqrdmlah, 0x3000b10, N_INV, 0x0800e40), \
  12699. X(vqrdmlsh, 0x3000c10, N_INV, 0x0800f40), \
  12700. X(vshl, 0x0000400, N_INV, 0x0800510), \
  12701. X(vqshl, 0x0000410, N_INV, 0x0800710), \
  12702. X(vand, 0x0000110, N_INV, 0x0800030), \
  12703. X(vbic, 0x0100110, N_INV, 0x0800030), \
  12704. X(veor, 0x1000110, N_INV, N_INV), \
  12705. X(vorn, 0x0300110, N_INV, 0x0800010), \
  12706. X(vorr, 0x0200110, N_INV, 0x0800010), \
  12707. X(vmvn, 0x1b00580, N_INV, 0x0800030), \
  12708. X(vshll, 0x1b20300, N_INV, 0x0800a10), /* max shift, immediate. */ \
  12709. X(vcvt, 0x1b30600, N_INV, 0x0800e10), /* integer, fixed-point. */ \
  12710. X(vdup, 0xe800b10, N_INV, 0x1b00c00), /* arm, scalar. */ \
  12711. X(vld1, 0x0200000, 0x0a00000, 0x0a00c00), /* interlv, lane, dup. */ \
  12712. X(vst1, 0x0000000, 0x0800000, N_INV), \
  12713. X(vld2, 0x0200100, 0x0a00100, 0x0a00d00), \
  12714. X(vst2, 0x0000100, 0x0800100, N_INV), \
  12715. X(vld3, 0x0200200, 0x0a00200, 0x0a00e00), \
  12716. X(vst3, 0x0000200, 0x0800200, N_INV), \
  12717. X(vld4, 0x0200300, 0x0a00300, 0x0a00f00), \
  12718. X(vst4, 0x0000300, 0x0800300, N_INV), \
  12719. X(vmovn, 0x1b20200, N_INV, N_INV), \
  12720. X(vtrn, 0x1b20080, N_INV, N_INV), \
  12721. X(vqmovn, 0x1b20200, N_INV, N_INV), \
  12722. X(vqmovun, 0x1b20240, N_INV, N_INV), \
  12723. X(vnmul, 0xe200a40, 0xe200b40, N_INV), \
  12724. X(vnmla, 0xe100a40, 0xe100b40, N_INV), \
  12725. X(vnmls, 0xe100a00, 0xe100b00, N_INV), \
  12726. X(vfnma, 0xe900a40, 0xe900b40, N_INV), \
  12727. X(vfnms, 0xe900a00, 0xe900b00, N_INV), \
  12728. X(vcmp, 0xeb40a40, 0xeb40b40, N_INV), \
  12729. X(vcmpz, 0xeb50a40, 0xeb50b40, N_INV), \
  12730. X(vcmpe, 0xeb40ac0, 0xeb40bc0, N_INV), \
  12731. X(vcmpez, 0xeb50ac0, 0xeb50bc0, N_INV), \
  12732. X(vseleq, 0xe000a00, N_INV, N_INV), \
  12733. X(vselvs, 0xe100a00, N_INV, N_INV), \
  12734. X(vselge, 0xe200a00, N_INV, N_INV), \
  12735. X(vselgt, 0xe300a00, N_INV, N_INV), \
  12736. X(vmaxnm, 0xe800a00, 0x3000f10, N_INV), \
  12737. X(vminnm, 0xe800a40, 0x3200f10, N_INV), \
  12738. X(vcvta, 0xebc0a40, 0x3bb0000, N_INV), \
  12739. X(vrintr, 0xeb60a40, 0x3ba0400, N_INV), \
  12740. X(vrinta, 0xeb80a40, 0x3ba0400, N_INV), \
  12741. X(aes, 0x3b00300, N_INV, N_INV), \
  12742. X(sha3op, 0x2000c00, N_INV, N_INV), \
  12743. X(sha1h, 0x3b902c0, N_INV, N_INV), \
  12744. X(sha2op, 0x3ba0380, N_INV, N_INV)
  12745. enum neon_opc
  12746. {
  12747. #define X(OPC,I,F,S) N_MNEM_##OPC
  12748. NEON_ENC_TAB
  12749. #undef X
  12750. };
  12751. static const struct neon_tab_entry neon_enc_tab[] =
  12752. {
  12753. #define X(OPC,I,F,S) { (I), (F), (S) }
  12754. NEON_ENC_TAB
  12755. #undef X
  12756. };
  12757. /* Do not use these macros; instead, use NEON_ENCODE defined below. */
  12758. #define NEON_ENC_INTEGER_(X) (neon_enc_tab[(X) & 0x0fffffff].integer)
  12759. #define NEON_ENC_ARMREG_(X) (neon_enc_tab[(X) & 0x0fffffff].integer)
  12760. #define NEON_ENC_POLY_(X) (neon_enc_tab[(X) & 0x0fffffff].float_or_poly)
  12761. #define NEON_ENC_FLOAT_(X) (neon_enc_tab[(X) & 0x0fffffff].float_or_poly)
  12762. #define NEON_ENC_SCALAR_(X) (neon_enc_tab[(X) & 0x0fffffff].scalar_or_imm)
  12763. #define NEON_ENC_IMMED_(X) (neon_enc_tab[(X) & 0x0fffffff].scalar_or_imm)
  12764. #define NEON_ENC_INTERLV_(X) (neon_enc_tab[(X) & 0x0fffffff].integer)
  12765. #define NEON_ENC_LANE_(X) (neon_enc_tab[(X) & 0x0fffffff].float_or_poly)
  12766. #define NEON_ENC_DUP_(X) (neon_enc_tab[(X) & 0x0fffffff].scalar_or_imm)
  12767. #define NEON_ENC_SINGLE_(X) \
  12768. ((neon_enc_tab[(X) & 0x0fffffff].integer) | ((X) & 0xf0000000))
  12769. #define NEON_ENC_DOUBLE_(X) \
  12770. ((neon_enc_tab[(X) & 0x0fffffff].float_or_poly) | ((X) & 0xf0000000))
  12771. #define NEON_ENC_FPV8_(X) \
  12772. ((neon_enc_tab[(X) & 0x0fffffff].integer) | ((X) & 0xf000000))
  12773. #define NEON_ENCODE(type, inst) \
  12774. do \
  12775. { \
  12776. inst.instruction = NEON_ENC_##type##_ (inst.instruction); \
  12777. inst.is_neon = 1; \
  12778. } \
  12779. while (0)
  12780. #define check_neon_suffixes \
  12781. do \
  12782. { \
  12783. if (!inst.error && inst.vectype.elems > 0 && !inst.is_neon) \
  12784. { \
  12785. as_bad (_("invalid neon suffix for non neon instruction")); \
  12786. return; \
  12787. } \
  12788. } \
  12789. while (0)
  12790. /* Define shapes for instruction operands. The following mnemonic characters
  12791. are used in this table:
  12792. F - VFP S<n> register
  12793. D - Neon D<n> register
  12794. Q - Neon Q<n> register
  12795. I - Immediate
  12796. S - Scalar
  12797. R - ARM register
  12798. L - D<n> register list
  12799. This table is used to generate various data:
  12800. - enumerations of the form NS_DDR to be used as arguments to
  12801. neon_select_shape.
  12802. - a table classifying shapes into single, double, quad, mixed.
  12803. - a table used to drive neon_select_shape. */
  12804. #define NEON_SHAPE_DEF \
  12805. X(4, (R, R, Q, Q), QUAD), \
  12806. X(4, (Q, R, R, I), QUAD), \
  12807. X(4, (R, R, S, S), QUAD), \
  12808. X(4, (S, S, R, R), QUAD), \
  12809. X(3, (Q, R, I), QUAD), \
  12810. X(3, (I, Q, Q), QUAD), \
  12811. X(3, (I, Q, R), QUAD), \
  12812. X(3, (R, Q, Q), QUAD), \
  12813. X(3, (D, D, D), DOUBLE), \
  12814. X(3, (Q, Q, Q), QUAD), \
  12815. X(3, (D, D, I), DOUBLE), \
  12816. X(3, (Q, Q, I), QUAD), \
  12817. X(3, (D, D, S), DOUBLE), \
  12818. X(3, (Q, Q, S), QUAD), \
  12819. X(3, (Q, Q, R), QUAD), \
  12820. X(3, (R, R, Q), QUAD), \
  12821. X(2, (R, Q), QUAD), \
  12822. X(2, (D, D), DOUBLE), \
  12823. X(2, (Q, Q), QUAD), \
  12824. X(2, (D, S), DOUBLE), \
  12825. X(2, (Q, S), QUAD), \
  12826. X(2, (D, R), DOUBLE), \
  12827. X(2, (Q, R), QUAD), \
  12828. X(2, (D, I), DOUBLE), \
  12829. X(2, (Q, I), QUAD), \
  12830. X(3, (P, F, I), SINGLE), \
  12831. X(3, (P, D, I), DOUBLE), \
  12832. X(3, (P, Q, I), QUAD), \
  12833. X(4, (P, F, F, I), SINGLE), \
  12834. X(4, (P, D, D, I), DOUBLE), \
  12835. X(4, (P, Q, Q, I), QUAD), \
  12836. X(5, (P, F, F, F, I), SINGLE), \
  12837. X(5, (P, D, D, D, I), DOUBLE), \
  12838. X(5, (P, Q, Q, Q, I), QUAD), \
  12839. X(3, (D, L, D), DOUBLE), \
  12840. X(2, (D, Q), MIXED), \
  12841. X(2, (Q, D), MIXED), \
  12842. X(3, (D, Q, I), MIXED), \
  12843. X(3, (Q, D, I), MIXED), \
  12844. X(3, (Q, D, D), MIXED), \
  12845. X(3, (D, Q, Q), MIXED), \
  12846. X(3, (Q, Q, D), MIXED), \
  12847. X(3, (Q, D, S), MIXED), \
  12848. X(3, (D, Q, S), MIXED), \
  12849. X(4, (D, D, D, I), DOUBLE), \
  12850. X(4, (Q, Q, Q, I), QUAD), \
  12851. X(4, (D, D, S, I), DOUBLE), \
  12852. X(4, (Q, Q, S, I), QUAD), \
  12853. X(2, (F, F), SINGLE), \
  12854. X(3, (F, F, F), SINGLE), \
  12855. X(2, (F, I), SINGLE), \
  12856. X(2, (F, D), MIXED), \
  12857. X(2, (D, F), MIXED), \
  12858. X(3, (F, F, I), MIXED), \
  12859. X(4, (R, R, F, F), SINGLE), \
  12860. X(4, (F, F, R, R), SINGLE), \
  12861. X(3, (D, R, R), DOUBLE), \
  12862. X(3, (R, R, D), DOUBLE), \
  12863. X(2, (S, R), SINGLE), \
  12864. X(2, (R, S), SINGLE), \
  12865. X(2, (F, R), SINGLE), \
  12866. X(2, (R, F), SINGLE), \
  12867. /* Used for MVE tail predicated loop instructions. */\
  12868. X(2, (R, R), QUAD), \
  12869. /* Half float shape supported so far. */\
  12870. X (2, (H, D), MIXED), \
  12871. X (2, (D, H), MIXED), \
  12872. X (2, (H, F), MIXED), \
  12873. X (2, (F, H), MIXED), \
  12874. X (2, (H, H), HALF), \
  12875. X (2, (H, R), HALF), \
  12876. X (2, (R, H), HALF), \
  12877. X (2, (H, I), HALF), \
  12878. X (3, (H, H, H), HALF), \
  12879. X (3, (H, F, I), MIXED), \
  12880. X (3, (F, H, I), MIXED), \
  12881. X (3, (D, H, H), MIXED), \
  12882. X (3, (D, H, S), MIXED)
  12883. #define S2(A,B) NS_##A##B
  12884. #define S3(A,B,C) NS_##A##B##C
  12885. #define S4(A,B,C,D) NS_##A##B##C##D
  12886. #define S5(A,B,C,D,E) NS_##A##B##C##D##E
  12887. #define X(N, L, C) S##N L
  12888. enum neon_shape
  12889. {
  12890. NEON_SHAPE_DEF,
  12891. NS_NULL
  12892. };
  12893. #undef X
  12894. #undef S2
  12895. #undef S3
  12896. #undef S4
  12897. #undef S5
  12898. enum neon_shape_class
  12899. {
  12900. SC_HALF,
  12901. SC_SINGLE,
  12902. SC_DOUBLE,
  12903. SC_QUAD,
  12904. SC_MIXED
  12905. };
  12906. #define X(N, L, C) SC_##C
  12907. static enum neon_shape_class neon_shape_class[] =
  12908. {
  12909. NEON_SHAPE_DEF
  12910. };
  12911. #undef X
  12912. enum neon_shape_el
  12913. {
  12914. SE_H,
  12915. SE_F,
  12916. SE_D,
  12917. SE_Q,
  12918. SE_I,
  12919. SE_S,
  12920. SE_R,
  12921. SE_L,
  12922. SE_P
  12923. };
  12924. /* Register widths of above. */
  12925. static unsigned neon_shape_el_size[] =
  12926. {
  12927. 16,
  12928. 32,
  12929. 64,
  12930. 128,
  12931. 0,
  12932. 32,
  12933. 32,
  12934. 0,
  12935. 0
  12936. };
  12937. struct neon_shape_info
  12938. {
  12939. unsigned els;
  12940. enum neon_shape_el el[NEON_MAX_TYPE_ELS];
  12941. };
  12942. #define S2(A,B) { SE_##A, SE_##B }
  12943. #define S3(A,B,C) { SE_##A, SE_##B, SE_##C }
  12944. #define S4(A,B,C,D) { SE_##A, SE_##B, SE_##C, SE_##D }
  12945. #define S5(A,B,C,D,E) { SE_##A, SE_##B, SE_##C, SE_##D, SE_##E }
  12946. #define X(N, L, C) { N, S##N L }
  12947. static struct neon_shape_info neon_shape_tab[] =
  12948. {
  12949. NEON_SHAPE_DEF
  12950. };
  12951. #undef X
  12952. #undef S2
  12953. #undef S3
  12954. #undef S4
  12955. #undef S5
  12956. /* Bit masks used in type checking given instructions.
  12957. 'N_EQK' means the type must be the same as (or based on in some way) the key
  12958. type, which itself is marked with the 'N_KEY' bit. If the 'N_EQK' bit is
  12959. set, various other bits can be set as well in order to modify the meaning of
  12960. the type constraint. */
  12961. enum neon_type_mask
  12962. {
  12963. N_S8 = 0x0000001,
  12964. N_S16 = 0x0000002,
  12965. N_S32 = 0x0000004,
  12966. N_S64 = 0x0000008,
  12967. N_U8 = 0x0000010,
  12968. N_U16 = 0x0000020,
  12969. N_U32 = 0x0000040,
  12970. N_U64 = 0x0000080,
  12971. N_I8 = 0x0000100,
  12972. N_I16 = 0x0000200,
  12973. N_I32 = 0x0000400,
  12974. N_I64 = 0x0000800,
  12975. N_8 = 0x0001000,
  12976. N_16 = 0x0002000,
  12977. N_32 = 0x0004000,
  12978. N_64 = 0x0008000,
  12979. N_P8 = 0x0010000,
  12980. N_P16 = 0x0020000,
  12981. N_F16 = 0x0040000,
  12982. N_F32 = 0x0080000,
  12983. N_F64 = 0x0100000,
  12984. N_P64 = 0x0200000,
  12985. N_BF16 = 0x0400000,
  12986. N_KEY = 0x1000000, /* Key element (main type specifier). */
  12987. N_EQK = 0x2000000, /* Given operand has the same type & size as the key. */
  12988. N_VFP = 0x4000000, /* VFP mode: operand size must match register width. */
  12989. N_UNT = 0x8000000, /* Must be explicitly untyped. */
  12990. N_DBL = 0x0000001, /* If N_EQK, this operand is twice the size. */
  12991. N_HLF = 0x0000002, /* If N_EQK, this operand is half the size. */
  12992. N_SGN = 0x0000004, /* If N_EQK, this operand is forced to be signed. */
  12993. N_UNS = 0x0000008, /* If N_EQK, this operand is forced to be unsigned. */
  12994. N_INT = 0x0000010, /* If N_EQK, this operand is forced to be integer. */
  12995. N_FLT = 0x0000020, /* If N_EQK, this operand is forced to be float. */
  12996. N_SIZ = 0x0000040, /* If N_EQK, this operand is forced to be size-only. */
  12997. N_UTYP = 0,
  12998. N_MAX_NONSPECIAL = N_P64
  12999. };
  13000. #define N_ALLMODS (N_DBL | N_HLF | N_SGN | N_UNS | N_INT | N_FLT | N_SIZ)
  13001. #define N_SU_ALL (N_S8 | N_S16 | N_S32 | N_S64 | N_U8 | N_U16 | N_U32 | N_U64)
  13002. #define N_SU_32 (N_S8 | N_S16 | N_S32 | N_U8 | N_U16 | N_U32)
  13003. #define N_SU_16_64 (N_S16 | N_S32 | N_S64 | N_U16 | N_U32 | N_U64)
  13004. #define N_S_32 (N_S8 | N_S16 | N_S32)
  13005. #define N_F_16_32 (N_F16 | N_F32)
  13006. #define N_SUF_32 (N_SU_32 | N_F_16_32)
  13007. #define N_I_ALL (N_I8 | N_I16 | N_I32 | N_I64)
  13008. #define N_IF_32 (N_I8 | N_I16 | N_I32 | N_F16 | N_F32)
  13009. #define N_F_ALL (N_F16 | N_F32 | N_F64)
  13010. #define N_I_MVE (N_I8 | N_I16 | N_I32)
  13011. #define N_F_MVE (N_F16 | N_F32)
  13012. #define N_SU_MVE (N_S8 | N_S16 | N_S32 | N_U8 | N_U16 | N_U32)
  13013. /* Pass this as the first type argument to neon_check_type to ignore types
  13014. altogether. */
  13015. #define N_IGNORE_TYPE (N_KEY | N_EQK)
  13016. /* Select a "shape" for the current instruction (describing register types or
  13017. sizes) from a list of alternatives. Return NS_NULL if the current instruction
  13018. doesn't fit. For non-polymorphic shapes, checking is usually done as a
  13019. function of operand parsing, so this function doesn't need to be called.
  13020. Shapes should be listed in order of decreasing length. */
  13021. static enum neon_shape
  13022. neon_select_shape (enum neon_shape shape, ...)
  13023. {
  13024. va_list ap;
  13025. enum neon_shape first_shape = shape;
  13026. /* Fix missing optional operands. FIXME: we don't know at this point how
  13027. many arguments we should have, so this makes the assumption that we have
  13028. > 1. This is true of all current Neon opcodes, I think, but may not be
  13029. true in the future. */
  13030. if (!inst.operands[1].present)
  13031. inst.operands[1] = inst.operands[0];
  13032. va_start (ap, shape);
  13033. for (; shape != NS_NULL; shape = (enum neon_shape) va_arg (ap, int))
  13034. {
  13035. unsigned j;
  13036. int matches = 1;
  13037. for (j = 0; j < neon_shape_tab[shape].els; j++)
  13038. {
  13039. if (!inst.operands[j].present)
  13040. {
  13041. matches = 0;
  13042. break;
  13043. }
  13044. switch (neon_shape_tab[shape].el[j])
  13045. {
  13046. /* If a .f16, .16, .u16, .s16 type specifier is given over
  13047. a VFP single precision register operand, it's essentially
  13048. means only half of the register is used.
  13049. If the type specifier is given after the mnemonics, the
  13050. information is stored in inst.vectype. If the type specifier
  13051. is given after register operand, the information is stored
  13052. in inst.operands[].vectype.
  13053. When there is only one type specifier, and all the register
  13054. operands are the same type of hardware register, the type
  13055. specifier applies to all register operands.
  13056. If no type specifier is given, the shape is inferred from
  13057. operand information.
  13058. for example:
  13059. vadd.f16 s0, s1, s2: NS_HHH
  13060. vabs.f16 s0, s1: NS_HH
  13061. vmov.f16 s0, r1: NS_HR
  13062. vmov.f16 r0, s1: NS_RH
  13063. vcvt.f16 r0, s1: NS_RH
  13064. vcvt.f16.s32 s2, s2, #29: NS_HFI
  13065. vcvt.f16.s32 s2, s2: NS_HF
  13066. */
  13067. case SE_H:
  13068. if (!(inst.operands[j].isreg
  13069. && inst.operands[j].isvec
  13070. && inst.operands[j].issingle
  13071. && !inst.operands[j].isquad
  13072. && ((inst.vectype.elems == 1
  13073. && inst.vectype.el[0].size == 16)
  13074. || (inst.vectype.elems > 1
  13075. && inst.vectype.el[j].size == 16)
  13076. || (inst.vectype.elems == 0
  13077. && inst.operands[j].vectype.type != NT_invtype
  13078. && inst.operands[j].vectype.size == 16))))
  13079. matches = 0;
  13080. break;
  13081. case SE_F:
  13082. if (!(inst.operands[j].isreg
  13083. && inst.operands[j].isvec
  13084. && inst.operands[j].issingle
  13085. && !inst.operands[j].isquad
  13086. && ((inst.vectype.elems == 1 && inst.vectype.el[0].size == 32)
  13087. || (inst.vectype.elems > 1 && inst.vectype.el[j].size == 32)
  13088. || (inst.vectype.elems == 0
  13089. && (inst.operands[j].vectype.size == 32
  13090. || inst.operands[j].vectype.type == NT_invtype)))))
  13091. matches = 0;
  13092. break;
  13093. case SE_D:
  13094. if (!(inst.operands[j].isreg
  13095. && inst.operands[j].isvec
  13096. && !inst.operands[j].isquad
  13097. && !inst.operands[j].issingle))
  13098. matches = 0;
  13099. break;
  13100. case SE_R:
  13101. if (!(inst.operands[j].isreg
  13102. && !inst.operands[j].isvec))
  13103. matches = 0;
  13104. break;
  13105. case SE_Q:
  13106. if (!(inst.operands[j].isreg
  13107. && inst.operands[j].isvec
  13108. && inst.operands[j].isquad
  13109. && !inst.operands[j].issingle))
  13110. matches = 0;
  13111. break;
  13112. case SE_I:
  13113. if (!(!inst.operands[j].isreg
  13114. && !inst.operands[j].isscalar))
  13115. matches = 0;
  13116. break;
  13117. case SE_S:
  13118. if (!(!inst.operands[j].isreg
  13119. && inst.operands[j].isscalar))
  13120. matches = 0;
  13121. break;
  13122. case SE_P:
  13123. case SE_L:
  13124. break;
  13125. }
  13126. if (!matches)
  13127. break;
  13128. }
  13129. if (matches && (j >= ARM_IT_MAX_OPERANDS || !inst.operands[j].present))
  13130. /* We've matched all the entries in the shape table, and we don't
  13131. have any left over operands which have not been matched. */
  13132. break;
  13133. }
  13134. va_end (ap);
  13135. if (shape == NS_NULL && first_shape != NS_NULL)
  13136. first_error (_("invalid instruction shape"));
  13137. return shape;
  13138. }
  13139. /* True if SHAPE is predominantly a quadword operation (most of the time, this
  13140. means the Q bit should be set). */
  13141. static int
  13142. neon_quad (enum neon_shape shape)
  13143. {
  13144. return neon_shape_class[shape] == SC_QUAD;
  13145. }
  13146. static void
  13147. neon_modify_type_size (unsigned typebits, enum neon_el_type *g_type,
  13148. unsigned *g_size)
  13149. {
  13150. /* Allow modification to be made to types which are constrained to be
  13151. based on the key element, based on bits set alongside N_EQK. */
  13152. if ((typebits & N_EQK) != 0)
  13153. {
  13154. if ((typebits & N_HLF) != 0)
  13155. *g_size /= 2;
  13156. else if ((typebits & N_DBL) != 0)
  13157. *g_size *= 2;
  13158. if ((typebits & N_SGN) != 0)
  13159. *g_type = NT_signed;
  13160. else if ((typebits & N_UNS) != 0)
  13161. *g_type = NT_unsigned;
  13162. else if ((typebits & N_INT) != 0)
  13163. *g_type = NT_integer;
  13164. else if ((typebits & N_FLT) != 0)
  13165. *g_type = NT_float;
  13166. else if ((typebits & N_SIZ) != 0)
  13167. *g_type = NT_untyped;
  13168. }
  13169. }
  13170. /* Return operand OPNO promoted by bits set in THISARG. KEY should be the "key"
  13171. operand type, i.e. the single type specified in a Neon instruction when it
  13172. is the only one given. */
  13173. static struct neon_type_el
  13174. neon_type_promote (struct neon_type_el *key, unsigned thisarg)
  13175. {
  13176. struct neon_type_el dest = *key;
  13177. gas_assert ((thisarg & N_EQK) != 0);
  13178. neon_modify_type_size (thisarg, &dest.type, &dest.size);
  13179. return dest;
  13180. }
  13181. /* Convert Neon type and size into compact bitmask representation. */
  13182. static enum neon_type_mask
  13183. type_chk_of_el_type (enum neon_el_type type, unsigned size)
  13184. {
  13185. switch (type)
  13186. {
  13187. case NT_untyped:
  13188. switch (size)
  13189. {
  13190. case 8: return N_8;
  13191. case 16: return N_16;
  13192. case 32: return N_32;
  13193. case 64: return N_64;
  13194. default: ;
  13195. }
  13196. break;
  13197. case NT_integer:
  13198. switch (size)
  13199. {
  13200. case 8: return N_I8;
  13201. case 16: return N_I16;
  13202. case 32: return N_I32;
  13203. case 64: return N_I64;
  13204. default: ;
  13205. }
  13206. break;
  13207. case NT_float:
  13208. switch (size)
  13209. {
  13210. case 16: return N_F16;
  13211. case 32: return N_F32;
  13212. case 64: return N_F64;
  13213. default: ;
  13214. }
  13215. break;
  13216. case NT_poly:
  13217. switch (size)
  13218. {
  13219. case 8: return N_P8;
  13220. case 16: return N_P16;
  13221. case 64: return N_P64;
  13222. default: ;
  13223. }
  13224. break;
  13225. case NT_signed:
  13226. switch (size)
  13227. {
  13228. case 8: return N_S8;
  13229. case 16: return N_S16;
  13230. case 32: return N_S32;
  13231. case 64: return N_S64;
  13232. default: ;
  13233. }
  13234. break;
  13235. case NT_unsigned:
  13236. switch (size)
  13237. {
  13238. case 8: return N_U8;
  13239. case 16: return N_U16;
  13240. case 32: return N_U32;
  13241. case 64: return N_U64;
  13242. default: ;
  13243. }
  13244. break;
  13245. case NT_bfloat:
  13246. if (size == 16) return N_BF16;
  13247. break;
  13248. default: ;
  13249. }
  13250. return N_UTYP;
  13251. }
  13252. /* Convert compact Neon bitmask type representation to a type and size. Only
  13253. handles the case where a single bit is set in the mask. */
  13254. static int
  13255. el_type_of_type_chk (enum neon_el_type *type, unsigned *size,
  13256. enum neon_type_mask mask)
  13257. {
  13258. if ((mask & N_EQK) != 0)
  13259. return FAIL;
  13260. if ((mask & (N_S8 | N_U8 | N_I8 | N_8 | N_P8)) != 0)
  13261. *size = 8;
  13262. else if ((mask & (N_S16 | N_U16 | N_I16 | N_16 | N_F16 | N_P16 | N_BF16))
  13263. != 0)
  13264. *size = 16;
  13265. else if ((mask & (N_S32 | N_U32 | N_I32 | N_32 | N_F32)) != 0)
  13266. *size = 32;
  13267. else if ((mask & (N_S64 | N_U64 | N_I64 | N_64 | N_F64 | N_P64)) != 0)
  13268. *size = 64;
  13269. else
  13270. return FAIL;
  13271. if ((mask & (N_S8 | N_S16 | N_S32 | N_S64)) != 0)
  13272. *type = NT_signed;
  13273. else if ((mask & (N_U8 | N_U16 | N_U32 | N_U64)) != 0)
  13274. *type = NT_unsigned;
  13275. else if ((mask & (N_I8 | N_I16 | N_I32 | N_I64)) != 0)
  13276. *type = NT_integer;
  13277. else if ((mask & (N_8 | N_16 | N_32 | N_64)) != 0)
  13278. *type = NT_untyped;
  13279. else if ((mask & (N_P8 | N_P16 | N_P64)) != 0)
  13280. *type = NT_poly;
  13281. else if ((mask & (N_F_ALL)) != 0)
  13282. *type = NT_float;
  13283. else if ((mask & (N_BF16)) != 0)
  13284. *type = NT_bfloat;
  13285. else
  13286. return FAIL;
  13287. return SUCCESS;
  13288. }
  13289. /* Modify a bitmask of allowed types. This is only needed for type
  13290. relaxation. */
  13291. static unsigned
  13292. modify_types_allowed (unsigned allowed, unsigned mods)
  13293. {
  13294. unsigned size;
  13295. enum neon_el_type type;
  13296. unsigned destmask;
  13297. int i;
  13298. destmask = 0;
  13299. for (i = 1; i <= N_MAX_NONSPECIAL; i <<= 1)
  13300. {
  13301. if (el_type_of_type_chk (&type, &size,
  13302. (enum neon_type_mask) (allowed & i)) == SUCCESS)
  13303. {
  13304. neon_modify_type_size (mods, &type, &size);
  13305. destmask |= type_chk_of_el_type (type, size);
  13306. }
  13307. }
  13308. return destmask;
  13309. }
  13310. /* Check type and return type classification.
  13311. The manual states (paraphrase): If one datatype is given, it indicates the
  13312. type given in:
  13313. - the second operand, if there is one
  13314. - the operand, if there is no second operand
  13315. - the result, if there are no operands.
  13316. This isn't quite good enough though, so we use a concept of a "key" datatype
  13317. which is set on a per-instruction basis, which is the one which matters when
  13318. only one data type is written.
  13319. Note: this function has side-effects (e.g. filling in missing operands). All
  13320. Neon instructions should call it before performing bit encoding. */
  13321. static struct neon_type_el
  13322. neon_check_type (unsigned els, enum neon_shape ns, ...)
  13323. {
  13324. va_list ap;
  13325. unsigned i, pass, key_el = 0;
  13326. unsigned types[NEON_MAX_TYPE_ELS];
  13327. enum neon_el_type k_type = NT_invtype;
  13328. unsigned k_size = -1u;
  13329. struct neon_type_el badtype = {NT_invtype, -1};
  13330. unsigned key_allowed = 0;
  13331. /* Optional registers in Neon instructions are always (not) in operand 1.
  13332. Fill in the missing operand here, if it was omitted. */
  13333. if (els > 1 && !inst.operands[1].present)
  13334. inst.operands[1] = inst.operands[0];
  13335. /* Suck up all the varargs. */
  13336. va_start (ap, ns);
  13337. for (i = 0; i < els; i++)
  13338. {
  13339. unsigned thisarg = va_arg (ap, unsigned);
  13340. if (thisarg == N_IGNORE_TYPE)
  13341. {
  13342. va_end (ap);
  13343. return badtype;
  13344. }
  13345. types[i] = thisarg;
  13346. if ((thisarg & N_KEY) != 0)
  13347. key_el = i;
  13348. }
  13349. va_end (ap);
  13350. if (inst.vectype.elems > 0)
  13351. for (i = 0; i < els; i++)
  13352. if (inst.operands[i].vectype.type != NT_invtype)
  13353. {
  13354. first_error (_("types specified in both the mnemonic and operands"));
  13355. return badtype;
  13356. }
  13357. /* Duplicate inst.vectype elements here as necessary.
  13358. FIXME: No idea if this is exactly the same as the ARM assembler,
  13359. particularly when an insn takes one register and one non-register
  13360. operand. */
  13361. if (inst.vectype.elems == 1 && els > 1)
  13362. {
  13363. unsigned j;
  13364. inst.vectype.elems = els;
  13365. inst.vectype.el[key_el] = inst.vectype.el[0];
  13366. for (j = 0; j < els; j++)
  13367. if (j != key_el)
  13368. inst.vectype.el[j] = neon_type_promote (&inst.vectype.el[key_el],
  13369. types[j]);
  13370. }
  13371. else if (inst.vectype.elems == 0 && els > 0)
  13372. {
  13373. unsigned j;
  13374. /* No types were given after the mnemonic, so look for types specified
  13375. after each operand. We allow some flexibility here; as long as the
  13376. "key" operand has a type, we can infer the others. */
  13377. for (j = 0; j < els; j++)
  13378. if (inst.operands[j].vectype.type != NT_invtype)
  13379. inst.vectype.el[j] = inst.operands[j].vectype;
  13380. if (inst.operands[key_el].vectype.type != NT_invtype)
  13381. {
  13382. for (j = 0; j < els; j++)
  13383. if (inst.operands[j].vectype.type == NT_invtype)
  13384. inst.vectype.el[j] = neon_type_promote (&inst.vectype.el[key_el],
  13385. types[j]);
  13386. }
  13387. else
  13388. {
  13389. first_error (_("operand types can't be inferred"));
  13390. return badtype;
  13391. }
  13392. }
  13393. else if (inst.vectype.elems != els)
  13394. {
  13395. first_error (_("type specifier has the wrong number of parts"));
  13396. return badtype;
  13397. }
  13398. for (pass = 0; pass < 2; pass++)
  13399. {
  13400. for (i = 0; i < els; i++)
  13401. {
  13402. unsigned thisarg = types[i];
  13403. unsigned types_allowed = ((thisarg & N_EQK) != 0 && pass != 0)
  13404. ? modify_types_allowed (key_allowed, thisarg) : thisarg;
  13405. enum neon_el_type g_type = inst.vectype.el[i].type;
  13406. unsigned g_size = inst.vectype.el[i].size;
  13407. /* Decay more-specific signed & unsigned types to sign-insensitive
  13408. integer types if sign-specific variants are unavailable. */
  13409. if ((g_type == NT_signed || g_type == NT_unsigned)
  13410. && (types_allowed & N_SU_ALL) == 0)
  13411. g_type = NT_integer;
  13412. /* If only untyped args are allowed, decay any more specific types to
  13413. them. Some instructions only care about signs for some element
  13414. sizes, so handle that properly. */
  13415. if (((types_allowed & N_UNT) == 0)
  13416. && ((g_size == 8 && (types_allowed & N_8) != 0)
  13417. || (g_size == 16 && (types_allowed & N_16) != 0)
  13418. || (g_size == 32 && (types_allowed & N_32) != 0)
  13419. || (g_size == 64 && (types_allowed & N_64) != 0)))
  13420. g_type = NT_untyped;
  13421. if (pass == 0)
  13422. {
  13423. if ((thisarg & N_KEY) != 0)
  13424. {
  13425. k_type = g_type;
  13426. k_size = g_size;
  13427. key_allowed = thisarg & ~N_KEY;
  13428. /* Check architecture constraint on FP16 extension. */
  13429. if (k_size == 16
  13430. && k_type == NT_float
  13431. && ! ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_fp16))
  13432. {
  13433. inst.error = _(BAD_FP16);
  13434. return badtype;
  13435. }
  13436. }
  13437. }
  13438. else
  13439. {
  13440. if ((thisarg & N_VFP) != 0)
  13441. {
  13442. enum neon_shape_el regshape;
  13443. unsigned regwidth, match;
  13444. /* PR 11136: Catch the case where we are passed a shape of NS_NULL. */
  13445. if (ns == NS_NULL)
  13446. {
  13447. first_error (_("invalid instruction shape"));
  13448. return badtype;
  13449. }
  13450. regshape = neon_shape_tab[ns].el[i];
  13451. regwidth = neon_shape_el_size[regshape];
  13452. /* In VFP mode, operands must match register widths. If we
  13453. have a key operand, use its width, else use the width of
  13454. the current operand. */
  13455. if (k_size != -1u)
  13456. match = k_size;
  13457. else
  13458. match = g_size;
  13459. /* FP16 will use a single precision register. */
  13460. if (regwidth == 32 && match == 16)
  13461. {
  13462. if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_fp16))
  13463. match = regwidth;
  13464. else
  13465. {
  13466. inst.error = _(BAD_FP16);
  13467. return badtype;
  13468. }
  13469. }
  13470. if (regwidth != match)
  13471. {
  13472. first_error (_("operand size must match register width"));
  13473. return badtype;
  13474. }
  13475. }
  13476. if ((thisarg & N_EQK) == 0)
  13477. {
  13478. unsigned given_type = type_chk_of_el_type (g_type, g_size);
  13479. if ((given_type & types_allowed) == 0)
  13480. {
  13481. first_error (BAD_SIMD_TYPE);
  13482. return badtype;
  13483. }
  13484. }
  13485. else
  13486. {
  13487. enum neon_el_type mod_k_type = k_type;
  13488. unsigned mod_k_size = k_size;
  13489. neon_modify_type_size (thisarg, &mod_k_type, &mod_k_size);
  13490. if (g_type != mod_k_type || g_size != mod_k_size)
  13491. {
  13492. first_error (_("inconsistent types in Neon instruction"));
  13493. return badtype;
  13494. }
  13495. }
  13496. }
  13497. }
  13498. }
  13499. return inst.vectype.el[key_el];
  13500. }
  13501. /* Neon-style VFP instruction forwarding. */
  13502. /* Thumb VFP instructions have 0xE in the condition field. */
  13503. static void
  13504. do_vfp_cond_or_thumb (void)
  13505. {
  13506. inst.is_neon = 1;
  13507. if (thumb_mode)
  13508. inst.instruction |= 0xe0000000;
  13509. else
  13510. inst.instruction |= inst.cond << 28;
  13511. }
  13512. /* Look up and encode a simple mnemonic, for use as a helper function for the
  13513. Neon-style VFP syntax. This avoids duplication of bits of the insns table,
  13514. etc. It is assumed that operand parsing has already been done, and that the
  13515. operands are in the form expected by the given opcode (this isn't necessarily
  13516. the same as the form in which they were parsed, hence some massaging must
  13517. take place before this function is called).
  13518. Checks current arch version against that in the looked-up opcode. */
  13519. static void
  13520. do_vfp_nsyn_opcode (const char *opname)
  13521. {
  13522. const struct asm_opcode *opcode;
  13523. opcode = (const struct asm_opcode *) str_hash_find (arm_ops_hsh, opname);
  13524. if (!opcode)
  13525. abort ();
  13526. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant,
  13527. thumb_mode ? *opcode->tvariant : *opcode->avariant),
  13528. _(BAD_FPU));
  13529. inst.is_neon = 1;
  13530. if (thumb_mode)
  13531. {
  13532. inst.instruction = opcode->tvalue;
  13533. opcode->tencode ();
  13534. }
  13535. else
  13536. {
  13537. inst.instruction = (inst.cond << 28) | opcode->avalue;
  13538. opcode->aencode ();
  13539. }
  13540. }
  13541. static void
  13542. do_vfp_nsyn_add_sub (enum neon_shape rs)
  13543. {
  13544. int is_add = (inst.instruction & 0x0fffffff) == N_MNEM_vadd;
  13545. if (rs == NS_FFF || rs == NS_HHH)
  13546. {
  13547. if (is_add)
  13548. do_vfp_nsyn_opcode ("fadds");
  13549. else
  13550. do_vfp_nsyn_opcode ("fsubs");
  13551. /* ARMv8.2 fp16 instruction. */
  13552. if (rs == NS_HHH)
  13553. do_scalar_fp16_v82_encode ();
  13554. }
  13555. else
  13556. {
  13557. if (is_add)
  13558. do_vfp_nsyn_opcode ("faddd");
  13559. else
  13560. do_vfp_nsyn_opcode ("fsubd");
  13561. }
  13562. }
  13563. /* Check operand types to see if this is a VFP instruction, and if so call
  13564. PFN (). */
  13565. static int
  13566. try_vfp_nsyn (int args, void (*pfn) (enum neon_shape))
  13567. {
  13568. enum neon_shape rs;
  13569. struct neon_type_el et;
  13570. switch (args)
  13571. {
  13572. case 2:
  13573. rs = neon_select_shape (NS_HH, NS_FF, NS_DD, NS_NULL);
  13574. et = neon_check_type (2, rs, N_EQK | N_VFP, N_F_ALL | N_KEY | N_VFP);
  13575. break;
  13576. case 3:
  13577. rs = neon_select_shape (NS_HHH, NS_FFF, NS_DDD, NS_NULL);
  13578. et = neon_check_type (3, rs, N_EQK | N_VFP, N_EQK | N_VFP,
  13579. N_F_ALL | N_KEY | N_VFP);
  13580. break;
  13581. default:
  13582. abort ();
  13583. }
  13584. if (et.type != NT_invtype)
  13585. {
  13586. pfn (rs);
  13587. return SUCCESS;
  13588. }
  13589. inst.error = NULL;
  13590. return FAIL;
  13591. }
  13592. static void
  13593. do_vfp_nsyn_mla_mls (enum neon_shape rs)
  13594. {
  13595. int is_mla = (inst.instruction & 0x0fffffff) == N_MNEM_vmla;
  13596. if (rs == NS_FFF || rs == NS_HHH)
  13597. {
  13598. if (is_mla)
  13599. do_vfp_nsyn_opcode ("fmacs");
  13600. else
  13601. do_vfp_nsyn_opcode ("fnmacs");
  13602. /* ARMv8.2 fp16 instruction. */
  13603. if (rs == NS_HHH)
  13604. do_scalar_fp16_v82_encode ();
  13605. }
  13606. else
  13607. {
  13608. if (is_mla)
  13609. do_vfp_nsyn_opcode ("fmacd");
  13610. else
  13611. do_vfp_nsyn_opcode ("fnmacd");
  13612. }
  13613. }
  13614. static void
  13615. do_vfp_nsyn_fma_fms (enum neon_shape rs)
  13616. {
  13617. int is_fma = (inst.instruction & 0x0fffffff) == N_MNEM_vfma;
  13618. if (rs == NS_FFF || rs == NS_HHH)
  13619. {
  13620. if (is_fma)
  13621. do_vfp_nsyn_opcode ("ffmas");
  13622. else
  13623. do_vfp_nsyn_opcode ("ffnmas");
  13624. /* ARMv8.2 fp16 instruction. */
  13625. if (rs == NS_HHH)
  13626. do_scalar_fp16_v82_encode ();
  13627. }
  13628. else
  13629. {
  13630. if (is_fma)
  13631. do_vfp_nsyn_opcode ("ffmad");
  13632. else
  13633. do_vfp_nsyn_opcode ("ffnmad");
  13634. }
  13635. }
  13636. static void
  13637. do_vfp_nsyn_mul (enum neon_shape rs)
  13638. {
  13639. if (rs == NS_FFF || rs == NS_HHH)
  13640. {
  13641. do_vfp_nsyn_opcode ("fmuls");
  13642. /* ARMv8.2 fp16 instruction. */
  13643. if (rs == NS_HHH)
  13644. do_scalar_fp16_v82_encode ();
  13645. }
  13646. else
  13647. do_vfp_nsyn_opcode ("fmuld");
  13648. }
  13649. static void
  13650. do_vfp_nsyn_abs_neg (enum neon_shape rs)
  13651. {
  13652. int is_neg = (inst.instruction & 0x80) != 0;
  13653. neon_check_type (2, rs, N_EQK | N_VFP, N_F_ALL | N_VFP | N_KEY);
  13654. if (rs == NS_FF || rs == NS_HH)
  13655. {
  13656. if (is_neg)
  13657. do_vfp_nsyn_opcode ("fnegs");
  13658. else
  13659. do_vfp_nsyn_opcode ("fabss");
  13660. /* ARMv8.2 fp16 instruction. */
  13661. if (rs == NS_HH)
  13662. do_scalar_fp16_v82_encode ();
  13663. }
  13664. else
  13665. {
  13666. if (is_neg)
  13667. do_vfp_nsyn_opcode ("fnegd");
  13668. else
  13669. do_vfp_nsyn_opcode ("fabsd");
  13670. }
  13671. }
  13672. /* Encode single-precision (only!) VFP fldm/fstm instructions. Double precision
  13673. insns belong to Neon, and are handled elsewhere. */
  13674. static void
  13675. do_vfp_nsyn_ldm_stm (int is_dbmode)
  13676. {
  13677. int is_ldm = (inst.instruction & (1 << 20)) != 0;
  13678. if (is_ldm)
  13679. {
  13680. if (is_dbmode)
  13681. do_vfp_nsyn_opcode ("fldmdbs");
  13682. else
  13683. do_vfp_nsyn_opcode ("fldmias");
  13684. }
  13685. else
  13686. {
  13687. if (is_dbmode)
  13688. do_vfp_nsyn_opcode ("fstmdbs");
  13689. else
  13690. do_vfp_nsyn_opcode ("fstmias");
  13691. }
  13692. }
  13693. static void
  13694. do_vfp_nsyn_sqrt (void)
  13695. {
  13696. enum neon_shape rs = neon_select_shape (NS_HH, NS_FF, NS_DD, NS_NULL);
  13697. neon_check_type (2, rs, N_EQK | N_VFP, N_F_ALL | N_KEY | N_VFP);
  13698. if (rs == NS_FF || rs == NS_HH)
  13699. {
  13700. do_vfp_nsyn_opcode ("fsqrts");
  13701. /* ARMv8.2 fp16 instruction. */
  13702. if (rs == NS_HH)
  13703. do_scalar_fp16_v82_encode ();
  13704. }
  13705. else
  13706. do_vfp_nsyn_opcode ("fsqrtd");
  13707. }
  13708. static void
  13709. do_vfp_nsyn_div (void)
  13710. {
  13711. enum neon_shape rs = neon_select_shape (NS_HHH, NS_FFF, NS_DDD, NS_NULL);
  13712. neon_check_type (3, rs, N_EQK | N_VFP, N_EQK | N_VFP,
  13713. N_F_ALL | N_KEY | N_VFP);
  13714. if (rs == NS_FFF || rs == NS_HHH)
  13715. {
  13716. do_vfp_nsyn_opcode ("fdivs");
  13717. /* ARMv8.2 fp16 instruction. */
  13718. if (rs == NS_HHH)
  13719. do_scalar_fp16_v82_encode ();
  13720. }
  13721. else
  13722. do_vfp_nsyn_opcode ("fdivd");
  13723. }
  13724. static void
  13725. do_vfp_nsyn_nmul (void)
  13726. {
  13727. enum neon_shape rs = neon_select_shape (NS_HHH, NS_FFF, NS_DDD, NS_NULL);
  13728. neon_check_type (3, rs, N_EQK | N_VFP, N_EQK | N_VFP,
  13729. N_F_ALL | N_KEY | N_VFP);
  13730. if (rs == NS_FFF || rs == NS_HHH)
  13731. {
  13732. NEON_ENCODE (SINGLE, inst);
  13733. do_vfp_sp_dyadic ();
  13734. /* ARMv8.2 fp16 instruction. */
  13735. if (rs == NS_HHH)
  13736. do_scalar_fp16_v82_encode ();
  13737. }
  13738. else
  13739. {
  13740. NEON_ENCODE (DOUBLE, inst);
  13741. do_vfp_dp_rd_rn_rm ();
  13742. }
  13743. do_vfp_cond_or_thumb ();
  13744. }
  13745. /* Turn a size (8, 16, 32, 64) into the respective bit number minus 3
  13746. (0, 1, 2, 3). */
  13747. static unsigned
  13748. neon_logbits (unsigned x)
  13749. {
  13750. return ffs (x) - 4;
  13751. }
  13752. #define LOW4(R) ((R) & 0xf)
  13753. #define HI1(R) (((R) >> 4) & 1)
  13754. #define LOW1(R) ((R) & 0x1)
  13755. #define HI4(R) (((R) >> 1) & 0xf)
  13756. static unsigned
  13757. mve_get_vcmp_vpt_cond (struct neon_type_el et)
  13758. {
  13759. switch (et.type)
  13760. {
  13761. default:
  13762. first_error (BAD_EL_TYPE);
  13763. return 0;
  13764. case NT_float:
  13765. switch (inst.operands[0].imm)
  13766. {
  13767. default:
  13768. first_error (_("invalid condition"));
  13769. return 0;
  13770. case 0x0:
  13771. /* eq. */
  13772. return 0;
  13773. case 0x1:
  13774. /* ne. */
  13775. return 1;
  13776. case 0xa:
  13777. /* ge/ */
  13778. return 4;
  13779. case 0xb:
  13780. /* lt. */
  13781. return 5;
  13782. case 0xc:
  13783. /* gt. */
  13784. return 6;
  13785. case 0xd:
  13786. /* le. */
  13787. return 7;
  13788. }
  13789. case NT_integer:
  13790. /* only accept eq and ne. */
  13791. if (inst.operands[0].imm > 1)
  13792. {
  13793. first_error (_("invalid condition"));
  13794. return 0;
  13795. }
  13796. return inst.operands[0].imm;
  13797. case NT_unsigned:
  13798. if (inst.operands[0].imm == 0x2)
  13799. return 2;
  13800. else if (inst.operands[0].imm == 0x8)
  13801. return 3;
  13802. else
  13803. {
  13804. first_error (_("invalid condition"));
  13805. return 0;
  13806. }
  13807. case NT_signed:
  13808. switch (inst.operands[0].imm)
  13809. {
  13810. default:
  13811. first_error (_("invalid condition"));
  13812. return 0;
  13813. case 0xa:
  13814. /* ge. */
  13815. return 4;
  13816. case 0xb:
  13817. /* lt. */
  13818. return 5;
  13819. case 0xc:
  13820. /* gt. */
  13821. return 6;
  13822. case 0xd:
  13823. /* le. */
  13824. return 7;
  13825. }
  13826. }
  13827. /* Should be unreachable. */
  13828. abort ();
  13829. }
  13830. /* For VCTP (create vector tail predicate) in MVE. */
  13831. static void
  13832. do_mve_vctp (void)
  13833. {
  13834. int dt = 0;
  13835. unsigned size = 0x0;
  13836. if (inst.cond > COND_ALWAYS)
  13837. inst.pred_insn_type = INSIDE_VPT_INSN;
  13838. else
  13839. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  13840. /* This is a typical MVE instruction which has no type but have size 8, 16,
  13841. 32 and 64. For instructions with no type, inst.vectype.el[j].type is set
  13842. to NT_untyped and size is updated in inst.vectype.el[j].size. */
  13843. if ((inst.operands[0].present) && (inst.vectype.el[0].type == NT_untyped))
  13844. dt = inst.vectype.el[0].size;
  13845. /* Setting this does not indicate an actual NEON instruction, but only
  13846. indicates that the mnemonic accepts neon-style type suffixes. */
  13847. inst.is_neon = 1;
  13848. switch (dt)
  13849. {
  13850. case 8:
  13851. break;
  13852. case 16:
  13853. size = 0x1; break;
  13854. case 32:
  13855. size = 0x2; break;
  13856. case 64:
  13857. size = 0x3; break;
  13858. default:
  13859. first_error (_("Type is not allowed for this instruction"));
  13860. }
  13861. inst.instruction |= size << 20;
  13862. inst.instruction |= inst.operands[0].reg << 16;
  13863. }
  13864. static void
  13865. do_mve_vpt (void)
  13866. {
  13867. /* We are dealing with a vector predicated block. */
  13868. if (inst.operands[0].present)
  13869. {
  13870. enum neon_shape rs = neon_select_shape (NS_IQQ, NS_IQR, NS_NULL);
  13871. struct neon_type_el et
  13872. = neon_check_type (3, rs, N_EQK, N_KEY | N_F_MVE | N_I_MVE | N_SU_32,
  13873. N_EQK);
  13874. unsigned fcond = mve_get_vcmp_vpt_cond (et);
  13875. constraint (inst.operands[1].reg > 14, MVE_BAD_QREG);
  13876. if (et.type == NT_invtype)
  13877. return;
  13878. if (et.type == NT_float)
  13879. {
  13880. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext),
  13881. BAD_FPU);
  13882. constraint (et.size != 16 && et.size != 32, BAD_EL_TYPE);
  13883. inst.instruction |= (et.size == 16) << 28;
  13884. inst.instruction |= 0x3 << 20;
  13885. }
  13886. else
  13887. {
  13888. constraint (et.size != 8 && et.size != 16 && et.size != 32,
  13889. BAD_EL_TYPE);
  13890. inst.instruction |= 1 << 28;
  13891. inst.instruction |= neon_logbits (et.size) << 20;
  13892. }
  13893. if (inst.operands[2].isquad)
  13894. {
  13895. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  13896. inst.instruction |= LOW4 (inst.operands[2].reg);
  13897. inst.instruction |= (fcond & 0x2) >> 1;
  13898. }
  13899. else
  13900. {
  13901. if (inst.operands[2].reg == REG_SP)
  13902. as_tsktsk (MVE_BAD_SP);
  13903. inst.instruction |= 1 << 6;
  13904. inst.instruction |= (fcond & 0x2) << 4;
  13905. inst.instruction |= inst.operands[2].reg;
  13906. }
  13907. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  13908. inst.instruction |= (fcond & 0x4) << 10;
  13909. inst.instruction |= (fcond & 0x1) << 7;
  13910. }
  13911. set_pred_insn_type (VPT_INSN);
  13912. now_pred.cc = 0;
  13913. now_pred.mask = ((inst.instruction & 0x00400000) >> 19)
  13914. | ((inst.instruction & 0xe000) >> 13);
  13915. now_pred.warn_deprecated = false;
  13916. now_pred.type = VECTOR_PRED;
  13917. inst.is_neon = 1;
  13918. }
  13919. static void
  13920. do_mve_vcmp (void)
  13921. {
  13922. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  13923. if (!inst.operands[1].isreg || !inst.operands[1].isquad)
  13924. first_error (_(reg_expected_msgs[REG_TYPE_MQ]));
  13925. if (!inst.operands[2].present)
  13926. first_error (_("MVE vector or ARM register expected"));
  13927. constraint (inst.operands[1].reg > 14, MVE_BAD_QREG);
  13928. /* Deal with 'else' conditional MVE's vcmp, it will be parsed as vcmpe. */
  13929. if ((inst.instruction & 0xffffffff) == N_MNEM_vcmpe
  13930. && inst.operands[1].isquad)
  13931. {
  13932. inst.instruction = N_MNEM_vcmp;
  13933. inst.cond = 0x10;
  13934. }
  13935. if (inst.cond > COND_ALWAYS)
  13936. inst.pred_insn_type = INSIDE_VPT_INSN;
  13937. else
  13938. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  13939. enum neon_shape rs = neon_select_shape (NS_IQQ, NS_IQR, NS_NULL);
  13940. struct neon_type_el et
  13941. = neon_check_type (3, rs, N_EQK, N_KEY | N_F_MVE | N_I_MVE | N_SU_32,
  13942. N_EQK);
  13943. constraint (rs == NS_IQR && inst.operands[2].reg == REG_PC
  13944. && !inst.operands[2].iszr, BAD_PC);
  13945. unsigned fcond = mve_get_vcmp_vpt_cond (et);
  13946. inst.instruction = 0xee010f00;
  13947. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  13948. inst.instruction |= (fcond & 0x4) << 10;
  13949. inst.instruction |= (fcond & 0x1) << 7;
  13950. if (et.type == NT_float)
  13951. {
  13952. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext),
  13953. BAD_FPU);
  13954. inst.instruction |= (et.size == 16) << 28;
  13955. inst.instruction |= 0x3 << 20;
  13956. }
  13957. else
  13958. {
  13959. inst.instruction |= 1 << 28;
  13960. inst.instruction |= neon_logbits (et.size) << 20;
  13961. }
  13962. if (inst.operands[2].isquad)
  13963. {
  13964. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  13965. inst.instruction |= (fcond & 0x2) >> 1;
  13966. inst.instruction |= LOW4 (inst.operands[2].reg);
  13967. }
  13968. else
  13969. {
  13970. if (inst.operands[2].reg == REG_SP)
  13971. as_tsktsk (MVE_BAD_SP);
  13972. inst.instruction |= 1 << 6;
  13973. inst.instruction |= (fcond & 0x2) << 4;
  13974. inst.instruction |= inst.operands[2].reg;
  13975. }
  13976. inst.is_neon = 1;
  13977. return;
  13978. }
  13979. static void
  13980. do_mve_vmaxa_vmina (void)
  13981. {
  13982. if (inst.cond > COND_ALWAYS)
  13983. inst.pred_insn_type = INSIDE_VPT_INSN;
  13984. else
  13985. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  13986. enum neon_shape rs = neon_select_shape (NS_QQ, NS_NULL);
  13987. struct neon_type_el et
  13988. = neon_check_type (2, rs, N_EQK, N_KEY | N_S8 | N_S16 | N_S32);
  13989. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  13990. inst.instruction |= neon_logbits (et.size) << 18;
  13991. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  13992. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  13993. inst.instruction |= LOW4 (inst.operands[1].reg);
  13994. inst.is_neon = 1;
  13995. }
  13996. static void
  13997. do_mve_vfmas (void)
  13998. {
  13999. enum neon_shape rs = neon_select_shape (NS_QQR, NS_NULL);
  14000. struct neon_type_el et
  14001. = neon_check_type (3, rs, N_F_MVE | N_KEY, N_EQK, N_EQK);
  14002. if (inst.cond > COND_ALWAYS)
  14003. inst.pred_insn_type = INSIDE_VPT_INSN;
  14004. else
  14005. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14006. if (inst.operands[2].reg == REG_SP)
  14007. as_tsktsk (MVE_BAD_SP);
  14008. else if (inst.operands[2].reg == REG_PC)
  14009. as_tsktsk (MVE_BAD_PC);
  14010. inst.instruction |= (et.size == 16) << 28;
  14011. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14012. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14013. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14014. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14015. inst.instruction |= inst.operands[2].reg;
  14016. inst.is_neon = 1;
  14017. }
  14018. static void
  14019. do_mve_viddup (void)
  14020. {
  14021. if (inst.cond > COND_ALWAYS)
  14022. inst.pred_insn_type = INSIDE_VPT_INSN;
  14023. else
  14024. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14025. unsigned imm = inst.relocs[0].exp.X_add_number;
  14026. constraint (imm != 1 && imm != 2 && imm != 4 && imm != 8,
  14027. _("immediate must be either 1, 2, 4 or 8"));
  14028. enum neon_shape rs;
  14029. struct neon_type_el et;
  14030. unsigned Rm;
  14031. if (inst.instruction == M_MNEM_vddup || inst.instruction == M_MNEM_vidup)
  14032. {
  14033. rs = neon_select_shape (NS_QRI, NS_NULL);
  14034. et = neon_check_type (2, rs, N_KEY | N_U8 | N_U16 | N_U32, N_EQK);
  14035. Rm = 7;
  14036. }
  14037. else
  14038. {
  14039. constraint ((inst.operands[2].reg % 2) != 1, BAD_EVEN);
  14040. if (inst.operands[2].reg == REG_SP)
  14041. as_tsktsk (MVE_BAD_SP);
  14042. else if (inst.operands[2].reg == REG_PC)
  14043. first_error (BAD_PC);
  14044. rs = neon_select_shape (NS_QRRI, NS_NULL);
  14045. et = neon_check_type (3, rs, N_KEY | N_U8 | N_U16 | N_U32, N_EQK, N_EQK);
  14046. Rm = inst.operands[2].reg >> 1;
  14047. }
  14048. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14049. inst.instruction |= neon_logbits (et.size) << 20;
  14050. inst.instruction |= inst.operands[1].reg << 16;
  14051. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14052. inst.instruction |= (imm > 2) << 7;
  14053. inst.instruction |= Rm << 1;
  14054. inst.instruction |= (imm == 2 || imm == 8);
  14055. inst.is_neon = 1;
  14056. }
  14057. static void
  14058. do_mve_vmlas (void)
  14059. {
  14060. enum neon_shape rs = neon_select_shape (NS_QQR, NS_NULL);
  14061. struct neon_type_el et
  14062. = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_MVE | N_KEY);
  14063. if (inst.operands[2].reg == REG_PC)
  14064. as_tsktsk (MVE_BAD_PC);
  14065. else if (inst.operands[2].reg == REG_SP)
  14066. as_tsktsk (MVE_BAD_SP);
  14067. if (inst.cond > COND_ALWAYS)
  14068. inst.pred_insn_type = INSIDE_VPT_INSN;
  14069. else
  14070. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14071. inst.instruction |= (et.type == NT_unsigned) << 28;
  14072. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14073. inst.instruction |= neon_logbits (et.size) << 20;
  14074. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14075. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14076. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14077. inst.instruction |= inst.operands[2].reg;
  14078. inst.is_neon = 1;
  14079. }
  14080. static void
  14081. do_mve_vshll (void)
  14082. {
  14083. struct neon_type_el et
  14084. = neon_check_type (2, NS_QQI, N_EQK, N_S8 | N_U8 | N_S16 | N_U16 | N_KEY);
  14085. if (inst.cond > COND_ALWAYS)
  14086. inst.pred_insn_type = INSIDE_VPT_INSN;
  14087. else
  14088. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14089. int imm = inst.operands[2].imm;
  14090. constraint (imm < 1 || (unsigned)imm > et.size,
  14091. _("immediate value out of range"));
  14092. if ((unsigned)imm == et.size)
  14093. {
  14094. inst.instruction |= neon_logbits (et.size) << 18;
  14095. inst.instruction |= 0x110001;
  14096. }
  14097. else
  14098. {
  14099. inst.instruction |= (et.size + imm) << 16;
  14100. inst.instruction |= 0x800140;
  14101. }
  14102. inst.instruction |= (et.type == NT_unsigned) << 28;
  14103. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14104. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14105. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  14106. inst.instruction |= LOW4 (inst.operands[1].reg);
  14107. inst.is_neon = 1;
  14108. }
  14109. static void
  14110. do_mve_vshlc (void)
  14111. {
  14112. if (inst.cond > COND_ALWAYS)
  14113. inst.pred_insn_type = INSIDE_VPT_INSN;
  14114. else
  14115. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14116. if (inst.operands[1].reg == REG_PC)
  14117. as_tsktsk (MVE_BAD_PC);
  14118. else if (inst.operands[1].reg == REG_SP)
  14119. as_tsktsk (MVE_BAD_SP);
  14120. int imm = inst.operands[2].imm;
  14121. constraint (imm < 1 || imm > 32, _("immediate value out of range"));
  14122. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14123. inst.instruction |= (imm & 0x1f) << 16;
  14124. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14125. inst.instruction |= inst.operands[1].reg;
  14126. inst.is_neon = 1;
  14127. }
  14128. static void
  14129. do_mve_vshrn (void)
  14130. {
  14131. unsigned types;
  14132. switch (inst.instruction)
  14133. {
  14134. case M_MNEM_vshrnt:
  14135. case M_MNEM_vshrnb:
  14136. case M_MNEM_vrshrnt:
  14137. case M_MNEM_vrshrnb:
  14138. types = N_I16 | N_I32;
  14139. break;
  14140. case M_MNEM_vqshrnt:
  14141. case M_MNEM_vqshrnb:
  14142. case M_MNEM_vqrshrnt:
  14143. case M_MNEM_vqrshrnb:
  14144. types = N_U16 | N_U32 | N_S16 | N_S32;
  14145. break;
  14146. case M_MNEM_vqshrunt:
  14147. case M_MNEM_vqshrunb:
  14148. case M_MNEM_vqrshrunt:
  14149. case M_MNEM_vqrshrunb:
  14150. types = N_S16 | N_S32;
  14151. break;
  14152. default:
  14153. abort ();
  14154. }
  14155. struct neon_type_el et = neon_check_type (2, NS_QQI, N_EQK, types | N_KEY);
  14156. if (inst.cond > COND_ALWAYS)
  14157. inst.pred_insn_type = INSIDE_VPT_INSN;
  14158. else
  14159. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14160. unsigned Qd = inst.operands[0].reg;
  14161. unsigned Qm = inst.operands[1].reg;
  14162. unsigned imm = inst.operands[2].imm;
  14163. constraint (imm < 1 || ((unsigned) imm) > (et.size / 2),
  14164. et.size == 16
  14165. ? _("immediate operand expected in the range [1,8]")
  14166. : _("immediate operand expected in the range [1,16]"));
  14167. inst.instruction |= (et.type == NT_unsigned) << 28;
  14168. inst.instruction |= HI1 (Qd) << 22;
  14169. inst.instruction |= (et.size - imm) << 16;
  14170. inst.instruction |= LOW4 (Qd) << 12;
  14171. inst.instruction |= HI1 (Qm) << 5;
  14172. inst.instruction |= LOW4 (Qm);
  14173. inst.is_neon = 1;
  14174. }
  14175. static void
  14176. do_mve_vqmovn (void)
  14177. {
  14178. struct neon_type_el et;
  14179. if (inst.instruction == M_MNEM_vqmovnt
  14180. || inst.instruction == M_MNEM_vqmovnb)
  14181. et = neon_check_type (2, NS_QQ, N_EQK,
  14182. N_U16 | N_U32 | N_S16 | N_S32 | N_KEY);
  14183. else
  14184. et = neon_check_type (2, NS_QQ, N_EQK, N_S16 | N_S32 | N_KEY);
  14185. if (inst.cond > COND_ALWAYS)
  14186. inst.pred_insn_type = INSIDE_VPT_INSN;
  14187. else
  14188. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14189. inst.instruction |= (et.type == NT_unsigned) << 28;
  14190. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14191. inst.instruction |= (et.size == 32) << 18;
  14192. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14193. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  14194. inst.instruction |= LOW4 (inst.operands[1].reg);
  14195. inst.is_neon = 1;
  14196. }
  14197. static void
  14198. do_mve_vpsel (void)
  14199. {
  14200. neon_select_shape (NS_QQQ, NS_NULL);
  14201. if (inst.cond > COND_ALWAYS)
  14202. inst.pred_insn_type = INSIDE_VPT_INSN;
  14203. else
  14204. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14205. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14206. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14207. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14208. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14209. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  14210. inst.instruction |= LOW4 (inst.operands[2].reg);
  14211. inst.is_neon = 1;
  14212. }
  14213. static void
  14214. do_mve_vpnot (void)
  14215. {
  14216. if (inst.cond > COND_ALWAYS)
  14217. inst.pred_insn_type = INSIDE_VPT_INSN;
  14218. else
  14219. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14220. }
  14221. static void
  14222. do_mve_vmaxnma_vminnma (void)
  14223. {
  14224. enum neon_shape rs = neon_select_shape (NS_QQ, NS_NULL);
  14225. struct neon_type_el et
  14226. = neon_check_type (2, rs, N_EQK, N_F_MVE | N_KEY);
  14227. if (inst.cond > COND_ALWAYS)
  14228. inst.pred_insn_type = INSIDE_VPT_INSN;
  14229. else
  14230. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14231. inst.instruction |= (et.size == 16) << 28;
  14232. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14233. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14234. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  14235. inst.instruction |= LOW4 (inst.operands[1].reg);
  14236. inst.is_neon = 1;
  14237. }
  14238. static void
  14239. do_mve_vcmul (void)
  14240. {
  14241. enum neon_shape rs = neon_select_shape (NS_QQQI, NS_NULL);
  14242. struct neon_type_el et
  14243. = neon_check_type (3, rs, N_EQK, N_EQK, N_F_MVE | N_KEY);
  14244. if (inst.cond > COND_ALWAYS)
  14245. inst.pred_insn_type = INSIDE_VPT_INSN;
  14246. else
  14247. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14248. unsigned rot = inst.relocs[0].exp.X_add_number;
  14249. constraint (rot != 0 && rot != 90 && rot != 180 && rot != 270,
  14250. _("immediate out of range"));
  14251. if (et.size == 32 && (inst.operands[0].reg == inst.operands[1].reg
  14252. || inst.operands[0].reg == inst.operands[2].reg))
  14253. as_tsktsk (BAD_MVE_SRCDEST);
  14254. inst.instruction |= (et.size == 32) << 28;
  14255. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14256. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14257. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14258. inst.instruction |= (rot > 90) << 12;
  14259. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14260. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  14261. inst.instruction |= LOW4 (inst.operands[2].reg);
  14262. inst.instruction |= (rot == 90 || rot == 270);
  14263. inst.is_neon = 1;
  14264. }
  14265. /* To handle the Low Overhead Loop instructions
  14266. in Armv8.1-M Mainline and MVE. */
  14267. static void
  14268. do_t_loloop (void)
  14269. {
  14270. unsigned long insn = inst.instruction;
  14271. inst.instruction = THUMB_OP32 (inst.instruction);
  14272. if (insn == T_MNEM_lctp)
  14273. return;
  14274. set_pred_insn_type (MVE_OUTSIDE_PRED_INSN);
  14275. if (insn == T_MNEM_wlstp || insn == T_MNEM_dlstp)
  14276. {
  14277. struct neon_type_el et
  14278. = neon_check_type (2, NS_RR, N_EQK, N_8 | N_16 | N_32 | N_64 | N_KEY);
  14279. inst.instruction |= neon_logbits (et.size) << 20;
  14280. inst.is_neon = 1;
  14281. }
  14282. switch (insn)
  14283. {
  14284. case T_MNEM_letp:
  14285. constraint (!inst.operands[0].present,
  14286. _("expected LR"));
  14287. /* fall through. */
  14288. case T_MNEM_le:
  14289. /* le <label>. */
  14290. if (!inst.operands[0].present)
  14291. inst.instruction |= 1 << 21;
  14292. v8_1_loop_reloc (true);
  14293. break;
  14294. case T_MNEM_wls:
  14295. case T_MNEM_wlstp:
  14296. v8_1_loop_reloc (false);
  14297. /* fall through. */
  14298. case T_MNEM_dlstp:
  14299. case T_MNEM_dls:
  14300. constraint (inst.operands[1].isreg != 1, BAD_ARGS);
  14301. if (insn == T_MNEM_wlstp || insn == T_MNEM_dlstp)
  14302. constraint (inst.operands[1].reg == REG_PC, BAD_PC);
  14303. else if (inst.operands[1].reg == REG_PC)
  14304. as_tsktsk (MVE_BAD_PC);
  14305. if (inst.operands[1].reg == REG_SP)
  14306. as_tsktsk (MVE_BAD_SP);
  14307. inst.instruction |= (inst.operands[1].reg << 16);
  14308. break;
  14309. default:
  14310. abort ();
  14311. }
  14312. }
  14313. static void
  14314. do_vfp_nsyn_cmp (void)
  14315. {
  14316. enum neon_shape rs;
  14317. if (!inst.operands[0].isreg)
  14318. {
  14319. do_mve_vcmp ();
  14320. return;
  14321. }
  14322. else
  14323. {
  14324. constraint (inst.operands[2].present, BAD_SYNTAX);
  14325. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd),
  14326. BAD_FPU);
  14327. }
  14328. if (inst.operands[1].isreg)
  14329. {
  14330. rs = neon_select_shape (NS_HH, NS_FF, NS_DD, NS_NULL);
  14331. neon_check_type (2, rs, N_EQK | N_VFP, N_F_ALL | N_KEY | N_VFP);
  14332. if (rs == NS_FF || rs == NS_HH)
  14333. {
  14334. NEON_ENCODE (SINGLE, inst);
  14335. do_vfp_sp_monadic ();
  14336. }
  14337. else
  14338. {
  14339. NEON_ENCODE (DOUBLE, inst);
  14340. do_vfp_dp_rd_rm ();
  14341. }
  14342. }
  14343. else
  14344. {
  14345. rs = neon_select_shape (NS_HI, NS_FI, NS_DI, NS_NULL);
  14346. neon_check_type (2, rs, N_F_ALL | N_KEY | N_VFP, N_EQK);
  14347. switch (inst.instruction & 0x0fffffff)
  14348. {
  14349. case N_MNEM_vcmp:
  14350. inst.instruction += N_MNEM_vcmpz - N_MNEM_vcmp;
  14351. break;
  14352. case N_MNEM_vcmpe:
  14353. inst.instruction += N_MNEM_vcmpez - N_MNEM_vcmpe;
  14354. break;
  14355. default:
  14356. abort ();
  14357. }
  14358. if (rs == NS_FI || rs == NS_HI)
  14359. {
  14360. NEON_ENCODE (SINGLE, inst);
  14361. do_vfp_sp_compare_z ();
  14362. }
  14363. else
  14364. {
  14365. NEON_ENCODE (DOUBLE, inst);
  14366. do_vfp_dp_rd ();
  14367. }
  14368. }
  14369. do_vfp_cond_or_thumb ();
  14370. /* ARMv8.2 fp16 instruction. */
  14371. if (rs == NS_HI || rs == NS_HH)
  14372. do_scalar_fp16_v82_encode ();
  14373. }
  14374. static void
  14375. nsyn_insert_sp (void)
  14376. {
  14377. inst.operands[1] = inst.operands[0];
  14378. memset (&inst.operands[0], '\0', sizeof (inst.operands[0]));
  14379. inst.operands[0].reg = REG_SP;
  14380. inst.operands[0].isreg = 1;
  14381. inst.operands[0].writeback = 1;
  14382. inst.operands[0].present = 1;
  14383. }
  14384. /* Fix up Neon data-processing instructions, ORing in the correct bits for
  14385. ARM mode or Thumb mode and moving the encoded bit 24 to bit 28. */
  14386. static void
  14387. neon_dp_fixup (struct arm_it* insn)
  14388. {
  14389. unsigned int i = insn->instruction;
  14390. insn->is_neon = 1;
  14391. if (thumb_mode)
  14392. {
  14393. /* The U bit is at bit 24 by default. Move to bit 28 in Thumb mode. */
  14394. if (i & (1 << 24))
  14395. i |= 1 << 28;
  14396. i &= ~(1 << 24);
  14397. i |= 0xef000000;
  14398. }
  14399. else
  14400. i |= 0xf2000000;
  14401. insn->instruction = i;
  14402. }
  14403. static void
  14404. mve_encode_qqr (int size, int U, int fp)
  14405. {
  14406. if (inst.operands[2].reg == REG_SP)
  14407. as_tsktsk (MVE_BAD_SP);
  14408. else if (inst.operands[2].reg == REG_PC)
  14409. as_tsktsk (MVE_BAD_PC);
  14410. if (fp)
  14411. {
  14412. /* vadd. */
  14413. if (((unsigned)inst.instruction) == 0xd00)
  14414. inst.instruction = 0xee300f40;
  14415. /* vsub. */
  14416. else if (((unsigned)inst.instruction) == 0x200d00)
  14417. inst.instruction = 0xee301f40;
  14418. /* vmul. */
  14419. else if (((unsigned)inst.instruction) == 0x1000d10)
  14420. inst.instruction = 0xee310e60;
  14421. /* Setting size which is 1 for F16 and 0 for F32. */
  14422. inst.instruction |= (size == 16) << 28;
  14423. }
  14424. else
  14425. {
  14426. /* vadd. */
  14427. if (((unsigned)inst.instruction) == 0x800)
  14428. inst.instruction = 0xee010f40;
  14429. /* vsub. */
  14430. else if (((unsigned)inst.instruction) == 0x1000800)
  14431. inst.instruction = 0xee011f40;
  14432. /* vhadd. */
  14433. else if (((unsigned)inst.instruction) == 0)
  14434. inst.instruction = 0xee000f40;
  14435. /* vhsub. */
  14436. else if (((unsigned)inst.instruction) == 0x200)
  14437. inst.instruction = 0xee001f40;
  14438. /* vmla. */
  14439. else if (((unsigned)inst.instruction) == 0x900)
  14440. inst.instruction = 0xee010e40;
  14441. /* vmul. */
  14442. else if (((unsigned)inst.instruction) == 0x910)
  14443. inst.instruction = 0xee011e60;
  14444. /* vqadd. */
  14445. else if (((unsigned)inst.instruction) == 0x10)
  14446. inst.instruction = 0xee000f60;
  14447. /* vqsub. */
  14448. else if (((unsigned)inst.instruction) == 0x210)
  14449. inst.instruction = 0xee001f60;
  14450. /* vqrdmlah. */
  14451. else if (((unsigned)inst.instruction) == 0x3000b10)
  14452. inst.instruction = 0xee000e40;
  14453. /* vqdmulh. */
  14454. else if (((unsigned)inst.instruction) == 0x0000b00)
  14455. inst.instruction = 0xee010e60;
  14456. /* vqrdmulh. */
  14457. else if (((unsigned)inst.instruction) == 0x1000b00)
  14458. inst.instruction = 0xfe010e60;
  14459. /* Set U-bit. */
  14460. inst.instruction |= U << 28;
  14461. /* Setting bits for size. */
  14462. inst.instruction |= neon_logbits (size) << 20;
  14463. }
  14464. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14465. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14466. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14467. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14468. inst.instruction |= inst.operands[2].reg;
  14469. inst.is_neon = 1;
  14470. }
  14471. static void
  14472. mve_encode_rqq (unsigned bit28, unsigned size)
  14473. {
  14474. inst.instruction |= bit28 << 28;
  14475. inst.instruction |= neon_logbits (size) << 20;
  14476. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14477. inst.instruction |= inst.operands[0].reg << 12;
  14478. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14479. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  14480. inst.instruction |= LOW4 (inst.operands[2].reg);
  14481. inst.is_neon = 1;
  14482. }
  14483. static void
  14484. mve_encode_qqq (int ubit, int size)
  14485. {
  14486. inst.instruction |= (ubit != 0) << 28;
  14487. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14488. inst.instruction |= neon_logbits (size) << 20;
  14489. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14490. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14491. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14492. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  14493. inst.instruction |= LOW4 (inst.operands[2].reg);
  14494. inst.is_neon = 1;
  14495. }
  14496. static void
  14497. mve_encode_rq (unsigned bit28, unsigned size)
  14498. {
  14499. inst.instruction |= bit28 << 28;
  14500. inst.instruction |= neon_logbits (size) << 18;
  14501. inst.instruction |= inst.operands[0].reg << 12;
  14502. inst.instruction |= LOW4 (inst.operands[1].reg);
  14503. inst.is_neon = 1;
  14504. }
  14505. static void
  14506. mve_encode_rrqq (unsigned U, unsigned size)
  14507. {
  14508. constraint (inst.operands[3].reg > 14, MVE_BAD_QREG);
  14509. inst.instruction |= U << 28;
  14510. inst.instruction |= (inst.operands[1].reg >> 1) << 20;
  14511. inst.instruction |= LOW4 (inst.operands[2].reg) << 16;
  14512. inst.instruction |= (size == 32) << 16;
  14513. inst.instruction |= inst.operands[0].reg << 12;
  14514. inst.instruction |= HI1 (inst.operands[2].reg) << 7;
  14515. inst.instruction |= inst.operands[3].reg;
  14516. inst.is_neon = 1;
  14517. }
  14518. /* Helper function for neon_three_same handling the operands. */
  14519. static void
  14520. neon_three_args (int isquad)
  14521. {
  14522. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14523. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14524. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  14525. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  14526. inst.instruction |= LOW4 (inst.operands[2].reg);
  14527. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  14528. inst.instruction |= (isquad != 0) << 6;
  14529. inst.is_neon = 1;
  14530. }
  14531. /* Encode insns with bit pattern:
  14532. |28/24|23|22 |21 20|19 16|15 12|11 8|7|6|5|4|3 0|
  14533. | U |x |D |size | Rn | Rd |x x x x|N|Q|M|x| Rm |
  14534. SIZE is passed in bits. -1 means size field isn't changed, in case it has a
  14535. different meaning for some instruction. */
  14536. static void
  14537. neon_three_same (int isquad, int ubit, int size)
  14538. {
  14539. neon_three_args (isquad);
  14540. inst.instruction |= (ubit != 0) << 24;
  14541. if (size != -1)
  14542. inst.instruction |= neon_logbits (size) << 20;
  14543. neon_dp_fixup (&inst);
  14544. }
  14545. /* Encode instructions of the form:
  14546. |28/24|23|22|21 20|19 18|17 16|15 12|11 7|6|5|4|3 0|
  14547. | U |x |D |x x |size |x x | Rd |x x x x x|Q|M|x| Rm |
  14548. Don't write size if SIZE == -1. */
  14549. static void
  14550. neon_two_same (int qbit, int ubit, int size)
  14551. {
  14552. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14553. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14554. inst.instruction |= LOW4 (inst.operands[1].reg);
  14555. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  14556. inst.instruction |= (qbit != 0) << 6;
  14557. inst.instruction |= (ubit != 0) << 24;
  14558. if (size != -1)
  14559. inst.instruction |= neon_logbits (size) << 18;
  14560. neon_dp_fixup (&inst);
  14561. }
  14562. enum vfp_or_neon_is_neon_bits
  14563. {
  14564. NEON_CHECK_CC = 1,
  14565. NEON_CHECK_ARCH = 2,
  14566. NEON_CHECK_ARCH8 = 4
  14567. };
  14568. /* Call this function if an instruction which may have belonged to the VFP or
  14569. Neon instruction sets, but turned out to be a Neon instruction (due to the
  14570. operand types involved, etc.). We have to check and/or fix-up a couple of
  14571. things:
  14572. - Make sure the user hasn't attempted to make a Neon instruction
  14573. conditional.
  14574. - Alter the value in the condition code field if necessary.
  14575. - Make sure that the arch supports Neon instructions.
  14576. Which of these operations take place depends on bits from enum
  14577. vfp_or_neon_is_neon_bits.
  14578. WARNING: This function has side effects! If NEON_CHECK_CC is used and the
  14579. current instruction's condition is COND_ALWAYS, the condition field is
  14580. changed to inst.uncond_value. This is necessary because instructions shared
  14581. between VFP and Neon may be conditional for the VFP variants only, and the
  14582. unconditional Neon version must have, e.g., 0xF in the condition field. */
  14583. static int
  14584. vfp_or_neon_is_neon (unsigned check)
  14585. {
  14586. /* Conditions are always legal in Thumb mode (IT blocks). */
  14587. if (!thumb_mode && (check & NEON_CHECK_CC))
  14588. {
  14589. if (inst.cond != COND_ALWAYS)
  14590. {
  14591. first_error (_(BAD_COND));
  14592. return FAIL;
  14593. }
  14594. if (inst.uncond_value != -1u)
  14595. inst.instruction |= inst.uncond_value << 28;
  14596. }
  14597. if (((check & NEON_CHECK_ARCH) && !mark_feature_used (&fpu_neon_ext_v1))
  14598. || ((check & NEON_CHECK_ARCH8)
  14599. && !mark_feature_used (&fpu_neon_ext_armv8)))
  14600. {
  14601. first_error (_(BAD_FPU));
  14602. return FAIL;
  14603. }
  14604. return SUCCESS;
  14605. }
  14606. /* Return TRUE if the SIMD instruction is available for the current
  14607. cpu_variant. FP is set to TRUE if this is a SIMD floating-point
  14608. instruction. CHECK contains th. CHECK contains the set of bits to pass to
  14609. vfp_or_neon_is_neon for the NEON specific checks. */
  14610. static bool
  14611. check_simd_pred_availability (int fp, unsigned check)
  14612. {
  14613. if (inst.cond > COND_ALWAYS)
  14614. {
  14615. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14616. {
  14617. inst.error = BAD_FPU;
  14618. return false;
  14619. }
  14620. inst.pred_insn_type = INSIDE_VPT_INSN;
  14621. }
  14622. else if (inst.cond < COND_ALWAYS)
  14623. {
  14624. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14625. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14626. else if (vfp_or_neon_is_neon (check) == FAIL)
  14627. return false;
  14628. }
  14629. else
  14630. {
  14631. if (!ARM_CPU_HAS_FEATURE (cpu_variant, fp ? mve_fp_ext : mve_ext)
  14632. && vfp_or_neon_is_neon (check) == FAIL)
  14633. return false;
  14634. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14635. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  14636. }
  14637. return true;
  14638. }
  14639. /* Neon instruction encoders, in approximate order of appearance. */
  14640. static void
  14641. do_neon_dyadic_i_su (void)
  14642. {
  14643. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  14644. return;
  14645. enum neon_shape rs;
  14646. struct neon_type_el et;
  14647. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14648. rs = neon_select_shape (NS_QQQ, NS_QQR, NS_NULL);
  14649. else
  14650. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  14651. et = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_32 | N_KEY);
  14652. if (rs != NS_QQR)
  14653. neon_three_same (neon_quad (rs), et.type == NT_unsigned, et.size);
  14654. else
  14655. mve_encode_qqr (et.size, et.type == NT_unsigned, 0);
  14656. }
  14657. static void
  14658. do_neon_dyadic_i64_su (void)
  14659. {
  14660. if (!check_simd_pred_availability (false, NEON_CHECK_CC | NEON_CHECK_ARCH))
  14661. return;
  14662. enum neon_shape rs;
  14663. struct neon_type_el et;
  14664. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14665. {
  14666. rs = neon_select_shape (NS_QQR, NS_QQQ, NS_NULL);
  14667. et = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_MVE | N_KEY);
  14668. }
  14669. else
  14670. {
  14671. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  14672. et = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_ALL | N_KEY);
  14673. }
  14674. if (rs == NS_QQR)
  14675. mve_encode_qqr (et.size, et.type == NT_unsigned, 0);
  14676. else
  14677. neon_three_same (neon_quad (rs), et.type == NT_unsigned, et.size);
  14678. }
  14679. static void
  14680. neon_imm_shift (int write_ubit, int uval, int isquad, struct neon_type_el et,
  14681. unsigned immbits)
  14682. {
  14683. unsigned size = et.size >> 3;
  14684. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14685. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14686. inst.instruction |= LOW4 (inst.operands[1].reg);
  14687. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  14688. inst.instruction |= (isquad != 0) << 6;
  14689. inst.instruction |= immbits << 16;
  14690. inst.instruction |= (size >> 3) << 7;
  14691. inst.instruction |= (size & 0x7) << 19;
  14692. if (write_ubit)
  14693. inst.instruction |= (uval != 0) << 24;
  14694. neon_dp_fixup (&inst);
  14695. }
  14696. static void
  14697. do_neon_shl (void)
  14698. {
  14699. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  14700. return;
  14701. if (!inst.operands[2].isreg)
  14702. {
  14703. enum neon_shape rs;
  14704. struct neon_type_el et;
  14705. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14706. {
  14707. rs = neon_select_shape (NS_QQI, NS_NULL);
  14708. et = neon_check_type (2, rs, N_EQK, N_KEY | N_I_MVE);
  14709. }
  14710. else
  14711. {
  14712. rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  14713. et = neon_check_type (2, rs, N_EQK, N_KEY | N_I_ALL);
  14714. }
  14715. int imm = inst.operands[2].imm;
  14716. constraint (imm < 0 || (unsigned)imm >= et.size,
  14717. _("immediate out of range for shift"));
  14718. NEON_ENCODE (IMMED, inst);
  14719. neon_imm_shift (false, 0, neon_quad (rs), et, imm);
  14720. }
  14721. else
  14722. {
  14723. enum neon_shape rs;
  14724. struct neon_type_el et;
  14725. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14726. {
  14727. rs = neon_select_shape (NS_QQQ, NS_QQR, NS_NULL);
  14728. et = neon_check_type (3, rs, N_EQK, N_SU_MVE | N_KEY, N_EQK | N_EQK);
  14729. }
  14730. else
  14731. {
  14732. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  14733. et = neon_check_type (3, rs, N_EQK, N_SU_ALL | N_KEY, N_EQK | N_SGN);
  14734. }
  14735. if (rs == NS_QQR)
  14736. {
  14737. constraint (inst.operands[0].reg != inst.operands[1].reg,
  14738. _("invalid instruction shape"));
  14739. if (inst.operands[2].reg == REG_SP)
  14740. as_tsktsk (MVE_BAD_SP);
  14741. else if (inst.operands[2].reg == REG_PC)
  14742. as_tsktsk (MVE_BAD_PC);
  14743. inst.instruction = 0xee311e60;
  14744. inst.instruction |= (et.type == NT_unsigned) << 28;
  14745. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14746. inst.instruction |= neon_logbits (et.size) << 18;
  14747. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14748. inst.instruction |= inst.operands[2].reg;
  14749. inst.is_neon = 1;
  14750. }
  14751. else
  14752. {
  14753. unsigned int tmp;
  14754. /* VSHL/VQSHL 3-register variants have syntax such as:
  14755. vshl.xx Dd, Dm, Dn
  14756. whereas other 3-register operations encoded by neon_three_same have
  14757. syntax like:
  14758. vadd.xx Dd, Dn, Dm
  14759. (i.e. with Dn & Dm reversed). Swap operands[1].reg and
  14760. operands[2].reg here. */
  14761. tmp = inst.operands[2].reg;
  14762. inst.operands[2].reg = inst.operands[1].reg;
  14763. inst.operands[1].reg = tmp;
  14764. NEON_ENCODE (INTEGER, inst);
  14765. neon_three_same (neon_quad (rs), et.type == NT_unsigned, et.size);
  14766. }
  14767. }
  14768. }
  14769. static void
  14770. do_neon_qshl (void)
  14771. {
  14772. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  14773. return;
  14774. if (!inst.operands[2].isreg)
  14775. {
  14776. enum neon_shape rs;
  14777. struct neon_type_el et;
  14778. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14779. {
  14780. rs = neon_select_shape (NS_QQI, NS_NULL);
  14781. et = neon_check_type (2, rs, N_EQK, N_KEY | N_SU_MVE);
  14782. }
  14783. else
  14784. {
  14785. rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  14786. et = neon_check_type (2, rs, N_EQK, N_SU_ALL | N_KEY);
  14787. }
  14788. int imm = inst.operands[2].imm;
  14789. constraint (imm < 0 || (unsigned)imm >= et.size,
  14790. _("immediate out of range for shift"));
  14791. NEON_ENCODE (IMMED, inst);
  14792. neon_imm_shift (true, et.type == NT_unsigned, neon_quad (rs), et, imm);
  14793. }
  14794. else
  14795. {
  14796. enum neon_shape rs;
  14797. struct neon_type_el et;
  14798. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14799. {
  14800. rs = neon_select_shape (NS_QQQ, NS_QQR, NS_NULL);
  14801. et = neon_check_type (3, rs, N_EQK, N_SU_MVE | N_KEY, N_EQK | N_EQK);
  14802. }
  14803. else
  14804. {
  14805. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  14806. et = neon_check_type (3, rs, N_EQK, N_SU_ALL | N_KEY, N_EQK | N_SGN);
  14807. }
  14808. if (rs == NS_QQR)
  14809. {
  14810. constraint (inst.operands[0].reg != inst.operands[1].reg,
  14811. _("invalid instruction shape"));
  14812. if (inst.operands[2].reg == REG_SP)
  14813. as_tsktsk (MVE_BAD_SP);
  14814. else if (inst.operands[2].reg == REG_PC)
  14815. as_tsktsk (MVE_BAD_PC);
  14816. inst.instruction = 0xee311ee0;
  14817. inst.instruction |= (et.type == NT_unsigned) << 28;
  14818. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14819. inst.instruction |= neon_logbits (et.size) << 18;
  14820. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14821. inst.instruction |= inst.operands[2].reg;
  14822. inst.is_neon = 1;
  14823. }
  14824. else
  14825. {
  14826. unsigned int tmp;
  14827. /* See note in do_neon_shl. */
  14828. tmp = inst.operands[2].reg;
  14829. inst.operands[2].reg = inst.operands[1].reg;
  14830. inst.operands[1].reg = tmp;
  14831. NEON_ENCODE (INTEGER, inst);
  14832. neon_three_same (neon_quad (rs), et.type == NT_unsigned, et.size);
  14833. }
  14834. }
  14835. }
  14836. static void
  14837. do_neon_rshl (void)
  14838. {
  14839. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  14840. return;
  14841. enum neon_shape rs;
  14842. struct neon_type_el et;
  14843. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14844. {
  14845. rs = neon_select_shape (NS_QQR, NS_QQQ, NS_NULL);
  14846. et = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_MVE | N_KEY);
  14847. }
  14848. else
  14849. {
  14850. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  14851. et = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_ALL | N_KEY);
  14852. }
  14853. unsigned int tmp;
  14854. if (rs == NS_QQR)
  14855. {
  14856. if (inst.operands[2].reg == REG_PC)
  14857. as_tsktsk (MVE_BAD_PC);
  14858. else if (inst.operands[2].reg == REG_SP)
  14859. as_tsktsk (MVE_BAD_SP);
  14860. constraint (inst.operands[0].reg != inst.operands[1].reg,
  14861. _("invalid instruction shape"));
  14862. if (inst.instruction == 0x0000510)
  14863. /* We are dealing with vqrshl. */
  14864. inst.instruction = 0xee331ee0;
  14865. else
  14866. /* We are dealing with vrshl. */
  14867. inst.instruction = 0xee331e60;
  14868. inst.instruction |= (et.type == NT_unsigned) << 28;
  14869. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  14870. inst.instruction |= neon_logbits (et.size) << 18;
  14871. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  14872. inst.instruction |= inst.operands[2].reg;
  14873. inst.is_neon = 1;
  14874. }
  14875. else
  14876. {
  14877. tmp = inst.operands[2].reg;
  14878. inst.operands[2].reg = inst.operands[1].reg;
  14879. inst.operands[1].reg = tmp;
  14880. neon_three_same (neon_quad (rs), et.type == NT_unsigned, et.size);
  14881. }
  14882. }
  14883. static int
  14884. neon_cmode_for_logic_imm (unsigned immediate, unsigned *immbits, int size)
  14885. {
  14886. /* Handle .I8 pseudo-instructions. */
  14887. if (size == 8)
  14888. {
  14889. /* Unfortunately, this will make everything apart from zero out-of-range.
  14890. FIXME is this the intended semantics? There doesn't seem much point in
  14891. accepting .I8 if so. */
  14892. immediate |= immediate << 8;
  14893. size = 16;
  14894. }
  14895. if (size >= 32)
  14896. {
  14897. if (immediate == (immediate & 0x000000ff))
  14898. {
  14899. *immbits = immediate;
  14900. return 0x1;
  14901. }
  14902. else if (immediate == (immediate & 0x0000ff00))
  14903. {
  14904. *immbits = immediate >> 8;
  14905. return 0x3;
  14906. }
  14907. else if (immediate == (immediate & 0x00ff0000))
  14908. {
  14909. *immbits = immediate >> 16;
  14910. return 0x5;
  14911. }
  14912. else if (immediate == (immediate & 0xff000000))
  14913. {
  14914. *immbits = immediate >> 24;
  14915. return 0x7;
  14916. }
  14917. if ((immediate & 0xffff) != (immediate >> 16))
  14918. goto bad_immediate;
  14919. immediate &= 0xffff;
  14920. }
  14921. if (immediate == (immediate & 0x000000ff))
  14922. {
  14923. *immbits = immediate;
  14924. return 0x9;
  14925. }
  14926. else if (immediate == (immediate & 0x0000ff00))
  14927. {
  14928. *immbits = immediate >> 8;
  14929. return 0xb;
  14930. }
  14931. bad_immediate:
  14932. first_error (_("immediate value out of range"));
  14933. return FAIL;
  14934. }
  14935. static void
  14936. do_neon_logic (void)
  14937. {
  14938. if (inst.operands[2].present && inst.operands[2].isreg)
  14939. {
  14940. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  14941. if (rs == NS_QQQ
  14942. && !check_simd_pred_availability (false,
  14943. NEON_CHECK_ARCH | NEON_CHECK_CC))
  14944. return;
  14945. else if (rs != NS_QQQ
  14946. && !ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_v1))
  14947. first_error (BAD_FPU);
  14948. neon_check_type (3, rs, N_IGNORE_TYPE);
  14949. /* U bit and size field were set as part of the bitmask. */
  14950. NEON_ENCODE (INTEGER, inst);
  14951. neon_three_same (neon_quad (rs), 0, -1);
  14952. }
  14953. else
  14954. {
  14955. const int three_ops_form = (inst.operands[2].present
  14956. && !inst.operands[2].isreg);
  14957. const int immoperand = (three_ops_form ? 2 : 1);
  14958. enum neon_shape rs = (three_ops_form
  14959. ? neon_select_shape (NS_DDI, NS_QQI, NS_NULL)
  14960. : neon_select_shape (NS_DI, NS_QI, NS_NULL));
  14961. /* Because neon_select_shape makes the second operand a copy of the first
  14962. if the second operand is not present. */
  14963. if (rs == NS_QQI
  14964. && !check_simd_pred_availability (false,
  14965. NEON_CHECK_ARCH | NEON_CHECK_CC))
  14966. return;
  14967. else if (rs != NS_QQI
  14968. && !ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_v1))
  14969. first_error (BAD_FPU);
  14970. struct neon_type_el et;
  14971. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  14972. et = neon_check_type (2, rs, N_I32 | N_I16 | N_KEY, N_EQK);
  14973. else
  14974. et = neon_check_type (2, rs, N_I8 | N_I16 | N_I32 | N_I64 | N_F32
  14975. | N_KEY, N_EQK);
  14976. if (et.type == NT_invtype)
  14977. return;
  14978. enum neon_opc opcode = (enum neon_opc) inst.instruction & 0x0fffffff;
  14979. unsigned immbits;
  14980. int cmode;
  14981. if (three_ops_form)
  14982. constraint (inst.operands[0].reg != inst.operands[1].reg,
  14983. _("first and second operands shall be the same register"));
  14984. NEON_ENCODE (IMMED, inst);
  14985. immbits = inst.operands[immoperand].imm;
  14986. if (et.size == 64)
  14987. {
  14988. /* .i64 is a pseudo-op, so the immediate must be a repeating
  14989. pattern. */
  14990. if (immbits != (inst.operands[immoperand].regisimm ?
  14991. inst.operands[immoperand].reg : 0))
  14992. {
  14993. /* Set immbits to an invalid constant. */
  14994. immbits = 0xdeadbeef;
  14995. }
  14996. }
  14997. switch (opcode)
  14998. {
  14999. case N_MNEM_vbic:
  15000. cmode = neon_cmode_for_logic_imm (immbits, &immbits, et.size);
  15001. break;
  15002. case N_MNEM_vorr:
  15003. cmode = neon_cmode_for_logic_imm (immbits, &immbits, et.size);
  15004. break;
  15005. case N_MNEM_vand:
  15006. /* Pseudo-instruction for VBIC. */
  15007. neon_invert_size (&immbits, 0, et.size);
  15008. cmode = neon_cmode_for_logic_imm (immbits, &immbits, et.size);
  15009. break;
  15010. case N_MNEM_vorn:
  15011. /* Pseudo-instruction for VORR. */
  15012. neon_invert_size (&immbits, 0, et.size);
  15013. cmode = neon_cmode_for_logic_imm (immbits, &immbits, et.size);
  15014. break;
  15015. default:
  15016. abort ();
  15017. }
  15018. if (cmode == FAIL)
  15019. return;
  15020. inst.instruction |= neon_quad (rs) << 6;
  15021. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15022. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15023. inst.instruction |= cmode << 8;
  15024. neon_write_immbits (immbits);
  15025. neon_dp_fixup (&inst);
  15026. }
  15027. }
  15028. static void
  15029. do_neon_bitfield (void)
  15030. {
  15031. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  15032. neon_check_type (3, rs, N_IGNORE_TYPE);
  15033. neon_three_same (neon_quad (rs), 0, -1);
  15034. }
  15035. static void
  15036. neon_dyadic_misc (enum neon_el_type ubit_meaning, unsigned types,
  15037. unsigned destbits)
  15038. {
  15039. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_QQR, NS_NULL);
  15040. struct neon_type_el et = neon_check_type (3, rs, N_EQK | destbits, N_EQK,
  15041. types | N_KEY);
  15042. if (et.type == NT_float)
  15043. {
  15044. NEON_ENCODE (FLOAT, inst);
  15045. if (rs == NS_QQR)
  15046. mve_encode_qqr (et.size, 0, 1);
  15047. else
  15048. neon_three_same (neon_quad (rs), 0, et.size == 16 ? (int) et.size : -1);
  15049. }
  15050. else
  15051. {
  15052. NEON_ENCODE (INTEGER, inst);
  15053. if (rs == NS_QQR)
  15054. mve_encode_qqr (et.size, et.type == ubit_meaning, 0);
  15055. else
  15056. neon_three_same (neon_quad (rs), et.type == ubit_meaning, et.size);
  15057. }
  15058. }
  15059. static void
  15060. do_neon_dyadic_if_su_d (void)
  15061. {
  15062. /* This version only allow D registers, but that constraint is enforced during
  15063. operand parsing so we don't need to do anything extra here. */
  15064. neon_dyadic_misc (NT_unsigned, N_SUF_32, 0);
  15065. }
  15066. static void
  15067. do_neon_dyadic_if_i_d (void)
  15068. {
  15069. /* The "untyped" case can't happen. Do this to stop the "U" bit being
  15070. affected if we specify unsigned args. */
  15071. neon_dyadic_misc (NT_untyped, N_IF_32, 0);
  15072. }
  15073. static void
  15074. do_mve_vstr_vldr_QI (int size, int elsize, int load)
  15075. {
  15076. constraint (size < 32, BAD_ADDR_MODE);
  15077. constraint (size != elsize, BAD_EL_TYPE);
  15078. constraint (inst.operands[1].immisreg, BAD_ADDR_MODE);
  15079. constraint (!inst.operands[1].preind, BAD_ADDR_MODE);
  15080. constraint (load && inst.operands[0].reg == inst.operands[1].reg,
  15081. _("destination register and offset register may not be the"
  15082. " same"));
  15083. int imm = inst.relocs[0].exp.X_add_number;
  15084. int add = 1;
  15085. if (imm < 0)
  15086. {
  15087. add = 0;
  15088. imm = -imm;
  15089. }
  15090. constraint ((imm % (size / 8) != 0)
  15091. || imm > (0x7f << neon_logbits (size)),
  15092. (size == 32) ? _("immediate must be a multiple of 4 in the"
  15093. " range of +/-[0,508]")
  15094. : _("immediate must be a multiple of 8 in the"
  15095. " range of +/-[0,1016]"));
  15096. inst.instruction |= 0x11 << 24;
  15097. inst.instruction |= add << 23;
  15098. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15099. inst.instruction |= inst.operands[1].writeback << 21;
  15100. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  15101. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15102. inst.instruction |= 1 << 12;
  15103. inst.instruction |= (size == 64) << 8;
  15104. inst.instruction &= 0xffffff00;
  15105. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  15106. inst.instruction |= imm >> neon_logbits (size);
  15107. }
  15108. static void
  15109. do_mve_vstr_vldr_RQ (int size, int elsize, int load)
  15110. {
  15111. unsigned os = inst.operands[1].imm >> 5;
  15112. unsigned type = inst.vectype.el[0].type;
  15113. constraint (os != 0 && size == 8,
  15114. _("can not shift offsets when accessing less than half-word"));
  15115. constraint (os && os != neon_logbits (size),
  15116. _("shift immediate must be 1, 2 or 3 for half-word, word"
  15117. " or double-word accesses respectively"));
  15118. if (inst.operands[1].reg == REG_PC)
  15119. as_tsktsk (MVE_BAD_PC);
  15120. switch (size)
  15121. {
  15122. case 8:
  15123. constraint (elsize >= 64, BAD_EL_TYPE);
  15124. break;
  15125. case 16:
  15126. constraint (elsize < 16 || elsize >= 64, BAD_EL_TYPE);
  15127. break;
  15128. case 32:
  15129. case 64:
  15130. constraint (elsize != size, BAD_EL_TYPE);
  15131. break;
  15132. default:
  15133. break;
  15134. }
  15135. constraint (inst.operands[1].writeback || !inst.operands[1].preind,
  15136. BAD_ADDR_MODE);
  15137. if (load)
  15138. {
  15139. constraint (inst.operands[0].reg == (inst.operands[1].imm & 0x1f),
  15140. _("destination register and offset register may not be"
  15141. " the same"));
  15142. constraint (size == elsize && type == NT_signed, BAD_EL_TYPE);
  15143. constraint (size != elsize && type != NT_unsigned && type != NT_signed,
  15144. BAD_EL_TYPE);
  15145. inst.instruction |= ((size == elsize) || (type == NT_unsigned)) << 28;
  15146. }
  15147. else
  15148. {
  15149. constraint (type != NT_untyped, BAD_EL_TYPE);
  15150. }
  15151. inst.instruction |= 1 << 23;
  15152. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15153. inst.instruction |= inst.operands[1].reg << 16;
  15154. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15155. inst.instruction |= neon_logbits (elsize) << 7;
  15156. inst.instruction |= HI1 (inst.operands[1].imm) << 5;
  15157. inst.instruction |= LOW4 (inst.operands[1].imm);
  15158. inst.instruction |= !!os;
  15159. }
  15160. static void
  15161. do_mve_vstr_vldr_RI (int size, int elsize, int load)
  15162. {
  15163. enum neon_el_type type = inst.vectype.el[0].type;
  15164. constraint (size >= 64, BAD_ADDR_MODE);
  15165. switch (size)
  15166. {
  15167. case 16:
  15168. constraint (elsize < 16 || elsize >= 64, BAD_EL_TYPE);
  15169. break;
  15170. case 32:
  15171. constraint (elsize != size, BAD_EL_TYPE);
  15172. break;
  15173. default:
  15174. break;
  15175. }
  15176. if (load)
  15177. {
  15178. constraint (elsize != size && type != NT_unsigned
  15179. && type != NT_signed, BAD_EL_TYPE);
  15180. }
  15181. else
  15182. {
  15183. constraint (elsize != size && type != NT_untyped, BAD_EL_TYPE);
  15184. }
  15185. int imm = inst.relocs[0].exp.X_add_number;
  15186. int add = 1;
  15187. if (imm < 0)
  15188. {
  15189. add = 0;
  15190. imm = -imm;
  15191. }
  15192. if ((imm % (size / 8) != 0) || imm > (0x7f << neon_logbits (size)))
  15193. {
  15194. switch (size)
  15195. {
  15196. case 8:
  15197. constraint (1, _("immediate must be in the range of +/-[0,127]"));
  15198. break;
  15199. case 16:
  15200. constraint (1, _("immediate must be a multiple of 2 in the"
  15201. " range of +/-[0,254]"));
  15202. break;
  15203. case 32:
  15204. constraint (1, _("immediate must be a multiple of 4 in the"
  15205. " range of +/-[0,508]"));
  15206. break;
  15207. }
  15208. }
  15209. if (size != elsize)
  15210. {
  15211. constraint (inst.operands[1].reg > 7, BAD_HIREG);
  15212. constraint (inst.operands[0].reg > 14,
  15213. _("MVE vector register in the range [Q0..Q7] expected"));
  15214. inst.instruction |= (load && type == NT_unsigned) << 28;
  15215. inst.instruction |= (size == 16) << 19;
  15216. inst.instruction |= neon_logbits (elsize) << 7;
  15217. }
  15218. else
  15219. {
  15220. if (inst.operands[1].reg == REG_PC)
  15221. as_tsktsk (MVE_BAD_PC);
  15222. else if (inst.operands[1].reg == REG_SP && inst.operands[1].writeback)
  15223. as_tsktsk (MVE_BAD_SP);
  15224. inst.instruction |= 1 << 12;
  15225. inst.instruction |= neon_logbits (size) << 7;
  15226. }
  15227. inst.instruction |= inst.operands[1].preind << 24;
  15228. inst.instruction |= add << 23;
  15229. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15230. inst.instruction |= inst.operands[1].writeback << 21;
  15231. inst.instruction |= inst.operands[1].reg << 16;
  15232. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15233. inst.instruction &= 0xffffff80;
  15234. inst.instruction |= imm >> neon_logbits (size);
  15235. }
  15236. static void
  15237. do_mve_vstr_vldr (void)
  15238. {
  15239. unsigned size;
  15240. int load = 0;
  15241. if (inst.cond > COND_ALWAYS)
  15242. inst.pred_insn_type = INSIDE_VPT_INSN;
  15243. else
  15244. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15245. switch (inst.instruction)
  15246. {
  15247. default:
  15248. gas_assert (0);
  15249. break;
  15250. case M_MNEM_vldrb:
  15251. load = 1;
  15252. /* fall through. */
  15253. case M_MNEM_vstrb:
  15254. size = 8;
  15255. break;
  15256. case M_MNEM_vldrh:
  15257. load = 1;
  15258. /* fall through. */
  15259. case M_MNEM_vstrh:
  15260. size = 16;
  15261. break;
  15262. case M_MNEM_vldrw:
  15263. load = 1;
  15264. /* fall through. */
  15265. case M_MNEM_vstrw:
  15266. size = 32;
  15267. break;
  15268. case M_MNEM_vldrd:
  15269. load = 1;
  15270. /* fall through. */
  15271. case M_MNEM_vstrd:
  15272. size = 64;
  15273. break;
  15274. }
  15275. unsigned elsize = inst.vectype.el[0].size;
  15276. if (inst.operands[1].isquad)
  15277. {
  15278. /* We are dealing with [Q, imm]{!} cases. */
  15279. do_mve_vstr_vldr_QI (size, elsize, load);
  15280. }
  15281. else
  15282. {
  15283. if (inst.operands[1].immisreg == 2)
  15284. {
  15285. /* We are dealing with [R, Q, {UXTW #os}] cases. */
  15286. do_mve_vstr_vldr_RQ (size, elsize, load);
  15287. }
  15288. else if (!inst.operands[1].immisreg)
  15289. {
  15290. /* We are dealing with [R, Imm]{!}/[R], Imm cases. */
  15291. do_mve_vstr_vldr_RI (size, elsize, load);
  15292. }
  15293. else
  15294. constraint (1, BAD_ADDR_MODE);
  15295. }
  15296. inst.is_neon = 1;
  15297. }
  15298. static void
  15299. do_mve_vst_vld (void)
  15300. {
  15301. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  15302. return;
  15303. constraint (!inst.operands[1].preind || inst.relocs[0].exp.X_add_symbol != 0
  15304. || inst.relocs[0].exp.X_add_number != 0
  15305. || inst.operands[1].immisreg != 0,
  15306. BAD_ADDR_MODE);
  15307. constraint (inst.vectype.el[0].size > 32, BAD_EL_TYPE);
  15308. if (inst.operands[1].reg == REG_PC)
  15309. as_tsktsk (MVE_BAD_PC);
  15310. else if (inst.operands[1].reg == REG_SP && inst.operands[1].writeback)
  15311. as_tsktsk (MVE_BAD_SP);
  15312. /* These instructions are one of the "exceptions" mentioned in
  15313. handle_pred_state. They are MVE instructions that are not VPT compatible
  15314. and do not accept a VPT code, thus appending such a code is a syntax
  15315. error. */
  15316. if (inst.cond > COND_ALWAYS)
  15317. first_error (BAD_SYNTAX);
  15318. /* If we append a scalar condition code we can set this to
  15319. MVE_OUTSIDE_PRED_INSN as it will also lead to a syntax error. */
  15320. else if (inst.cond < COND_ALWAYS)
  15321. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15322. else
  15323. inst.pred_insn_type = MVE_UNPREDICABLE_INSN;
  15324. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15325. inst.instruction |= inst.operands[1].writeback << 21;
  15326. inst.instruction |= inst.operands[1].reg << 16;
  15327. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15328. inst.instruction |= neon_logbits (inst.vectype.el[0].size) << 7;
  15329. inst.is_neon = 1;
  15330. }
  15331. static void
  15332. do_mve_vaddlv (void)
  15333. {
  15334. enum neon_shape rs = neon_select_shape (NS_RRQ, NS_NULL);
  15335. struct neon_type_el et
  15336. = neon_check_type (3, rs, N_EQK, N_EQK, N_S32 | N_U32 | N_KEY);
  15337. if (et.type == NT_invtype)
  15338. first_error (BAD_EL_TYPE);
  15339. if (inst.cond > COND_ALWAYS)
  15340. inst.pred_insn_type = INSIDE_VPT_INSN;
  15341. else
  15342. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15343. constraint (inst.operands[1].reg > 14, MVE_BAD_QREG);
  15344. inst.instruction |= (et.type == NT_unsigned) << 28;
  15345. inst.instruction |= inst.operands[1].reg << 19;
  15346. inst.instruction |= inst.operands[0].reg << 12;
  15347. inst.instruction |= inst.operands[2].reg;
  15348. inst.is_neon = 1;
  15349. }
  15350. static void
  15351. do_neon_dyadic_if_su (void)
  15352. {
  15353. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_QQR, NS_NULL);
  15354. struct neon_type_el et = neon_check_type (3, rs, N_EQK , N_EQK,
  15355. N_SUF_32 | N_KEY);
  15356. constraint ((inst.instruction == ((unsigned) N_MNEM_vmax)
  15357. || inst.instruction == ((unsigned) N_MNEM_vmin))
  15358. && et.type == NT_float
  15359. && !ARM_CPU_HAS_FEATURE (cpu_variant,fpu_neon_ext_v1), BAD_FPU);
  15360. if (!check_simd_pred_availability (et.type == NT_float,
  15361. NEON_CHECK_ARCH | NEON_CHECK_CC))
  15362. return;
  15363. neon_dyadic_misc (NT_unsigned, N_SUF_32, 0);
  15364. }
  15365. static void
  15366. do_neon_addsub_if_i (void)
  15367. {
  15368. if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)
  15369. && try_vfp_nsyn (3, do_vfp_nsyn_add_sub) == SUCCESS)
  15370. return;
  15371. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_QQR, NS_NULL);
  15372. struct neon_type_el et = neon_check_type (3, rs, N_EQK,
  15373. N_EQK, N_IF_32 | N_I64 | N_KEY);
  15374. constraint (rs == NS_QQR && et.size == 64, BAD_FPU);
  15375. /* If we are parsing Q registers and the element types match MVE, which NEON
  15376. also supports, then we must check whether this is an instruction that can
  15377. be used by both MVE/NEON. This distinction can be made based on whether
  15378. they are predicated or not. */
  15379. if ((rs == NS_QQQ || rs == NS_QQR) && et.size != 64)
  15380. {
  15381. if (!check_simd_pred_availability (et.type == NT_float,
  15382. NEON_CHECK_ARCH | NEON_CHECK_CC))
  15383. return;
  15384. }
  15385. else
  15386. {
  15387. /* If they are either in a D register or are using an unsupported. */
  15388. if (rs != NS_QQR
  15389. && vfp_or_neon_is_neon (NEON_CHECK_CC | NEON_CHECK_ARCH) == FAIL)
  15390. return;
  15391. }
  15392. /* The "untyped" case can't happen. Do this to stop the "U" bit being
  15393. affected if we specify unsigned args. */
  15394. neon_dyadic_misc (NT_untyped, N_IF_32 | N_I64, 0);
  15395. }
  15396. /* Swaps operands 1 and 2. If operand 1 (optional arg) was omitted, we want the
  15397. result to be:
  15398. V<op> A,B (A is operand 0, B is operand 2)
  15399. to mean:
  15400. V<op> A,B,A
  15401. not:
  15402. V<op> A,B,B
  15403. so handle that case specially. */
  15404. static void
  15405. neon_exchange_operands (void)
  15406. {
  15407. if (inst.operands[1].present)
  15408. {
  15409. void *scratch = xmalloc (sizeof (inst.operands[0]));
  15410. /* Swap operands[1] and operands[2]. */
  15411. memcpy (scratch, &inst.operands[1], sizeof (inst.operands[0]));
  15412. inst.operands[1] = inst.operands[2];
  15413. memcpy (&inst.operands[2], scratch, sizeof (inst.operands[0]));
  15414. free (scratch);
  15415. }
  15416. else
  15417. {
  15418. inst.operands[1] = inst.operands[2];
  15419. inst.operands[2] = inst.operands[0];
  15420. }
  15421. }
  15422. static void
  15423. neon_compare (unsigned regtypes, unsigned immtypes, int invert)
  15424. {
  15425. if (inst.operands[2].isreg)
  15426. {
  15427. if (invert)
  15428. neon_exchange_operands ();
  15429. neon_dyadic_misc (NT_unsigned, regtypes, N_SIZ);
  15430. }
  15431. else
  15432. {
  15433. enum neon_shape rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  15434. struct neon_type_el et = neon_check_type (2, rs,
  15435. N_EQK | N_SIZ, immtypes | N_KEY);
  15436. NEON_ENCODE (IMMED, inst);
  15437. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15438. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15439. inst.instruction |= LOW4 (inst.operands[1].reg);
  15440. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  15441. inst.instruction |= neon_quad (rs) << 6;
  15442. inst.instruction |= (et.type == NT_float) << 10;
  15443. inst.instruction |= neon_logbits (et.size) << 18;
  15444. neon_dp_fixup (&inst);
  15445. }
  15446. }
  15447. static void
  15448. do_neon_cmp (void)
  15449. {
  15450. neon_compare (N_SUF_32, N_S_32 | N_F_16_32, false);
  15451. }
  15452. static void
  15453. do_neon_cmp_inv (void)
  15454. {
  15455. neon_compare (N_SUF_32, N_S_32 | N_F_16_32, true);
  15456. }
  15457. static void
  15458. do_neon_ceq (void)
  15459. {
  15460. neon_compare (N_IF_32, N_IF_32, false);
  15461. }
  15462. /* For multiply instructions, we have the possibility of 16-bit or 32-bit
  15463. scalars, which are encoded in 5 bits, M : Rm.
  15464. For 16-bit scalars, the register is encoded in Rm[2:0] and the index in
  15465. M:Rm[3], and for 32-bit scalars, the register is encoded in Rm[3:0] and the
  15466. index in M.
  15467. Dot Product instructions are similar to multiply instructions except elsize
  15468. should always be 32.
  15469. This function translates SCALAR, which is GAS's internal encoding of indexed
  15470. scalar register, to raw encoding. There is also register and index range
  15471. check based on ELSIZE. */
  15472. static unsigned
  15473. neon_scalar_for_mul (unsigned scalar, unsigned elsize)
  15474. {
  15475. unsigned regno = NEON_SCALAR_REG (scalar);
  15476. unsigned elno = NEON_SCALAR_INDEX (scalar);
  15477. switch (elsize)
  15478. {
  15479. case 16:
  15480. if (regno > 7 || elno > 3)
  15481. goto bad_scalar;
  15482. return regno | (elno << 3);
  15483. case 32:
  15484. if (regno > 15 || elno > 1)
  15485. goto bad_scalar;
  15486. return regno | (elno << 4);
  15487. default:
  15488. bad_scalar:
  15489. first_error (_("scalar out of range for multiply instruction"));
  15490. }
  15491. return 0;
  15492. }
  15493. /* Encode multiply / multiply-accumulate scalar instructions. */
  15494. static void
  15495. neon_mul_mac (struct neon_type_el et, int ubit)
  15496. {
  15497. unsigned scalar;
  15498. /* Give a more helpful error message if we have an invalid type. */
  15499. if (et.type == NT_invtype)
  15500. return;
  15501. scalar = neon_scalar_for_mul (inst.operands[2].reg, et.size);
  15502. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15503. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15504. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  15505. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  15506. inst.instruction |= LOW4 (scalar);
  15507. inst.instruction |= HI1 (scalar) << 5;
  15508. inst.instruction |= (et.type == NT_float) << 8;
  15509. inst.instruction |= neon_logbits (et.size) << 20;
  15510. inst.instruction |= (ubit != 0) << 24;
  15511. neon_dp_fixup (&inst);
  15512. }
  15513. static void
  15514. do_neon_mac_maybe_scalar (void)
  15515. {
  15516. if (try_vfp_nsyn (3, do_vfp_nsyn_mla_mls) == SUCCESS)
  15517. return;
  15518. if (!check_simd_pred_availability (false, NEON_CHECK_CC | NEON_CHECK_ARCH))
  15519. return;
  15520. if (inst.operands[2].isscalar)
  15521. {
  15522. constraint (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  15523. enum neon_shape rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  15524. struct neon_type_el et = neon_check_type (3, rs,
  15525. N_EQK, N_EQK, N_I16 | N_I32 | N_F_16_32 | N_KEY);
  15526. NEON_ENCODE (SCALAR, inst);
  15527. neon_mul_mac (et, neon_quad (rs));
  15528. }
  15529. else if (!inst.operands[2].isvec)
  15530. {
  15531. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  15532. enum neon_shape rs = neon_select_shape (NS_QQR, NS_NULL);
  15533. neon_check_type (3, rs, N_EQK, N_EQK, N_SU_MVE | N_KEY);
  15534. neon_dyadic_misc (NT_unsigned, N_SU_MVE, 0);
  15535. }
  15536. else
  15537. {
  15538. constraint (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  15539. /* The "untyped" case can't happen. Do this to stop the "U" bit being
  15540. affected if we specify unsigned args. */
  15541. neon_dyadic_misc (NT_untyped, N_IF_32, 0);
  15542. }
  15543. }
  15544. static void
  15545. do_bfloat_vfma (void)
  15546. {
  15547. constraint (!mark_feature_used (&fpu_neon_ext_armv8), _(BAD_FPU));
  15548. constraint (!mark_feature_used (&arm_ext_bf16), _(BAD_BF16));
  15549. enum neon_shape rs;
  15550. int t_bit = 0;
  15551. if (inst.instruction != B_MNEM_vfmab)
  15552. {
  15553. t_bit = 1;
  15554. inst.instruction = B_MNEM_vfmat;
  15555. }
  15556. if (inst.operands[2].isscalar)
  15557. {
  15558. rs = neon_select_shape (NS_QQS, NS_NULL);
  15559. neon_check_type (3, rs, N_EQK, N_EQK, N_BF16 | N_KEY);
  15560. inst.instruction |= (1 << 25);
  15561. int idx = inst.operands[2].reg & 0xf;
  15562. constraint (!(idx < 4), _("index must be in the range 0 to 3"));
  15563. inst.operands[2].reg >>= 4;
  15564. constraint (!(inst.operands[2].reg < 8),
  15565. _("indexed register must be less than 8"));
  15566. neon_three_args (t_bit);
  15567. inst.instruction |= ((idx & 1) << 3);
  15568. inst.instruction |= ((idx & 2) << 4);
  15569. }
  15570. else
  15571. {
  15572. rs = neon_select_shape (NS_QQQ, NS_NULL);
  15573. neon_check_type (3, rs, N_EQK, N_EQK, N_BF16 | N_KEY);
  15574. neon_three_args (t_bit);
  15575. }
  15576. }
  15577. static void
  15578. do_neon_fmac (void)
  15579. {
  15580. if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_fma)
  15581. && try_vfp_nsyn (3, do_vfp_nsyn_fma_fms) == SUCCESS)
  15582. return;
  15583. if (!check_simd_pred_availability (true, NEON_CHECK_CC | NEON_CHECK_ARCH))
  15584. return;
  15585. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext))
  15586. {
  15587. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_QQR, NS_NULL);
  15588. struct neon_type_el et = neon_check_type (3, rs, N_F_MVE | N_KEY, N_EQK,
  15589. N_EQK);
  15590. if (rs == NS_QQR)
  15591. {
  15592. if (inst.operands[2].reg == REG_SP)
  15593. as_tsktsk (MVE_BAD_SP);
  15594. else if (inst.operands[2].reg == REG_PC)
  15595. as_tsktsk (MVE_BAD_PC);
  15596. inst.instruction = 0xee310e40;
  15597. inst.instruction |= (et.size == 16) << 28;
  15598. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  15599. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  15600. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  15601. inst.instruction |= HI1 (inst.operands[1].reg) << 6;
  15602. inst.instruction |= inst.operands[2].reg;
  15603. inst.is_neon = 1;
  15604. return;
  15605. }
  15606. }
  15607. else
  15608. {
  15609. constraint (!inst.operands[2].isvec, BAD_FPU);
  15610. }
  15611. neon_dyadic_misc (NT_untyped, N_IF_32, 0);
  15612. }
  15613. static void
  15614. do_mve_vfma (void)
  15615. {
  15616. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_bf16) &&
  15617. inst.cond == COND_ALWAYS)
  15618. {
  15619. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  15620. inst.instruction = N_MNEM_vfma;
  15621. inst.pred_insn_type = INSIDE_VPT_INSN;
  15622. inst.cond = 0xf;
  15623. return do_neon_fmac();
  15624. }
  15625. else
  15626. {
  15627. do_bfloat_vfma();
  15628. }
  15629. }
  15630. static void
  15631. do_neon_tst (void)
  15632. {
  15633. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  15634. struct neon_type_el et = neon_check_type (3, rs,
  15635. N_EQK, N_EQK, N_8 | N_16 | N_32 | N_KEY);
  15636. neon_three_same (neon_quad (rs), 0, et.size);
  15637. }
  15638. /* VMUL with 3 registers allows the P8 type. The scalar version supports the
  15639. same types as the MAC equivalents. The polynomial type for this instruction
  15640. is encoded the same as the integer type. */
  15641. static void
  15642. do_neon_mul (void)
  15643. {
  15644. if (try_vfp_nsyn (3, do_vfp_nsyn_mul) == SUCCESS)
  15645. return;
  15646. if (!check_simd_pred_availability (false, NEON_CHECK_CC | NEON_CHECK_ARCH))
  15647. return;
  15648. if (inst.operands[2].isscalar)
  15649. {
  15650. constraint (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  15651. do_neon_mac_maybe_scalar ();
  15652. }
  15653. else
  15654. {
  15655. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  15656. {
  15657. enum neon_shape rs = neon_select_shape (NS_QQR, NS_QQQ, NS_NULL);
  15658. struct neon_type_el et
  15659. = neon_check_type (3, rs, N_EQK, N_EQK, N_I_MVE | N_F_MVE | N_KEY);
  15660. if (et.type == NT_float)
  15661. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext),
  15662. BAD_FPU);
  15663. neon_dyadic_misc (NT_float, N_I_MVE | N_F_MVE, 0);
  15664. }
  15665. else
  15666. {
  15667. constraint (!inst.operands[2].isvec, BAD_FPU);
  15668. neon_dyadic_misc (NT_poly,
  15669. N_I8 | N_I16 | N_I32 | N_F16 | N_F32 | N_P8, 0);
  15670. }
  15671. }
  15672. }
  15673. static void
  15674. do_neon_qdmulh (void)
  15675. {
  15676. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  15677. return;
  15678. if (inst.operands[2].isscalar)
  15679. {
  15680. constraint (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  15681. enum neon_shape rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  15682. struct neon_type_el et = neon_check_type (3, rs,
  15683. N_EQK, N_EQK, N_S16 | N_S32 | N_KEY);
  15684. NEON_ENCODE (SCALAR, inst);
  15685. neon_mul_mac (et, neon_quad (rs));
  15686. }
  15687. else
  15688. {
  15689. enum neon_shape rs;
  15690. struct neon_type_el et;
  15691. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  15692. {
  15693. rs = neon_select_shape (NS_QQR, NS_QQQ, NS_NULL);
  15694. et = neon_check_type (3, rs,
  15695. N_EQK, N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  15696. }
  15697. else
  15698. {
  15699. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  15700. et = neon_check_type (3, rs,
  15701. N_EQK, N_EQK, N_S16 | N_S32 | N_KEY);
  15702. }
  15703. NEON_ENCODE (INTEGER, inst);
  15704. if (rs == NS_QQR)
  15705. mve_encode_qqr (et.size, 0, 0);
  15706. else
  15707. /* The U bit (rounding) comes from bit mask. */
  15708. neon_three_same (neon_quad (rs), 0, et.size);
  15709. }
  15710. }
  15711. static void
  15712. do_mve_vaddv (void)
  15713. {
  15714. enum neon_shape rs = neon_select_shape (NS_RQ, NS_NULL);
  15715. struct neon_type_el et
  15716. = neon_check_type (2, rs, N_EQK, N_SU_32 | N_KEY);
  15717. if (et.type == NT_invtype)
  15718. first_error (BAD_EL_TYPE);
  15719. if (inst.cond > COND_ALWAYS)
  15720. inst.pred_insn_type = INSIDE_VPT_INSN;
  15721. else
  15722. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15723. constraint (inst.operands[1].reg > 14, MVE_BAD_QREG);
  15724. mve_encode_rq (et.type == NT_unsigned, et.size);
  15725. }
  15726. static void
  15727. do_mve_vhcadd (void)
  15728. {
  15729. enum neon_shape rs = neon_select_shape (NS_QQQI, NS_NULL);
  15730. struct neon_type_el et
  15731. = neon_check_type (3, rs, N_EQK, N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  15732. if (inst.cond > COND_ALWAYS)
  15733. inst.pred_insn_type = INSIDE_VPT_INSN;
  15734. else
  15735. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15736. unsigned rot = inst.relocs[0].exp.X_add_number;
  15737. constraint (rot != 90 && rot != 270, _("immediate out of range"));
  15738. if (et.size == 32 && inst.operands[0].reg == inst.operands[2].reg)
  15739. as_tsktsk (_("Warning: 32-bit element size and same first and third "
  15740. "operand makes instruction UNPREDICTABLE"));
  15741. mve_encode_qqq (0, et.size);
  15742. inst.instruction |= (rot == 270) << 12;
  15743. inst.is_neon = 1;
  15744. }
  15745. static void
  15746. do_mve_vqdmull (void)
  15747. {
  15748. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_QQR, NS_NULL);
  15749. struct neon_type_el et
  15750. = neon_check_type (3, rs, N_EQK, N_EQK, N_S16 | N_S32 | N_KEY);
  15751. if (et.size == 32
  15752. && (inst.operands[0].reg == inst.operands[1].reg
  15753. || (rs == NS_QQQ && inst.operands[0].reg == inst.operands[2].reg)))
  15754. as_tsktsk (BAD_MVE_SRCDEST);
  15755. if (inst.cond > COND_ALWAYS)
  15756. inst.pred_insn_type = INSIDE_VPT_INSN;
  15757. else
  15758. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15759. if (rs == NS_QQQ)
  15760. {
  15761. mve_encode_qqq (et.size == 32, 64);
  15762. inst.instruction |= 1;
  15763. }
  15764. else
  15765. {
  15766. mve_encode_qqr (64, et.size == 32, 0);
  15767. inst.instruction |= 0x3 << 5;
  15768. }
  15769. }
  15770. static void
  15771. do_mve_vadc (void)
  15772. {
  15773. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_NULL);
  15774. struct neon_type_el et
  15775. = neon_check_type (3, rs, N_KEY | N_I32, N_EQK, N_EQK);
  15776. if (et.type == NT_invtype)
  15777. first_error (BAD_EL_TYPE);
  15778. if (inst.cond > COND_ALWAYS)
  15779. inst.pred_insn_type = INSIDE_VPT_INSN;
  15780. else
  15781. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15782. mve_encode_qqq (0, 64);
  15783. }
  15784. static void
  15785. do_mve_vbrsr (void)
  15786. {
  15787. enum neon_shape rs = neon_select_shape (NS_QQR, NS_NULL);
  15788. struct neon_type_el et
  15789. = neon_check_type (3, rs, N_EQK, N_EQK, N_8 | N_16 | N_32 | N_KEY);
  15790. if (inst.cond > COND_ALWAYS)
  15791. inst.pred_insn_type = INSIDE_VPT_INSN;
  15792. else
  15793. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15794. mve_encode_qqr (et.size, 0, 0);
  15795. }
  15796. static void
  15797. do_mve_vsbc (void)
  15798. {
  15799. neon_check_type (3, NS_QQQ, N_EQK, N_EQK, N_I32 | N_KEY);
  15800. if (inst.cond > COND_ALWAYS)
  15801. inst.pred_insn_type = INSIDE_VPT_INSN;
  15802. else
  15803. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15804. mve_encode_qqq (1, 64);
  15805. }
  15806. static void
  15807. do_mve_vmulh (void)
  15808. {
  15809. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_NULL);
  15810. struct neon_type_el et
  15811. = neon_check_type (3, rs, N_EQK, N_EQK, N_SU_MVE | N_KEY);
  15812. if (inst.cond > COND_ALWAYS)
  15813. inst.pred_insn_type = INSIDE_VPT_INSN;
  15814. else
  15815. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15816. mve_encode_qqq (et.type == NT_unsigned, et.size);
  15817. }
  15818. static void
  15819. do_mve_vqdmlah (void)
  15820. {
  15821. enum neon_shape rs = neon_select_shape (NS_QQR, NS_NULL);
  15822. struct neon_type_el et
  15823. = neon_check_type (3, rs, N_EQK, N_EQK, N_S_32 | N_KEY);
  15824. if (inst.cond > COND_ALWAYS)
  15825. inst.pred_insn_type = INSIDE_VPT_INSN;
  15826. else
  15827. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15828. mve_encode_qqr (et.size, et.type == NT_unsigned, 0);
  15829. }
  15830. static void
  15831. do_mve_vqdmladh (void)
  15832. {
  15833. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_NULL);
  15834. struct neon_type_el et
  15835. = neon_check_type (3, rs, N_EQK, N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  15836. if (inst.cond > COND_ALWAYS)
  15837. inst.pred_insn_type = INSIDE_VPT_INSN;
  15838. else
  15839. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15840. mve_encode_qqq (0, et.size);
  15841. }
  15842. static void
  15843. do_mve_vmull (void)
  15844. {
  15845. enum neon_shape rs = neon_select_shape (NS_HHH, NS_FFF, NS_DDD, NS_DDS,
  15846. NS_QQS, NS_QQQ, NS_QQR, NS_NULL);
  15847. if (inst.cond == COND_ALWAYS
  15848. && ((unsigned)inst.instruction) == M_MNEM_vmullt)
  15849. {
  15850. if (rs == NS_QQQ)
  15851. {
  15852. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  15853. goto neon_vmul;
  15854. }
  15855. else
  15856. goto neon_vmul;
  15857. }
  15858. constraint (rs != NS_QQQ, BAD_FPU);
  15859. struct neon_type_el et = neon_check_type (3, rs, N_EQK , N_EQK,
  15860. N_SU_32 | N_P8 | N_P16 | N_KEY);
  15861. /* We are dealing with MVE's vmullt. */
  15862. if (et.size == 32
  15863. && (inst.operands[0].reg == inst.operands[1].reg
  15864. || inst.operands[0].reg == inst.operands[2].reg))
  15865. as_tsktsk (BAD_MVE_SRCDEST);
  15866. if (inst.cond > COND_ALWAYS)
  15867. inst.pred_insn_type = INSIDE_VPT_INSN;
  15868. else
  15869. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15870. if (et.type == NT_poly)
  15871. mve_encode_qqq (neon_logbits (et.size), 64);
  15872. else
  15873. mve_encode_qqq (et.type == NT_unsigned, et.size);
  15874. return;
  15875. neon_vmul:
  15876. inst.instruction = N_MNEM_vmul;
  15877. inst.cond = 0xb;
  15878. if (thumb_mode)
  15879. inst.pred_insn_type = INSIDE_IT_INSN;
  15880. do_neon_mul ();
  15881. }
  15882. static void
  15883. do_mve_vabav (void)
  15884. {
  15885. enum neon_shape rs = neon_select_shape (NS_RQQ, NS_NULL);
  15886. if (rs == NS_NULL)
  15887. return;
  15888. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  15889. return;
  15890. struct neon_type_el et = neon_check_type (2, NS_NULL, N_EQK, N_KEY | N_S8
  15891. | N_S16 | N_S32 | N_U8 | N_U16
  15892. | N_U32);
  15893. if (inst.cond > COND_ALWAYS)
  15894. inst.pred_insn_type = INSIDE_VPT_INSN;
  15895. else
  15896. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15897. mve_encode_rqq (et.type == NT_unsigned, et.size);
  15898. }
  15899. static void
  15900. do_mve_vmladav (void)
  15901. {
  15902. enum neon_shape rs = neon_select_shape (NS_RQQ, NS_NULL);
  15903. struct neon_type_el et = neon_check_type (3, rs,
  15904. N_EQK, N_EQK, N_SU_MVE | N_KEY);
  15905. if (et.type == NT_unsigned
  15906. && (inst.instruction == M_MNEM_vmladavx
  15907. || inst.instruction == M_MNEM_vmladavax
  15908. || inst.instruction == M_MNEM_vmlsdav
  15909. || inst.instruction == M_MNEM_vmlsdava
  15910. || inst.instruction == M_MNEM_vmlsdavx
  15911. || inst.instruction == M_MNEM_vmlsdavax))
  15912. first_error (BAD_SIMD_TYPE);
  15913. constraint (inst.operands[2].reg > 14,
  15914. _("MVE vector register in the range [Q0..Q7] expected"));
  15915. if (inst.cond > COND_ALWAYS)
  15916. inst.pred_insn_type = INSIDE_VPT_INSN;
  15917. else
  15918. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15919. if (inst.instruction == M_MNEM_vmlsdav
  15920. || inst.instruction == M_MNEM_vmlsdava
  15921. || inst.instruction == M_MNEM_vmlsdavx
  15922. || inst.instruction == M_MNEM_vmlsdavax)
  15923. inst.instruction |= (et.size == 8) << 28;
  15924. else
  15925. inst.instruction |= (et.size == 8) << 8;
  15926. mve_encode_rqq (et.type == NT_unsigned, 64);
  15927. inst.instruction |= (et.size == 32) << 16;
  15928. }
  15929. static void
  15930. do_mve_vmlaldav (void)
  15931. {
  15932. enum neon_shape rs = neon_select_shape (NS_RRQQ, NS_NULL);
  15933. struct neon_type_el et
  15934. = neon_check_type (4, rs, N_EQK, N_EQK, N_EQK,
  15935. N_S16 | N_S32 | N_U16 | N_U32 | N_KEY);
  15936. if (et.type == NT_unsigned
  15937. && (inst.instruction == M_MNEM_vmlsldav
  15938. || inst.instruction == M_MNEM_vmlsldava
  15939. || inst.instruction == M_MNEM_vmlsldavx
  15940. || inst.instruction == M_MNEM_vmlsldavax))
  15941. first_error (BAD_SIMD_TYPE);
  15942. if (inst.cond > COND_ALWAYS)
  15943. inst.pred_insn_type = INSIDE_VPT_INSN;
  15944. else
  15945. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15946. mve_encode_rrqq (et.type == NT_unsigned, et.size);
  15947. }
  15948. static void
  15949. do_mve_vrmlaldavh (void)
  15950. {
  15951. struct neon_type_el et;
  15952. if (inst.instruction == M_MNEM_vrmlsldavh
  15953. || inst.instruction == M_MNEM_vrmlsldavha
  15954. || inst.instruction == M_MNEM_vrmlsldavhx
  15955. || inst.instruction == M_MNEM_vrmlsldavhax)
  15956. {
  15957. et = neon_check_type (4, NS_RRQQ, N_EQK, N_EQK, N_EQK, N_S32 | N_KEY);
  15958. if (inst.operands[1].reg == REG_SP)
  15959. as_tsktsk (MVE_BAD_SP);
  15960. }
  15961. else
  15962. {
  15963. if (inst.instruction == M_MNEM_vrmlaldavhx
  15964. || inst.instruction == M_MNEM_vrmlaldavhax)
  15965. et = neon_check_type (4, NS_RRQQ, N_EQK, N_EQK, N_EQK, N_S32 | N_KEY);
  15966. else
  15967. et = neon_check_type (4, NS_RRQQ, N_EQK, N_EQK, N_EQK,
  15968. N_U32 | N_S32 | N_KEY);
  15969. /* vrmlaldavh's encoding with SP as the second, odd, GPR operand may alias
  15970. with vmax/min instructions, making the use of SP in assembly really
  15971. nonsensical, so instead of issuing a warning like we do for other uses
  15972. of SP for the odd register operand we error out. */
  15973. constraint (inst.operands[1].reg == REG_SP, BAD_SP);
  15974. }
  15975. /* Make sure we still check the second operand is an odd one and that PC is
  15976. disallowed. This because we are parsing for any GPR operand, to be able
  15977. to distinguish between giving a warning or an error for SP as described
  15978. above. */
  15979. constraint ((inst.operands[1].reg % 2) != 1, BAD_EVEN);
  15980. constraint (inst.operands[1].reg == REG_PC, BAD_PC);
  15981. if (inst.cond > COND_ALWAYS)
  15982. inst.pred_insn_type = INSIDE_VPT_INSN;
  15983. else
  15984. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15985. mve_encode_rrqq (et.type == NT_unsigned, 0);
  15986. }
  15987. static void
  15988. do_mve_vmaxnmv (void)
  15989. {
  15990. enum neon_shape rs = neon_select_shape (NS_RQ, NS_NULL);
  15991. struct neon_type_el et
  15992. = neon_check_type (2, rs, N_EQK, N_F_MVE | N_KEY);
  15993. if (inst.cond > COND_ALWAYS)
  15994. inst.pred_insn_type = INSIDE_VPT_INSN;
  15995. else
  15996. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  15997. if (inst.operands[0].reg == REG_SP)
  15998. as_tsktsk (MVE_BAD_SP);
  15999. else if (inst.operands[0].reg == REG_PC)
  16000. as_tsktsk (MVE_BAD_PC);
  16001. mve_encode_rq (et.size == 16, 64);
  16002. }
  16003. static void
  16004. do_mve_vmaxv (void)
  16005. {
  16006. enum neon_shape rs = neon_select_shape (NS_RQ, NS_NULL);
  16007. struct neon_type_el et;
  16008. if (inst.instruction == M_MNEM_vmaxv || inst.instruction == M_MNEM_vminv)
  16009. et = neon_check_type (2, rs, N_EQK, N_SU_MVE | N_KEY);
  16010. else
  16011. et = neon_check_type (2, rs, N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  16012. if (inst.cond > COND_ALWAYS)
  16013. inst.pred_insn_type = INSIDE_VPT_INSN;
  16014. else
  16015. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  16016. if (inst.operands[0].reg == REG_SP)
  16017. as_tsktsk (MVE_BAD_SP);
  16018. else if (inst.operands[0].reg == REG_PC)
  16019. as_tsktsk (MVE_BAD_PC);
  16020. mve_encode_rq (et.type == NT_unsigned, et.size);
  16021. }
  16022. static void
  16023. do_neon_qrdmlah (void)
  16024. {
  16025. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  16026. return;
  16027. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16028. {
  16029. /* Check we're on the correct architecture. */
  16030. if (!mark_feature_used (&fpu_neon_ext_armv8))
  16031. inst.error
  16032. = _("instruction form not available on this architecture.");
  16033. else if (!mark_feature_used (&fpu_neon_ext_v8_1))
  16034. {
  16035. as_warn (_("this instruction implies use of ARMv8.1 AdvSIMD."));
  16036. record_feature_use (&fpu_neon_ext_v8_1);
  16037. }
  16038. if (inst.operands[2].isscalar)
  16039. {
  16040. enum neon_shape rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  16041. struct neon_type_el et = neon_check_type (3, rs,
  16042. N_EQK, N_EQK, N_S16 | N_S32 | N_KEY);
  16043. NEON_ENCODE (SCALAR, inst);
  16044. neon_mul_mac (et, neon_quad (rs));
  16045. }
  16046. else
  16047. {
  16048. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  16049. struct neon_type_el et = neon_check_type (3, rs,
  16050. N_EQK, N_EQK, N_S16 | N_S32 | N_KEY);
  16051. NEON_ENCODE (INTEGER, inst);
  16052. /* The U bit (rounding) comes from bit mask. */
  16053. neon_three_same (neon_quad (rs), 0, et.size);
  16054. }
  16055. }
  16056. else
  16057. {
  16058. enum neon_shape rs = neon_select_shape (NS_QQR, NS_NULL);
  16059. struct neon_type_el et
  16060. = neon_check_type (3, rs, N_EQK, N_EQK, N_S_32 | N_KEY);
  16061. NEON_ENCODE (INTEGER, inst);
  16062. mve_encode_qqr (et.size, et.type == NT_unsigned, 0);
  16063. }
  16064. }
  16065. static void
  16066. do_neon_fcmp_absolute (void)
  16067. {
  16068. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  16069. struct neon_type_el et = neon_check_type (3, rs, N_EQK, N_EQK,
  16070. N_F_16_32 | N_KEY);
  16071. /* Size field comes from bit mask. */
  16072. neon_three_same (neon_quad (rs), 1, et.size == 16 ? (int) et.size : -1);
  16073. }
  16074. static void
  16075. do_neon_fcmp_absolute_inv (void)
  16076. {
  16077. neon_exchange_operands ();
  16078. do_neon_fcmp_absolute ();
  16079. }
  16080. static void
  16081. do_neon_step (void)
  16082. {
  16083. enum neon_shape rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  16084. struct neon_type_el et = neon_check_type (3, rs, N_EQK, N_EQK,
  16085. N_F_16_32 | N_KEY);
  16086. neon_three_same (neon_quad (rs), 0, et.size == 16 ? (int) et.size : -1);
  16087. }
  16088. static void
  16089. do_neon_abs_neg (void)
  16090. {
  16091. enum neon_shape rs;
  16092. struct neon_type_el et;
  16093. if (try_vfp_nsyn (2, do_vfp_nsyn_abs_neg) == SUCCESS)
  16094. return;
  16095. rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  16096. et = neon_check_type (2, rs, N_EQK, N_S_32 | N_F_16_32 | N_KEY);
  16097. if (!check_simd_pred_availability (et.type == NT_float,
  16098. NEON_CHECK_ARCH | NEON_CHECK_CC))
  16099. return;
  16100. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16101. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16102. inst.instruction |= LOW4 (inst.operands[1].reg);
  16103. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16104. inst.instruction |= neon_quad (rs) << 6;
  16105. inst.instruction |= (et.type == NT_float) << 10;
  16106. inst.instruction |= neon_logbits (et.size) << 18;
  16107. neon_dp_fixup (&inst);
  16108. }
  16109. static void
  16110. do_neon_sli (void)
  16111. {
  16112. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  16113. return;
  16114. enum neon_shape rs;
  16115. struct neon_type_el et;
  16116. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16117. {
  16118. rs = neon_select_shape (NS_QQI, NS_NULL);
  16119. et = neon_check_type (2, rs, N_EQK, N_8 | N_16 | N_32 | N_KEY);
  16120. }
  16121. else
  16122. {
  16123. rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  16124. et = neon_check_type (2, rs, N_EQK, N_8 | N_16 | N_32 | N_64 | N_KEY);
  16125. }
  16126. int imm = inst.operands[2].imm;
  16127. constraint (imm < 0 || (unsigned)imm >= et.size,
  16128. _("immediate out of range for insert"));
  16129. neon_imm_shift (false, 0, neon_quad (rs), et, imm);
  16130. }
  16131. static void
  16132. do_neon_sri (void)
  16133. {
  16134. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  16135. return;
  16136. enum neon_shape rs;
  16137. struct neon_type_el et;
  16138. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16139. {
  16140. rs = neon_select_shape (NS_QQI, NS_NULL);
  16141. et = neon_check_type (2, rs, N_EQK, N_8 | N_16 | N_32 | N_KEY);
  16142. }
  16143. else
  16144. {
  16145. rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  16146. et = neon_check_type (2, rs, N_EQK, N_8 | N_16 | N_32 | N_64 | N_KEY);
  16147. }
  16148. int imm = inst.operands[2].imm;
  16149. constraint (imm < 1 || (unsigned)imm > et.size,
  16150. _("immediate out of range for insert"));
  16151. neon_imm_shift (false, 0, neon_quad (rs), et, et.size - imm);
  16152. }
  16153. static void
  16154. do_neon_qshlu_imm (void)
  16155. {
  16156. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  16157. return;
  16158. enum neon_shape rs;
  16159. struct neon_type_el et;
  16160. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16161. {
  16162. rs = neon_select_shape (NS_QQI, NS_NULL);
  16163. et = neon_check_type (2, rs, N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  16164. }
  16165. else
  16166. {
  16167. rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  16168. et = neon_check_type (2, rs, N_EQK | N_UNS,
  16169. N_S8 | N_S16 | N_S32 | N_S64 | N_KEY);
  16170. }
  16171. int imm = inst.operands[2].imm;
  16172. constraint (imm < 0 || (unsigned)imm >= et.size,
  16173. _("immediate out of range for shift"));
  16174. /* Only encodes the 'U present' variant of the instruction.
  16175. In this case, signed types have OP (bit 8) set to 0.
  16176. Unsigned types have OP set to 1. */
  16177. inst.instruction |= (et.type == NT_unsigned) << 8;
  16178. /* The rest of the bits are the same as other immediate shifts. */
  16179. neon_imm_shift (false, 0, neon_quad (rs), et, imm);
  16180. }
  16181. static void
  16182. do_neon_qmovn (void)
  16183. {
  16184. struct neon_type_el et = neon_check_type (2, NS_DQ,
  16185. N_EQK | N_HLF, N_SU_16_64 | N_KEY);
  16186. /* Saturating move where operands can be signed or unsigned, and the
  16187. destination has the same signedness. */
  16188. NEON_ENCODE (INTEGER, inst);
  16189. if (et.type == NT_unsigned)
  16190. inst.instruction |= 0xc0;
  16191. else
  16192. inst.instruction |= 0x80;
  16193. neon_two_same (0, 1, et.size / 2);
  16194. }
  16195. static void
  16196. do_neon_qmovun (void)
  16197. {
  16198. struct neon_type_el et = neon_check_type (2, NS_DQ,
  16199. N_EQK | N_HLF | N_UNS, N_S16 | N_S32 | N_S64 | N_KEY);
  16200. /* Saturating move with unsigned results. Operands must be signed. */
  16201. NEON_ENCODE (INTEGER, inst);
  16202. neon_two_same (0, 1, et.size / 2);
  16203. }
  16204. static void
  16205. do_neon_rshift_sat_narrow (void)
  16206. {
  16207. /* FIXME: Types for narrowing. If operands are signed, results can be signed
  16208. or unsigned. If operands are unsigned, results must also be unsigned. */
  16209. struct neon_type_el et = neon_check_type (2, NS_DQI,
  16210. N_EQK | N_HLF, N_SU_16_64 | N_KEY);
  16211. int imm = inst.operands[2].imm;
  16212. /* This gets the bounds check, size encoding and immediate bits calculation
  16213. right. */
  16214. et.size /= 2;
  16215. /* VQ{R}SHRN.I<size> <Dd>, <Qm>, #0 is a synonym for
  16216. VQMOVN.I<size> <Dd>, <Qm>. */
  16217. if (imm == 0)
  16218. {
  16219. inst.operands[2].present = 0;
  16220. inst.instruction = N_MNEM_vqmovn;
  16221. do_neon_qmovn ();
  16222. return;
  16223. }
  16224. constraint (imm < 1 || (unsigned)imm > et.size,
  16225. _("immediate out of range"));
  16226. neon_imm_shift (true, et.type == NT_unsigned, 0, et, et.size - imm);
  16227. }
  16228. static void
  16229. do_neon_rshift_sat_narrow_u (void)
  16230. {
  16231. /* FIXME: Types for narrowing. If operands are signed, results can be signed
  16232. or unsigned. If operands are unsigned, results must also be unsigned. */
  16233. struct neon_type_el et = neon_check_type (2, NS_DQI,
  16234. N_EQK | N_HLF | N_UNS, N_S16 | N_S32 | N_S64 | N_KEY);
  16235. int imm = inst.operands[2].imm;
  16236. /* This gets the bounds check, size encoding and immediate bits calculation
  16237. right. */
  16238. et.size /= 2;
  16239. /* VQSHRUN.I<size> <Dd>, <Qm>, #0 is a synonym for
  16240. VQMOVUN.I<size> <Dd>, <Qm>. */
  16241. if (imm == 0)
  16242. {
  16243. inst.operands[2].present = 0;
  16244. inst.instruction = N_MNEM_vqmovun;
  16245. do_neon_qmovun ();
  16246. return;
  16247. }
  16248. constraint (imm < 1 || (unsigned)imm > et.size,
  16249. _("immediate out of range"));
  16250. /* FIXME: The manual is kind of unclear about what value U should have in
  16251. VQ{R}SHRUN instructions, but U=0, op=0 definitely encodes VRSHR, so it
  16252. must be 1. */
  16253. neon_imm_shift (true, 1, 0, et, et.size - imm);
  16254. }
  16255. static void
  16256. do_neon_movn (void)
  16257. {
  16258. struct neon_type_el et = neon_check_type (2, NS_DQ,
  16259. N_EQK | N_HLF, N_I16 | N_I32 | N_I64 | N_KEY);
  16260. NEON_ENCODE (INTEGER, inst);
  16261. neon_two_same (0, 1, et.size / 2);
  16262. }
  16263. static void
  16264. do_neon_rshift_narrow (void)
  16265. {
  16266. struct neon_type_el et = neon_check_type (2, NS_DQI,
  16267. N_EQK | N_HLF, N_I16 | N_I32 | N_I64 | N_KEY);
  16268. int imm = inst.operands[2].imm;
  16269. /* This gets the bounds check, size encoding and immediate bits calculation
  16270. right. */
  16271. et.size /= 2;
  16272. /* If immediate is zero then we are a pseudo-instruction for
  16273. VMOVN.I<size> <Dd>, <Qm> */
  16274. if (imm == 0)
  16275. {
  16276. inst.operands[2].present = 0;
  16277. inst.instruction = N_MNEM_vmovn;
  16278. do_neon_movn ();
  16279. return;
  16280. }
  16281. constraint (imm < 1 || (unsigned)imm > et.size,
  16282. _("immediate out of range for narrowing operation"));
  16283. neon_imm_shift (false, 0, 0, et, et.size - imm);
  16284. }
  16285. static void
  16286. do_neon_shll (void)
  16287. {
  16288. /* FIXME: Type checking when lengthening. */
  16289. struct neon_type_el et = neon_check_type (2, NS_QDI,
  16290. N_EQK | N_DBL, N_I8 | N_I16 | N_I32 | N_KEY);
  16291. unsigned imm = inst.operands[2].imm;
  16292. if (imm == et.size)
  16293. {
  16294. /* Maximum shift variant. */
  16295. NEON_ENCODE (INTEGER, inst);
  16296. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16297. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16298. inst.instruction |= LOW4 (inst.operands[1].reg);
  16299. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16300. inst.instruction |= neon_logbits (et.size) << 18;
  16301. neon_dp_fixup (&inst);
  16302. }
  16303. else
  16304. {
  16305. /* A more-specific type check for non-max versions. */
  16306. et = neon_check_type (2, NS_QDI,
  16307. N_EQK | N_DBL, N_SU_32 | N_KEY);
  16308. NEON_ENCODE (IMMED, inst);
  16309. neon_imm_shift (true, et.type == NT_unsigned, 0, et, imm);
  16310. }
  16311. }
  16312. /* Check the various types for the VCVT instruction, and return which version
  16313. the current instruction is. */
  16314. #define CVT_FLAVOUR_VAR \
  16315. CVT_VAR (s32_f32, N_S32, N_F32, whole_reg, "ftosls", "ftosis", "ftosizs") \
  16316. CVT_VAR (u32_f32, N_U32, N_F32, whole_reg, "ftouls", "ftouis", "ftouizs") \
  16317. CVT_VAR (f32_s32, N_F32, N_S32, whole_reg, "fsltos", "fsitos", NULL) \
  16318. CVT_VAR (f32_u32, N_F32, N_U32, whole_reg, "fultos", "fuitos", NULL) \
  16319. /* Half-precision conversions. */ \
  16320. CVT_VAR (s16_f16, N_S16, N_F16 | N_KEY, whole_reg, NULL, NULL, NULL) \
  16321. CVT_VAR (u16_f16, N_U16, N_F16 | N_KEY, whole_reg, NULL, NULL, NULL) \
  16322. CVT_VAR (f16_s16, N_F16 | N_KEY, N_S16, whole_reg, NULL, NULL, NULL) \
  16323. CVT_VAR (f16_u16, N_F16 | N_KEY, N_U16, whole_reg, NULL, NULL, NULL) \
  16324. CVT_VAR (f32_f16, N_F32, N_F16, whole_reg, NULL, NULL, NULL) \
  16325. CVT_VAR (f16_f32, N_F16, N_F32, whole_reg, NULL, NULL, NULL) \
  16326. /* New VCVT instructions introduced by ARMv8.2 fp16 extension. \
  16327. Compared with single/double precision variants, only the co-processor \
  16328. field is different, so the encoding flow is reused here. */ \
  16329. CVT_VAR (f16_s32, N_F16 | N_KEY, N_S32, N_VFP, "fsltos", "fsitos", NULL) \
  16330. CVT_VAR (f16_u32, N_F16 | N_KEY, N_U32, N_VFP, "fultos", "fuitos", NULL) \
  16331. CVT_VAR (u32_f16, N_U32, N_F16 | N_KEY, N_VFP, "ftouls", "ftouis", "ftouizs")\
  16332. CVT_VAR (s32_f16, N_S32, N_F16 | N_KEY, N_VFP, "ftosls", "ftosis", "ftosizs")\
  16333. CVT_VAR (bf16_f32, N_BF16, N_F32, whole_reg, NULL, NULL, NULL) \
  16334. /* VFP instructions. */ \
  16335. CVT_VAR (f32_f64, N_F32, N_F64, N_VFP, NULL, "fcvtsd", NULL) \
  16336. CVT_VAR (f64_f32, N_F64, N_F32, N_VFP, NULL, "fcvtds", NULL) \
  16337. CVT_VAR (s32_f64, N_S32, N_F64 | key, N_VFP, "ftosld", "ftosid", "ftosizd") \
  16338. CVT_VAR (u32_f64, N_U32, N_F64 | key, N_VFP, "ftould", "ftouid", "ftouizd") \
  16339. CVT_VAR (f64_s32, N_F64 | key, N_S32, N_VFP, "fsltod", "fsitod", NULL) \
  16340. CVT_VAR (f64_u32, N_F64 | key, N_U32, N_VFP, "fultod", "fuitod", NULL) \
  16341. /* VFP instructions with bitshift. */ \
  16342. CVT_VAR (f32_s16, N_F32 | key, N_S16, N_VFP, "fshtos", NULL, NULL) \
  16343. CVT_VAR (f32_u16, N_F32 | key, N_U16, N_VFP, "fuhtos", NULL, NULL) \
  16344. CVT_VAR (f64_s16, N_F64 | key, N_S16, N_VFP, "fshtod", NULL, NULL) \
  16345. CVT_VAR (f64_u16, N_F64 | key, N_U16, N_VFP, "fuhtod", NULL, NULL) \
  16346. CVT_VAR (s16_f32, N_S16, N_F32 | key, N_VFP, "ftoshs", NULL, NULL) \
  16347. CVT_VAR (u16_f32, N_U16, N_F32 | key, N_VFP, "ftouhs", NULL, NULL) \
  16348. CVT_VAR (s16_f64, N_S16, N_F64 | key, N_VFP, "ftoshd", NULL, NULL) \
  16349. CVT_VAR (u16_f64, N_U16, N_F64 | key, N_VFP, "ftouhd", NULL, NULL)
  16350. #define CVT_VAR(C, X, Y, R, BSN, CN, ZN) \
  16351. neon_cvt_flavour_##C,
  16352. /* The different types of conversions we can do. */
  16353. enum neon_cvt_flavour
  16354. {
  16355. CVT_FLAVOUR_VAR
  16356. neon_cvt_flavour_invalid,
  16357. neon_cvt_flavour_first_fp = neon_cvt_flavour_f32_f64
  16358. };
  16359. #undef CVT_VAR
  16360. static enum neon_cvt_flavour
  16361. get_neon_cvt_flavour (enum neon_shape rs)
  16362. {
  16363. #define CVT_VAR(C,X,Y,R,BSN,CN,ZN) \
  16364. et = neon_check_type (2, rs, (R) | (X), (R) | (Y)); \
  16365. if (et.type != NT_invtype) \
  16366. { \
  16367. inst.error = NULL; \
  16368. return (neon_cvt_flavour_##C); \
  16369. }
  16370. struct neon_type_el et;
  16371. unsigned whole_reg = (rs == NS_FFI || rs == NS_FD || rs == NS_DF
  16372. || rs == NS_FF) ? N_VFP : 0;
  16373. /* The instruction versions which take an immediate take one register
  16374. argument, which is extended to the width of the full register. Thus the
  16375. "source" and "destination" registers must have the same width. Hack that
  16376. here by making the size equal to the key (wider, in this case) operand. */
  16377. unsigned key = (rs == NS_QQI || rs == NS_DDI || rs == NS_FFI) ? N_KEY : 0;
  16378. CVT_FLAVOUR_VAR;
  16379. return neon_cvt_flavour_invalid;
  16380. #undef CVT_VAR
  16381. }
  16382. enum neon_cvt_mode
  16383. {
  16384. neon_cvt_mode_a,
  16385. neon_cvt_mode_n,
  16386. neon_cvt_mode_p,
  16387. neon_cvt_mode_m,
  16388. neon_cvt_mode_z,
  16389. neon_cvt_mode_x,
  16390. neon_cvt_mode_r
  16391. };
  16392. /* Neon-syntax VFP conversions. */
  16393. static void
  16394. do_vfp_nsyn_cvt (enum neon_shape rs, enum neon_cvt_flavour flavour)
  16395. {
  16396. const char *opname = 0;
  16397. if (rs == NS_DDI || rs == NS_QQI || rs == NS_FFI
  16398. || rs == NS_FHI || rs == NS_HFI)
  16399. {
  16400. /* Conversions with immediate bitshift. */
  16401. const char *enc[] =
  16402. {
  16403. #define CVT_VAR(C,A,B,R,BSN,CN,ZN) BSN,
  16404. CVT_FLAVOUR_VAR
  16405. NULL
  16406. #undef CVT_VAR
  16407. };
  16408. if (flavour < (int) ARRAY_SIZE (enc))
  16409. {
  16410. opname = enc[flavour];
  16411. constraint (inst.operands[0].reg != inst.operands[1].reg,
  16412. _("operands 0 and 1 must be the same register"));
  16413. inst.operands[1] = inst.operands[2];
  16414. memset (&inst.operands[2], '\0', sizeof (inst.operands[2]));
  16415. }
  16416. }
  16417. else
  16418. {
  16419. /* Conversions without bitshift. */
  16420. const char *enc[] =
  16421. {
  16422. #define CVT_VAR(C,A,B,R,BSN,CN,ZN) CN,
  16423. CVT_FLAVOUR_VAR
  16424. NULL
  16425. #undef CVT_VAR
  16426. };
  16427. if (flavour < (int) ARRAY_SIZE (enc))
  16428. opname = enc[flavour];
  16429. }
  16430. if (opname)
  16431. do_vfp_nsyn_opcode (opname);
  16432. /* ARMv8.2 fp16 VCVT instruction. */
  16433. if (flavour == neon_cvt_flavour_s32_f16
  16434. || flavour == neon_cvt_flavour_u32_f16
  16435. || flavour == neon_cvt_flavour_f16_u32
  16436. || flavour == neon_cvt_flavour_f16_s32)
  16437. do_scalar_fp16_v82_encode ();
  16438. }
  16439. static void
  16440. do_vfp_nsyn_cvtz (void)
  16441. {
  16442. enum neon_shape rs = neon_select_shape (NS_FH, NS_FF, NS_FD, NS_NULL);
  16443. enum neon_cvt_flavour flavour = get_neon_cvt_flavour (rs);
  16444. const char *enc[] =
  16445. {
  16446. #define CVT_VAR(C,A,B,R,BSN,CN,ZN) ZN,
  16447. CVT_FLAVOUR_VAR
  16448. NULL
  16449. #undef CVT_VAR
  16450. };
  16451. if (flavour < (int) ARRAY_SIZE (enc) && enc[flavour])
  16452. do_vfp_nsyn_opcode (enc[flavour]);
  16453. }
  16454. static void
  16455. do_vfp_nsyn_cvt_fpv8 (enum neon_cvt_flavour flavour,
  16456. enum neon_cvt_mode mode)
  16457. {
  16458. int sz, op;
  16459. int rm;
  16460. /* Targets like FPv5-SP-D16 don't support FP v8 instructions with
  16461. D register operands. */
  16462. if (flavour == neon_cvt_flavour_s32_f64
  16463. || flavour == neon_cvt_flavour_u32_f64)
  16464. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  16465. _(BAD_FPU));
  16466. if (flavour == neon_cvt_flavour_s32_f16
  16467. || flavour == neon_cvt_flavour_u32_f16)
  16468. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_fp16),
  16469. _(BAD_FP16));
  16470. set_pred_insn_type (OUTSIDE_PRED_INSN);
  16471. switch (flavour)
  16472. {
  16473. case neon_cvt_flavour_s32_f64:
  16474. sz = 1;
  16475. op = 1;
  16476. break;
  16477. case neon_cvt_flavour_s32_f32:
  16478. sz = 0;
  16479. op = 1;
  16480. break;
  16481. case neon_cvt_flavour_s32_f16:
  16482. sz = 0;
  16483. op = 1;
  16484. break;
  16485. case neon_cvt_flavour_u32_f64:
  16486. sz = 1;
  16487. op = 0;
  16488. break;
  16489. case neon_cvt_flavour_u32_f32:
  16490. sz = 0;
  16491. op = 0;
  16492. break;
  16493. case neon_cvt_flavour_u32_f16:
  16494. sz = 0;
  16495. op = 0;
  16496. break;
  16497. default:
  16498. first_error (_("invalid instruction shape"));
  16499. return;
  16500. }
  16501. switch (mode)
  16502. {
  16503. case neon_cvt_mode_a: rm = 0; break;
  16504. case neon_cvt_mode_n: rm = 1; break;
  16505. case neon_cvt_mode_p: rm = 2; break;
  16506. case neon_cvt_mode_m: rm = 3; break;
  16507. default: first_error (_("invalid rounding mode")); return;
  16508. }
  16509. NEON_ENCODE (FPV8, inst);
  16510. encode_arm_vfp_reg (inst.operands[0].reg, VFP_REG_Sd);
  16511. encode_arm_vfp_reg (inst.operands[1].reg, sz == 1 ? VFP_REG_Dm : VFP_REG_Sm);
  16512. inst.instruction |= sz << 8;
  16513. /* ARMv8.2 fp16 VCVT instruction. */
  16514. if (flavour == neon_cvt_flavour_s32_f16
  16515. ||flavour == neon_cvt_flavour_u32_f16)
  16516. do_scalar_fp16_v82_encode ();
  16517. inst.instruction |= op << 7;
  16518. inst.instruction |= rm << 16;
  16519. inst.instruction |= 0xf0000000;
  16520. inst.is_neon = true;
  16521. }
  16522. static void
  16523. do_neon_cvt_1 (enum neon_cvt_mode mode)
  16524. {
  16525. enum neon_shape rs = neon_select_shape (NS_DDI, NS_QQI, NS_FFI, NS_DD, NS_QQ,
  16526. NS_FD, NS_DF, NS_FF, NS_QD, NS_DQ,
  16527. NS_FH, NS_HF, NS_FHI, NS_HFI,
  16528. NS_NULL);
  16529. enum neon_cvt_flavour flavour = get_neon_cvt_flavour (rs);
  16530. if (flavour == neon_cvt_flavour_invalid)
  16531. return;
  16532. /* PR11109: Handle round-to-zero for VCVT conversions. */
  16533. if (mode == neon_cvt_mode_z
  16534. && ARM_CPU_HAS_FEATURE (cpu_variant, fpu_arch_vfp_v2)
  16535. && (flavour == neon_cvt_flavour_s16_f16
  16536. || flavour == neon_cvt_flavour_u16_f16
  16537. || flavour == neon_cvt_flavour_s32_f32
  16538. || flavour == neon_cvt_flavour_u32_f32
  16539. || flavour == neon_cvt_flavour_s32_f64
  16540. || flavour == neon_cvt_flavour_u32_f64)
  16541. && (rs == NS_FD || rs == NS_FF))
  16542. {
  16543. do_vfp_nsyn_cvtz ();
  16544. return;
  16545. }
  16546. /* ARMv8.2 fp16 VCVT conversions. */
  16547. if (mode == neon_cvt_mode_z
  16548. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_fp16)
  16549. && (flavour == neon_cvt_flavour_s32_f16
  16550. || flavour == neon_cvt_flavour_u32_f16)
  16551. && (rs == NS_FH))
  16552. {
  16553. do_vfp_nsyn_cvtz ();
  16554. do_scalar_fp16_v82_encode ();
  16555. return;
  16556. }
  16557. if ((rs == NS_FD || rs == NS_QQI) && mode == neon_cvt_mode_n
  16558. && ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16559. {
  16560. /* We are dealing with vcvt with the 'ne' condition. */
  16561. inst.cond = 0x1;
  16562. inst.instruction = N_MNEM_vcvt;
  16563. do_neon_cvt_1 (neon_cvt_mode_z);
  16564. return;
  16565. }
  16566. /* VFP rather than Neon conversions. */
  16567. if (flavour >= neon_cvt_flavour_first_fp)
  16568. {
  16569. if (mode == neon_cvt_mode_x || mode == neon_cvt_mode_z)
  16570. do_vfp_nsyn_cvt (rs, flavour);
  16571. else
  16572. do_vfp_nsyn_cvt_fpv8 (flavour, mode);
  16573. return;
  16574. }
  16575. switch (rs)
  16576. {
  16577. case NS_QQI:
  16578. if (mode == neon_cvt_mode_z
  16579. && (flavour == neon_cvt_flavour_f16_s16
  16580. || flavour == neon_cvt_flavour_f16_u16
  16581. || flavour == neon_cvt_flavour_s16_f16
  16582. || flavour == neon_cvt_flavour_u16_f16
  16583. || flavour == neon_cvt_flavour_f32_u32
  16584. || flavour == neon_cvt_flavour_f32_s32
  16585. || flavour == neon_cvt_flavour_s32_f32
  16586. || flavour == neon_cvt_flavour_u32_f32))
  16587. {
  16588. if (!check_simd_pred_availability (true,
  16589. NEON_CHECK_CC | NEON_CHECK_ARCH))
  16590. return;
  16591. }
  16592. /* fall through. */
  16593. case NS_DDI:
  16594. {
  16595. unsigned immbits;
  16596. unsigned enctab[] = {0x0000100, 0x1000100, 0x0, 0x1000000,
  16597. 0x0000100, 0x1000100, 0x0, 0x1000000};
  16598. if ((rs != NS_QQI || !ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext))
  16599. && vfp_or_neon_is_neon (NEON_CHECK_CC | NEON_CHECK_ARCH) == FAIL)
  16600. return;
  16601. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext))
  16602. {
  16603. constraint (inst.operands[2].present && inst.operands[2].imm == 0,
  16604. _("immediate value out of range"));
  16605. switch (flavour)
  16606. {
  16607. case neon_cvt_flavour_f16_s16:
  16608. case neon_cvt_flavour_f16_u16:
  16609. case neon_cvt_flavour_s16_f16:
  16610. case neon_cvt_flavour_u16_f16:
  16611. constraint (inst.operands[2].imm > 16,
  16612. _("immediate value out of range"));
  16613. break;
  16614. case neon_cvt_flavour_f32_u32:
  16615. case neon_cvt_flavour_f32_s32:
  16616. case neon_cvt_flavour_s32_f32:
  16617. case neon_cvt_flavour_u32_f32:
  16618. constraint (inst.operands[2].imm > 32,
  16619. _("immediate value out of range"));
  16620. break;
  16621. default:
  16622. inst.error = BAD_FPU;
  16623. return;
  16624. }
  16625. }
  16626. /* Fixed-point conversion with #0 immediate is encoded as an
  16627. integer conversion. */
  16628. if (inst.operands[2].present && inst.operands[2].imm == 0)
  16629. goto int_encode;
  16630. NEON_ENCODE (IMMED, inst);
  16631. if (flavour != neon_cvt_flavour_invalid)
  16632. inst.instruction |= enctab[flavour];
  16633. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16634. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16635. inst.instruction |= LOW4 (inst.operands[1].reg);
  16636. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16637. inst.instruction |= neon_quad (rs) << 6;
  16638. inst.instruction |= 1 << 21;
  16639. if (flavour < neon_cvt_flavour_s16_f16)
  16640. {
  16641. inst.instruction |= 1 << 21;
  16642. immbits = 32 - inst.operands[2].imm;
  16643. inst.instruction |= immbits << 16;
  16644. }
  16645. else
  16646. {
  16647. inst.instruction |= 3 << 20;
  16648. immbits = 16 - inst.operands[2].imm;
  16649. inst.instruction |= immbits << 16;
  16650. inst.instruction &= ~(1 << 9);
  16651. }
  16652. neon_dp_fixup (&inst);
  16653. }
  16654. break;
  16655. case NS_QQ:
  16656. if ((mode == neon_cvt_mode_a || mode == neon_cvt_mode_n
  16657. || mode == neon_cvt_mode_m || mode == neon_cvt_mode_p)
  16658. && (flavour == neon_cvt_flavour_s16_f16
  16659. || flavour == neon_cvt_flavour_u16_f16
  16660. || flavour == neon_cvt_flavour_s32_f32
  16661. || flavour == neon_cvt_flavour_u32_f32))
  16662. {
  16663. if (!check_simd_pred_availability (true,
  16664. NEON_CHECK_CC | NEON_CHECK_ARCH8))
  16665. return;
  16666. }
  16667. else if (mode == neon_cvt_mode_z
  16668. && (flavour == neon_cvt_flavour_f16_s16
  16669. || flavour == neon_cvt_flavour_f16_u16
  16670. || flavour == neon_cvt_flavour_s16_f16
  16671. || flavour == neon_cvt_flavour_u16_f16
  16672. || flavour == neon_cvt_flavour_f32_u32
  16673. || flavour == neon_cvt_flavour_f32_s32
  16674. || flavour == neon_cvt_flavour_s32_f32
  16675. || flavour == neon_cvt_flavour_u32_f32))
  16676. {
  16677. if (!check_simd_pred_availability (true,
  16678. NEON_CHECK_CC | NEON_CHECK_ARCH))
  16679. return;
  16680. }
  16681. /* fall through. */
  16682. case NS_DD:
  16683. if (mode != neon_cvt_mode_x && mode != neon_cvt_mode_z)
  16684. {
  16685. NEON_ENCODE (FLOAT, inst);
  16686. if (!check_simd_pred_availability (true,
  16687. NEON_CHECK_CC | NEON_CHECK_ARCH8))
  16688. return;
  16689. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16690. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16691. inst.instruction |= LOW4 (inst.operands[1].reg);
  16692. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16693. inst.instruction |= neon_quad (rs) << 6;
  16694. inst.instruction |= (flavour == neon_cvt_flavour_u16_f16
  16695. || flavour == neon_cvt_flavour_u32_f32) << 7;
  16696. inst.instruction |= mode << 8;
  16697. if (flavour == neon_cvt_flavour_u16_f16
  16698. || flavour == neon_cvt_flavour_s16_f16)
  16699. /* Mask off the original size bits and reencode them. */
  16700. inst.instruction = ((inst.instruction & 0xfff3ffff) | (1 << 18));
  16701. if (thumb_mode)
  16702. inst.instruction |= 0xfc000000;
  16703. else
  16704. inst.instruction |= 0xf0000000;
  16705. }
  16706. else
  16707. {
  16708. int_encode:
  16709. {
  16710. unsigned enctab[] = { 0x100, 0x180, 0x0, 0x080,
  16711. 0x100, 0x180, 0x0, 0x080};
  16712. NEON_ENCODE (INTEGER, inst);
  16713. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext))
  16714. {
  16715. if (vfp_or_neon_is_neon (NEON_CHECK_CC | NEON_CHECK_ARCH) == FAIL)
  16716. return;
  16717. }
  16718. if (flavour != neon_cvt_flavour_invalid)
  16719. inst.instruction |= enctab[flavour];
  16720. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16721. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16722. inst.instruction |= LOW4 (inst.operands[1].reg);
  16723. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16724. inst.instruction |= neon_quad (rs) << 6;
  16725. if (flavour >= neon_cvt_flavour_s16_f16
  16726. && flavour <= neon_cvt_flavour_f16_u16)
  16727. /* Half precision. */
  16728. inst.instruction |= 1 << 18;
  16729. else
  16730. inst.instruction |= 2 << 18;
  16731. neon_dp_fixup (&inst);
  16732. }
  16733. }
  16734. break;
  16735. /* Half-precision conversions for Advanced SIMD -- neon. */
  16736. case NS_QD:
  16737. case NS_DQ:
  16738. if (vfp_or_neon_is_neon (NEON_CHECK_CC | NEON_CHECK_ARCH) == FAIL)
  16739. return;
  16740. if ((rs == NS_DQ)
  16741. && (inst.vectype.el[0].size != 16 || inst.vectype.el[1].size != 32))
  16742. {
  16743. as_bad (_("operand size must match register width"));
  16744. break;
  16745. }
  16746. if ((rs == NS_QD)
  16747. && ((inst.vectype.el[0].size != 32 || inst.vectype.el[1].size != 16)))
  16748. {
  16749. as_bad (_("operand size must match register width"));
  16750. break;
  16751. }
  16752. if (rs == NS_DQ)
  16753. {
  16754. if (flavour == neon_cvt_flavour_bf16_f32)
  16755. {
  16756. if (vfp_or_neon_is_neon (NEON_CHECK_ARCH8) == FAIL)
  16757. return;
  16758. constraint (!mark_feature_used (&arm_ext_bf16), _(BAD_BF16));
  16759. /* VCVT.bf16.f32. */
  16760. inst.instruction = 0x11b60640;
  16761. }
  16762. else
  16763. /* VCVT.f16.f32. */
  16764. inst.instruction = 0x3b60600;
  16765. }
  16766. else
  16767. /* VCVT.f32.f16. */
  16768. inst.instruction = 0x3b60700;
  16769. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16770. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16771. inst.instruction |= LOW4 (inst.operands[1].reg);
  16772. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16773. neon_dp_fixup (&inst);
  16774. break;
  16775. default:
  16776. /* Some VFP conversions go here (s32 <-> f32, u32 <-> f32). */
  16777. if (mode == neon_cvt_mode_x || mode == neon_cvt_mode_z)
  16778. do_vfp_nsyn_cvt (rs, flavour);
  16779. else
  16780. do_vfp_nsyn_cvt_fpv8 (flavour, mode);
  16781. }
  16782. }
  16783. static void
  16784. do_neon_cvtr (void)
  16785. {
  16786. do_neon_cvt_1 (neon_cvt_mode_x);
  16787. }
  16788. static void
  16789. do_neon_cvt (void)
  16790. {
  16791. do_neon_cvt_1 (neon_cvt_mode_z);
  16792. }
  16793. static void
  16794. do_neon_cvta (void)
  16795. {
  16796. do_neon_cvt_1 (neon_cvt_mode_a);
  16797. }
  16798. static void
  16799. do_neon_cvtn (void)
  16800. {
  16801. do_neon_cvt_1 (neon_cvt_mode_n);
  16802. }
  16803. static void
  16804. do_neon_cvtp (void)
  16805. {
  16806. do_neon_cvt_1 (neon_cvt_mode_p);
  16807. }
  16808. static void
  16809. do_neon_cvtm (void)
  16810. {
  16811. do_neon_cvt_1 (neon_cvt_mode_m);
  16812. }
  16813. static void
  16814. do_neon_cvttb_2 (bool t, bool to, bool is_double)
  16815. {
  16816. if (is_double)
  16817. mark_feature_used (&fpu_vfp_ext_armv8);
  16818. encode_arm_vfp_reg (inst.operands[0].reg,
  16819. (is_double && !to) ? VFP_REG_Dd : VFP_REG_Sd);
  16820. encode_arm_vfp_reg (inst.operands[1].reg,
  16821. (is_double && to) ? VFP_REG_Dm : VFP_REG_Sm);
  16822. inst.instruction |= to ? 0x10000 : 0;
  16823. inst.instruction |= t ? 0x80 : 0;
  16824. inst.instruction |= is_double ? 0x100 : 0;
  16825. do_vfp_cond_or_thumb ();
  16826. }
  16827. static void
  16828. do_neon_cvttb_1 (bool t)
  16829. {
  16830. enum neon_shape rs = neon_select_shape (NS_HF, NS_HD, NS_FH, NS_FF, NS_FD,
  16831. NS_DF, NS_DH, NS_QQ, NS_QQI, NS_NULL);
  16832. if (rs == NS_NULL)
  16833. return;
  16834. else if (rs == NS_QQ || rs == NS_QQI)
  16835. {
  16836. int single_to_half = 0;
  16837. if (!check_simd_pred_availability (true, NEON_CHECK_ARCH))
  16838. return;
  16839. enum neon_cvt_flavour flavour = get_neon_cvt_flavour (rs);
  16840. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  16841. && (flavour == neon_cvt_flavour_u16_f16
  16842. || flavour == neon_cvt_flavour_s16_f16
  16843. || flavour == neon_cvt_flavour_f16_s16
  16844. || flavour == neon_cvt_flavour_f16_u16
  16845. || flavour == neon_cvt_flavour_u32_f32
  16846. || flavour == neon_cvt_flavour_s32_f32
  16847. || flavour == neon_cvt_flavour_f32_s32
  16848. || flavour == neon_cvt_flavour_f32_u32))
  16849. {
  16850. inst.cond = 0xf;
  16851. inst.instruction = N_MNEM_vcvt;
  16852. set_pred_insn_type (INSIDE_VPT_INSN);
  16853. do_neon_cvt_1 (neon_cvt_mode_z);
  16854. return;
  16855. }
  16856. else if (rs == NS_QQ && flavour == neon_cvt_flavour_f32_f16)
  16857. single_to_half = 1;
  16858. else if (rs == NS_QQ && flavour != neon_cvt_flavour_f16_f32)
  16859. {
  16860. first_error (BAD_FPU);
  16861. return;
  16862. }
  16863. inst.instruction = 0xee3f0e01;
  16864. inst.instruction |= single_to_half << 28;
  16865. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16866. inst.instruction |= LOW4 (inst.operands[0].reg) << 13;
  16867. inst.instruction |= t << 12;
  16868. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16869. inst.instruction |= LOW4 (inst.operands[1].reg) << 1;
  16870. inst.is_neon = 1;
  16871. }
  16872. else if (neon_check_type (2, rs, N_F16, N_F32 | N_VFP).type != NT_invtype)
  16873. {
  16874. inst.error = NULL;
  16875. do_neon_cvttb_2 (t, /*to=*/true, /*is_double=*/false);
  16876. }
  16877. else if (neon_check_type (2, rs, N_F32 | N_VFP, N_F16).type != NT_invtype)
  16878. {
  16879. inst.error = NULL;
  16880. do_neon_cvttb_2 (t, /*to=*/false, /*is_double=*/false);
  16881. }
  16882. else if (neon_check_type (2, rs, N_F16, N_F64 | N_VFP).type != NT_invtype)
  16883. {
  16884. /* The VCVTB and VCVTT instructions with D-register operands
  16885. don't work for SP only targets. */
  16886. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  16887. _(BAD_FPU));
  16888. inst.error = NULL;
  16889. do_neon_cvttb_2 (t, /*to=*/true, /*is_double=*/true);
  16890. }
  16891. else if (neon_check_type (2, rs, N_F64 | N_VFP, N_F16).type != NT_invtype)
  16892. {
  16893. /* The VCVTB and VCVTT instructions with D-register operands
  16894. don't work for SP only targets. */
  16895. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  16896. _(BAD_FPU));
  16897. inst.error = NULL;
  16898. do_neon_cvttb_2 (t, /*to=*/false, /*is_double=*/true);
  16899. }
  16900. else if (neon_check_type (2, rs, N_BF16 | N_VFP, N_F32).type != NT_invtype)
  16901. {
  16902. constraint (!mark_feature_used (&arm_ext_bf16), _(BAD_BF16));
  16903. inst.error = NULL;
  16904. inst.instruction |= (1 << 8);
  16905. inst.instruction &= ~(1 << 9);
  16906. do_neon_cvttb_2 (t, /*to=*/true, /*is_double=*/false);
  16907. }
  16908. else
  16909. return;
  16910. }
  16911. static void
  16912. do_neon_cvtb (void)
  16913. {
  16914. do_neon_cvttb_1 (false);
  16915. }
  16916. static void
  16917. do_neon_cvtt (void)
  16918. {
  16919. do_neon_cvttb_1 (true);
  16920. }
  16921. static void
  16922. neon_move_immediate (void)
  16923. {
  16924. enum neon_shape rs = neon_select_shape (NS_DI, NS_QI, NS_NULL);
  16925. struct neon_type_el et = neon_check_type (2, rs,
  16926. N_I8 | N_I16 | N_I32 | N_I64 | N_F32 | N_KEY, N_EQK);
  16927. unsigned immlo, immhi = 0, immbits;
  16928. int op, cmode, float_p;
  16929. constraint (et.type == NT_invtype,
  16930. _("operand size must be specified for immediate VMOV"));
  16931. /* We start out as an MVN instruction if OP = 1, MOV otherwise. */
  16932. op = (inst.instruction & (1 << 5)) != 0;
  16933. immlo = inst.operands[1].imm;
  16934. if (inst.operands[1].regisimm)
  16935. immhi = inst.operands[1].reg;
  16936. constraint (et.size < 32 && (immlo & ~((1 << et.size) - 1)) != 0,
  16937. _("immediate has bits set outside the operand size"));
  16938. float_p = inst.operands[1].immisfloat;
  16939. if ((cmode = neon_cmode_for_move_imm (immlo, immhi, float_p, &immbits, &op,
  16940. et.size, et.type)) == FAIL)
  16941. {
  16942. /* Invert relevant bits only. */
  16943. neon_invert_size (&immlo, &immhi, et.size);
  16944. /* Flip from VMOV/VMVN to VMVN/VMOV. Some immediate types are unavailable
  16945. with one or the other; those cases are caught by
  16946. neon_cmode_for_move_imm. */
  16947. op = !op;
  16948. if ((cmode = neon_cmode_for_move_imm (immlo, immhi, float_p, &immbits,
  16949. &op, et.size, et.type)) == FAIL)
  16950. {
  16951. first_error (_("immediate out of range"));
  16952. return;
  16953. }
  16954. }
  16955. inst.instruction &= ~(1 << 5);
  16956. inst.instruction |= op << 5;
  16957. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16958. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16959. inst.instruction |= neon_quad (rs) << 6;
  16960. inst.instruction |= cmode << 8;
  16961. neon_write_immbits (immbits);
  16962. }
  16963. static void
  16964. do_neon_mvn (void)
  16965. {
  16966. if (!check_simd_pred_availability (false, NEON_CHECK_CC | NEON_CHECK_ARCH))
  16967. return;
  16968. if (inst.operands[1].isreg)
  16969. {
  16970. enum neon_shape rs;
  16971. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16972. rs = neon_select_shape (NS_QQ, NS_NULL);
  16973. else
  16974. rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  16975. if (rs == NS_NULL)
  16976. return;
  16977. NEON_ENCODE (INTEGER, inst);
  16978. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  16979. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  16980. inst.instruction |= LOW4 (inst.operands[1].reg);
  16981. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  16982. inst.instruction |= neon_quad (rs) << 6;
  16983. }
  16984. else
  16985. {
  16986. NEON_ENCODE (IMMED, inst);
  16987. neon_move_immediate ();
  16988. }
  16989. neon_dp_fixup (&inst);
  16990. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  16991. {
  16992. constraint (!inst.operands[1].isreg && !inst.operands[0].isquad, BAD_FPU);
  16993. }
  16994. }
  16995. /* Encode instructions of form:
  16996. |28/24|23|22|21 20|19 16|15 12|11 8|7|6|5|4|3 0|
  16997. | U |x |D |size | Rn | Rd |x x x x|N|x|M|x| Rm | */
  16998. static void
  16999. neon_mixed_length (struct neon_type_el et, unsigned size)
  17000. {
  17001. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17002. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17003. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  17004. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  17005. inst.instruction |= LOW4 (inst.operands[2].reg);
  17006. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  17007. inst.instruction |= (et.type == NT_unsigned) << 24;
  17008. inst.instruction |= neon_logbits (size) << 20;
  17009. neon_dp_fixup (&inst);
  17010. }
  17011. static void
  17012. do_neon_dyadic_long (void)
  17013. {
  17014. enum neon_shape rs = neon_select_shape (NS_QDD, NS_HHH, NS_FFF, NS_DDD, NS_NULL);
  17015. if (rs == NS_QDD)
  17016. {
  17017. if (vfp_or_neon_is_neon (NEON_CHECK_ARCH | NEON_CHECK_CC) == FAIL)
  17018. return;
  17019. NEON_ENCODE (INTEGER, inst);
  17020. /* FIXME: Type checking for lengthening op. */
  17021. struct neon_type_el et = neon_check_type (3, NS_QDD,
  17022. N_EQK | N_DBL, N_EQK, N_SU_32 | N_KEY);
  17023. neon_mixed_length (et, et.size);
  17024. }
  17025. else if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  17026. && (inst.cond == 0xf || inst.cond == 0x10))
  17027. {
  17028. /* If parsing for MVE, vaddl/vsubl/vabdl{e,t} can only be vadd/vsub/vabd
  17029. in an IT block with le/lt conditions. */
  17030. if (inst.cond == 0xf)
  17031. inst.cond = 0xb;
  17032. else if (inst.cond == 0x10)
  17033. inst.cond = 0xd;
  17034. inst.pred_insn_type = INSIDE_IT_INSN;
  17035. if (inst.instruction == N_MNEM_vaddl)
  17036. {
  17037. inst.instruction = N_MNEM_vadd;
  17038. do_neon_addsub_if_i ();
  17039. }
  17040. else if (inst.instruction == N_MNEM_vsubl)
  17041. {
  17042. inst.instruction = N_MNEM_vsub;
  17043. do_neon_addsub_if_i ();
  17044. }
  17045. else if (inst.instruction == N_MNEM_vabdl)
  17046. {
  17047. inst.instruction = N_MNEM_vabd;
  17048. do_neon_dyadic_if_su ();
  17049. }
  17050. }
  17051. else
  17052. first_error (BAD_FPU);
  17053. }
  17054. static void
  17055. do_neon_abal (void)
  17056. {
  17057. struct neon_type_el et = neon_check_type (3, NS_QDD,
  17058. N_EQK | N_INT | N_DBL, N_EQK, N_SU_32 | N_KEY);
  17059. neon_mixed_length (et, et.size);
  17060. }
  17061. static void
  17062. neon_mac_reg_scalar_long (unsigned regtypes, unsigned scalartypes)
  17063. {
  17064. if (inst.operands[2].isscalar)
  17065. {
  17066. struct neon_type_el et = neon_check_type (3, NS_QDS,
  17067. N_EQK | N_DBL, N_EQK, regtypes | N_KEY);
  17068. NEON_ENCODE (SCALAR, inst);
  17069. neon_mul_mac (et, et.type == NT_unsigned);
  17070. }
  17071. else
  17072. {
  17073. struct neon_type_el et = neon_check_type (3, NS_QDD,
  17074. N_EQK | N_DBL, N_EQK, scalartypes | N_KEY);
  17075. NEON_ENCODE (INTEGER, inst);
  17076. neon_mixed_length (et, et.size);
  17077. }
  17078. }
  17079. static void
  17080. do_neon_mac_maybe_scalar_long (void)
  17081. {
  17082. neon_mac_reg_scalar_long (N_S16 | N_S32 | N_U16 | N_U32, N_SU_32);
  17083. }
  17084. /* Like neon_scalar_for_mul, this function generate Rm encoding from GAS's
  17085. internal SCALAR. QUAD_P is 1 if it's for Q format, otherwise it's 0. */
  17086. static unsigned
  17087. neon_scalar_for_fmac_fp16_long (unsigned scalar, unsigned quad_p)
  17088. {
  17089. unsigned regno = NEON_SCALAR_REG (scalar);
  17090. unsigned elno = NEON_SCALAR_INDEX (scalar);
  17091. if (quad_p)
  17092. {
  17093. if (regno > 7 || elno > 3)
  17094. goto bad_scalar;
  17095. return ((regno & 0x7)
  17096. | ((elno & 0x1) << 3)
  17097. | (((elno >> 1) & 0x1) << 5));
  17098. }
  17099. else
  17100. {
  17101. if (regno > 15 || elno > 1)
  17102. goto bad_scalar;
  17103. return (((regno & 0x1) << 5)
  17104. | ((regno >> 1) & 0x7)
  17105. | ((elno & 0x1) << 3));
  17106. }
  17107. bad_scalar:
  17108. first_error (_("scalar out of range for multiply instruction"));
  17109. return 0;
  17110. }
  17111. static void
  17112. do_neon_fmac_maybe_scalar_long (int subtype)
  17113. {
  17114. enum neon_shape rs;
  17115. int high8;
  17116. /* NOTE: vfmal/vfmsl use slightly different NEON three-same encoding. 'size"
  17117. field (bits[21:20]) has different meaning. For scalar index variant, it's
  17118. used to differentiate add and subtract, otherwise it's with fixed value
  17119. 0x2. */
  17120. int size = -1;
  17121. /* vfmal/vfmsl are in three-same D/Q register format or the third operand can
  17122. be a scalar index register. */
  17123. if (inst.operands[2].isscalar)
  17124. {
  17125. high8 = 0xfe000000;
  17126. if (subtype)
  17127. size = 16;
  17128. rs = neon_select_shape (NS_DHS, NS_QDS, NS_NULL);
  17129. }
  17130. else
  17131. {
  17132. high8 = 0xfc000000;
  17133. size = 32;
  17134. if (subtype)
  17135. inst.instruction |= (0x1 << 23);
  17136. rs = neon_select_shape (NS_DHH, NS_QDD, NS_NULL);
  17137. }
  17138. if (inst.cond != COND_ALWAYS)
  17139. as_warn (_("vfmal/vfmsl with FP16 type cannot be conditional, the "
  17140. "behaviour is UNPREDICTABLE"));
  17141. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_fp16_fml),
  17142. _(BAD_FP16));
  17143. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_armv8),
  17144. _(BAD_FPU));
  17145. /* "opcode" from template has included "ubit", so simply pass 0 here. Also,
  17146. the "S" bit in size field has been reused to differentiate vfmal and vfmsl,
  17147. so we simply pass -1 as size. */
  17148. unsigned quad_p = (rs == NS_QDD || rs == NS_QDS);
  17149. neon_three_same (quad_p, 0, size);
  17150. /* Undo neon_dp_fixup. Redo the high eight bits. */
  17151. inst.instruction &= 0x00ffffff;
  17152. inst.instruction |= high8;
  17153. /* Unlike usually NEON three-same, encoding for Vn and Vm will depend on
  17154. whether the instruction is in Q form and whether Vm is a scalar indexed
  17155. operand. */
  17156. if (inst.operands[2].isscalar)
  17157. {
  17158. unsigned rm
  17159. = neon_scalar_for_fmac_fp16_long (inst.operands[2].reg, quad_p);
  17160. inst.instruction &= 0xffffffd0;
  17161. inst.instruction |= rm;
  17162. if (!quad_p)
  17163. {
  17164. /* Redo Rn as well. */
  17165. inst.instruction &= 0xfff0ff7f;
  17166. inst.instruction |= HI4 (inst.operands[1].reg) << 16;
  17167. inst.instruction |= LOW1 (inst.operands[1].reg) << 7;
  17168. }
  17169. }
  17170. else if (!quad_p)
  17171. {
  17172. /* Redo Rn and Rm. */
  17173. inst.instruction &= 0xfff0ff50;
  17174. inst.instruction |= HI4 (inst.operands[1].reg) << 16;
  17175. inst.instruction |= LOW1 (inst.operands[1].reg) << 7;
  17176. inst.instruction |= HI4 (inst.operands[2].reg);
  17177. inst.instruction |= LOW1 (inst.operands[2].reg) << 5;
  17178. }
  17179. }
  17180. static void
  17181. do_neon_vfmal (void)
  17182. {
  17183. return do_neon_fmac_maybe_scalar_long (0);
  17184. }
  17185. static void
  17186. do_neon_vfmsl (void)
  17187. {
  17188. return do_neon_fmac_maybe_scalar_long (1);
  17189. }
  17190. static void
  17191. do_neon_dyadic_wide (void)
  17192. {
  17193. struct neon_type_el et = neon_check_type (3, NS_QQD,
  17194. N_EQK | N_DBL, N_EQK | N_DBL, N_SU_32 | N_KEY);
  17195. neon_mixed_length (et, et.size);
  17196. }
  17197. static void
  17198. do_neon_dyadic_narrow (void)
  17199. {
  17200. struct neon_type_el et = neon_check_type (3, NS_QDD,
  17201. N_EQK | N_DBL, N_EQK, N_I16 | N_I32 | N_I64 | N_KEY);
  17202. /* Operand sign is unimportant, and the U bit is part of the opcode,
  17203. so force the operand type to integer. */
  17204. et.type = NT_integer;
  17205. neon_mixed_length (et, et.size / 2);
  17206. }
  17207. static void
  17208. do_neon_mul_sat_scalar_long (void)
  17209. {
  17210. neon_mac_reg_scalar_long (N_S16 | N_S32, N_S16 | N_S32);
  17211. }
  17212. static void
  17213. do_neon_vmull (void)
  17214. {
  17215. if (inst.operands[2].isscalar)
  17216. do_neon_mac_maybe_scalar_long ();
  17217. else
  17218. {
  17219. struct neon_type_el et = neon_check_type (3, NS_QDD,
  17220. N_EQK | N_DBL, N_EQK, N_SU_32 | N_P8 | N_P64 | N_KEY);
  17221. if (et.type == NT_poly)
  17222. NEON_ENCODE (POLY, inst);
  17223. else
  17224. NEON_ENCODE (INTEGER, inst);
  17225. /* For polynomial encoding the U bit must be zero, and the size must
  17226. be 8 (encoded as 0b00) or, on ARMv8 or later 64 (encoded, non
  17227. obviously, as 0b10). */
  17228. if (et.size == 64)
  17229. {
  17230. /* Check we're on the correct architecture. */
  17231. if (!mark_feature_used (&fpu_crypto_ext_armv8))
  17232. inst.error =
  17233. _("Instruction form not available on this architecture.");
  17234. et.size = 32;
  17235. }
  17236. neon_mixed_length (et, et.size);
  17237. }
  17238. }
  17239. static void
  17240. do_neon_ext (void)
  17241. {
  17242. enum neon_shape rs = neon_select_shape (NS_DDDI, NS_QQQI, NS_NULL);
  17243. struct neon_type_el et = neon_check_type (3, rs,
  17244. N_EQK, N_EQK, N_8 | N_16 | N_32 | N_64 | N_KEY);
  17245. unsigned imm = (inst.operands[3].imm * et.size) / 8;
  17246. constraint (imm >= (unsigned) (neon_quad (rs) ? 16 : 8),
  17247. _("shift out of range"));
  17248. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17249. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17250. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  17251. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  17252. inst.instruction |= LOW4 (inst.operands[2].reg);
  17253. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  17254. inst.instruction |= neon_quad (rs) << 6;
  17255. inst.instruction |= imm << 8;
  17256. neon_dp_fixup (&inst);
  17257. }
  17258. static void
  17259. do_neon_rev (void)
  17260. {
  17261. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  17262. return;
  17263. enum neon_shape rs;
  17264. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17265. rs = neon_select_shape (NS_QQ, NS_NULL);
  17266. else
  17267. rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17268. struct neon_type_el et = neon_check_type (2, rs,
  17269. N_EQK, N_8 | N_16 | N_32 | N_KEY);
  17270. unsigned op = (inst.instruction >> 7) & 3;
  17271. /* N (width of reversed regions) is encoded as part of the bitmask. We
  17272. extract it here to check the elements to be reversed are smaller.
  17273. Otherwise we'd get a reserved instruction. */
  17274. unsigned elsize = (op == 2) ? 16 : (op == 1) ? 32 : (op == 0) ? 64 : 0;
  17275. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext) && elsize == 64
  17276. && inst.operands[0].reg == inst.operands[1].reg)
  17277. as_tsktsk (_("Warning: 64-bit element size and same destination and source"
  17278. " operands makes instruction UNPREDICTABLE"));
  17279. gas_assert (elsize != 0);
  17280. constraint (et.size >= elsize,
  17281. _("elements must be smaller than reversal region"));
  17282. neon_two_same (neon_quad (rs), 1, et.size);
  17283. }
  17284. static void
  17285. do_neon_dup (void)
  17286. {
  17287. if (inst.operands[1].isscalar)
  17288. {
  17289. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_v1),
  17290. BAD_FPU);
  17291. enum neon_shape rs = neon_select_shape (NS_DS, NS_QS, NS_NULL);
  17292. struct neon_type_el et = neon_check_type (2, rs,
  17293. N_EQK, N_8 | N_16 | N_32 | N_KEY);
  17294. unsigned sizebits = et.size >> 3;
  17295. unsigned dm = NEON_SCALAR_REG (inst.operands[1].reg);
  17296. int logsize = neon_logbits (et.size);
  17297. unsigned x = NEON_SCALAR_INDEX (inst.operands[1].reg) << logsize;
  17298. if (vfp_or_neon_is_neon (NEON_CHECK_CC) == FAIL)
  17299. return;
  17300. NEON_ENCODE (SCALAR, inst);
  17301. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17302. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17303. inst.instruction |= LOW4 (dm);
  17304. inst.instruction |= HI1 (dm) << 5;
  17305. inst.instruction |= neon_quad (rs) << 6;
  17306. inst.instruction |= x << 17;
  17307. inst.instruction |= sizebits << 16;
  17308. neon_dp_fixup (&inst);
  17309. }
  17310. else
  17311. {
  17312. enum neon_shape rs = neon_select_shape (NS_DR, NS_QR, NS_NULL);
  17313. struct neon_type_el et = neon_check_type (2, rs,
  17314. N_8 | N_16 | N_32 | N_KEY, N_EQK);
  17315. if (rs == NS_QR)
  17316. {
  17317. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH))
  17318. return;
  17319. }
  17320. else
  17321. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_v1),
  17322. BAD_FPU);
  17323. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17324. {
  17325. if (inst.operands[1].reg == REG_SP)
  17326. as_tsktsk (MVE_BAD_SP);
  17327. else if (inst.operands[1].reg == REG_PC)
  17328. as_tsktsk (MVE_BAD_PC);
  17329. }
  17330. /* Duplicate ARM register to lanes of vector. */
  17331. NEON_ENCODE (ARMREG, inst);
  17332. switch (et.size)
  17333. {
  17334. case 8: inst.instruction |= 0x400000; break;
  17335. case 16: inst.instruction |= 0x000020; break;
  17336. case 32: inst.instruction |= 0x000000; break;
  17337. default: break;
  17338. }
  17339. inst.instruction |= LOW4 (inst.operands[1].reg) << 12;
  17340. inst.instruction |= LOW4 (inst.operands[0].reg) << 16;
  17341. inst.instruction |= HI1 (inst.operands[0].reg) << 7;
  17342. inst.instruction |= neon_quad (rs) << 21;
  17343. /* The encoding for this instruction is identical for the ARM and Thumb
  17344. variants, except for the condition field. */
  17345. do_vfp_cond_or_thumb ();
  17346. }
  17347. }
  17348. static void
  17349. do_mve_mov (int toQ)
  17350. {
  17351. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17352. return;
  17353. if (inst.cond > COND_ALWAYS)
  17354. inst.pred_insn_type = MVE_UNPREDICABLE_INSN;
  17355. unsigned Rt = 0, Rt2 = 1, Q0 = 2, Q1 = 3;
  17356. if (toQ)
  17357. {
  17358. Q0 = 0;
  17359. Q1 = 1;
  17360. Rt = 2;
  17361. Rt2 = 3;
  17362. }
  17363. constraint (inst.operands[Q0].reg != inst.operands[Q1].reg + 2,
  17364. _("Index one must be [2,3] and index two must be two less than"
  17365. " index one."));
  17366. constraint (!toQ && inst.operands[Rt].reg == inst.operands[Rt2].reg,
  17367. _("Destination registers may not be the same"));
  17368. constraint (inst.operands[Rt].reg == REG_SP
  17369. || inst.operands[Rt2].reg == REG_SP,
  17370. BAD_SP);
  17371. constraint (inst.operands[Rt].reg == REG_PC
  17372. || inst.operands[Rt2].reg == REG_PC,
  17373. BAD_PC);
  17374. inst.instruction = 0xec000f00;
  17375. inst.instruction |= HI1 (inst.operands[Q1].reg / 32) << 23;
  17376. inst.instruction |= !!toQ << 20;
  17377. inst.instruction |= inst.operands[Rt2].reg << 16;
  17378. inst.instruction |= LOW4 (inst.operands[Q1].reg / 32) << 13;
  17379. inst.instruction |= (inst.operands[Q1].reg % 4) << 4;
  17380. inst.instruction |= inst.operands[Rt].reg;
  17381. }
  17382. static void
  17383. do_mve_movn (void)
  17384. {
  17385. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17386. return;
  17387. if (inst.cond > COND_ALWAYS)
  17388. inst.pred_insn_type = INSIDE_VPT_INSN;
  17389. else
  17390. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  17391. struct neon_type_el et = neon_check_type (2, NS_QQ, N_EQK, N_I16 | N_I32
  17392. | N_KEY);
  17393. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17394. inst.instruction |= (neon_logbits (et.size) - 1) << 18;
  17395. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17396. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  17397. inst.instruction |= LOW4 (inst.operands[1].reg);
  17398. inst.is_neon = 1;
  17399. }
  17400. /* VMOV has particularly many variations. It can be one of:
  17401. 0. VMOV<c><q> <Qd>, <Qm>
  17402. 1. VMOV<c><q> <Dd>, <Dm>
  17403. (Register operations, which are VORR with Rm = Rn.)
  17404. 2. VMOV<c><q>.<dt> <Qd>, #<imm>
  17405. 3. VMOV<c><q>.<dt> <Dd>, #<imm>
  17406. (Immediate loads.)
  17407. 4. VMOV<c><q>.<size> <Dn[x]>, <Rd>
  17408. (ARM register to scalar.)
  17409. 5. VMOV<c><q> <Dm>, <Rd>, <Rn>
  17410. (Two ARM registers to vector.)
  17411. 6. VMOV<c><q>.<dt> <Rd>, <Dn[x]>
  17412. (Scalar to ARM register.)
  17413. 7. VMOV<c><q> <Rd>, <Rn>, <Dm>
  17414. (Vector to two ARM registers.)
  17415. 8. VMOV.F32 <Sd>, <Sm>
  17416. 9. VMOV.F64 <Dd>, <Dm>
  17417. (VFP register moves.)
  17418. 10. VMOV.F32 <Sd>, #imm
  17419. 11. VMOV.F64 <Dd>, #imm
  17420. (VFP float immediate load.)
  17421. 12. VMOV <Rd>, <Sm>
  17422. (VFP single to ARM reg.)
  17423. 13. VMOV <Sd>, <Rm>
  17424. (ARM reg to VFP single.)
  17425. 14. VMOV <Rd>, <Re>, <Sn>, <Sm>
  17426. (Two ARM regs to two VFP singles.)
  17427. 15. VMOV <Sd>, <Se>, <Rn>, <Rm>
  17428. (Two VFP singles to two ARM regs.)
  17429. 16. VMOV<c> <Rt>, <Rt2>, <Qd[idx]>, <Qd[idx2]>
  17430. 17. VMOV<c> <Qd[idx]>, <Qd[idx2]>, <Rt>, <Rt2>
  17431. 18. VMOV<c>.<dt> <Rt>, <Qn[idx]>
  17432. 19. VMOV<c>.<dt> <Qd[idx]>, <Rt>
  17433. These cases can be disambiguated using neon_select_shape, except cases 1/9
  17434. and 3/11 which depend on the operand type too.
  17435. All the encoded bits are hardcoded by this function.
  17436. Cases 4, 6 may be used with VFPv1 and above (only 32-bit transfers!).
  17437. Cases 5, 7 may be used with VFPv2 and above.
  17438. FIXME: Some of the checking may be a bit sloppy (in a couple of cases you
  17439. can specify a type where it doesn't make sense to, and is ignored). */
  17440. static void
  17441. do_neon_mov (void)
  17442. {
  17443. enum neon_shape rs = neon_select_shape (NS_RRSS, NS_SSRR, NS_RRFF, NS_FFRR,
  17444. NS_DRR, NS_RRD, NS_QQ, NS_DD, NS_QI,
  17445. NS_DI, NS_SR, NS_RS, NS_FF, NS_FI,
  17446. NS_RF, NS_FR, NS_HR, NS_RH, NS_HI,
  17447. NS_NULL);
  17448. struct neon_type_el et;
  17449. const char *ldconst = 0;
  17450. switch (rs)
  17451. {
  17452. case NS_DD: /* case 1/9. */
  17453. et = neon_check_type (2, rs, N_EQK, N_F64 | N_KEY);
  17454. /* It is not an error here if no type is given. */
  17455. inst.error = NULL;
  17456. /* In MVE we interpret the following instructions as same, so ignoring
  17457. the following type (float) and size (64) checks.
  17458. a: VMOV<c><q> <Dd>, <Dm>
  17459. b: VMOV<c><q>.F64 <Dd>, <Dm>. */
  17460. if ((et.type == NT_float && et.size == 64)
  17461. || (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)))
  17462. {
  17463. do_vfp_nsyn_opcode ("fcpyd");
  17464. break;
  17465. }
  17466. /* fall through. */
  17467. case NS_QQ: /* case 0/1. */
  17468. {
  17469. if (!check_simd_pred_availability (false,
  17470. NEON_CHECK_CC | NEON_CHECK_ARCH))
  17471. return;
  17472. /* The architecture manual I have doesn't explicitly state which
  17473. value the U bit should have for register->register moves, but
  17474. the equivalent VORR instruction has U = 0, so do that. */
  17475. inst.instruction = 0x0200110;
  17476. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17477. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17478. inst.instruction |= LOW4 (inst.operands[1].reg);
  17479. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  17480. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  17481. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  17482. inst.instruction |= neon_quad (rs) << 6;
  17483. neon_dp_fixup (&inst);
  17484. }
  17485. break;
  17486. case NS_DI: /* case 3/11. */
  17487. et = neon_check_type (2, rs, N_EQK, N_F64 | N_KEY);
  17488. inst.error = NULL;
  17489. if (et.type == NT_float && et.size == 64)
  17490. {
  17491. /* case 11 (fconstd). */
  17492. ldconst = "fconstd";
  17493. goto encode_fconstd;
  17494. }
  17495. /* fall through. */
  17496. case NS_QI: /* case 2/3. */
  17497. if (!check_simd_pred_availability (false,
  17498. NEON_CHECK_CC | NEON_CHECK_ARCH))
  17499. return;
  17500. inst.instruction = 0x0800010;
  17501. neon_move_immediate ();
  17502. neon_dp_fixup (&inst);
  17503. break;
  17504. case NS_SR: /* case 4. */
  17505. {
  17506. unsigned bcdebits = 0;
  17507. int logsize;
  17508. unsigned dn = NEON_SCALAR_REG (inst.operands[0].reg);
  17509. unsigned x = NEON_SCALAR_INDEX (inst.operands[0].reg);
  17510. /* .<size> is optional here, defaulting to .32. */
  17511. if (inst.vectype.elems == 0
  17512. && inst.operands[0].vectype.type == NT_invtype
  17513. && inst.operands[1].vectype.type == NT_invtype)
  17514. {
  17515. inst.vectype.el[0].type = NT_untyped;
  17516. inst.vectype.el[0].size = 32;
  17517. inst.vectype.elems = 1;
  17518. }
  17519. et = neon_check_type (2, NS_NULL, N_8 | N_16 | N_32 | N_KEY, N_EQK);
  17520. logsize = neon_logbits (et.size);
  17521. if (et.size != 32)
  17522. {
  17523. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  17524. && vfp_or_neon_is_neon (NEON_CHECK_ARCH) == FAIL)
  17525. return;
  17526. }
  17527. else
  17528. {
  17529. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1)
  17530. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17531. _(BAD_FPU));
  17532. }
  17533. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17534. {
  17535. if (inst.operands[1].reg == REG_SP)
  17536. as_tsktsk (MVE_BAD_SP);
  17537. else if (inst.operands[1].reg == REG_PC)
  17538. as_tsktsk (MVE_BAD_PC);
  17539. }
  17540. unsigned size = inst.operands[0].isscalar == 1 ? 64 : 128;
  17541. constraint (et.type == NT_invtype, _("bad type for scalar"));
  17542. constraint (x >= size / et.size, _("scalar index out of range"));
  17543. switch (et.size)
  17544. {
  17545. case 8: bcdebits = 0x8; break;
  17546. case 16: bcdebits = 0x1; break;
  17547. case 32: bcdebits = 0x0; break;
  17548. default: ;
  17549. }
  17550. bcdebits |= (x & ((1 << (3-logsize)) - 1)) << logsize;
  17551. inst.instruction = 0xe000b10;
  17552. do_vfp_cond_or_thumb ();
  17553. inst.instruction |= LOW4 (dn) << 16;
  17554. inst.instruction |= HI1 (dn) << 7;
  17555. inst.instruction |= inst.operands[1].reg << 12;
  17556. inst.instruction |= (bcdebits & 3) << 5;
  17557. inst.instruction |= ((bcdebits >> 2) & 3) << 21;
  17558. inst.instruction |= (x >> (3-logsize)) << 16;
  17559. }
  17560. break;
  17561. case NS_DRR: /* case 5 (fmdrr). */
  17562. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v2)
  17563. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17564. _(BAD_FPU));
  17565. inst.instruction = 0xc400b10;
  17566. do_vfp_cond_or_thumb ();
  17567. inst.instruction |= LOW4 (inst.operands[0].reg);
  17568. inst.instruction |= HI1 (inst.operands[0].reg) << 5;
  17569. inst.instruction |= inst.operands[1].reg << 12;
  17570. inst.instruction |= inst.operands[2].reg << 16;
  17571. break;
  17572. case NS_RS: /* case 6. */
  17573. {
  17574. unsigned logsize;
  17575. unsigned dn = NEON_SCALAR_REG (inst.operands[1].reg);
  17576. unsigned x = NEON_SCALAR_INDEX (inst.operands[1].reg);
  17577. unsigned abcdebits = 0;
  17578. /* .<dt> is optional here, defaulting to .32. */
  17579. if (inst.vectype.elems == 0
  17580. && inst.operands[0].vectype.type == NT_invtype
  17581. && inst.operands[1].vectype.type == NT_invtype)
  17582. {
  17583. inst.vectype.el[0].type = NT_untyped;
  17584. inst.vectype.el[0].size = 32;
  17585. inst.vectype.elems = 1;
  17586. }
  17587. et = neon_check_type (2, NS_NULL,
  17588. N_EQK, N_S8 | N_S16 | N_U8 | N_U16 | N_32 | N_KEY);
  17589. logsize = neon_logbits (et.size);
  17590. if (et.size != 32)
  17591. {
  17592. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  17593. && vfp_or_neon_is_neon (NEON_CHECK_CC
  17594. | NEON_CHECK_ARCH) == FAIL)
  17595. return;
  17596. }
  17597. else
  17598. {
  17599. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1)
  17600. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17601. _(BAD_FPU));
  17602. }
  17603. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17604. {
  17605. if (inst.operands[0].reg == REG_SP)
  17606. as_tsktsk (MVE_BAD_SP);
  17607. else if (inst.operands[0].reg == REG_PC)
  17608. as_tsktsk (MVE_BAD_PC);
  17609. }
  17610. unsigned size = inst.operands[1].isscalar == 1 ? 64 : 128;
  17611. constraint (et.type == NT_invtype, _("bad type for scalar"));
  17612. constraint (x >= size / et.size, _("scalar index out of range"));
  17613. switch (et.size)
  17614. {
  17615. case 8: abcdebits = (et.type == NT_signed) ? 0x08 : 0x18; break;
  17616. case 16: abcdebits = (et.type == NT_signed) ? 0x01 : 0x11; break;
  17617. case 32: abcdebits = 0x00; break;
  17618. default: ;
  17619. }
  17620. abcdebits |= (x & ((1 << (3-logsize)) - 1)) << logsize;
  17621. inst.instruction = 0xe100b10;
  17622. do_vfp_cond_or_thumb ();
  17623. inst.instruction |= LOW4 (dn) << 16;
  17624. inst.instruction |= HI1 (dn) << 7;
  17625. inst.instruction |= inst.operands[0].reg << 12;
  17626. inst.instruction |= (abcdebits & 3) << 5;
  17627. inst.instruction |= (abcdebits >> 2) << 21;
  17628. inst.instruction |= (x >> (3-logsize)) << 16;
  17629. }
  17630. break;
  17631. case NS_RRD: /* case 7 (fmrrd). */
  17632. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v2)
  17633. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17634. _(BAD_FPU));
  17635. inst.instruction = 0xc500b10;
  17636. do_vfp_cond_or_thumb ();
  17637. inst.instruction |= inst.operands[0].reg << 12;
  17638. inst.instruction |= inst.operands[1].reg << 16;
  17639. inst.instruction |= LOW4 (inst.operands[2].reg);
  17640. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  17641. break;
  17642. case NS_FF: /* case 8 (fcpys). */
  17643. do_vfp_nsyn_opcode ("fcpys");
  17644. break;
  17645. case NS_HI:
  17646. case NS_FI: /* case 10 (fconsts). */
  17647. ldconst = "fconsts";
  17648. encode_fconstd:
  17649. if (!inst.operands[1].immisfloat)
  17650. {
  17651. unsigned new_imm;
  17652. /* Immediate has to fit in 8 bits so float is enough. */
  17653. float imm = (float) inst.operands[1].imm;
  17654. memcpy (&new_imm, &imm, sizeof (float));
  17655. /* But the assembly may have been written to provide an integer
  17656. bit pattern that equates to a float, so check that the
  17657. conversion has worked. */
  17658. if (is_quarter_float (new_imm))
  17659. {
  17660. if (is_quarter_float (inst.operands[1].imm))
  17661. as_warn (_("immediate constant is valid both as a bit-pattern and a floating point value (using the fp value)"));
  17662. inst.operands[1].imm = new_imm;
  17663. inst.operands[1].immisfloat = 1;
  17664. }
  17665. }
  17666. if (is_quarter_float (inst.operands[1].imm))
  17667. {
  17668. inst.operands[1].imm = neon_qfloat_bits (inst.operands[1].imm);
  17669. do_vfp_nsyn_opcode (ldconst);
  17670. /* ARMv8.2 fp16 vmov.f16 instruction. */
  17671. if (rs == NS_HI)
  17672. do_scalar_fp16_v82_encode ();
  17673. }
  17674. else
  17675. first_error (_("immediate out of range"));
  17676. break;
  17677. case NS_RH:
  17678. case NS_RF: /* case 12 (fmrs). */
  17679. do_vfp_nsyn_opcode ("fmrs");
  17680. /* ARMv8.2 fp16 vmov.f16 instruction. */
  17681. if (rs == NS_RH)
  17682. do_scalar_fp16_v82_encode ();
  17683. break;
  17684. case NS_HR:
  17685. case NS_FR: /* case 13 (fmsr). */
  17686. do_vfp_nsyn_opcode ("fmsr");
  17687. /* ARMv8.2 fp16 vmov.f16 instruction. */
  17688. if (rs == NS_HR)
  17689. do_scalar_fp16_v82_encode ();
  17690. break;
  17691. case NS_RRSS:
  17692. do_mve_mov (0);
  17693. break;
  17694. case NS_SSRR:
  17695. do_mve_mov (1);
  17696. break;
  17697. /* The encoders for the fmrrs and fmsrr instructions expect three operands
  17698. (one of which is a list), but we have parsed four. Do some fiddling to
  17699. make the operands what do_vfp_reg2_from_sp2 and do_vfp_sp2_from_reg2
  17700. expect. */
  17701. case NS_RRFF: /* case 14 (fmrrs). */
  17702. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v2)
  17703. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17704. _(BAD_FPU));
  17705. constraint (inst.operands[3].reg != inst.operands[2].reg + 1,
  17706. _("VFP registers must be adjacent"));
  17707. inst.operands[2].imm = 2;
  17708. memset (&inst.operands[3], '\0', sizeof (inst.operands[3]));
  17709. do_vfp_nsyn_opcode ("fmrrs");
  17710. break;
  17711. case NS_FFRR: /* case 15 (fmsrr). */
  17712. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v2)
  17713. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17714. _(BAD_FPU));
  17715. constraint (inst.operands[1].reg != inst.operands[0].reg + 1,
  17716. _("VFP registers must be adjacent"));
  17717. inst.operands[1] = inst.operands[2];
  17718. inst.operands[2] = inst.operands[3];
  17719. inst.operands[0].imm = 2;
  17720. memset (&inst.operands[3], '\0', sizeof (inst.operands[3]));
  17721. do_vfp_nsyn_opcode ("fmsrr");
  17722. break;
  17723. case NS_NULL:
  17724. /* neon_select_shape has determined that the instruction
  17725. shape is wrong and has already set the error message. */
  17726. break;
  17727. default:
  17728. abort ();
  17729. }
  17730. }
  17731. static void
  17732. do_mve_movl (void)
  17733. {
  17734. if (!(inst.operands[0].present && inst.operands[0].isquad
  17735. && inst.operands[1].present && inst.operands[1].isquad
  17736. && !inst.operands[2].present))
  17737. {
  17738. inst.instruction = 0;
  17739. inst.cond = 0xb;
  17740. if (thumb_mode)
  17741. set_pred_insn_type (INSIDE_IT_INSN);
  17742. do_neon_mov ();
  17743. return;
  17744. }
  17745. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17746. return;
  17747. if (inst.cond != COND_ALWAYS)
  17748. inst.pred_insn_type = INSIDE_VPT_INSN;
  17749. struct neon_type_el et = neon_check_type (2, NS_QQ, N_EQK, N_S8 | N_U8
  17750. | N_S16 | N_U16 | N_KEY);
  17751. inst.instruction |= (et.type == NT_unsigned) << 28;
  17752. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17753. inst.instruction |= (neon_logbits (et.size) + 1) << 19;
  17754. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17755. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  17756. inst.instruction |= LOW4 (inst.operands[1].reg);
  17757. inst.is_neon = 1;
  17758. }
  17759. static void
  17760. do_neon_rshift_round_imm (void)
  17761. {
  17762. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  17763. return;
  17764. enum neon_shape rs;
  17765. struct neon_type_el et;
  17766. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17767. {
  17768. rs = neon_select_shape (NS_QQI, NS_NULL);
  17769. et = neon_check_type (2, rs, N_EQK, N_SU_MVE | N_KEY);
  17770. }
  17771. else
  17772. {
  17773. rs = neon_select_shape (NS_DDI, NS_QQI, NS_NULL);
  17774. et = neon_check_type (2, rs, N_EQK, N_SU_ALL | N_KEY);
  17775. }
  17776. int imm = inst.operands[2].imm;
  17777. /* imm == 0 case is encoded as VMOV for V{R}SHR. */
  17778. if (imm == 0)
  17779. {
  17780. inst.operands[2].present = 0;
  17781. do_neon_mov ();
  17782. return;
  17783. }
  17784. constraint (imm < 1 || (unsigned)imm > et.size,
  17785. _("immediate out of range for shift"));
  17786. neon_imm_shift (true, et.type == NT_unsigned, neon_quad (rs), et,
  17787. et.size - imm);
  17788. }
  17789. static void
  17790. do_neon_movhf (void)
  17791. {
  17792. enum neon_shape rs = neon_select_shape (NS_HH, NS_NULL);
  17793. constraint (rs != NS_HH, _("invalid suffix"));
  17794. if (inst.cond != COND_ALWAYS)
  17795. {
  17796. if (thumb_mode)
  17797. {
  17798. as_warn (_("ARMv8.2 scalar fp16 instruction cannot be conditional,"
  17799. " the behaviour is UNPREDICTABLE"));
  17800. }
  17801. else
  17802. {
  17803. inst.error = BAD_COND;
  17804. return;
  17805. }
  17806. }
  17807. do_vfp_sp_monadic ();
  17808. inst.is_neon = 1;
  17809. inst.instruction |= 0xf0000000;
  17810. }
  17811. static void
  17812. do_neon_movl (void)
  17813. {
  17814. struct neon_type_el et = neon_check_type (2, NS_QD,
  17815. N_EQK | N_DBL, N_SU_32 | N_KEY);
  17816. unsigned sizebits = et.size >> 3;
  17817. inst.instruction |= sizebits << 19;
  17818. neon_two_same (0, et.type == NT_unsigned, -1);
  17819. }
  17820. static void
  17821. do_neon_trn (void)
  17822. {
  17823. enum neon_shape rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17824. struct neon_type_el et = neon_check_type (2, rs,
  17825. N_EQK, N_8 | N_16 | N_32 | N_KEY);
  17826. NEON_ENCODE (INTEGER, inst);
  17827. neon_two_same (neon_quad (rs), 1, et.size);
  17828. }
  17829. static void
  17830. do_neon_zip_uzp (void)
  17831. {
  17832. enum neon_shape rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17833. struct neon_type_el et = neon_check_type (2, rs,
  17834. N_EQK, N_8 | N_16 | N_32 | N_KEY);
  17835. if (rs == NS_DD && et.size == 32)
  17836. {
  17837. /* Special case: encode as VTRN.32 <Dd>, <Dm>. */
  17838. inst.instruction = N_MNEM_vtrn;
  17839. do_neon_trn ();
  17840. return;
  17841. }
  17842. neon_two_same (neon_quad (rs), 1, et.size);
  17843. }
  17844. static void
  17845. do_neon_sat_abs_neg (void)
  17846. {
  17847. if (!check_simd_pred_availability (false, NEON_CHECK_CC | NEON_CHECK_ARCH))
  17848. return;
  17849. enum neon_shape rs;
  17850. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17851. rs = neon_select_shape (NS_QQ, NS_NULL);
  17852. else
  17853. rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17854. struct neon_type_el et = neon_check_type (2, rs,
  17855. N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  17856. neon_two_same (neon_quad (rs), 1, et.size);
  17857. }
  17858. static void
  17859. do_neon_pair_long (void)
  17860. {
  17861. enum neon_shape rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17862. struct neon_type_el et = neon_check_type (2, rs, N_EQK, N_SU_32 | N_KEY);
  17863. /* Unsigned is encoded in OP field (bit 7) for these instruction. */
  17864. inst.instruction |= (et.type == NT_unsigned) << 7;
  17865. neon_two_same (neon_quad (rs), 1, et.size);
  17866. }
  17867. static void
  17868. do_neon_recip_est (void)
  17869. {
  17870. enum neon_shape rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17871. struct neon_type_el et = neon_check_type (2, rs,
  17872. N_EQK | N_FLT, N_F_16_32 | N_U32 | N_KEY);
  17873. inst.instruction |= (et.type == NT_float) << 8;
  17874. neon_two_same (neon_quad (rs), 1, et.size);
  17875. }
  17876. static void
  17877. do_neon_cls (void)
  17878. {
  17879. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  17880. return;
  17881. enum neon_shape rs;
  17882. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17883. rs = neon_select_shape (NS_QQ, NS_NULL);
  17884. else
  17885. rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17886. struct neon_type_el et = neon_check_type (2, rs,
  17887. N_EQK, N_S8 | N_S16 | N_S32 | N_KEY);
  17888. neon_two_same (neon_quad (rs), 1, et.size);
  17889. }
  17890. static void
  17891. do_neon_clz (void)
  17892. {
  17893. if (!check_simd_pred_availability (false, NEON_CHECK_ARCH | NEON_CHECK_CC))
  17894. return;
  17895. enum neon_shape rs;
  17896. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17897. rs = neon_select_shape (NS_QQ, NS_NULL);
  17898. else
  17899. rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17900. struct neon_type_el et = neon_check_type (2, rs,
  17901. N_EQK, N_I8 | N_I16 | N_I32 | N_KEY);
  17902. neon_two_same (neon_quad (rs), 1, et.size);
  17903. }
  17904. static void
  17905. do_neon_cnt (void)
  17906. {
  17907. enum neon_shape rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17908. struct neon_type_el et = neon_check_type (2, rs,
  17909. N_EQK | N_INT, N_8 | N_KEY);
  17910. neon_two_same (neon_quad (rs), 1, et.size);
  17911. }
  17912. static void
  17913. do_neon_swp (void)
  17914. {
  17915. enum neon_shape rs = neon_select_shape (NS_DD, NS_QQ, NS_NULL);
  17916. if (rs == NS_NULL)
  17917. return;
  17918. neon_two_same (neon_quad (rs), 1, -1);
  17919. }
  17920. static void
  17921. do_neon_tbl_tbx (void)
  17922. {
  17923. unsigned listlenbits;
  17924. neon_check_type (3, NS_DLD, N_EQK, N_EQK, N_8 | N_KEY);
  17925. if (inst.operands[1].imm < 1 || inst.operands[1].imm > 4)
  17926. {
  17927. first_error (_("bad list length for table lookup"));
  17928. return;
  17929. }
  17930. listlenbits = inst.operands[1].imm - 1;
  17931. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  17932. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  17933. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  17934. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  17935. inst.instruction |= LOW4 (inst.operands[2].reg);
  17936. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  17937. inst.instruction |= listlenbits << 8;
  17938. neon_dp_fixup (&inst);
  17939. }
  17940. static void
  17941. do_neon_ldm_stm (void)
  17942. {
  17943. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd)
  17944. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext),
  17945. _(BAD_FPU));
  17946. /* P, U and L bits are part of bitmask. */
  17947. int is_dbmode = (inst.instruction & (1 << 24)) != 0;
  17948. unsigned offsetbits = inst.operands[1].imm * 2;
  17949. if (inst.operands[1].issingle)
  17950. {
  17951. do_vfp_nsyn_ldm_stm (is_dbmode);
  17952. return;
  17953. }
  17954. constraint (is_dbmode && !inst.operands[0].writeback,
  17955. _("writeback (!) must be used for VLDMDB and VSTMDB"));
  17956. constraint (inst.operands[1].imm < 1 || inst.operands[1].imm > 16,
  17957. _("register list must contain at least 1 and at most 16 "
  17958. "registers"));
  17959. inst.instruction |= inst.operands[0].reg << 16;
  17960. inst.instruction |= inst.operands[0].writeback << 21;
  17961. inst.instruction |= LOW4 (inst.operands[1].reg) << 12;
  17962. inst.instruction |= HI1 (inst.operands[1].reg) << 22;
  17963. inst.instruction |= offsetbits;
  17964. do_vfp_cond_or_thumb ();
  17965. }
  17966. static void
  17967. do_vfp_nsyn_push_pop_check (void)
  17968. {
  17969. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_v1xd), _(BAD_FPU));
  17970. if (inst.operands[1].issingle)
  17971. {
  17972. constraint (inst.operands[1].imm < 1 || inst.operands[1].imm > 32,
  17973. _("register list must contain at least 1 and at most 32 registers"));
  17974. }
  17975. else
  17976. {
  17977. constraint (inst.operands[1].imm < 1 || inst.operands[1].imm > 16,
  17978. _("register list must contain at least 1 and at most 16 registers"));
  17979. }
  17980. }
  17981. static void
  17982. do_vfp_nsyn_pop (void)
  17983. {
  17984. nsyn_insert_sp ();
  17985. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17986. return do_vfp_nsyn_opcode ("vldm");
  17987. do_vfp_nsyn_push_pop_check ();
  17988. if (inst.operands[1].issingle)
  17989. do_vfp_nsyn_opcode ("fldmias");
  17990. else
  17991. do_vfp_nsyn_opcode ("fldmiad");
  17992. }
  17993. static void
  17994. do_vfp_nsyn_push (void)
  17995. {
  17996. nsyn_insert_sp ();
  17997. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  17998. return do_vfp_nsyn_opcode ("vstmdb");
  17999. do_vfp_nsyn_push_pop_check ();
  18000. if (inst.operands[1].issingle)
  18001. do_vfp_nsyn_opcode ("fstmdbs");
  18002. else
  18003. do_vfp_nsyn_opcode ("fstmdbd");
  18004. }
  18005. static void
  18006. do_neon_ldr_str (void)
  18007. {
  18008. int is_ldr = (inst.instruction & (1 << 20)) != 0;
  18009. /* Use of PC in vstr in ARM mode is deprecated in ARMv7.
  18010. And is UNPREDICTABLE in thumb mode. */
  18011. if (!is_ldr
  18012. && inst.operands[1].reg == REG_PC
  18013. && (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v7) || thumb_mode))
  18014. {
  18015. if (thumb_mode)
  18016. inst.error = _("Use of PC here is UNPREDICTABLE");
  18017. else if (warn_on_deprecated)
  18018. as_tsktsk (_("Use of PC here is deprecated"));
  18019. }
  18020. if (inst.operands[0].issingle)
  18021. {
  18022. if (is_ldr)
  18023. do_vfp_nsyn_opcode ("flds");
  18024. else
  18025. do_vfp_nsyn_opcode ("fsts");
  18026. /* ARMv8.2 vldr.16/vstr.16 instruction. */
  18027. if (inst.vectype.el[0].size == 16)
  18028. do_scalar_fp16_v82_encode ();
  18029. }
  18030. else
  18031. {
  18032. if (is_ldr)
  18033. do_vfp_nsyn_opcode ("fldd");
  18034. else
  18035. do_vfp_nsyn_opcode ("fstd");
  18036. }
  18037. }
  18038. static void
  18039. do_t_vldr_vstr_sysreg (void)
  18040. {
  18041. int fp_vldr_bitno = 20, sysreg_vldr_bitno = 20;
  18042. bool is_vldr = ((inst.instruction & (1 << fp_vldr_bitno)) != 0);
  18043. /* Use of PC is UNPREDICTABLE. */
  18044. if (inst.operands[1].reg == REG_PC)
  18045. inst.error = _("Use of PC here is UNPREDICTABLE");
  18046. if (inst.operands[1].immisreg)
  18047. inst.error = _("instruction does not accept register index");
  18048. if (!inst.operands[1].isreg)
  18049. inst.error = _("instruction does not accept PC-relative addressing");
  18050. if (abs (inst.operands[1].imm) >= (1 << 7))
  18051. inst.error = _("immediate value out of range");
  18052. inst.instruction = 0xec000f80;
  18053. if (is_vldr)
  18054. inst.instruction |= 1 << sysreg_vldr_bitno;
  18055. encode_arm_cp_address (1, true, false, BFD_RELOC_ARM_T32_VLDR_VSTR_OFF_IMM);
  18056. inst.instruction |= (inst.operands[0].imm & 0x7) << 13;
  18057. inst.instruction |= (inst.operands[0].imm & 0x8) << 19;
  18058. }
  18059. static void
  18060. do_vldr_vstr (void)
  18061. {
  18062. bool sysreg_op = !inst.operands[0].isreg;
  18063. /* VLDR/VSTR (System Register). */
  18064. if (sysreg_op)
  18065. {
  18066. if (!mark_feature_used (&arm_ext_v8_1m_main))
  18067. as_bad (_("Instruction not permitted on this architecture"));
  18068. do_t_vldr_vstr_sysreg ();
  18069. }
  18070. /* VLDR/VSTR. */
  18071. else
  18072. {
  18073. if (!mark_feature_used (&fpu_vfp_ext_v1xd)
  18074. && !ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  18075. as_bad (_("Instruction not permitted on this architecture"));
  18076. do_neon_ldr_str ();
  18077. }
  18078. }
  18079. /* "interleave" version also handles non-interleaving register VLD1/VST1
  18080. instructions. */
  18081. static void
  18082. do_neon_ld_st_interleave (void)
  18083. {
  18084. struct neon_type_el et = neon_check_type (1, NS_NULL,
  18085. N_8 | N_16 | N_32 | N_64);
  18086. unsigned alignbits = 0;
  18087. unsigned idx;
  18088. /* The bits in this table go:
  18089. 0: register stride of one (0) or two (1)
  18090. 1,2: register list length, minus one (1, 2, 3, 4).
  18091. 3,4: <n> in instruction type, minus one (VLD<n> / VST<n>).
  18092. We use -1 for invalid entries. */
  18093. const int typetable[] =
  18094. {
  18095. 0x7, -1, 0xa, -1, 0x6, -1, 0x2, -1, /* VLD1 / VST1. */
  18096. -1, -1, 0x8, 0x9, -1, -1, 0x3, -1, /* VLD2 / VST2. */
  18097. -1, -1, -1, -1, 0x4, 0x5, -1, -1, /* VLD3 / VST3. */
  18098. -1, -1, -1, -1, -1, -1, 0x0, 0x1 /* VLD4 / VST4. */
  18099. };
  18100. int typebits;
  18101. if (et.type == NT_invtype)
  18102. return;
  18103. if (inst.operands[1].immisalign)
  18104. switch (inst.operands[1].imm >> 8)
  18105. {
  18106. case 64: alignbits = 1; break;
  18107. case 128:
  18108. if (NEON_REGLIST_LENGTH (inst.operands[0].imm) != 2
  18109. && NEON_REGLIST_LENGTH (inst.operands[0].imm) != 4)
  18110. goto bad_alignment;
  18111. alignbits = 2;
  18112. break;
  18113. case 256:
  18114. if (NEON_REGLIST_LENGTH (inst.operands[0].imm) != 4)
  18115. goto bad_alignment;
  18116. alignbits = 3;
  18117. break;
  18118. default:
  18119. bad_alignment:
  18120. first_error (_("bad alignment"));
  18121. return;
  18122. }
  18123. inst.instruction |= alignbits << 4;
  18124. inst.instruction |= neon_logbits (et.size) << 6;
  18125. /* Bits [4:6] of the immediate in a list specifier encode register stride
  18126. (minus 1) in bit 4, and list length in bits [5:6]. We put the <n> of
  18127. VLD<n>/VST<n> in bits [9:8] of the initial bitmask. Suck it out here, look
  18128. up the right value for "type" in a table based on this value and the given
  18129. list style, then stick it back. */
  18130. idx = ((inst.operands[0].imm >> 4) & 7)
  18131. | (((inst.instruction >> 8) & 3) << 3);
  18132. typebits = typetable[idx];
  18133. constraint (typebits == -1, _("bad list type for instruction"));
  18134. constraint (((inst.instruction >> 8) & 3) && et.size == 64,
  18135. BAD_EL_TYPE);
  18136. inst.instruction &= ~0xf00;
  18137. inst.instruction |= typebits << 8;
  18138. }
  18139. /* Check alignment is valid for do_neon_ld_st_lane and do_neon_ld_dup.
  18140. *DO_ALIGN is set to 1 if the relevant alignment bit should be set, 0
  18141. otherwise. The variable arguments are a list of pairs of legal (size, align)
  18142. values, terminated with -1. */
  18143. static int
  18144. neon_alignment_bit (int size, int align, int *do_alignment, ...)
  18145. {
  18146. va_list ap;
  18147. int result = FAIL, thissize, thisalign;
  18148. if (!inst.operands[1].immisalign)
  18149. {
  18150. *do_alignment = 0;
  18151. return SUCCESS;
  18152. }
  18153. va_start (ap, do_alignment);
  18154. do
  18155. {
  18156. thissize = va_arg (ap, int);
  18157. if (thissize == -1)
  18158. break;
  18159. thisalign = va_arg (ap, int);
  18160. if (size == thissize && align == thisalign)
  18161. result = SUCCESS;
  18162. }
  18163. while (result != SUCCESS);
  18164. va_end (ap);
  18165. if (result == SUCCESS)
  18166. *do_alignment = 1;
  18167. else
  18168. first_error (_("unsupported alignment for instruction"));
  18169. return result;
  18170. }
  18171. static void
  18172. do_neon_ld_st_lane (void)
  18173. {
  18174. struct neon_type_el et = neon_check_type (1, NS_NULL, N_8 | N_16 | N_32);
  18175. int align_good, do_alignment = 0;
  18176. int logsize = neon_logbits (et.size);
  18177. int align = inst.operands[1].imm >> 8;
  18178. int n = (inst.instruction >> 8) & 3;
  18179. int max_el = 64 / et.size;
  18180. if (et.type == NT_invtype)
  18181. return;
  18182. constraint (NEON_REGLIST_LENGTH (inst.operands[0].imm) != n + 1,
  18183. _("bad list length"));
  18184. constraint (NEON_LANE (inst.operands[0].imm) >= max_el,
  18185. _("scalar index out of range"));
  18186. constraint (n != 0 && NEON_REG_STRIDE (inst.operands[0].imm) == 2
  18187. && et.size == 8,
  18188. _("stride of 2 unavailable when element size is 8"));
  18189. switch (n)
  18190. {
  18191. case 0: /* VLD1 / VST1. */
  18192. align_good = neon_alignment_bit (et.size, align, &do_alignment, 16, 16,
  18193. 32, 32, -1);
  18194. if (align_good == FAIL)
  18195. return;
  18196. if (do_alignment)
  18197. {
  18198. unsigned alignbits = 0;
  18199. switch (et.size)
  18200. {
  18201. case 16: alignbits = 0x1; break;
  18202. case 32: alignbits = 0x3; break;
  18203. default: ;
  18204. }
  18205. inst.instruction |= alignbits << 4;
  18206. }
  18207. break;
  18208. case 1: /* VLD2 / VST2. */
  18209. align_good = neon_alignment_bit (et.size, align, &do_alignment, 8, 16,
  18210. 16, 32, 32, 64, -1);
  18211. if (align_good == FAIL)
  18212. return;
  18213. if (do_alignment)
  18214. inst.instruction |= 1 << 4;
  18215. break;
  18216. case 2: /* VLD3 / VST3. */
  18217. constraint (inst.operands[1].immisalign,
  18218. _("can't use alignment with this instruction"));
  18219. break;
  18220. case 3: /* VLD4 / VST4. */
  18221. align_good = neon_alignment_bit (et.size, align, &do_alignment, 8, 32,
  18222. 16, 64, 32, 64, 32, 128, -1);
  18223. if (align_good == FAIL)
  18224. return;
  18225. if (do_alignment)
  18226. {
  18227. unsigned alignbits = 0;
  18228. switch (et.size)
  18229. {
  18230. case 8: alignbits = 0x1; break;
  18231. case 16: alignbits = 0x1; break;
  18232. case 32: alignbits = (align == 64) ? 0x1 : 0x2; break;
  18233. default: ;
  18234. }
  18235. inst.instruction |= alignbits << 4;
  18236. }
  18237. break;
  18238. default: ;
  18239. }
  18240. /* Reg stride of 2 is encoded in bit 5 when size==16, bit 6 when size==32. */
  18241. if (n != 0 && NEON_REG_STRIDE (inst.operands[0].imm) == 2)
  18242. inst.instruction |= 1 << (4 + logsize);
  18243. inst.instruction |= NEON_LANE (inst.operands[0].imm) << (logsize + 5);
  18244. inst.instruction |= logsize << 10;
  18245. }
  18246. /* Encode single n-element structure to all lanes VLD<n> instructions. */
  18247. static void
  18248. do_neon_ld_dup (void)
  18249. {
  18250. struct neon_type_el et = neon_check_type (1, NS_NULL, N_8 | N_16 | N_32);
  18251. int align_good, do_alignment = 0;
  18252. if (et.type == NT_invtype)
  18253. return;
  18254. switch ((inst.instruction >> 8) & 3)
  18255. {
  18256. case 0: /* VLD1. */
  18257. gas_assert (NEON_REG_STRIDE (inst.operands[0].imm) != 2);
  18258. align_good = neon_alignment_bit (et.size, inst.operands[1].imm >> 8,
  18259. &do_alignment, 16, 16, 32, 32, -1);
  18260. if (align_good == FAIL)
  18261. return;
  18262. switch (NEON_REGLIST_LENGTH (inst.operands[0].imm))
  18263. {
  18264. case 1: break;
  18265. case 2: inst.instruction |= 1 << 5; break;
  18266. default: first_error (_("bad list length")); return;
  18267. }
  18268. inst.instruction |= neon_logbits (et.size) << 6;
  18269. break;
  18270. case 1: /* VLD2. */
  18271. align_good = neon_alignment_bit (et.size, inst.operands[1].imm >> 8,
  18272. &do_alignment, 8, 16, 16, 32, 32, 64,
  18273. -1);
  18274. if (align_good == FAIL)
  18275. return;
  18276. constraint (NEON_REGLIST_LENGTH (inst.operands[0].imm) != 2,
  18277. _("bad list length"));
  18278. if (NEON_REG_STRIDE (inst.operands[0].imm) == 2)
  18279. inst.instruction |= 1 << 5;
  18280. inst.instruction |= neon_logbits (et.size) << 6;
  18281. break;
  18282. case 2: /* VLD3. */
  18283. constraint (inst.operands[1].immisalign,
  18284. _("can't use alignment with this instruction"));
  18285. constraint (NEON_REGLIST_LENGTH (inst.operands[0].imm) != 3,
  18286. _("bad list length"));
  18287. if (NEON_REG_STRIDE (inst.operands[0].imm) == 2)
  18288. inst.instruction |= 1 << 5;
  18289. inst.instruction |= neon_logbits (et.size) << 6;
  18290. break;
  18291. case 3: /* VLD4. */
  18292. {
  18293. int align = inst.operands[1].imm >> 8;
  18294. align_good = neon_alignment_bit (et.size, align, &do_alignment, 8, 32,
  18295. 16, 64, 32, 64, 32, 128, -1);
  18296. if (align_good == FAIL)
  18297. return;
  18298. constraint (NEON_REGLIST_LENGTH (inst.operands[0].imm) != 4,
  18299. _("bad list length"));
  18300. if (NEON_REG_STRIDE (inst.operands[0].imm) == 2)
  18301. inst.instruction |= 1 << 5;
  18302. if (et.size == 32 && align == 128)
  18303. inst.instruction |= 0x3 << 6;
  18304. else
  18305. inst.instruction |= neon_logbits (et.size) << 6;
  18306. }
  18307. break;
  18308. default: ;
  18309. }
  18310. inst.instruction |= do_alignment << 4;
  18311. }
  18312. /* Disambiguate VLD<n> and VST<n> instructions, and fill in common bits (those
  18313. apart from bits [11:4]. */
  18314. static void
  18315. do_neon_ldx_stx (void)
  18316. {
  18317. if (inst.operands[1].isreg)
  18318. constraint (inst.operands[1].reg == REG_PC, BAD_PC);
  18319. switch (NEON_LANE (inst.operands[0].imm))
  18320. {
  18321. case NEON_INTERLEAVE_LANES:
  18322. NEON_ENCODE (INTERLV, inst);
  18323. do_neon_ld_st_interleave ();
  18324. break;
  18325. case NEON_ALL_LANES:
  18326. NEON_ENCODE (DUP, inst);
  18327. if (inst.instruction == N_INV)
  18328. {
  18329. first_error ("only loads support such operands");
  18330. break;
  18331. }
  18332. do_neon_ld_dup ();
  18333. break;
  18334. default:
  18335. NEON_ENCODE (LANE, inst);
  18336. do_neon_ld_st_lane ();
  18337. }
  18338. /* L bit comes from bit mask. */
  18339. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  18340. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  18341. inst.instruction |= inst.operands[1].reg << 16;
  18342. if (inst.operands[1].postind)
  18343. {
  18344. int postreg = inst.operands[1].imm & 0xf;
  18345. constraint (!inst.operands[1].immisreg,
  18346. _("post-index must be a register"));
  18347. constraint (postreg == 0xd || postreg == 0xf,
  18348. _("bad register for post-index"));
  18349. inst.instruction |= postreg;
  18350. }
  18351. else
  18352. {
  18353. constraint (inst.operands[1].immisreg, BAD_ADDR_MODE);
  18354. constraint (inst.relocs[0].exp.X_op != O_constant
  18355. || inst.relocs[0].exp.X_add_number != 0,
  18356. BAD_ADDR_MODE);
  18357. if (inst.operands[1].writeback)
  18358. {
  18359. inst.instruction |= 0xd;
  18360. }
  18361. else
  18362. inst.instruction |= 0xf;
  18363. }
  18364. if (thumb_mode)
  18365. inst.instruction |= 0xf9000000;
  18366. else
  18367. inst.instruction |= 0xf4000000;
  18368. }
  18369. /* FP v8. */
  18370. static void
  18371. do_vfp_nsyn_fpv8 (enum neon_shape rs)
  18372. {
  18373. /* Targets like FPv5-SP-D16 don't support FP v8 instructions with
  18374. D register operands. */
  18375. if (neon_shape_class[rs] == SC_DOUBLE)
  18376. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  18377. _(BAD_FPU));
  18378. NEON_ENCODE (FPV8, inst);
  18379. if (rs == NS_FFF || rs == NS_HHH)
  18380. {
  18381. do_vfp_sp_dyadic ();
  18382. /* ARMv8.2 fp16 instruction. */
  18383. if (rs == NS_HHH)
  18384. do_scalar_fp16_v82_encode ();
  18385. }
  18386. else
  18387. do_vfp_dp_rd_rn_rm ();
  18388. if (rs == NS_DDD)
  18389. inst.instruction |= 0x100;
  18390. inst.instruction |= 0xf0000000;
  18391. }
  18392. static void
  18393. do_vsel (void)
  18394. {
  18395. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18396. if (try_vfp_nsyn (3, do_vfp_nsyn_fpv8) != SUCCESS)
  18397. first_error (_("invalid instruction shape"));
  18398. }
  18399. static void
  18400. do_vmaxnm (void)
  18401. {
  18402. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  18403. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18404. if (try_vfp_nsyn (3, do_vfp_nsyn_fpv8) == SUCCESS)
  18405. return;
  18406. if (!check_simd_pred_availability (true, NEON_CHECK_CC | NEON_CHECK_ARCH8))
  18407. return;
  18408. neon_dyadic_misc (NT_untyped, N_F_16_32, 0);
  18409. }
  18410. static void
  18411. do_vrint_1 (enum neon_cvt_mode mode)
  18412. {
  18413. enum neon_shape rs = neon_select_shape (NS_HH, NS_FF, NS_DD, NS_QQ, NS_NULL);
  18414. struct neon_type_el et;
  18415. if (rs == NS_NULL)
  18416. return;
  18417. /* Targets like FPv5-SP-D16 don't support FP v8 instructions with
  18418. D register operands. */
  18419. if (neon_shape_class[rs] == SC_DOUBLE)
  18420. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  18421. _(BAD_FPU));
  18422. et = neon_check_type (2, rs, N_EQK | N_VFP, N_F_ALL | N_KEY
  18423. | N_VFP);
  18424. if (et.type != NT_invtype)
  18425. {
  18426. /* VFP encodings. */
  18427. if (mode == neon_cvt_mode_a || mode == neon_cvt_mode_n
  18428. || mode == neon_cvt_mode_p || mode == neon_cvt_mode_m)
  18429. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18430. NEON_ENCODE (FPV8, inst);
  18431. if (rs == NS_FF || rs == NS_HH)
  18432. do_vfp_sp_monadic ();
  18433. else
  18434. do_vfp_dp_rd_rm ();
  18435. switch (mode)
  18436. {
  18437. case neon_cvt_mode_r: inst.instruction |= 0x00000000; break;
  18438. case neon_cvt_mode_z: inst.instruction |= 0x00000080; break;
  18439. case neon_cvt_mode_x: inst.instruction |= 0x00010000; break;
  18440. case neon_cvt_mode_a: inst.instruction |= 0xf0000000; break;
  18441. case neon_cvt_mode_n: inst.instruction |= 0xf0010000; break;
  18442. case neon_cvt_mode_p: inst.instruction |= 0xf0020000; break;
  18443. case neon_cvt_mode_m: inst.instruction |= 0xf0030000; break;
  18444. default: abort ();
  18445. }
  18446. inst.instruction |= (rs == NS_DD) << 8;
  18447. do_vfp_cond_or_thumb ();
  18448. /* ARMv8.2 fp16 vrint instruction. */
  18449. if (rs == NS_HH)
  18450. do_scalar_fp16_v82_encode ();
  18451. }
  18452. else
  18453. {
  18454. /* Neon encodings (or something broken...). */
  18455. inst.error = NULL;
  18456. et = neon_check_type (2, rs, N_EQK, N_F_16_32 | N_KEY);
  18457. if (et.type == NT_invtype)
  18458. return;
  18459. if (!check_simd_pred_availability (true,
  18460. NEON_CHECK_CC | NEON_CHECK_ARCH8))
  18461. return;
  18462. NEON_ENCODE (FLOAT, inst);
  18463. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  18464. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  18465. inst.instruction |= LOW4 (inst.operands[1].reg);
  18466. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  18467. inst.instruction |= neon_quad (rs) << 6;
  18468. /* Mask off the original size bits and reencode them. */
  18469. inst.instruction = ((inst.instruction & 0xfff3ffff)
  18470. | neon_logbits (et.size) << 18);
  18471. switch (mode)
  18472. {
  18473. case neon_cvt_mode_z: inst.instruction |= 3 << 7; break;
  18474. case neon_cvt_mode_x: inst.instruction |= 1 << 7; break;
  18475. case neon_cvt_mode_a: inst.instruction |= 2 << 7; break;
  18476. case neon_cvt_mode_n: inst.instruction |= 0 << 7; break;
  18477. case neon_cvt_mode_p: inst.instruction |= 7 << 7; break;
  18478. case neon_cvt_mode_m: inst.instruction |= 5 << 7; break;
  18479. case neon_cvt_mode_r: inst.error = _("invalid rounding mode"); break;
  18480. default: abort ();
  18481. }
  18482. if (thumb_mode)
  18483. inst.instruction |= 0xfc000000;
  18484. else
  18485. inst.instruction |= 0xf0000000;
  18486. }
  18487. }
  18488. static void
  18489. do_vrintx (void)
  18490. {
  18491. do_vrint_1 (neon_cvt_mode_x);
  18492. }
  18493. static void
  18494. do_vrintz (void)
  18495. {
  18496. do_vrint_1 (neon_cvt_mode_z);
  18497. }
  18498. static void
  18499. do_vrintr (void)
  18500. {
  18501. do_vrint_1 (neon_cvt_mode_r);
  18502. }
  18503. static void
  18504. do_vrinta (void)
  18505. {
  18506. do_vrint_1 (neon_cvt_mode_a);
  18507. }
  18508. static void
  18509. do_vrintn (void)
  18510. {
  18511. do_vrint_1 (neon_cvt_mode_n);
  18512. }
  18513. static void
  18514. do_vrintp (void)
  18515. {
  18516. do_vrint_1 (neon_cvt_mode_p);
  18517. }
  18518. static void
  18519. do_vrintm (void)
  18520. {
  18521. do_vrint_1 (neon_cvt_mode_m);
  18522. }
  18523. static unsigned
  18524. neon_scalar_for_vcmla (unsigned opnd, unsigned elsize)
  18525. {
  18526. unsigned regno = NEON_SCALAR_REG (opnd);
  18527. unsigned elno = NEON_SCALAR_INDEX (opnd);
  18528. if (elsize == 16 && elno < 2 && regno < 16)
  18529. return regno | (elno << 4);
  18530. else if (elsize == 32 && elno == 0)
  18531. return regno;
  18532. first_error (_("scalar out of range"));
  18533. return 0;
  18534. }
  18535. static void
  18536. do_vcmla (void)
  18537. {
  18538. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext)
  18539. && (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_armv8)
  18540. || !mark_feature_used (&arm_ext_v8_3)), (BAD_FPU));
  18541. constraint (inst.relocs[0].exp.X_op != O_constant,
  18542. _("expression too complex"));
  18543. unsigned rot = inst.relocs[0].exp.X_add_number;
  18544. constraint (rot != 0 && rot != 90 && rot != 180 && rot != 270,
  18545. _("immediate out of range"));
  18546. rot /= 90;
  18547. if (!check_simd_pred_availability (true,
  18548. NEON_CHECK_ARCH8 | NEON_CHECK_CC))
  18549. return;
  18550. if (inst.operands[2].isscalar)
  18551. {
  18552. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext))
  18553. first_error (_("invalid instruction shape"));
  18554. enum neon_shape rs = neon_select_shape (NS_DDSI, NS_QQSI, NS_NULL);
  18555. unsigned size = neon_check_type (3, rs, N_EQK, N_EQK,
  18556. N_KEY | N_F16 | N_F32).size;
  18557. unsigned m = neon_scalar_for_vcmla (inst.operands[2].reg, size);
  18558. inst.is_neon = 1;
  18559. inst.instruction = 0xfe000800;
  18560. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  18561. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  18562. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  18563. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  18564. inst.instruction |= LOW4 (m);
  18565. inst.instruction |= HI1 (m) << 5;
  18566. inst.instruction |= neon_quad (rs) << 6;
  18567. inst.instruction |= rot << 20;
  18568. inst.instruction |= (size == 32) << 23;
  18569. }
  18570. else
  18571. {
  18572. enum neon_shape rs;
  18573. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext))
  18574. rs = neon_select_shape (NS_QQQI, NS_NULL);
  18575. else
  18576. rs = neon_select_shape (NS_DDDI, NS_QQQI, NS_NULL);
  18577. unsigned size = neon_check_type (3, rs, N_EQK, N_EQK,
  18578. N_KEY | N_F16 | N_F32).size;
  18579. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_fp_ext) && size == 32
  18580. && (inst.operands[0].reg == inst.operands[1].reg
  18581. || inst.operands[0].reg == inst.operands[2].reg))
  18582. as_tsktsk (BAD_MVE_SRCDEST);
  18583. neon_three_same (neon_quad (rs), 0, -1);
  18584. inst.instruction &= 0x00ffffff; /* Undo neon_dp_fixup. */
  18585. inst.instruction |= 0xfc200800;
  18586. inst.instruction |= rot << 23;
  18587. inst.instruction |= (size == 32) << 20;
  18588. }
  18589. }
  18590. static void
  18591. do_vcadd (void)
  18592. {
  18593. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext)
  18594. && (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_armv8)
  18595. || !mark_feature_used (&arm_ext_v8_3)), (BAD_FPU));
  18596. constraint (inst.relocs[0].exp.X_op != O_constant,
  18597. _("expression too complex"));
  18598. unsigned rot = inst.relocs[0].exp.X_add_number;
  18599. constraint (rot != 90 && rot != 270, _("immediate out of range"));
  18600. enum neon_shape rs;
  18601. struct neon_type_el et;
  18602. if (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  18603. {
  18604. rs = neon_select_shape (NS_DDDI, NS_QQQI, NS_NULL);
  18605. et = neon_check_type (3, rs, N_EQK, N_EQK, N_KEY | N_F16 | N_F32);
  18606. }
  18607. else
  18608. {
  18609. rs = neon_select_shape (NS_QQQI, NS_NULL);
  18610. et = neon_check_type (3, rs, N_EQK, N_EQK, N_KEY | N_F16 | N_F32 | N_I8
  18611. | N_I16 | N_I32);
  18612. if (et.size == 32 && inst.operands[0].reg == inst.operands[2].reg)
  18613. as_tsktsk (_("Warning: 32-bit element size and same first and third "
  18614. "operand makes instruction UNPREDICTABLE"));
  18615. }
  18616. if (et.type == NT_invtype)
  18617. return;
  18618. if (!check_simd_pred_availability (et.type == NT_float,
  18619. NEON_CHECK_ARCH8 | NEON_CHECK_CC))
  18620. return;
  18621. if (et.type == NT_float)
  18622. {
  18623. neon_three_same (neon_quad (rs), 0, -1);
  18624. inst.instruction &= 0x00ffffff; /* Undo neon_dp_fixup. */
  18625. inst.instruction |= 0xfc800800;
  18626. inst.instruction |= (rot == 270) << 24;
  18627. inst.instruction |= (et.size == 32) << 20;
  18628. }
  18629. else
  18630. {
  18631. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext), BAD_FPU);
  18632. inst.instruction = 0xfe000f00;
  18633. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  18634. inst.instruction |= neon_logbits (et.size) << 20;
  18635. inst.instruction |= LOW4 (inst.operands[1].reg) << 16;
  18636. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  18637. inst.instruction |= (rot == 270) << 12;
  18638. inst.instruction |= HI1 (inst.operands[1].reg) << 7;
  18639. inst.instruction |= HI1 (inst.operands[2].reg) << 5;
  18640. inst.instruction |= LOW4 (inst.operands[2].reg);
  18641. inst.is_neon = 1;
  18642. }
  18643. }
  18644. /* Dot Product instructions encoding support. */
  18645. static void
  18646. do_neon_dotproduct (int unsigned_p)
  18647. {
  18648. enum neon_shape rs;
  18649. unsigned scalar_oprd2 = 0;
  18650. int high8;
  18651. if (inst.cond != COND_ALWAYS)
  18652. as_warn (_("Dot Product instructions cannot be conditional, the behaviour "
  18653. "is UNPREDICTABLE"));
  18654. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_neon_ext_armv8),
  18655. _(BAD_FPU));
  18656. /* Dot Product instructions are in three-same D/Q register format or the third
  18657. operand can be a scalar index register. */
  18658. if (inst.operands[2].isscalar)
  18659. {
  18660. scalar_oprd2 = neon_scalar_for_mul (inst.operands[2].reg, 32);
  18661. high8 = 0xfe000000;
  18662. rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  18663. }
  18664. else
  18665. {
  18666. high8 = 0xfc000000;
  18667. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  18668. }
  18669. if (unsigned_p)
  18670. neon_check_type (3, rs, N_EQK, N_EQK, N_KEY | N_U8);
  18671. else
  18672. neon_check_type (3, rs, N_EQK, N_EQK, N_KEY | N_S8);
  18673. /* The "U" bit in traditional Three Same encoding is fixed to 0 for Dot
  18674. Product instruction, so we pass 0 as the "ubit" parameter. And the
  18675. "Size" field are fixed to 0x2, so we pass 32 as the "size" parameter. */
  18676. neon_three_same (neon_quad (rs), 0, 32);
  18677. /* Undo neon_dp_fixup. Dot Product instructions are using a slightly
  18678. different NEON three-same encoding. */
  18679. inst.instruction &= 0x00ffffff;
  18680. inst.instruction |= high8;
  18681. /* Encode 'U' bit which indicates signedness. */
  18682. inst.instruction |= (unsigned_p ? 1 : 0) << 4;
  18683. /* Re-encode operand2 if it's indexed scalar operand. What has been encoded
  18684. from inst.operand[2].reg in neon_three_same is GAS's internal encoding, not
  18685. the instruction encoding. */
  18686. if (inst.operands[2].isscalar)
  18687. {
  18688. inst.instruction &= 0xffffffd0;
  18689. inst.instruction |= LOW4 (scalar_oprd2);
  18690. inst.instruction |= HI1 (scalar_oprd2) << 5;
  18691. }
  18692. }
  18693. /* Dot Product instructions for signed integer. */
  18694. static void
  18695. do_neon_dotproduct_s (void)
  18696. {
  18697. return do_neon_dotproduct (0);
  18698. }
  18699. /* Dot Product instructions for unsigned integer. */
  18700. static void
  18701. do_neon_dotproduct_u (void)
  18702. {
  18703. return do_neon_dotproduct (1);
  18704. }
  18705. static void
  18706. do_vusdot (void)
  18707. {
  18708. enum neon_shape rs;
  18709. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18710. if (inst.operands[2].isscalar)
  18711. {
  18712. rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  18713. neon_check_type (3, rs, N_EQK, N_EQK, N_S8 | N_KEY);
  18714. inst.instruction |= (1 << 25);
  18715. int idx = inst.operands[2].reg & 0xf;
  18716. constraint ((idx != 1 && idx != 0), _("index must be 0 or 1"));
  18717. inst.operands[2].reg >>= 4;
  18718. constraint (!(inst.operands[2].reg < 16),
  18719. _("indexed register must be less than 16"));
  18720. neon_three_args (rs == NS_QQS);
  18721. inst.instruction |= (idx << 5);
  18722. }
  18723. else
  18724. {
  18725. inst.instruction |= (1 << 21);
  18726. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  18727. neon_check_type (3, rs, N_EQK, N_EQK, N_S8 | N_KEY);
  18728. neon_three_args (rs == NS_QQQ);
  18729. }
  18730. }
  18731. static void
  18732. do_vsudot (void)
  18733. {
  18734. enum neon_shape rs;
  18735. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18736. if (inst.operands[2].isscalar)
  18737. {
  18738. rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  18739. neon_check_type (3, rs, N_EQK, N_EQK, N_U8 | N_KEY);
  18740. inst.instruction |= (1 << 25);
  18741. int idx = inst.operands[2].reg & 0xf;
  18742. constraint ((idx != 1 && idx != 0), _("index must be 0 or 1"));
  18743. inst.operands[2].reg >>= 4;
  18744. constraint (!(inst.operands[2].reg < 16),
  18745. _("indexed register must be less than 16"));
  18746. neon_three_args (rs == NS_QQS);
  18747. inst.instruction |= (idx << 5);
  18748. }
  18749. }
  18750. static void
  18751. do_vsmmla (void)
  18752. {
  18753. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_NULL);
  18754. neon_check_type (3, rs, N_EQK, N_EQK, N_S8 | N_KEY);
  18755. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18756. neon_three_args (1);
  18757. }
  18758. static void
  18759. do_vummla (void)
  18760. {
  18761. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_NULL);
  18762. neon_check_type (3, rs, N_EQK, N_EQK, N_U8 | N_KEY);
  18763. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18764. neon_three_args (1);
  18765. }
  18766. static void
  18767. check_cde_operand (size_t idx, int is_dual)
  18768. {
  18769. unsigned Rx = inst.operands[idx].reg;
  18770. bool isvec = inst.operands[idx].isvec;
  18771. if (is_dual == 0 && thumb_mode)
  18772. constraint (
  18773. !((Rx <= 14 && Rx != 13) || (Rx == REG_PC && isvec)),
  18774. _("Register must be r0-r14 except r13, or APSR_nzcv."));
  18775. else
  18776. constraint ( !((Rx <= 10 && Rx % 2 == 0 )),
  18777. _("Register must be an even register between r0-r10."));
  18778. }
  18779. static bool
  18780. cde_coproc_enabled (unsigned coproc)
  18781. {
  18782. switch (coproc)
  18783. {
  18784. case 0: return mark_feature_used (&arm_ext_cde0);
  18785. case 1: return mark_feature_used (&arm_ext_cde1);
  18786. case 2: return mark_feature_used (&arm_ext_cde2);
  18787. case 3: return mark_feature_used (&arm_ext_cde3);
  18788. case 4: return mark_feature_used (&arm_ext_cde4);
  18789. case 5: return mark_feature_used (&arm_ext_cde5);
  18790. case 6: return mark_feature_used (&arm_ext_cde6);
  18791. case 7: return mark_feature_used (&arm_ext_cde7);
  18792. default: return false;
  18793. }
  18794. }
  18795. #define cde_coproc_pos 8
  18796. static void
  18797. cde_handle_coproc (void)
  18798. {
  18799. unsigned coproc = inst.operands[0].reg;
  18800. constraint (coproc > 7, _("CDE Coprocessor must be in range 0-7"));
  18801. constraint (!(cde_coproc_enabled (coproc)), BAD_CDE_COPROC);
  18802. inst.instruction |= coproc << cde_coproc_pos;
  18803. }
  18804. #undef cde_coproc_pos
  18805. static void
  18806. cxn_handle_predication (bool is_accum)
  18807. {
  18808. if (is_accum && conditional_insn ())
  18809. set_pred_insn_type (INSIDE_IT_INSN);
  18810. else if (conditional_insn ())
  18811. /* conditional_insn essentially checks for a suffix, not whether the
  18812. instruction is inside an IT block or not.
  18813. The non-accumulator versions should not have suffixes. */
  18814. inst.error = BAD_SYNTAX;
  18815. else
  18816. set_pred_insn_type (OUTSIDE_PRED_INSN);
  18817. }
  18818. static void
  18819. do_custom_instruction_1 (int is_dual, bool is_accum)
  18820. {
  18821. constraint (!mark_feature_used (&arm_ext_cde), _(BAD_CDE));
  18822. unsigned imm, Rd;
  18823. Rd = inst.operands[1].reg;
  18824. check_cde_operand (1, is_dual);
  18825. if (is_dual == 1)
  18826. {
  18827. constraint (inst.operands[2].reg != Rd + 1,
  18828. _("cx1d requires consecutive destination registers."));
  18829. imm = inst.operands[3].imm;
  18830. }
  18831. else if (is_dual == 0)
  18832. imm = inst.operands[2].imm;
  18833. else
  18834. abort ();
  18835. inst.instruction |= Rd << 12;
  18836. inst.instruction |= (imm & 0x1F80) << 9;
  18837. inst.instruction |= (imm & 0x0040) << 1;
  18838. inst.instruction |= (imm & 0x003f);
  18839. cde_handle_coproc ();
  18840. cxn_handle_predication (is_accum);
  18841. }
  18842. static void
  18843. do_custom_instruction_2 (int is_dual, bool is_accum)
  18844. {
  18845. constraint (!mark_feature_used (&arm_ext_cde), _(BAD_CDE));
  18846. unsigned imm, Rd, Rn;
  18847. Rd = inst.operands[1].reg;
  18848. if (is_dual == 1)
  18849. {
  18850. constraint (inst.operands[2].reg != Rd + 1,
  18851. _("cx2d requires consecutive destination registers."));
  18852. imm = inst.operands[4].imm;
  18853. Rn = inst.operands[3].reg;
  18854. }
  18855. else if (is_dual == 0)
  18856. {
  18857. imm = inst.operands[3].imm;
  18858. Rn = inst.operands[2].reg;
  18859. }
  18860. else
  18861. abort ();
  18862. check_cde_operand (2 + is_dual, /* is_dual = */0);
  18863. check_cde_operand (1, is_dual);
  18864. inst.instruction |= Rd << 12;
  18865. inst.instruction |= Rn << 16;
  18866. inst.instruction |= (imm & 0x0380) << 13;
  18867. inst.instruction |= (imm & 0x0040) << 1;
  18868. inst.instruction |= (imm & 0x003f);
  18869. cde_handle_coproc ();
  18870. cxn_handle_predication (is_accum);
  18871. }
  18872. static void
  18873. do_custom_instruction_3 (int is_dual, bool is_accum)
  18874. {
  18875. constraint (!mark_feature_used (&arm_ext_cde), _(BAD_CDE));
  18876. unsigned imm, Rd, Rn, Rm;
  18877. Rd = inst.operands[1].reg;
  18878. if (is_dual == 1)
  18879. {
  18880. constraint (inst.operands[2].reg != Rd + 1,
  18881. _("cx3d requires consecutive destination registers."));
  18882. imm = inst.operands[5].imm;
  18883. Rn = inst.operands[3].reg;
  18884. Rm = inst.operands[4].reg;
  18885. }
  18886. else if (is_dual == 0)
  18887. {
  18888. imm = inst.operands[4].imm;
  18889. Rn = inst.operands[2].reg;
  18890. Rm = inst.operands[3].reg;
  18891. }
  18892. else
  18893. abort ();
  18894. check_cde_operand (1, is_dual);
  18895. check_cde_operand (2 + is_dual, /* is_dual = */0);
  18896. check_cde_operand (3 + is_dual, /* is_dual = */0);
  18897. inst.instruction |= Rd;
  18898. inst.instruction |= Rn << 16;
  18899. inst.instruction |= Rm << 12;
  18900. inst.instruction |= (imm & 0x0038) << 17;
  18901. inst.instruction |= (imm & 0x0004) << 5;
  18902. inst.instruction |= (imm & 0x0003) << 4;
  18903. cde_handle_coproc ();
  18904. cxn_handle_predication (is_accum);
  18905. }
  18906. static void
  18907. do_cx1 (void)
  18908. {
  18909. return do_custom_instruction_1 (0, 0);
  18910. }
  18911. static void
  18912. do_cx1a (void)
  18913. {
  18914. return do_custom_instruction_1 (0, 1);
  18915. }
  18916. static void
  18917. do_cx1d (void)
  18918. {
  18919. return do_custom_instruction_1 (1, 0);
  18920. }
  18921. static void
  18922. do_cx1da (void)
  18923. {
  18924. return do_custom_instruction_1 (1, 1);
  18925. }
  18926. static void
  18927. do_cx2 (void)
  18928. {
  18929. return do_custom_instruction_2 (0, 0);
  18930. }
  18931. static void
  18932. do_cx2a (void)
  18933. {
  18934. return do_custom_instruction_2 (0, 1);
  18935. }
  18936. static void
  18937. do_cx2d (void)
  18938. {
  18939. return do_custom_instruction_2 (1, 0);
  18940. }
  18941. static void
  18942. do_cx2da (void)
  18943. {
  18944. return do_custom_instruction_2 (1, 1);
  18945. }
  18946. static void
  18947. do_cx3 (void)
  18948. {
  18949. return do_custom_instruction_3 (0, 0);
  18950. }
  18951. static void
  18952. do_cx3a (void)
  18953. {
  18954. return do_custom_instruction_3 (0, 1);
  18955. }
  18956. static void
  18957. do_cx3d (void)
  18958. {
  18959. return do_custom_instruction_3 (1, 0);
  18960. }
  18961. static void
  18962. do_cx3da (void)
  18963. {
  18964. return do_custom_instruction_3 (1, 1);
  18965. }
  18966. static void
  18967. vcx_assign_vec_d (unsigned regnum)
  18968. {
  18969. inst.instruction |= HI4 (regnum) << 12;
  18970. inst.instruction |= LOW1 (regnum) << 22;
  18971. }
  18972. static void
  18973. vcx_assign_vec_m (unsigned regnum)
  18974. {
  18975. inst.instruction |= HI4 (regnum);
  18976. inst.instruction |= LOW1 (regnum) << 5;
  18977. }
  18978. static void
  18979. vcx_assign_vec_n (unsigned regnum)
  18980. {
  18981. inst.instruction |= HI4 (regnum) << 16;
  18982. inst.instruction |= LOW1 (regnum) << 7;
  18983. }
  18984. enum vcx_reg_type {
  18985. q_reg,
  18986. d_reg,
  18987. s_reg
  18988. };
  18989. static enum vcx_reg_type
  18990. vcx_get_reg_type (enum neon_shape ns)
  18991. {
  18992. gas_assert (ns == NS_PQI
  18993. || ns == NS_PDI
  18994. || ns == NS_PFI
  18995. || ns == NS_PQQI
  18996. || ns == NS_PDDI
  18997. || ns == NS_PFFI
  18998. || ns == NS_PQQQI
  18999. || ns == NS_PDDDI
  19000. || ns == NS_PFFFI);
  19001. if (ns == NS_PQI || ns == NS_PQQI || ns == NS_PQQQI)
  19002. return q_reg;
  19003. if (ns == NS_PDI || ns == NS_PDDI || ns == NS_PDDDI)
  19004. return d_reg;
  19005. return s_reg;
  19006. }
  19007. #define vcx_size_pos 24
  19008. #define vcx_vec_pos 6
  19009. static unsigned
  19010. vcx_handle_shape (enum vcx_reg_type reg_type)
  19011. {
  19012. unsigned mult = 2;
  19013. if (reg_type == q_reg)
  19014. inst.instruction |= 1 << vcx_vec_pos;
  19015. else if (reg_type == d_reg)
  19016. inst.instruction |= 1 << vcx_size_pos;
  19017. else
  19018. mult = 1;
  19019. /* NOTE:
  19020. The documentation says that the Q registers are encoded as 2*N in the D:Vd
  19021. bits (or equivalent for N and M registers).
  19022. Similarly the D registers are encoded as N in D:Vd bits.
  19023. While the S registers are encoded as N in the Vd:D bits.
  19024. Taking into account the maximum values of these registers we can see a
  19025. nicer pattern for calculation:
  19026. Q -> 7, D -> 15, S -> 31
  19027. If we say that everything is encoded in the Vd:D bits, then we can say
  19028. that Q is encoded as 4*N, and D is encoded as 2*N.
  19029. This way the bits will end up the same, and calculation is simpler.
  19030. (calculation is now:
  19031. 1. Multiply by a number determined by the register letter.
  19032. 2. Encode resulting number in Vd:D bits.)
  19033. This is made a little more complicated by automatic handling of 'Q'
  19034. registers elsewhere, which means the register number is already 2*N where
  19035. N is the number the user wrote after the register letter.
  19036. */
  19037. return mult;
  19038. }
  19039. #undef vcx_vec_pos
  19040. #undef vcx_size_pos
  19041. static void
  19042. vcx_ensure_register_in_range (unsigned R, enum vcx_reg_type reg_type)
  19043. {
  19044. if (reg_type == q_reg)
  19045. {
  19046. gas_assert (R % 2 == 0);
  19047. constraint (R >= 16, _("'q' register must be in range 0-7"));
  19048. }
  19049. else if (reg_type == d_reg)
  19050. constraint (R >= 16, _("'d' register must be in range 0-15"));
  19051. else
  19052. constraint (R >= 32, _("'s' register must be in range 0-31"));
  19053. }
  19054. static void (*vcx_assign_vec[3]) (unsigned) = {
  19055. vcx_assign_vec_d,
  19056. vcx_assign_vec_m,
  19057. vcx_assign_vec_n
  19058. };
  19059. static void
  19060. vcx_handle_register_arguments (unsigned num_registers,
  19061. enum vcx_reg_type reg_type)
  19062. {
  19063. unsigned R, i;
  19064. unsigned reg_mult = vcx_handle_shape (reg_type);
  19065. for (i = 0; i < num_registers; i++)
  19066. {
  19067. R = inst.operands[i+1].reg;
  19068. vcx_ensure_register_in_range (R, reg_type);
  19069. if (num_registers == 3 && i > 0)
  19070. {
  19071. if (i == 2)
  19072. vcx_assign_vec[1] (R * reg_mult);
  19073. else
  19074. vcx_assign_vec[2] (R * reg_mult);
  19075. continue;
  19076. }
  19077. vcx_assign_vec[i](R * reg_mult);
  19078. }
  19079. }
  19080. static void
  19081. vcx_handle_insn_block (enum vcx_reg_type reg_type)
  19082. {
  19083. if (reg_type == q_reg)
  19084. if (inst.cond > COND_ALWAYS)
  19085. inst.pred_insn_type = INSIDE_VPT_INSN;
  19086. else
  19087. inst.pred_insn_type = MVE_OUTSIDE_PRED_INSN;
  19088. else if (inst.cond == COND_ALWAYS)
  19089. inst.pred_insn_type = OUTSIDE_PRED_INSN;
  19090. else
  19091. inst.error = BAD_NOT_IT;
  19092. }
  19093. static void
  19094. vcx_handle_common_checks (unsigned num_args, enum neon_shape rs)
  19095. {
  19096. constraint (!mark_feature_used (&arm_ext_cde), _(BAD_CDE));
  19097. cde_handle_coproc ();
  19098. enum vcx_reg_type reg_type = vcx_get_reg_type (rs);
  19099. vcx_handle_register_arguments (num_args, reg_type);
  19100. vcx_handle_insn_block (reg_type);
  19101. if (reg_type == q_reg)
  19102. constraint (!mark_feature_used (&mve_ext),
  19103. _("vcx instructions with Q registers require MVE"));
  19104. else
  19105. constraint (!(ARM_FSET_CPU_SUBSET (armv8m_fp, cpu_variant)
  19106. && mark_feature_used (&armv8m_fp))
  19107. && !mark_feature_used (&mve_ext),
  19108. _("vcx instructions with S or D registers require either MVE"
  19109. " or Armv8-M floating point extension."));
  19110. }
  19111. static void
  19112. do_vcx1 (void)
  19113. {
  19114. enum neon_shape rs = neon_select_shape (NS_PQI, NS_PDI, NS_PFI, NS_NULL);
  19115. vcx_handle_common_checks (1, rs);
  19116. unsigned imm = inst.operands[2].imm;
  19117. inst.instruction |= (imm & 0x03f);
  19118. inst.instruction |= (imm & 0x040) << 1;
  19119. inst.instruction |= (imm & 0x780) << 9;
  19120. if (rs != NS_PQI)
  19121. constraint (imm >= 2048,
  19122. _("vcx1 with S or D registers takes immediate within 0-2047"));
  19123. inst.instruction |= (imm & 0x800) << 13;
  19124. }
  19125. static void
  19126. do_vcx2 (void)
  19127. {
  19128. enum neon_shape rs = neon_select_shape (NS_PQQI, NS_PDDI, NS_PFFI, NS_NULL);
  19129. vcx_handle_common_checks (2, rs);
  19130. unsigned imm = inst.operands[3].imm;
  19131. inst.instruction |= (imm & 0x01) << 4;
  19132. inst.instruction |= (imm & 0x02) << 6;
  19133. inst.instruction |= (imm & 0x3c) << 14;
  19134. if (rs != NS_PQQI)
  19135. constraint (imm >= 64,
  19136. _("vcx2 with S or D registers takes immediate within 0-63"));
  19137. inst.instruction |= (imm & 0x40) << 18;
  19138. }
  19139. static void
  19140. do_vcx3 (void)
  19141. {
  19142. enum neon_shape rs = neon_select_shape (NS_PQQQI, NS_PDDDI, NS_PFFFI, NS_NULL);
  19143. vcx_handle_common_checks (3, rs);
  19144. unsigned imm = inst.operands[4].imm;
  19145. inst.instruction |= (imm & 0x1) << 4;
  19146. inst.instruction |= (imm & 0x6) << 19;
  19147. if (rs != NS_PQQQI)
  19148. constraint (imm >= 8,
  19149. _("vcx2 with S or D registers takes immediate within 0-7"));
  19150. inst.instruction |= (imm & 0x8) << 21;
  19151. }
  19152. /* Crypto v1 instructions. */
  19153. static void
  19154. do_crypto_2op_1 (unsigned elttype, int op)
  19155. {
  19156. set_pred_insn_type (OUTSIDE_PRED_INSN);
  19157. if (neon_check_type (2, NS_QQ, N_EQK | N_UNT, elttype | N_UNT | N_KEY).type
  19158. == NT_invtype)
  19159. return;
  19160. inst.error = NULL;
  19161. NEON_ENCODE (INTEGER, inst);
  19162. inst.instruction |= LOW4 (inst.operands[0].reg) << 12;
  19163. inst.instruction |= HI1 (inst.operands[0].reg) << 22;
  19164. inst.instruction |= LOW4 (inst.operands[1].reg);
  19165. inst.instruction |= HI1 (inst.operands[1].reg) << 5;
  19166. if (op != -1)
  19167. inst.instruction |= op << 6;
  19168. if (thumb_mode)
  19169. inst.instruction |= 0xfc000000;
  19170. else
  19171. inst.instruction |= 0xf0000000;
  19172. }
  19173. static void
  19174. do_crypto_3op_1 (int u, int op)
  19175. {
  19176. set_pred_insn_type (OUTSIDE_PRED_INSN);
  19177. if (neon_check_type (3, NS_QQQ, N_EQK | N_UNT, N_EQK | N_UNT,
  19178. N_32 | N_UNT | N_KEY).type == NT_invtype)
  19179. return;
  19180. inst.error = NULL;
  19181. NEON_ENCODE (INTEGER, inst);
  19182. neon_three_same (1, u, 8 << op);
  19183. }
  19184. static void
  19185. do_aese (void)
  19186. {
  19187. do_crypto_2op_1 (N_8, 0);
  19188. }
  19189. static void
  19190. do_aesd (void)
  19191. {
  19192. do_crypto_2op_1 (N_8, 1);
  19193. }
  19194. static void
  19195. do_aesmc (void)
  19196. {
  19197. do_crypto_2op_1 (N_8, 2);
  19198. }
  19199. static void
  19200. do_aesimc (void)
  19201. {
  19202. do_crypto_2op_1 (N_8, 3);
  19203. }
  19204. static void
  19205. do_sha1c (void)
  19206. {
  19207. do_crypto_3op_1 (0, 0);
  19208. }
  19209. static void
  19210. do_sha1p (void)
  19211. {
  19212. do_crypto_3op_1 (0, 1);
  19213. }
  19214. static void
  19215. do_sha1m (void)
  19216. {
  19217. do_crypto_3op_1 (0, 2);
  19218. }
  19219. static void
  19220. do_sha1su0 (void)
  19221. {
  19222. do_crypto_3op_1 (0, 3);
  19223. }
  19224. static void
  19225. do_sha256h (void)
  19226. {
  19227. do_crypto_3op_1 (1, 0);
  19228. }
  19229. static void
  19230. do_sha256h2 (void)
  19231. {
  19232. do_crypto_3op_1 (1, 1);
  19233. }
  19234. static void
  19235. do_sha256su1 (void)
  19236. {
  19237. do_crypto_3op_1 (1, 2);
  19238. }
  19239. static void
  19240. do_sha1h (void)
  19241. {
  19242. do_crypto_2op_1 (N_32, -1);
  19243. }
  19244. static void
  19245. do_sha1su1 (void)
  19246. {
  19247. do_crypto_2op_1 (N_32, 0);
  19248. }
  19249. static void
  19250. do_sha256su0 (void)
  19251. {
  19252. do_crypto_2op_1 (N_32, 1);
  19253. }
  19254. static void
  19255. do_crc32_1 (unsigned int poly, unsigned int sz)
  19256. {
  19257. unsigned int Rd = inst.operands[0].reg;
  19258. unsigned int Rn = inst.operands[1].reg;
  19259. unsigned int Rm = inst.operands[2].reg;
  19260. set_pred_insn_type (OUTSIDE_PRED_INSN);
  19261. inst.instruction |= LOW4 (Rd) << (thumb_mode ? 8 : 12);
  19262. inst.instruction |= LOW4 (Rn) << 16;
  19263. inst.instruction |= LOW4 (Rm);
  19264. inst.instruction |= sz << (thumb_mode ? 4 : 21);
  19265. inst.instruction |= poly << (thumb_mode ? 20 : 9);
  19266. if (Rd == REG_PC || Rn == REG_PC || Rm == REG_PC)
  19267. as_warn (UNPRED_REG ("r15"));
  19268. }
  19269. static void
  19270. do_crc32b (void)
  19271. {
  19272. do_crc32_1 (0, 0);
  19273. }
  19274. static void
  19275. do_crc32h (void)
  19276. {
  19277. do_crc32_1 (0, 1);
  19278. }
  19279. static void
  19280. do_crc32w (void)
  19281. {
  19282. do_crc32_1 (0, 2);
  19283. }
  19284. static void
  19285. do_crc32cb (void)
  19286. {
  19287. do_crc32_1 (1, 0);
  19288. }
  19289. static void
  19290. do_crc32ch (void)
  19291. {
  19292. do_crc32_1 (1, 1);
  19293. }
  19294. static void
  19295. do_crc32cw (void)
  19296. {
  19297. do_crc32_1 (1, 2);
  19298. }
  19299. static void
  19300. do_vjcvt (void)
  19301. {
  19302. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_vfp_ext_armv8),
  19303. _(BAD_FPU));
  19304. neon_check_type (2, NS_FD, N_S32, N_F64);
  19305. do_vfp_sp_dp_cvt ();
  19306. do_vfp_cond_or_thumb ();
  19307. }
  19308. static void
  19309. do_vdot (void)
  19310. {
  19311. enum neon_shape rs;
  19312. constraint (!mark_feature_used (&fpu_neon_ext_armv8), _(BAD_FPU));
  19313. set_pred_insn_type (OUTSIDE_PRED_INSN);
  19314. if (inst.operands[2].isscalar)
  19315. {
  19316. rs = neon_select_shape (NS_DDS, NS_QQS, NS_NULL);
  19317. neon_check_type (3, rs, N_EQK, N_EQK, N_BF16 | N_KEY);
  19318. inst.instruction |= (1 << 25);
  19319. int idx = inst.operands[2].reg & 0xf;
  19320. constraint ((idx != 1 && idx != 0), _("index must be 0 or 1"));
  19321. inst.operands[2].reg >>= 4;
  19322. constraint (!(inst.operands[2].reg < 16),
  19323. _("indexed register must be less than 16"));
  19324. neon_three_args (rs == NS_QQS);
  19325. inst.instruction |= (idx << 5);
  19326. }
  19327. else
  19328. {
  19329. rs = neon_select_shape (NS_DDD, NS_QQQ, NS_NULL);
  19330. neon_check_type (3, rs, N_EQK, N_EQK, N_BF16 | N_KEY);
  19331. neon_three_args (rs == NS_QQQ);
  19332. }
  19333. }
  19334. static void
  19335. do_vmmla (void)
  19336. {
  19337. enum neon_shape rs = neon_select_shape (NS_QQQ, NS_NULL);
  19338. neon_check_type (3, rs, N_EQK, N_EQK, N_BF16 | N_KEY);
  19339. constraint (!mark_feature_used (&fpu_neon_ext_armv8), _(BAD_FPU));
  19340. set_pred_insn_type (OUTSIDE_PRED_INSN);
  19341. neon_three_args (1);
  19342. }
  19343. static void
  19344. do_t_pacbti (void)
  19345. {
  19346. inst.instruction = THUMB_OP32 (inst.instruction);
  19347. }
  19348. static void
  19349. do_t_pacbti_nonop (void)
  19350. {
  19351. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, pacbti_ext),
  19352. _(BAD_PACBTI));
  19353. inst.instruction = THUMB_OP32 (inst.instruction);
  19354. inst.instruction |= inst.operands[0].reg << 12;
  19355. inst.instruction |= inst.operands[1].reg << 16;
  19356. inst.instruction |= inst.operands[2].reg;
  19357. }
  19358. static void
  19359. do_t_pacbti_pacg (void)
  19360. {
  19361. constraint (!ARM_CPU_HAS_FEATURE (cpu_variant, pacbti_ext),
  19362. _(BAD_PACBTI));
  19363. inst.instruction = THUMB_OP32 (inst.instruction);
  19364. inst.instruction |= inst.operands[0].reg << 8;
  19365. inst.instruction |= inst.operands[1].reg << 16;
  19366. inst.instruction |= inst.operands[2].reg;
  19367. }
  19368. /* Overall per-instruction processing. */
  19369. /* We need to be able to fix up arbitrary expressions in some statements.
  19370. This is so that we can handle symbols that are an arbitrary distance from
  19371. the pc. The most common cases are of the form ((+/-sym -/+ . - 8) & mask),
  19372. which returns part of an address in a form which will be valid for
  19373. a data instruction. We do this by pushing the expression into a symbol
  19374. in the expr_section, and creating a fix for that. */
  19375. static void
  19376. fix_new_arm (fragS * frag,
  19377. int where,
  19378. short int size,
  19379. expressionS * exp,
  19380. int pc_rel,
  19381. int reloc)
  19382. {
  19383. fixS * new_fix;
  19384. switch (exp->X_op)
  19385. {
  19386. case O_constant:
  19387. if (pc_rel)
  19388. {
  19389. /* Create an absolute valued symbol, so we have something to
  19390. refer to in the object file. Unfortunately for us, gas's
  19391. generic expression parsing will already have folded out
  19392. any use of .set foo/.type foo %function that may have
  19393. been used to set type information of the target location,
  19394. that's being specified symbolically. We have to presume
  19395. the user knows what they are doing. */
  19396. char name[16 + 8];
  19397. symbolS *symbol;
  19398. sprintf (name, "*ABS*0x%lx", (unsigned long)exp->X_add_number);
  19399. symbol = symbol_find_or_make (name);
  19400. S_SET_SEGMENT (symbol, absolute_section);
  19401. symbol_set_frag (symbol, &zero_address_frag);
  19402. S_SET_VALUE (symbol, exp->X_add_number);
  19403. exp->X_op = O_symbol;
  19404. exp->X_add_symbol = symbol;
  19405. exp->X_add_number = 0;
  19406. }
  19407. /* FALLTHROUGH */
  19408. case O_symbol:
  19409. case O_add:
  19410. case O_subtract:
  19411. new_fix = fix_new_exp (frag, where, size, exp, pc_rel,
  19412. (enum bfd_reloc_code_real) reloc);
  19413. break;
  19414. default:
  19415. new_fix = (fixS *) fix_new (frag, where, size, make_expr_symbol (exp), 0,
  19416. pc_rel, (enum bfd_reloc_code_real) reloc);
  19417. break;
  19418. }
  19419. /* Mark whether the fix is to a THUMB instruction, or an ARM
  19420. instruction. */
  19421. new_fix->tc_fix_data = thumb_mode;
  19422. }
  19423. /* Create a frg for an instruction requiring relaxation. */
  19424. static void
  19425. output_relax_insn (void)
  19426. {
  19427. char * to;
  19428. symbolS *sym;
  19429. int offset;
  19430. /* The size of the instruction is unknown, so tie the debug info to the
  19431. start of the instruction. */
  19432. dwarf2_emit_insn (0);
  19433. switch (inst.relocs[0].exp.X_op)
  19434. {
  19435. case O_symbol:
  19436. sym = inst.relocs[0].exp.X_add_symbol;
  19437. offset = inst.relocs[0].exp.X_add_number;
  19438. break;
  19439. case O_constant:
  19440. sym = NULL;
  19441. offset = inst.relocs[0].exp.X_add_number;
  19442. break;
  19443. default:
  19444. sym = make_expr_symbol (&inst.relocs[0].exp);
  19445. offset = 0;
  19446. break;
  19447. }
  19448. to = frag_var (rs_machine_dependent, INSN_SIZE, THUMB_SIZE,
  19449. inst.relax, sym, offset, NULL/*offset, opcode*/);
  19450. md_number_to_chars (to, inst.instruction, THUMB_SIZE);
  19451. }
  19452. /* Write a 32-bit thumb instruction to buf. */
  19453. static void
  19454. put_thumb32_insn (char * buf, unsigned long insn)
  19455. {
  19456. md_number_to_chars (buf, insn >> 16, THUMB_SIZE);
  19457. md_number_to_chars (buf + THUMB_SIZE, insn, THUMB_SIZE);
  19458. }
  19459. static void
  19460. output_inst (const char * str)
  19461. {
  19462. char * to = NULL;
  19463. if (inst.error)
  19464. {
  19465. as_bad ("%s -- `%s'", inst.error, str);
  19466. return;
  19467. }
  19468. if (inst.relax)
  19469. {
  19470. output_relax_insn ();
  19471. return;
  19472. }
  19473. if (inst.size == 0)
  19474. return;
  19475. to = frag_more (inst.size);
  19476. /* PR 9814: Record the thumb mode into the current frag so that we know
  19477. what type of NOP padding to use, if necessary. We override any previous
  19478. setting so that if the mode has changed then the NOPS that we use will
  19479. match the encoding of the last instruction in the frag. */
  19480. frag_now->tc_frag_data.thumb_mode = thumb_mode | MODE_RECORDED;
  19481. if (thumb_mode && (inst.size > THUMB_SIZE))
  19482. {
  19483. gas_assert (inst.size == (2 * THUMB_SIZE));
  19484. put_thumb32_insn (to, inst.instruction);
  19485. }
  19486. else if (inst.size > INSN_SIZE)
  19487. {
  19488. gas_assert (inst.size == (2 * INSN_SIZE));
  19489. md_number_to_chars (to, inst.instruction, INSN_SIZE);
  19490. md_number_to_chars (to + INSN_SIZE, inst.instruction, INSN_SIZE);
  19491. }
  19492. else
  19493. md_number_to_chars (to, inst.instruction, inst.size);
  19494. int r;
  19495. for (r = 0; r < ARM_IT_MAX_RELOCS; r++)
  19496. {
  19497. if (inst.relocs[r].type != BFD_RELOC_UNUSED)
  19498. fix_new_arm (frag_now, to - frag_now->fr_literal,
  19499. inst.size, & inst.relocs[r].exp, inst.relocs[r].pc_rel,
  19500. inst.relocs[r].type);
  19501. }
  19502. dwarf2_emit_insn (inst.size);
  19503. }
  19504. static char *
  19505. output_it_inst (int cond, int mask, char * to)
  19506. {
  19507. unsigned long instruction = 0xbf00;
  19508. mask &= 0xf;
  19509. instruction |= mask;
  19510. instruction |= cond << 4;
  19511. if (to == NULL)
  19512. {
  19513. to = frag_more (2);
  19514. #ifdef OBJ_ELF
  19515. dwarf2_emit_insn (2);
  19516. #endif
  19517. }
  19518. md_number_to_chars (to, instruction, 2);
  19519. return to;
  19520. }
  19521. /* Tag values used in struct asm_opcode's tag field. */
  19522. enum opcode_tag
  19523. {
  19524. OT_unconditional, /* Instruction cannot be conditionalized.
  19525. The ARM condition field is still 0xE. */
  19526. OT_unconditionalF, /* Instruction cannot be conditionalized
  19527. and carries 0xF in its ARM condition field. */
  19528. OT_csuffix, /* Instruction takes a conditional suffix. */
  19529. OT_csuffixF, /* Some forms of the instruction take a scalar
  19530. conditional suffix, others place 0xF where the
  19531. condition field would be, others take a vector
  19532. conditional suffix. */
  19533. OT_cinfix3, /* Instruction takes a conditional infix,
  19534. beginning at character index 3. (In
  19535. unified mode, it becomes a suffix.) */
  19536. OT_cinfix3_deprecated, /* The same as OT_cinfix3. This is used for
  19537. tsts, cmps, cmns, and teqs. */
  19538. OT_cinfix3_legacy, /* Legacy instruction takes a conditional infix at
  19539. character index 3, even in unified mode. Used for
  19540. legacy instructions where suffix and infix forms
  19541. may be ambiguous. */
  19542. OT_csuf_or_in3, /* Instruction takes either a conditional
  19543. suffix or an infix at character index 3. */
  19544. OT_odd_infix_unc, /* This is the unconditional variant of an
  19545. instruction that takes a conditional infix
  19546. at an unusual position. In unified mode,
  19547. this variant will accept a suffix. */
  19548. OT_odd_infix_0 /* Values greater than or equal to OT_odd_infix_0
  19549. are the conditional variants of instructions that
  19550. take conditional infixes in unusual positions.
  19551. The infix appears at character index
  19552. (tag - OT_odd_infix_0). These are not accepted
  19553. in unified mode. */
  19554. };
  19555. /* Subroutine of md_assemble, responsible for looking up the primary
  19556. opcode from the mnemonic the user wrote. STR points to the
  19557. beginning of the mnemonic.
  19558. This is not simply a hash table lookup, because of conditional
  19559. variants. Most instructions have conditional variants, which are
  19560. expressed with a _conditional affix_ to the mnemonic. If we were
  19561. to encode each conditional variant as a literal string in the opcode
  19562. table, it would have approximately 20,000 entries.
  19563. Most mnemonics take this affix as a suffix, and in unified syntax,
  19564. 'most' is upgraded to 'all'. However, in the divided syntax, some
  19565. instructions take the affix as an infix, notably the s-variants of
  19566. the arithmetic instructions. Of those instructions, all but six
  19567. have the infix appear after the third character of the mnemonic.
  19568. Accordingly, the algorithm for looking up primary opcodes given
  19569. an identifier is:
  19570. 1. Look up the identifier in the opcode table.
  19571. If we find a match, go to step U.
  19572. 2. Look up the last two characters of the identifier in the
  19573. conditions table. If we find a match, look up the first N-2
  19574. characters of the identifier in the opcode table. If we
  19575. find a match, go to step CE.
  19576. 3. Look up the fourth and fifth characters of the identifier in
  19577. the conditions table. If we find a match, extract those
  19578. characters from the identifier, and look up the remaining
  19579. characters in the opcode table. If we find a match, go
  19580. to step CM.
  19581. 4. Fail.
  19582. U. Examine the tag field of the opcode structure, in case this is
  19583. one of the six instructions with its conditional infix in an
  19584. unusual place. If it is, the tag tells us where to find the
  19585. infix; look it up in the conditions table and set inst.cond
  19586. accordingly. Otherwise, this is an unconditional instruction.
  19587. Again set inst.cond accordingly. Return the opcode structure.
  19588. CE. Examine the tag field to make sure this is an instruction that
  19589. should receive a conditional suffix. If it is not, fail.
  19590. Otherwise, set inst.cond from the suffix we already looked up,
  19591. and return the opcode structure.
  19592. CM. Examine the tag field to make sure this is an instruction that
  19593. should receive a conditional infix after the third character.
  19594. If it is not, fail. Otherwise, undo the edits to the current
  19595. line of input and proceed as for case CE. */
  19596. static const struct asm_opcode *
  19597. opcode_lookup (char **str)
  19598. {
  19599. char *end, *base;
  19600. char *affix;
  19601. const struct asm_opcode *opcode;
  19602. const struct asm_cond *cond;
  19603. char save[2];
  19604. /* Scan up to the end of the mnemonic, which must end in white space,
  19605. '.' (in unified mode, or for Neon/VFP instructions), or end of string. */
  19606. for (base = end = *str; *end != '\0'; end++)
  19607. if (*end == ' ' || *end == '.')
  19608. break;
  19609. if (end == base)
  19610. return NULL;
  19611. /* Handle a possible width suffix and/or Neon type suffix. */
  19612. if (end[0] == '.')
  19613. {
  19614. int offset = 2;
  19615. /* The .w and .n suffixes are only valid if the unified syntax is in
  19616. use. */
  19617. if (unified_syntax && end[1] == 'w')
  19618. inst.size_req = 4;
  19619. else if (unified_syntax && end[1] == 'n')
  19620. inst.size_req = 2;
  19621. else
  19622. offset = 0;
  19623. inst.vectype.elems = 0;
  19624. *str = end + offset;
  19625. if (end[offset] == '.')
  19626. {
  19627. /* See if we have a Neon type suffix (possible in either unified or
  19628. non-unified ARM syntax mode). */
  19629. if (parse_neon_type (&inst.vectype, str) == FAIL)
  19630. return NULL;
  19631. }
  19632. else if (end[offset] != '\0' && end[offset] != ' ')
  19633. return NULL;
  19634. }
  19635. else
  19636. *str = end;
  19637. /* Look for unaffixed or special-case affixed mnemonic. */
  19638. opcode = (const struct asm_opcode *) str_hash_find_n (arm_ops_hsh, base,
  19639. end - base);
  19640. cond = NULL;
  19641. if (opcode)
  19642. {
  19643. /* step U */
  19644. if (opcode->tag < OT_odd_infix_0)
  19645. {
  19646. inst.cond = COND_ALWAYS;
  19647. return opcode;
  19648. }
  19649. if (warn_on_deprecated && unified_syntax)
  19650. as_tsktsk (_("conditional infixes are deprecated in unified syntax"));
  19651. affix = base + (opcode->tag - OT_odd_infix_0);
  19652. cond = (const struct asm_cond *) str_hash_find_n (arm_cond_hsh, affix, 2);
  19653. gas_assert (cond);
  19654. inst.cond = cond->value;
  19655. return opcode;
  19656. }
  19657. if (ARM_CPU_HAS_FEATURE (cpu_variant, mve_ext))
  19658. {
  19659. /* Cannot have a conditional suffix on a mnemonic of less than a character.
  19660. */
  19661. if (end - base < 2)
  19662. return NULL;
  19663. affix = end - 1;
  19664. cond = (const struct asm_cond *) str_hash_find_n (arm_vcond_hsh, affix, 1);
  19665. opcode = (const struct asm_opcode *) str_hash_find_n (arm_ops_hsh, base,
  19666. affix - base);
  19667. /* If this opcode can not be vector predicated then don't accept it with a
  19668. vector predication code. */
  19669. if (opcode && !opcode->mayBeVecPred)
  19670. opcode = NULL;
  19671. }
  19672. if (!opcode || !cond)
  19673. {
  19674. /* Cannot have a conditional suffix on a mnemonic of less than two
  19675. characters. */
  19676. if (end - base < 3)
  19677. return NULL;
  19678. /* Look for suffixed mnemonic. */
  19679. affix = end - 2;
  19680. cond = (const struct asm_cond *) str_hash_find_n (arm_cond_hsh, affix, 2);
  19681. opcode = (const struct asm_opcode *) str_hash_find_n (arm_ops_hsh, base,
  19682. affix - base);
  19683. }
  19684. if (opcode && cond)
  19685. {
  19686. /* step CE */
  19687. switch (opcode->tag)
  19688. {
  19689. case OT_cinfix3_legacy:
  19690. /* Ignore conditional suffixes matched on infix only mnemonics. */
  19691. break;
  19692. case OT_cinfix3:
  19693. case OT_cinfix3_deprecated:
  19694. case OT_odd_infix_unc:
  19695. if (!unified_syntax)
  19696. return NULL;
  19697. /* Fall through. */
  19698. case OT_csuffix:
  19699. case OT_csuffixF:
  19700. case OT_csuf_or_in3:
  19701. inst.cond = cond->value;
  19702. return opcode;
  19703. case OT_unconditional:
  19704. case OT_unconditionalF:
  19705. if (thumb_mode)
  19706. inst.cond = cond->value;
  19707. else
  19708. {
  19709. /* Delayed diagnostic. */
  19710. inst.error = BAD_COND;
  19711. inst.cond = COND_ALWAYS;
  19712. }
  19713. return opcode;
  19714. default:
  19715. return NULL;
  19716. }
  19717. }
  19718. /* Cannot have a usual-position infix on a mnemonic of less than
  19719. six characters (five would be a suffix). */
  19720. if (end - base < 6)
  19721. return NULL;
  19722. /* Look for infixed mnemonic in the usual position. */
  19723. affix = base + 3;
  19724. cond = (const struct asm_cond *) str_hash_find_n (arm_cond_hsh, affix, 2);
  19725. if (!cond)
  19726. return NULL;
  19727. memcpy (save, affix, 2);
  19728. memmove (affix, affix + 2, (end - affix) - 2);
  19729. opcode = (const struct asm_opcode *) str_hash_find_n (arm_ops_hsh, base,
  19730. (end - base) - 2);
  19731. memmove (affix + 2, affix, (end - affix) - 2);
  19732. memcpy (affix, save, 2);
  19733. if (opcode
  19734. && (opcode->tag == OT_cinfix3
  19735. || opcode->tag == OT_cinfix3_deprecated
  19736. || opcode->tag == OT_csuf_or_in3
  19737. || opcode->tag == OT_cinfix3_legacy))
  19738. {
  19739. /* Step CM. */
  19740. if (warn_on_deprecated && unified_syntax
  19741. && (opcode->tag == OT_cinfix3
  19742. || opcode->tag == OT_cinfix3_deprecated))
  19743. as_tsktsk (_("conditional infixes are deprecated in unified syntax"));
  19744. inst.cond = cond->value;
  19745. return opcode;
  19746. }
  19747. return NULL;
  19748. }
  19749. /* This function generates an initial IT instruction, leaving its block
  19750. virtually open for the new instructions. Eventually,
  19751. the mask will be updated by now_pred_add_mask () each time
  19752. a new instruction needs to be included in the IT block.
  19753. Finally, the block is closed with close_automatic_it_block ().
  19754. The block closure can be requested either from md_assemble (),
  19755. a tencode (), or due to a label hook. */
  19756. static void
  19757. new_automatic_it_block (int cond)
  19758. {
  19759. now_pred.state = AUTOMATIC_PRED_BLOCK;
  19760. now_pred.mask = 0x18;
  19761. now_pred.cc = cond;
  19762. now_pred.block_length = 1;
  19763. mapping_state (MAP_THUMB);
  19764. now_pred.insn = output_it_inst (cond, now_pred.mask, NULL);
  19765. now_pred.warn_deprecated = false;
  19766. now_pred.insn_cond = true;
  19767. }
  19768. /* Close an automatic IT block.
  19769. See comments in new_automatic_it_block (). */
  19770. static void
  19771. close_automatic_it_block (void)
  19772. {
  19773. now_pred.mask = 0x10;
  19774. now_pred.block_length = 0;
  19775. }
  19776. /* Update the mask of the current automatically-generated IT
  19777. instruction. See comments in new_automatic_it_block (). */
  19778. static void
  19779. now_pred_add_mask (int cond)
  19780. {
  19781. #define CLEAR_BIT(value, nbit) ((value) & ~(1 << (nbit)))
  19782. #define SET_BIT_VALUE(value, bitvalue, nbit) (CLEAR_BIT (value, nbit) \
  19783. | ((bitvalue) << (nbit)))
  19784. const int resulting_bit = (cond & 1);
  19785. now_pred.mask &= 0xf;
  19786. now_pred.mask = SET_BIT_VALUE (now_pred.mask,
  19787. resulting_bit,
  19788. (5 - now_pred.block_length));
  19789. now_pred.mask = SET_BIT_VALUE (now_pred.mask,
  19790. 1,
  19791. ((5 - now_pred.block_length) - 1));
  19792. output_it_inst (now_pred.cc, now_pred.mask, now_pred.insn);
  19793. #undef CLEAR_BIT
  19794. #undef SET_BIT_VALUE
  19795. }
  19796. /* The IT blocks handling machinery is accessed through the these functions:
  19797. it_fsm_pre_encode () from md_assemble ()
  19798. set_pred_insn_type () optional, from the tencode functions
  19799. set_pred_insn_type_last () ditto
  19800. in_pred_block () ditto
  19801. it_fsm_post_encode () from md_assemble ()
  19802. force_automatic_it_block_close () from label handling functions
  19803. Rationale:
  19804. 1) md_assemble () calls it_fsm_pre_encode () before calling tencode (),
  19805. initializing the IT insn type with a generic initial value depending
  19806. on the inst.condition.
  19807. 2) During the tencode function, two things may happen:
  19808. a) The tencode function overrides the IT insn type by
  19809. calling either set_pred_insn_type (type) or
  19810. set_pred_insn_type_last ().
  19811. b) The tencode function queries the IT block state by
  19812. calling in_pred_block () (i.e. to determine narrow/not narrow mode).
  19813. Both set_pred_insn_type and in_pred_block run the internal FSM state
  19814. handling function (handle_pred_state), because: a) setting the IT insn
  19815. type may incur in an invalid state (exiting the function),
  19816. and b) querying the state requires the FSM to be updated.
  19817. Specifically we want to avoid creating an IT block for conditional
  19818. branches, so it_fsm_pre_encode is actually a guess and we can't
  19819. determine whether an IT block is required until the tencode () routine
  19820. has decided what type of instruction this actually it.
  19821. Because of this, if set_pred_insn_type and in_pred_block have to be
  19822. used, set_pred_insn_type has to be called first.
  19823. set_pred_insn_type_last () is a wrapper of set_pred_insn_type (type),
  19824. that determines the insn IT type depending on the inst.cond code.
  19825. When a tencode () routine encodes an instruction that can be
  19826. either outside an IT block, or, in the case of being inside, has to be
  19827. the last one, set_pred_insn_type_last () will determine the proper
  19828. IT instruction type based on the inst.cond code. Otherwise,
  19829. set_pred_insn_type can be called for overriding that logic or
  19830. for covering other cases.
  19831. Calling handle_pred_state () may not transition the IT block state to
  19832. OUTSIDE_PRED_BLOCK immediately, since the (current) state could be
  19833. still queried. Instead, if the FSM determines that the state should
  19834. be transitioned to OUTSIDE_PRED_BLOCK, a flag is marked to be closed
  19835. after the tencode () function: that's what it_fsm_post_encode () does.
  19836. Since in_pred_block () calls the state handling function to get an
  19837. updated state, an error may occur (due to invalid insns combination).
  19838. In that case, inst.error is set.
  19839. Therefore, inst.error has to be checked after the execution of
  19840. the tencode () routine.
  19841. 3) Back in md_assemble(), it_fsm_post_encode () is called to commit
  19842. any pending state change (if any) that didn't take place in
  19843. handle_pred_state () as explained above. */
  19844. static void
  19845. it_fsm_pre_encode (void)
  19846. {
  19847. if (inst.cond != COND_ALWAYS)
  19848. inst.pred_insn_type = INSIDE_IT_INSN;
  19849. else
  19850. inst.pred_insn_type = OUTSIDE_PRED_INSN;
  19851. now_pred.state_handled = 0;
  19852. }
  19853. /* IT state FSM handling function. */
  19854. /* MVE instructions and non-MVE instructions are handled differently because of
  19855. the introduction of VPT blocks.
  19856. Specifications say that any non-MVE instruction inside a VPT block is
  19857. UNPREDICTABLE, with the exception of the BKPT instruction. Whereas most MVE
  19858. instructions are deemed to be UNPREDICTABLE if inside an IT block. For the
  19859. few exceptions we have MVE_UNPREDICABLE_INSN.
  19860. The error messages provided depending on the different combinations possible
  19861. are described in the cases below:
  19862. For 'most' MVE instructions:
  19863. 1) In an IT block, with an IT code: syntax error
  19864. 2) In an IT block, with a VPT code: error: must be in a VPT block
  19865. 3) In an IT block, with no code: warning: UNPREDICTABLE
  19866. 4) In a VPT block, with an IT code: syntax error
  19867. 5) In a VPT block, with a VPT code: OK!
  19868. 6) In a VPT block, with no code: error: missing code
  19869. 7) Outside a pred block, with an IT code: error: syntax error
  19870. 8) Outside a pred block, with a VPT code: error: should be in a VPT block
  19871. 9) Outside a pred block, with no code: OK!
  19872. For non-MVE instructions:
  19873. 10) In an IT block, with an IT code: OK!
  19874. 11) In an IT block, with a VPT code: syntax error
  19875. 12) In an IT block, with no code: error: missing code
  19876. 13) In a VPT block, with an IT code: error: should be in an IT block
  19877. 14) In a VPT block, with a VPT code: syntax error
  19878. 15) In a VPT block, with no code: UNPREDICTABLE
  19879. 16) Outside a pred block, with an IT code: error: should be in an IT block
  19880. 17) Outside a pred block, with a VPT code: syntax error
  19881. 18) Outside a pred block, with no code: OK!
  19882. */
  19883. static int
  19884. handle_pred_state (void)
  19885. {
  19886. now_pred.state_handled = 1;
  19887. now_pred.insn_cond = false;
  19888. switch (now_pred.state)
  19889. {
  19890. case OUTSIDE_PRED_BLOCK:
  19891. switch (inst.pred_insn_type)
  19892. {
  19893. case MVE_UNPREDICABLE_INSN:
  19894. case MVE_OUTSIDE_PRED_INSN:
  19895. if (inst.cond < COND_ALWAYS)
  19896. {
  19897. /* Case 7: Outside a pred block, with an IT code: error: syntax
  19898. error. */
  19899. inst.error = BAD_SYNTAX;
  19900. return FAIL;
  19901. }
  19902. /* Case 9: Outside a pred block, with no code: OK! */
  19903. break;
  19904. case OUTSIDE_PRED_INSN:
  19905. if (inst.cond > COND_ALWAYS)
  19906. {
  19907. /* Case 17: Outside a pred block, with a VPT code: syntax error.
  19908. */
  19909. inst.error = BAD_SYNTAX;
  19910. return FAIL;
  19911. }
  19912. /* Case 18: Outside a pred block, with no code: OK! */
  19913. break;
  19914. case INSIDE_VPT_INSN:
  19915. /* Case 8: Outside a pred block, with a VPT code: error: should be in
  19916. a VPT block. */
  19917. inst.error = BAD_OUT_VPT;
  19918. return FAIL;
  19919. case INSIDE_IT_INSN:
  19920. case INSIDE_IT_LAST_INSN:
  19921. if (inst.cond < COND_ALWAYS)
  19922. {
  19923. /* Case 16: Outside a pred block, with an IT code: error: should
  19924. be in an IT block. */
  19925. if (thumb_mode == 0)
  19926. {
  19927. if (unified_syntax
  19928. && !(implicit_it_mode & IMPLICIT_IT_MODE_ARM))
  19929. as_tsktsk (_("Warning: conditional outside an IT block"\
  19930. " for Thumb."));
  19931. }
  19932. else
  19933. {
  19934. if ((implicit_it_mode & IMPLICIT_IT_MODE_THUMB)
  19935. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2))
  19936. {
  19937. /* Automatically generate the IT instruction. */
  19938. new_automatic_it_block (inst.cond);
  19939. if (inst.pred_insn_type == INSIDE_IT_LAST_INSN)
  19940. close_automatic_it_block ();
  19941. }
  19942. else
  19943. {
  19944. inst.error = BAD_OUT_IT;
  19945. return FAIL;
  19946. }
  19947. }
  19948. break;
  19949. }
  19950. else if (inst.cond > COND_ALWAYS)
  19951. {
  19952. /* Case 17: Outside a pred block, with a VPT code: syntax error.
  19953. */
  19954. inst.error = BAD_SYNTAX;
  19955. return FAIL;
  19956. }
  19957. else
  19958. gas_assert (0);
  19959. case IF_INSIDE_IT_LAST_INSN:
  19960. case NEUTRAL_IT_INSN:
  19961. break;
  19962. case VPT_INSN:
  19963. if (inst.cond != COND_ALWAYS)
  19964. first_error (BAD_SYNTAX);
  19965. now_pred.state = MANUAL_PRED_BLOCK;
  19966. now_pred.block_length = 0;
  19967. now_pred.type = VECTOR_PRED;
  19968. now_pred.cc = 0;
  19969. break;
  19970. case IT_INSN:
  19971. now_pred.state = MANUAL_PRED_BLOCK;
  19972. now_pred.block_length = 0;
  19973. now_pred.type = SCALAR_PRED;
  19974. break;
  19975. }
  19976. break;
  19977. case AUTOMATIC_PRED_BLOCK:
  19978. /* Three things may happen now:
  19979. a) We should increment current it block size;
  19980. b) We should close current it block (closing insn or 4 insns);
  19981. c) We should close current it block and start a new one (due
  19982. to incompatible conditions or
  19983. 4 insns-length block reached). */
  19984. switch (inst.pred_insn_type)
  19985. {
  19986. case INSIDE_VPT_INSN:
  19987. case VPT_INSN:
  19988. case MVE_UNPREDICABLE_INSN:
  19989. case MVE_OUTSIDE_PRED_INSN:
  19990. gas_assert (0);
  19991. case OUTSIDE_PRED_INSN:
  19992. /* The closure of the block shall happen immediately,
  19993. so any in_pred_block () call reports the block as closed. */
  19994. force_automatic_it_block_close ();
  19995. break;
  19996. case INSIDE_IT_INSN:
  19997. case INSIDE_IT_LAST_INSN:
  19998. case IF_INSIDE_IT_LAST_INSN:
  19999. now_pred.block_length++;
  20000. if (now_pred.block_length > 4
  20001. || !now_pred_compatible (inst.cond))
  20002. {
  20003. force_automatic_it_block_close ();
  20004. if (inst.pred_insn_type != IF_INSIDE_IT_LAST_INSN)
  20005. new_automatic_it_block (inst.cond);
  20006. }
  20007. else
  20008. {
  20009. now_pred.insn_cond = true;
  20010. now_pred_add_mask (inst.cond);
  20011. }
  20012. if (now_pred.state == AUTOMATIC_PRED_BLOCK
  20013. && (inst.pred_insn_type == INSIDE_IT_LAST_INSN
  20014. || inst.pred_insn_type == IF_INSIDE_IT_LAST_INSN))
  20015. close_automatic_it_block ();
  20016. break;
  20017. /* Fallthrough. */
  20018. case NEUTRAL_IT_INSN:
  20019. now_pred.block_length++;
  20020. now_pred.insn_cond = true;
  20021. if (now_pred.block_length > 4)
  20022. force_automatic_it_block_close ();
  20023. else
  20024. now_pred_add_mask (now_pred.cc & 1);
  20025. break;
  20026. case IT_INSN:
  20027. close_automatic_it_block ();
  20028. now_pred.state = MANUAL_PRED_BLOCK;
  20029. break;
  20030. }
  20031. break;
  20032. case MANUAL_PRED_BLOCK:
  20033. {
  20034. unsigned int cond;
  20035. int is_last;
  20036. if (now_pred.type == SCALAR_PRED)
  20037. {
  20038. /* Check conditional suffixes. */
  20039. cond = now_pred.cc ^ ((now_pred.mask >> 4) & 1) ^ 1;
  20040. now_pred.mask <<= 1;
  20041. now_pred.mask &= 0x1f;
  20042. is_last = (now_pred.mask == 0x10);
  20043. }
  20044. else
  20045. {
  20046. now_pred.cc ^= (now_pred.mask >> 4);
  20047. cond = now_pred.cc + 0xf;
  20048. now_pred.mask <<= 1;
  20049. now_pred.mask &= 0x1f;
  20050. is_last = now_pred.mask == 0x10;
  20051. }
  20052. now_pred.insn_cond = true;
  20053. switch (inst.pred_insn_type)
  20054. {
  20055. case OUTSIDE_PRED_INSN:
  20056. if (now_pred.type == SCALAR_PRED)
  20057. {
  20058. if (inst.cond == COND_ALWAYS)
  20059. {
  20060. /* Case 12: In an IT block, with no code: error: missing
  20061. code. */
  20062. inst.error = BAD_NOT_IT;
  20063. return FAIL;
  20064. }
  20065. else if (inst.cond > COND_ALWAYS)
  20066. {
  20067. /* Case 11: In an IT block, with a VPT code: syntax error.
  20068. */
  20069. inst.error = BAD_SYNTAX;
  20070. return FAIL;
  20071. }
  20072. else if (thumb_mode)
  20073. {
  20074. /* This is for some special cases where a non-MVE
  20075. instruction is not allowed in an IT block, such as cbz,
  20076. but are put into one with a condition code.
  20077. You could argue this should be a syntax error, but we
  20078. gave the 'not allowed in IT block' diagnostic in the
  20079. past so we will keep doing so. */
  20080. inst.error = BAD_NOT_IT;
  20081. return FAIL;
  20082. }
  20083. break;
  20084. }
  20085. else
  20086. {
  20087. /* Case 15: In a VPT block, with no code: UNPREDICTABLE. */
  20088. as_tsktsk (MVE_NOT_VPT);
  20089. return SUCCESS;
  20090. }
  20091. case MVE_OUTSIDE_PRED_INSN:
  20092. if (now_pred.type == SCALAR_PRED)
  20093. {
  20094. if (inst.cond == COND_ALWAYS)
  20095. {
  20096. /* Case 3: In an IT block, with no code: warning:
  20097. UNPREDICTABLE. */
  20098. as_tsktsk (MVE_NOT_IT);
  20099. return SUCCESS;
  20100. }
  20101. else if (inst.cond < COND_ALWAYS)
  20102. {
  20103. /* Case 1: In an IT block, with an IT code: syntax error.
  20104. */
  20105. inst.error = BAD_SYNTAX;
  20106. return FAIL;
  20107. }
  20108. else
  20109. gas_assert (0);
  20110. }
  20111. else
  20112. {
  20113. if (inst.cond < COND_ALWAYS)
  20114. {
  20115. /* Case 4: In a VPT block, with an IT code: syntax error.
  20116. */
  20117. inst.error = BAD_SYNTAX;
  20118. return FAIL;
  20119. }
  20120. else if (inst.cond == COND_ALWAYS)
  20121. {
  20122. /* Case 6: In a VPT block, with no code: error: missing
  20123. code. */
  20124. inst.error = BAD_NOT_VPT;
  20125. return FAIL;
  20126. }
  20127. else
  20128. {
  20129. gas_assert (0);
  20130. }
  20131. }
  20132. case MVE_UNPREDICABLE_INSN:
  20133. as_tsktsk (now_pred.type == SCALAR_PRED ? MVE_NOT_IT : MVE_NOT_VPT);
  20134. return SUCCESS;
  20135. case INSIDE_IT_INSN:
  20136. if (inst.cond > COND_ALWAYS)
  20137. {
  20138. /* Case 11: In an IT block, with a VPT code: syntax error. */
  20139. /* Case 14: In a VPT block, with a VPT code: syntax error. */
  20140. inst.error = BAD_SYNTAX;
  20141. return FAIL;
  20142. }
  20143. else if (now_pred.type == SCALAR_PRED)
  20144. {
  20145. /* Case 10: In an IT block, with an IT code: OK! */
  20146. if (cond != inst.cond)
  20147. {
  20148. inst.error = now_pred.type == SCALAR_PRED ? BAD_IT_COND :
  20149. BAD_VPT_COND;
  20150. return FAIL;
  20151. }
  20152. }
  20153. else
  20154. {
  20155. /* Case 13: In a VPT block, with an IT code: error: should be
  20156. in an IT block. */
  20157. inst.error = BAD_OUT_IT;
  20158. return FAIL;
  20159. }
  20160. break;
  20161. case INSIDE_VPT_INSN:
  20162. if (now_pred.type == SCALAR_PRED)
  20163. {
  20164. /* Case 2: In an IT block, with a VPT code: error: must be in a
  20165. VPT block. */
  20166. inst.error = BAD_OUT_VPT;
  20167. return FAIL;
  20168. }
  20169. /* Case 5: In a VPT block, with a VPT code: OK! */
  20170. else if (cond != inst.cond)
  20171. {
  20172. inst.error = BAD_VPT_COND;
  20173. return FAIL;
  20174. }
  20175. break;
  20176. case INSIDE_IT_LAST_INSN:
  20177. case IF_INSIDE_IT_LAST_INSN:
  20178. if (now_pred.type == VECTOR_PRED || inst.cond > COND_ALWAYS)
  20179. {
  20180. /* Case 4: In a VPT block, with an IT code: syntax error. */
  20181. /* Case 11: In an IT block, with a VPT code: syntax error. */
  20182. inst.error = BAD_SYNTAX;
  20183. return FAIL;
  20184. }
  20185. else if (cond != inst.cond)
  20186. {
  20187. inst.error = BAD_IT_COND;
  20188. return FAIL;
  20189. }
  20190. if (!is_last)
  20191. {
  20192. inst.error = BAD_BRANCH;
  20193. return FAIL;
  20194. }
  20195. break;
  20196. case NEUTRAL_IT_INSN:
  20197. /* The BKPT instruction is unconditional even in a IT or VPT
  20198. block. */
  20199. break;
  20200. case IT_INSN:
  20201. if (now_pred.type == SCALAR_PRED)
  20202. {
  20203. inst.error = BAD_IT_IT;
  20204. return FAIL;
  20205. }
  20206. /* fall through. */
  20207. case VPT_INSN:
  20208. if (inst.cond == COND_ALWAYS)
  20209. {
  20210. /* Executing a VPT/VPST instruction inside an IT block or a
  20211. VPT/VPST/IT instruction inside a VPT block is UNPREDICTABLE.
  20212. */
  20213. if (now_pred.type == SCALAR_PRED)
  20214. as_tsktsk (MVE_NOT_IT);
  20215. else
  20216. as_tsktsk (MVE_NOT_VPT);
  20217. return SUCCESS;
  20218. }
  20219. else
  20220. {
  20221. /* VPT/VPST do not accept condition codes. */
  20222. inst.error = BAD_SYNTAX;
  20223. return FAIL;
  20224. }
  20225. }
  20226. }
  20227. break;
  20228. }
  20229. return SUCCESS;
  20230. }
  20231. struct depr_insn_mask
  20232. {
  20233. unsigned long pattern;
  20234. unsigned long mask;
  20235. const char* description;
  20236. };
  20237. /* List of 16-bit instruction patterns deprecated in an IT block in
  20238. ARMv8. */
  20239. static const struct depr_insn_mask depr_it_insns[] = {
  20240. { 0xc000, 0xc000, N_("Short branches, Undefined, SVC, LDM/STM") },
  20241. { 0xb000, 0xb000, N_("Miscellaneous 16-bit instructions") },
  20242. { 0xa000, 0xb800, N_("ADR") },
  20243. { 0x4800, 0xf800, N_("Literal loads") },
  20244. { 0x4478, 0xf478, N_("Hi-register ADD, MOV, CMP, BX, BLX using pc") },
  20245. { 0x4487, 0xfc87, N_("Hi-register ADD, MOV, CMP using pc") },
  20246. /* NOTE: 0x00dd is not the real encoding, instead, it is the 'tvalue'
  20247. field in asm_opcode. 'tvalue' is used at the stage this check happen. */
  20248. { 0x00dd, 0x7fff, N_("ADD/SUB sp, sp #imm") },
  20249. { 0, 0, NULL }
  20250. };
  20251. static void
  20252. it_fsm_post_encode (void)
  20253. {
  20254. int is_last;
  20255. if (!now_pred.state_handled)
  20256. handle_pred_state ();
  20257. if (now_pred.insn_cond
  20258. && warn_on_restrict_it
  20259. && !now_pred.warn_deprecated
  20260. && warn_on_deprecated
  20261. && (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8)
  20262. || ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v8r))
  20263. && !ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_m))
  20264. {
  20265. if (inst.instruction >= 0x10000)
  20266. {
  20267. as_tsktsk (_("IT blocks containing 32-bit Thumb instructions are "
  20268. "performance deprecated in ARMv8-A and ARMv8-R"));
  20269. now_pred.warn_deprecated = true;
  20270. }
  20271. else
  20272. {
  20273. const struct depr_insn_mask *p = depr_it_insns;
  20274. while (p->mask != 0)
  20275. {
  20276. if ((inst.instruction & p->mask) == p->pattern)
  20277. {
  20278. as_tsktsk (_("IT blocks containing 16-bit Thumb "
  20279. "instructions of the following class are "
  20280. "performance deprecated in ARMv8-A and "
  20281. "ARMv8-R: %s"), p->description);
  20282. now_pred.warn_deprecated = true;
  20283. break;
  20284. }
  20285. ++p;
  20286. }
  20287. }
  20288. if (now_pred.block_length > 1)
  20289. {
  20290. as_tsktsk (_("IT blocks containing more than one conditional "
  20291. "instruction are performance deprecated in ARMv8-A and "
  20292. "ARMv8-R"));
  20293. now_pred.warn_deprecated = true;
  20294. }
  20295. }
  20296. is_last = (now_pred.mask == 0x10);
  20297. if (is_last)
  20298. {
  20299. now_pred.state = OUTSIDE_PRED_BLOCK;
  20300. now_pred.mask = 0;
  20301. }
  20302. }
  20303. static void
  20304. force_automatic_it_block_close (void)
  20305. {
  20306. if (now_pred.state == AUTOMATIC_PRED_BLOCK)
  20307. {
  20308. close_automatic_it_block ();
  20309. now_pred.state = OUTSIDE_PRED_BLOCK;
  20310. now_pred.mask = 0;
  20311. }
  20312. }
  20313. static int
  20314. in_pred_block (void)
  20315. {
  20316. if (!now_pred.state_handled)
  20317. handle_pred_state ();
  20318. return now_pred.state != OUTSIDE_PRED_BLOCK;
  20319. }
  20320. /* Whether OPCODE only has T32 encoding. Since this function is only used by
  20321. t32_insn_ok, OPCODE enabled by v6t2 extension bit do not need to be listed
  20322. here, hence the "known" in the function name. */
  20323. static bool
  20324. known_t32_only_insn (const struct asm_opcode *opcode)
  20325. {
  20326. /* Original Thumb-1 wide instruction. */
  20327. if (opcode->tencode == do_t_blx
  20328. || opcode->tencode == do_t_branch23
  20329. || ARM_CPU_HAS_FEATURE (*opcode->tvariant, arm_ext_msr)
  20330. || ARM_CPU_HAS_FEATURE (*opcode->tvariant, arm_ext_barrier))
  20331. return true;
  20332. /* Wide-only instruction added to ARMv8-M Baseline. */
  20333. if (ARM_CPU_HAS_FEATURE (*opcode->tvariant, arm_ext_v8m_m_only)
  20334. || ARM_CPU_HAS_FEATURE (*opcode->tvariant, arm_ext_atomics)
  20335. || ARM_CPU_HAS_FEATURE (*opcode->tvariant, arm_ext_v6t2_v8m)
  20336. || ARM_CPU_HAS_FEATURE (*opcode->tvariant, arm_ext_div))
  20337. return true;
  20338. return false;
  20339. }
  20340. /* Whether wide instruction variant can be used if available for a valid OPCODE
  20341. in ARCH. */
  20342. static bool
  20343. t32_insn_ok (arm_feature_set arch, const struct asm_opcode *opcode)
  20344. {
  20345. if (known_t32_only_insn (opcode))
  20346. return true;
  20347. /* Instruction with narrow and wide encoding added to ARMv8-M. Availability
  20348. of variant T3 of B.W is checked in do_t_branch. */
  20349. if (ARM_CPU_HAS_FEATURE (arch, arm_ext_v8m)
  20350. && opcode->tencode == do_t_branch)
  20351. return true;
  20352. /* MOV accepts T1/T3 encodings under Baseline, T3 encoding is 32bit. */
  20353. if (ARM_CPU_HAS_FEATURE (arch, arm_ext_v8m)
  20354. && opcode->tencode == do_t_mov_cmp
  20355. /* Make sure CMP instruction is not affected. */
  20356. && opcode->aencode == do_mov)
  20357. return true;
  20358. /* Wide instruction variants of all instructions with narrow *and* wide
  20359. variants become available with ARMv6t2. Other opcodes are either
  20360. narrow-only or wide-only and are thus available if OPCODE is valid. */
  20361. if (ARM_CPU_HAS_FEATURE (arch, arm_ext_v6t2))
  20362. return true;
  20363. /* OPCODE with narrow only instruction variant or wide variant not
  20364. available. */
  20365. return false;
  20366. }
  20367. void
  20368. md_assemble (char *str)
  20369. {
  20370. char *p = str;
  20371. const struct asm_opcode * opcode;
  20372. /* Align the previous label if needed. */
  20373. if (last_label_seen != NULL)
  20374. {
  20375. symbol_set_frag (last_label_seen, frag_now);
  20376. S_SET_VALUE (last_label_seen, (valueT) frag_now_fix ());
  20377. S_SET_SEGMENT (last_label_seen, now_seg);
  20378. }
  20379. memset (&inst, '\0', sizeof (inst));
  20380. int r;
  20381. for (r = 0; r < ARM_IT_MAX_RELOCS; r++)
  20382. inst.relocs[r].type = BFD_RELOC_UNUSED;
  20383. opcode = opcode_lookup (&p);
  20384. if (!opcode)
  20385. {
  20386. /* It wasn't an instruction, but it might be a register alias of
  20387. the form alias .req reg, or a Neon .dn/.qn directive. */
  20388. if (! create_register_alias (str, p)
  20389. && ! create_neon_reg_alias (str, p))
  20390. as_bad (_("bad instruction `%s'"), str);
  20391. return;
  20392. }
  20393. if (warn_on_deprecated && opcode->tag == OT_cinfix3_deprecated)
  20394. as_tsktsk (_("s suffix on comparison instruction is deprecated"));
  20395. /* The value which unconditional instructions should have in place of the
  20396. condition field. */
  20397. inst.uncond_value = (opcode->tag == OT_csuffixF) ? 0xf : -1u;
  20398. if (thumb_mode)
  20399. {
  20400. arm_feature_set variant;
  20401. variant = cpu_variant;
  20402. /* Only allow coprocessor instructions on Thumb-2 capable devices. */
  20403. if (!ARM_CPU_HAS_FEATURE (variant, arm_arch_t2))
  20404. ARM_CLEAR_FEATURE (variant, variant, fpu_any_hard);
  20405. /* Check that this instruction is supported for this CPU. */
  20406. if (!opcode->tvariant
  20407. || (thumb_mode == 1
  20408. && !ARM_CPU_HAS_FEATURE (variant, *opcode->tvariant)))
  20409. {
  20410. if (opcode->tencode == do_t_swi)
  20411. as_bad (_("SVC is not permitted on this architecture"));
  20412. else
  20413. as_bad (_("selected processor does not support `%s' in Thumb mode"), str);
  20414. return;
  20415. }
  20416. if (inst.cond != COND_ALWAYS && !unified_syntax
  20417. && opcode->tencode != do_t_branch)
  20418. {
  20419. as_bad (_("Thumb does not support conditional execution"));
  20420. return;
  20421. }
  20422. /* Two things are addressed here:
  20423. 1) Implicit require narrow instructions on Thumb-1.
  20424. This avoids relaxation accidentally introducing Thumb-2
  20425. instructions.
  20426. 2) Reject wide instructions in non Thumb-2 cores.
  20427. Only instructions with narrow and wide variants need to be handled
  20428. but selecting all non wide-only instructions is easier. */
  20429. if (!ARM_CPU_HAS_FEATURE (variant, arm_ext_v6t2)
  20430. && !t32_insn_ok (variant, opcode))
  20431. {
  20432. if (inst.size_req == 0)
  20433. inst.size_req = 2;
  20434. else if (inst.size_req == 4)
  20435. {
  20436. if (ARM_CPU_HAS_FEATURE (variant, arm_ext_v8m))
  20437. as_bad (_("selected processor does not support 32bit wide "
  20438. "variant of instruction `%s'"), str);
  20439. else
  20440. as_bad (_("selected processor does not support `%s' in "
  20441. "Thumb-2 mode"), str);
  20442. return;
  20443. }
  20444. }
  20445. inst.instruction = opcode->tvalue;
  20446. if (!parse_operands (p, opcode->operands, /*thumb=*/true))
  20447. {
  20448. /* Prepare the pred_insn_type for those encodings that don't set
  20449. it. */
  20450. it_fsm_pre_encode ();
  20451. opcode->tencode ();
  20452. it_fsm_post_encode ();
  20453. }
  20454. if (!(inst.error || inst.relax))
  20455. {
  20456. gas_assert (inst.instruction < 0xe800 || inst.instruction > 0xffff);
  20457. inst.size = (inst.instruction > 0xffff ? 4 : 2);
  20458. if (inst.size_req && inst.size_req != inst.size)
  20459. {
  20460. as_bad (_("cannot honor width suffix -- `%s'"), str);
  20461. return;
  20462. }
  20463. }
  20464. /* Something has gone badly wrong if we try to relax a fixed size
  20465. instruction. */
  20466. gas_assert (inst.size_req == 0 || !inst.relax);
  20467. ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
  20468. *opcode->tvariant);
  20469. /* Many Thumb-2 instructions also have Thumb-1 variants, so explicitly
  20470. set those bits when Thumb-2 32-bit instructions are seen. The impact
  20471. of relaxable instructions will be considered later after we finish all
  20472. relaxation. */
  20473. if (ARM_FEATURE_CORE_EQUAL (cpu_variant, arm_arch_any))
  20474. variant = arm_arch_none;
  20475. else
  20476. variant = cpu_variant;
  20477. if (inst.size == 4 && !t32_insn_ok (variant, opcode))
  20478. ARM_MERGE_FEATURE_SETS (thumb_arch_used, thumb_arch_used,
  20479. arm_ext_v6t2);
  20480. check_neon_suffixes;
  20481. if (!inst.error)
  20482. {
  20483. mapping_state (MAP_THUMB);
  20484. }
  20485. }
  20486. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1))
  20487. {
  20488. bool is_bx;
  20489. /* bx is allowed on v5 cores, and sometimes on v4 cores. */
  20490. is_bx = (opcode->aencode == do_bx);
  20491. /* Check that this instruction is supported for this CPU. */
  20492. if (!(is_bx && fix_v4bx)
  20493. && !(opcode->avariant &&
  20494. ARM_CPU_HAS_FEATURE (cpu_variant, *opcode->avariant)))
  20495. {
  20496. as_bad (_("selected processor does not support `%s' in ARM mode"), str);
  20497. return;
  20498. }
  20499. if (inst.size_req)
  20500. {
  20501. as_bad (_("width suffixes are invalid in ARM mode -- `%s'"), str);
  20502. return;
  20503. }
  20504. inst.instruction = opcode->avalue;
  20505. if (opcode->tag == OT_unconditionalF)
  20506. inst.instruction |= 0xFU << 28;
  20507. else
  20508. inst.instruction |= inst.cond << 28;
  20509. inst.size = INSN_SIZE;
  20510. if (!parse_operands (p, opcode->operands, /*thumb=*/false))
  20511. {
  20512. it_fsm_pre_encode ();
  20513. opcode->aencode ();
  20514. it_fsm_post_encode ();
  20515. }
  20516. /* Arm mode bx is marked as both v4T and v5 because it's still required
  20517. on a hypothetical non-thumb v5 core. */
  20518. if (is_bx)
  20519. ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used, arm_ext_v4t);
  20520. else
  20521. ARM_MERGE_FEATURE_SETS (arm_arch_used, arm_arch_used,
  20522. *opcode->avariant);
  20523. check_neon_suffixes;
  20524. if (!inst.error)
  20525. {
  20526. mapping_state (MAP_ARM);
  20527. }
  20528. }
  20529. else
  20530. {
  20531. as_bad (_("attempt to use an ARM instruction on a Thumb-only processor "
  20532. "-- `%s'"), str);
  20533. return;
  20534. }
  20535. output_inst (str);
  20536. }
  20537. static void
  20538. check_pred_blocks_finished (void)
  20539. {
  20540. #ifdef OBJ_ELF
  20541. asection *sect;
  20542. for (sect = stdoutput->sections; sect != NULL; sect = sect->next)
  20543. if (seg_info (sect)->tc_segment_info_data.current_pred.state
  20544. == MANUAL_PRED_BLOCK)
  20545. {
  20546. if (now_pred.type == SCALAR_PRED)
  20547. as_warn (_("section '%s' finished with an open IT block."),
  20548. sect->name);
  20549. else
  20550. as_warn (_("section '%s' finished with an open VPT/VPST block."),
  20551. sect->name);
  20552. }
  20553. #else
  20554. if (now_pred.state == MANUAL_PRED_BLOCK)
  20555. {
  20556. if (now_pred.type == SCALAR_PRED)
  20557. as_warn (_("file finished with an open IT block."));
  20558. else
  20559. as_warn (_("file finished with an open VPT/VPST block."));
  20560. }
  20561. #endif
  20562. }
  20563. /* Various frobbings of labels and their addresses. */
  20564. void
  20565. arm_start_line_hook (void)
  20566. {
  20567. last_label_seen = NULL;
  20568. }
  20569. void
  20570. arm_frob_label (symbolS * sym)
  20571. {
  20572. last_label_seen = sym;
  20573. ARM_SET_THUMB (sym, thumb_mode);
  20574. #if defined OBJ_COFF || defined OBJ_ELF
  20575. ARM_SET_INTERWORK (sym, support_interwork);
  20576. #endif
  20577. force_automatic_it_block_close ();
  20578. /* Note - do not allow local symbols (.Lxxx) to be labelled
  20579. as Thumb functions. This is because these labels, whilst
  20580. they exist inside Thumb code, are not the entry points for
  20581. possible ARM->Thumb calls. Also, these labels can be used
  20582. as part of a computed goto or switch statement. eg gcc
  20583. can generate code that looks like this:
  20584. ldr r2, [pc, .Laaa]
  20585. lsl r3, r3, #2
  20586. ldr r2, [r3, r2]
  20587. mov pc, r2
  20588. .Lbbb: .word .Lxxx
  20589. .Lccc: .word .Lyyy
  20590. ..etc...
  20591. .Laaa: .word Lbbb
  20592. The first instruction loads the address of the jump table.
  20593. The second instruction converts a table index into a byte offset.
  20594. The third instruction gets the jump address out of the table.
  20595. The fourth instruction performs the jump.
  20596. If the address stored at .Laaa is that of a symbol which has the
  20597. Thumb_Func bit set, then the linker will arrange for this address
  20598. to have the bottom bit set, which in turn would mean that the
  20599. address computation performed by the third instruction would end
  20600. up with the bottom bit set. Since the ARM is capable of unaligned
  20601. word loads, the instruction would then load the incorrect address
  20602. out of the jump table, and chaos would ensue. */
  20603. if (label_is_thumb_function_name
  20604. && (S_GET_NAME (sym)[0] != '.' || S_GET_NAME (sym)[1] != 'L')
  20605. && (bfd_section_flags (now_seg) & SEC_CODE) != 0)
  20606. {
  20607. /* When the address of a Thumb function is taken the bottom
  20608. bit of that address should be set. This will allow
  20609. interworking between Arm and Thumb functions to work
  20610. correctly. */
  20611. THUMB_SET_FUNC (sym, 1);
  20612. label_is_thumb_function_name = false;
  20613. }
  20614. dwarf2_emit_label (sym);
  20615. }
  20616. bool
  20617. arm_data_in_code (void)
  20618. {
  20619. if (thumb_mode && startswith (input_line_pointer + 1, "data:"))
  20620. {
  20621. *input_line_pointer = '/';
  20622. input_line_pointer += 5;
  20623. *input_line_pointer = 0;
  20624. return true;
  20625. }
  20626. return false;
  20627. }
  20628. char *
  20629. arm_canonicalize_symbol_name (char * name)
  20630. {
  20631. int len;
  20632. if (thumb_mode && (len = strlen (name)) > 5
  20633. && streq (name + len - 5, "/data"))
  20634. *(name + len - 5) = 0;
  20635. return name;
  20636. }
  20637. /* Table of all register names defined by default. The user can
  20638. define additional names with .req. Note that all register names
  20639. should appear in both upper and lowercase variants. Some registers
  20640. also have mixed-case names. */
  20641. #define REGDEF(s,n,t) { #s, n, REG_TYPE_##t, true, 0 }
  20642. #define REGNUM(p,n,t) REGDEF(p##n, n, t)
  20643. #define REGNUM2(p,n,t) REGDEF(p##n, 2 * n, t)
  20644. #define REGSET(p,t) \
  20645. REGNUM(p, 0,t), REGNUM(p, 1,t), REGNUM(p, 2,t), REGNUM(p, 3,t), \
  20646. REGNUM(p, 4,t), REGNUM(p, 5,t), REGNUM(p, 6,t), REGNUM(p, 7,t), \
  20647. REGNUM(p, 8,t), REGNUM(p, 9,t), REGNUM(p,10,t), REGNUM(p,11,t), \
  20648. REGNUM(p,12,t), REGNUM(p,13,t), REGNUM(p,14,t), REGNUM(p,15,t)
  20649. #define REGSETH(p,t) \
  20650. REGNUM(p,16,t), REGNUM(p,17,t), REGNUM(p,18,t), REGNUM(p,19,t), \
  20651. REGNUM(p,20,t), REGNUM(p,21,t), REGNUM(p,22,t), REGNUM(p,23,t), \
  20652. REGNUM(p,24,t), REGNUM(p,25,t), REGNUM(p,26,t), REGNUM(p,27,t), \
  20653. REGNUM(p,28,t), REGNUM(p,29,t), REGNUM(p,30,t), REGNUM(p,31,t)
  20654. #define REGSET2(p,t) \
  20655. REGNUM2(p, 0,t), REGNUM2(p, 1,t), REGNUM2(p, 2,t), REGNUM2(p, 3,t), \
  20656. REGNUM2(p, 4,t), REGNUM2(p, 5,t), REGNUM2(p, 6,t), REGNUM2(p, 7,t), \
  20657. REGNUM2(p, 8,t), REGNUM2(p, 9,t), REGNUM2(p,10,t), REGNUM2(p,11,t), \
  20658. REGNUM2(p,12,t), REGNUM2(p,13,t), REGNUM2(p,14,t), REGNUM2(p,15,t)
  20659. #define SPLRBANK(base,bank,t) \
  20660. REGDEF(lr_##bank, 768|((base+0)<<16), t), \
  20661. REGDEF(sp_##bank, 768|((base+1)<<16), t), \
  20662. REGDEF(spsr_##bank, 768|(base<<16)|SPSR_BIT, t), \
  20663. REGDEF(LR_##bank, 768|((base+0)<<16), t), \
  20664. REGDEF(SP_##bank, 768|((base+1)<<16), t), \
  20665. REGDEF(SPSR_##bank, 768|(base<<16)|SPSR_BIT, t)
  20666. static const struct reg_entry reg_names[] =
  20667. {
  20668. /* ARM integer registers. */
  20669. REGSET(r, RN), REGSET(R, RN),
  20670. /* ATPCS synonyms. */
  20671. REGDEF(a1,0,RN), REGDEF(a2,1,RN), REGDEF(a3, 2,RN), REGDEF(a4, 3,RN),
  20672. REGDEF(v1,4,RN), REGDEF(v2,5,RN), REGDEF(v3, 6,RN), REGDEF(v4, 7,RN),
  20673. REGDEF(v5,8,RN), REGDEF(v6,9,RN), REGDEF(v7,10,RN), REGDEF(v8,11,RN),
  20674. REGDEF(A1,0,RN), REGDEF(A2,1,RN), REGDEF(A3, 2,RN), REGDEF(A4, 3,RN),
  20675. REGDEF(V1,4,RN), REGDEF(V2,5,RN), REGDEF(V3, 6,RN), REGDEF(V4, 7,RN),
  20676. REGDEF(V5,8,RN), REGDEF(V6,9,RN), REGDEF(V7,10,RN), REGDEF(V8,11,RN),
  20677. /* Well-known aliases. */
  20678. REGDEF(wr, 7,RN), REGDEF(sb, 9,RN), REGDEF(sl,10,RN), REGDEF(fp,11,RN),
  20679. REGDEF(ip,12,RN), REGDEF(sp,13,RN), REGDEF(lr,14,RN), REGDEF(pc,15,RN),
  20680. REGDEF(WR, 7,RN), REGDEF(SB, 9,RN), REGDEF(SL,10,RN), REGDEF(FP,11,RN),
  20681. REGDEF(IP,12,RN), REGDEF(SP,13,RN), REGDEF(LR,14,RN), REGDEF(PC,15,RN),
  20682. /* Defining the new Zero register from ARMv8.1-M. */
  20683. REGDEF(zr,15,ZR),
  20684. REGDEF(ZR,15,ZR),
  20685. /* Coprocessor numbers. */
  20686. REGSET(p, CP), REGSET(P, CP),
  20687. /* Coprocessor register numbers. The "cr" variants are for backward
  20688. compatibility. */
  20689. REGSET(c, CN), REGSET(C, CN),
  20690. REGSET(cr, CN), REGSET(CR, CN),
  20691. /* ARM banked registers. */
  20692. REGDEF(R8_usr,512|(0<<16),RNB), REGDEF(r8_usr,512|(0<<16),RNB),
  20693. REGDEF(R9_usr,512|(1<<16),RNB), REGDEF(r9_usr,512|(1<<16),RNB),
  20694. REGDEF(R10_usr,512|(2<<16),RNB), REGDEF(r10_usr,512|(2<<16),RNB),
  20695. REGDEF(R11_usr,512|(3<<16),RNB), REGDEF(r11_usr,512|(3<<16),RNB),
  20696. REGDEF(R12_usr,512|(4<<16),RNB), REGDEF(r12_usr,512|(4<<16),RNB),
  20697. REGDEF(SP_usr,512|(5<<16),RNB), REGDEF(sp_usr,512|(5<<16),RNB),
  20698. REGDEF(LR_usr,512|(6<<16),RNB), REGDEF(lr_usr,512|(6<<16),RNB),
  20699. REGDEF(R8_fiq,512|(8<<16),RNB), REGDEF(r8_fiq,512|(8<<16),RNB),
  20700. REGDEF(R9_fiq,512|(9<<16),RNB), REGDEF(r9_fiq,512|(9<<16),RNB),
  20701. REGDEF(R10_fiq,512|(10<<16),RNB), REGDEF(r10_fiq,512|(10<<16),RNB),
  20702. REGDEF(R11_fiq,512|(11<<16),RNB), REGDEF(r11_fiq,512|(11<<16),RNB),
  20703. REGDEF(R12_fiq,512|(12<<16),RNB), REGDEF(r12_fiq,512|(12<<16),RNB),
  20704. REGDEF(SP_fiq,512|(13<<16),RNB), REGDEF(sp_fiq,512|(13<<16),RNB),
  20705. REGDEF(LR_fiq,512|(14<<16),RNB), REGDEF(lr_fiq,512|(14<<16),RNB),
  20706. REGDEF(SPSR_fiq,512|(14<<16)|SPSR_BIT,RNB), REGDEF(spsr_fiq,512|(14<<16)|SPSR_BIT,RNB),
  20707. SPLRBANK(0,IRQ,RNB), SPLRBANK(0,irq,RNB),
  20708. SPLRBANK(2,SVC,RNB), SPLRBANK(2,svc,RNB),
  20709. SPLRBANK(4,ABT,RNB), SPLRBANK(4,abt,RNB),
  20710. SPLRBANK(6,UND,RNB), SPLRBANK(6,und,RNB),
  20711. SPLRBANK(12,MON,RNB), SPLRBANK(12,mon,RNB),
  20712. REGDEF(elr_hyp,768|(14<<16),RNB), REGDEF(ELR_hyp,768|(14<<16),RNB),
  20713. REGDEF(sp_hyp,768|(15<<16),RNB), REGDEF(SP_hyp,768|(15<<16),RNB),
  20714. REGDEF(spsr_hyp,768|(14<<16)|SPSR_BIT,RNB),
  20715. REGDEF(SPSR_hyp,768|(14<<16)|SPSR_BIT,RNB),
  20716. /* FPA registers. */
  20717. REGNUM(f,0,FN), REGNUM(f,1,FN), REGNUM(f,2,FN), REGNUM(f,3,FN),
  20718. REGNUM(f,4,FN), REGNUM(f,5,FN), REGNUM(f,6,FN), REGNUM(f,7, FN),
  20719. REGNUM(F,0,FN), REGNUM(F,1,FN), REGNUM(F,2,FN), REGNUM(F,3,FN),
  20720. REGNUM(F,4,FN), REGNUM(F,5,FN), REGNUM(F,6,FN), REGNUM(F,7, FN),
  20721. /* VFP SP registers. */
  20722. REGSET(s,VFS), REGSET(S,VFS),
  20723. REGSETH(s,VFS), REGSETH(S,VFS),
  20724. /* VFP DP Registers. */
  20725. REGSET(d,VFD), REGSET(D,VFD),
  20726. /* Extra Neon DP registers. */
  20727. REGSETH(d,VFD), REGSETH(D,VFD),
  20728. /* Neon QP registers. */
  20729. REGSET2(q,NQ), REGSET2(Q,NQ),
  20730. /* VFP control registers. */
  20731. REGDEF(fpsid,0,VFC), REGDEF(fpscr,1,VFC), REGDEF(fpexc,8,VFC),
  20732. REGDEF(FPSID,0,VFC), REGDEF(FPSCR,1,VFC), REGDEF(FPEXC,8,VFC),
  20733. REGDEF(fpinst,9,VFC), REGDEF(fpinst2,10,VFC),
  20734. REGDEF(FPINST,9,VFC), REGDEF(FPINST2,10,VFC),
  20735. REGDEF(mvfr0,7,VFC), REGDEF(mvfr1,6,VFC),
  20736. REGDEF(MVFR0,7,VFC), REGDEF(MVFR1,6,VFC),
  20737. REGDEF(mvfr2,5,VFC), REGDEF(MVFR2,5,VFC),
  20738. REGDEF(fpscr_nzcvqc,2,VFC), REGDEF(FPSCR_nzcvqc,2,VFC),
  20739. REGDEF(vpr,12,VFC), REGDEF(VPR,12,VFC),
  20740. REGDEF(fpcxt_ns,14,VFC), REGDEF(FPCXT_NS,14,VFC),
  20741. REGDEF(fpcxt_s,15,VFC), REGDEF(FPCXT_S,15,VFC),
  20742. /* Maverick DSP coprocessor registers. */
  20743. REGSET(mvf,MVF), REGSET(mvd,MVD), REGSET(mvfx,MVFX), REGSET(mvdx,MVDX),
  20744. REGSET(MVF,MVF), REGSET(MVD,MVD), REGSET(MVFX,MVFX), REGSET(MVDX,MVDX),
  20745. REGNUM(mvax,0,MVAX), REGNUM(mvax,1,MVAX),
  20746. REGNUM(mvax,2,MVAX), REGNUM(mvax,3,MVAX),
  20747. REGDEF(dspsc,0,DSPSC),
  20748. REGNUM(MVAX,0,MVAX), REGNUM(MVAX,1,MVAX),
  20749. REGNUM(MVAX,2,MVAX), REGNUM(MVAX,3,MVAX),
  20750. REGDEF(DSPSC,0,DSPSC),
  20751. /* iWMMXt data registers - p0, c0-15. */
  20752. REGSET(wr,MMXWR), REGSET(wR,MMXWR), REGSET(WR, MMXWR),
  20753. /* iWMMXt control registers - p1, c0-3. */
  20754. REGDEF(wcid, 0,MMXWC), REGDEF(wCID, 0,MMXWC), REGDEF(WCID, 0,MMXWC),
  20755. REGDEF(wcon, 1,MMXWC), REGDEF(wCon, 1,MMXWC), REGDEF(WCON, 1,MMXWC),
  20756. REGDEF(wcssf, 2,MMXWC), REGDEF(wCSSF, 2,MMXWC), REGDEF(WCSSF, 2,MMXWC),
  20757. REGDEF(wcasf, 3,MMXWC), REGDEF(wCASF, 3,MMXWC), REGDEF(WCASF, 3,MMXWC),
  20758. /* iWMMXt scalar (constant/offset) registers - p1, c8-11. */
  20759. REGDEF(wcgr0, 8,MMXWCG), REGDEF(wCGR0, 8,MMXWCG), REGDEF(WCGR0, 8,MMXWCG),
  20760. REGDEF(wcgr1, 9,MMXWCG), REGDEF(wCGR1, 9,MMXWCG), REGDEF(WCGR1, 9,MMXWCG),
  20761. REGDEF(wcgr2,10,MMXWCG), REGDEF(wCGR2,10,MMXWCG), REGDEF(WCGR2,10,MMXWCG),
  20762. REGDEF(wcgr3,11,MMXWCG), REGDEF(wCGR3,11,MMXWCG), REGDEF(WCGR3,11,MMXWCG),
  20763. /* XScale accumulator registers. */
  20764. REGNUM(acc,0,XSCALE), REGNUM(ACC,0,XSCALE),
  20765. /* DWARF ABI defines RA_AUTH_CODE to 143. It also reserves 134-142 for future
  20766. expansion. RA_AUTH_CODE here is given the value 143 % 134 to make it easy
  20767. for tc_arm_regname_to_dw2regnum to translate to DWARF reg number using
  20768. 134 + reg_number should the range 134 to 142 be used for more pseudo regs
  20769. in the future. This also helps fit RA_AUTH_CODE into a bitmask. */
  20770. REGDEF(ra_auth_code,9,PSEUDO),
  20771. };
  20772. #undef REGDEF
  20773. #undef REGNUM
  20774. #undef REGSET
  20775. /* Table of all PSR suffixes. Bare "CPSR" and "SPSR" are handled
  20776. within psr_required_here. */
  20777. static const struct asm_psr psrs[] =
  20778. {
  20779. /* Backward compatibility notation. Note that "all" is no longer
  20780. truly all possible PSR bits. */
  20781. {"all", PSR_c | PSR_f},
  20782. {"flg", PSR_f},
  20783. {"ctl", PSR_c},
  20784. /* Individual flags. */
  20785. {"f", PSR_f},
  20786. {"c", PSR_c},
  20787. {"x", PSR_x},
  20788. {"s", PSR_s},
  20789. /* Combinations of flags. */
  20790. {"fs", PSR_f | PSR_s},
  20791. {"fx", PSR_f | PSR_x},
  20792. {"fc", PSR_f | PSR_c},
  20793. {"sf", PSR_s | PSR_f},
  20794. {"sx", PSR_s | PSR_x},
  20795. {"sc", PSR_s | PSR_c},
  20796. {"xf", PSR_x | PSR_f},
  20797. {"xs", PSR_x | PSR_s},
  20798. {"xc", PSR_x | PSR_c},
  20799. {"cf", PSR_c | PSR_f},
  20800. {"cs", PSR_c | PSR_s},
  20801. {"cx", PSR_c | PSR_x},
  20802. {"fsx", PSR_f | PSR_s | PSR_x},
  20803. {"fsc", PSR_f | PSR_s | PSR_c},
  20804. {"fxs", PSR_f | PSR_x | PSR_s},
  20805. {"fxc", PSR_f | PSR_x | PSR_c},
  20806. {"fcs", PSR_f | PSR_c | PSR_s},
  20807. {"fcx", PSR_f | PSR_c | PSR_x},
  20808. {"sfx", PSR_s | PSR_f | PSR_x},
  20809. {"sfc", PSR_s | PSR_f | PSR_c},
  20810. {"sxf", PSR_s | PSR_x | PSR_f},
  20811. {"sxc", PSR_s | PSR_x | PSR_c},
  20812. {"scf", PSR_s | PSR_c | PSR_f},
  20813. {"scx", PSR_s | PSR_c | PSR_x},
  20814. {"xfs", PSR_x | PSR_f | PSR_s},
  20815. {"xfc", PSR_x | PSR_f | PSR_c},
  20816. {"xsf", PSR_x | PSR_s | PSR_f},
  20817. {"xsc", PSR_x | PSR_s | PSR_c},
  20818. {"xcf", PSR_x | PSR_c | PSR_f},
  20819. {"xcs", PSR_x | PSR_c | PSR_s},
  20820. {"cfs", PSR_c | PSR_f | PSR_s},
  20821. {"cfx", PSR_c | PSR_f | PSR_x},
  20822. {"csf", PSR_c | PSR_s | PSR_f},
  20823. {"csx", PSR_c | PSR_s | PSR_x},
  20824. {"cxf", PSR_c | PSR_x | PSR_f},
  20825. {"cxs", PSR_c | PSR_x | PSR_s},
  20826. {"fsxc", PSR_f | PSR_s | PSR_x | PSR_c},
  20827. {"fscx", PSR_f | PSR_s | PSR_c | PSR_x},
  20828. {"fxsc", PSR_f | PSR_x | PSR_s | PSR_c},
  20829. {"fxcs", PSR_f | PSR_x | PSR_c | PSR_s},
  20830. {"fcsx", PSR_f | PSR_c | PSR_s | PSR_x},
  20831. {"fcxs", PSR_f | PSR_c | PSR_x | PSR_s},
  20832. {"sfxc", PSR_s | PSR_f | PSR_x | PSR_c},
  20833. {"sfcx", PSR_s | PSR_f | PSR_c | PSR_x},
  20834. {"sxfc", PSR_s | PSR_x | PSR_f | PSR_c},
  20835. {"sxcf", PSR_s | PSR_x | PSR_c | PSR_f},
  20836. {"scfx", PSR_s | PSR_c | PSR_f | PSR_x},
  20837. {"scxf", PSR_s | PSR_c | PSR_x | PSR_f},
  20838. {"xfsc", PSR_x | PSR_f | PSR_s | PSR_c},
  20839. {"xfcs", PSR_x | PSR_f | PSR_c | PSR_s},
  20840. {"xsfc", PSR_x | PSR_s | PSR_f | PSR_c},
  20841. {"xscf", PSR_x | PSR_s | PSR_c | PSR_f},
  20842. {"xcfs", PSR_x | PSR_c | PSR_f | PSR_s},
  20843. {"xcsf", PSR_x | PSR_c | PSR_s | PSR_f},
  20844. {"cfsx", PSR_c | PSR_f | PSR_s | PSR_x},
  20845. {"cfxs", PSR_c | PSR_f | PSR_x | PSR_s},
  20846. {"csfx", PSR_c | PSR_s | PSR_f | PSR_x},
  20847. {"csxf", PSR_c | PSR_s | PSR_x | PSR_f},
  20848. {"cxfs", PSR_c | PSR_x | PSR_f | PSR_s},
  20849. {"cxsf", PSR_c | PSR_x | PSR_s | PSR_f},
  20850. };
  20851. /* Table of V7M psr names. */
  20852. static const struct asm_psr v7m_psrs[] =
  20853. {
  20854. {"apsr", 0x0 }, {"APSR", 0x0 },
  20855. {"iapsr", 0x1 }, {"IAPSR", 0x1 },
  20856. {"eapsr", 0x2 }, {"EAPSR", 0x2 },
  20857. {"psr", 0x3 }, {"PSR", 0x3 },
  20858. {"xpsr", 0x3 }, {"XPSR", 0x3 }, {"xPSR", 3 },
  20859. {"ipsr", 0x5 }, {"IPSR", 0x5 },
  20860. {"epsr", 0x6 }, {"EPSR", 0x6 },
  20861. {"iepsr", 0x7 }, {"IEPSR", 0x7 },
  20862. {"msp", 0x8 }, {"MSP", 0x8 },
  20863. {"psp", 0x9 }, {"PSP", 0x9 },
  20864. {"msplim", 0xa }, {"MSPLIM", 0xa },
  20865. {"psplim", 0xb }, {"PSPLIM", 0xb },
  20866. {"primask", 0x10}, {"PRIMASK", 0x10},
  20867. {"basepri", 0x11}, {"BASEPRI", 0x11},
  20868. {"basepri_max", 0x12}, {"BASEPRI_MAX", 0x12},
  20869. {"faultmask", 0x13}, {"FAULTMASK", 0x13},
  20870. {"control", 0x14}, {"CONTROL", 0x14},
  20871. {"msp_ns", 0x88}, {"MSP_NS", 0x88},
  20872. {"psp_ns", 0x89}, {"PSP_NS", 0x89},
  20873. {"msplim_ns", 0x8a}, {"MSPLIM_NS", 0x8a},
  20874. {"psplim_ns", 0x8b}, {"PSPLIM_NS", 0x8b},
  20875. {"primask_ns", 0x90}, {"PRIMASK_NS", 0x90},
  20876. {"basepri_ns", 0x91}, {"BASEPRI_NS", 0x91},
  20877. {"faultmask_ns", 0x93}, {"FAULTMASK_NS", 0x93},
  20878. {"control_ns", 0x94}, {"CONTROL_NS", 0x94},
  20879. {"sp_ns", 0x98}, {"SP_NS", 0x98 }
  20880. };
  20881. /* Table of all shift-in-operand names. */
  20882. static const struct asm_shift_name shift_names [] =
  20883. {
  20884. { "asl", SHIFT_LSL }, { "ASL", SHIFT_LSL },
  20885. { "lsl", SHIFT_LSL }, { "LSL", SHIFT_LSL },
  20886. { "lsr", SHIFT_LSR }, { "LSR", SHIFT_LSR },
  20887. { "asr", SHIFT_ASR }, { "ASR", SHIFT_ASR },
  20888. { "ror", SHIFT_ROR }, { "ROR", SHIFT_ROR },
  20889. { "rrx", SHIFT_RRX }, { "RRX", SHIFT_RRX },
  20890. { "uxtw", SHIFT_UXTW}, { "UXTW", SHIFT_UXTW}
  20891. };
  20892. /* Table of all explicit relocation names. */
  20893. #ifdef OBJ_ELF
  20894. static struct reloc_entry reloc_names[] =
  20895. {
  20896. { "got", BFD_RELOC_ARM_GOT32 }, { "GOT", BFD_RELOC_ARM_GOT32 },
  20897. { "gotoff", BFD_RELOC_ARM_GOTOFF }, { "GOTOFF", BFD_RELOC_ARM_GOTOFF },
  20898. { "plt", BFD_RELOC_ARM_PLT32 }, { "PLT", BFD_RELOC_ARM_PLT32 },
  20899. { "target1", BFD_RELOC_ARM_TARGET1 }, { "TARGET1", BFD_RELOC_ARM_TARGET1 },
  20900. { "target2", BFD_RELOC_ARM_TARGET2 }, { "TARGET2", BFD_RELOC_ARM_TARGET2 },
  20901. { "sbrel", BFD_RELOC_ARM_SBREL32 }, { "SBREL", BFD_RELOC_ARM_SBREL32 },
  20902. { "tlsgd", BFD_RELOC_ARM_TLS_GD32}, { "TLSGD", BFD_RELOC_ARM_TLS_GD32},
  20903. { "tlsldm", BFD_RELOC_ARM_TLS_LDM32}, { "TLSLDM", BFD_RELOC_ARM_TLS_LDM32},
  20904. { "tlsldo", BFD_RELOC_ARM_TLS_LDO32}, { "TLSLDO", BFD_RELOC_ARM_TLS_LDO32},
  20905. { "gottpoff",BFD_RELOC_ARM_TLS_IE32}, { "GOTTPOFF",BFD_RELOC_ARM_TLS_IE32},
  20906. { "tpoff", BFD_RELOC_ARM_TLS_LE32}, { "TPOFF", BFD_RELOC_ARM_TLS_LE32},
  20907. { "got_prel", BFD_RELOC_ARM_GOT_PREL}, { "GOT_PREL", BFD_RELOC_ARM_GOT_PREL},
  20908. { "tlsdesc", BFD_RELOC_ARM_TLS_GOTDESC},
  20909. { "TLSDESC", BFD_RELOC_ARM_TLS_GOTDESC},
  20910. { "tlscall", BFD_RELOC_ARM_TLS_CALL},
  20911. { "TLSCALL", BFD_RELOC_ARM_TLS_CALL},
  20912. { "tlsdescseq", BFD_RELOC_ARM_TLS_DESCSEQ},
  20913. { "TLSDESCSEQ", BFD_RELOC_ARM_TLS_DESCSEQ},
  20914. { "gotfuncdesc", BFD_RELOC_ARM_GOTFUNCDESC },
  20915. { "GOTFUNCDESC", BFD_RELOC_ARM_GOTFUNCDESC },
  20916. { "gotofffuncdesc", BFD_RELOC_ARM_GOTOFFFUNCDESC },
  20917. { "GOTOFFFUNCDESC", BFD_RELOC_ARM_GOTOFFFUNCDESC },
  20918. { "funcdesc", BFD_RELOC_ARM_FUNCDESC },
  20919. { "FUNCDESC", BFD_RELOC_ARM_FUNCDESC },
  20920. { "tlsgd_fdpic", BFD_RELOC_ARM_TLS_GD32_FDPIC }, { "TLSGD_FDPIC", BFD_RELOC_ARM_TLS_GD32_FDPIC },
  20921. { "tlsldm_fdpic", BFD_RELOC_ARM_TLS_LDM32_FDPIC }, { "TLSLDM_FDPIC", BFD_RELOC_ARM_TLS_LDM32_FDPIC },
  20922. { "gottpoff_fdpic", BFD_RELOC_ARM_TLS_IE32_FDPIC }, { "GOTTPOFF_FDIC", BFD_RELOC_ARM_TLS_IE32_FDPIC },
  20923. };
  20924. #endif
  20925. /* Table of all conditional affixes. */
  20926. static const struct asm_cond conds[] =
  20927. {
  20928. {"eq", 0x0},
  20929. {"ne", 0x1},
  20930. {"cs", 0x2}, {"hs", 0x2},
  20931. {"cc", 0x3}, {"ul", 0x3}, {"lo", 0x3},
  20932. {"mi", 0x4},
  20933. {"pl", 0x5},
  20934. {"vs", 0x6},
  20935. {"vc", 0x7},
  20936. {"hi", 0x8},
  20937. {"ls", 0x9},
  20938. {"ge", 0xa},
  20939. {"lt", 0xb},
  20940. {"gt", 0xc},
  20941. {"le", 0xd},
  20942. {"al", 0xe}
  20943. };
  20944. static const struct asm_cond vconds[] =
  20945. {
  20946. {"t", 0xf},
  20947. {"e", 0x10}
  20948. };
  20949. #define UL_BARRIER(L,U,CODE,FEAT) \
  20950. { L, CODE, ARM_FEATURE_CORE_LOW (FEAT) }, \
  20951. { U, CODE, ARM_FEATURE_CORE_LOW (FEAT) }
  20952. static struct asm_barrier_opt barrier_opt_names[] =
  20953. {
  20954. UL_BARRIER ("sy", "SY", 0xf, ARM_EXT_BARRIER),
  20955. UL_BARRIER ("st", "ST", 0xe, ARM_EXT_BARRIER),
  20956. UL_BARRIER ("ld", "LD", 0xd, ARM_EXT_V8),
  20957. UL_BARRIER ("ish", "ISH", 0xb, ARM_EXT_BARRIER),
  20958. UL_BARRIER ("sh", "SH", 0xb, ARM_EXT_BARRIER),
  20959. UL_BARRIER ("ishst", "ISHST", 0xa, ARM_EXT_BARRIER),
  20960. UL_BARRIER ("shst", "SHST", 0xa, ARM_EXT_BARRIER),
  20961. UL_BARRIER ("ishld", "ISHLD", 0x9, ARM_EXT_V8),
  20962. UL_BARRIER ("un", "UN", 0x7, ARM_EXT_BARRIER),
  20963. UL_BARRIER ("nsh", "NSH", 0x7, ARM_EXT_BARRIER),
  20964. UL_BARRIER ("unst", "UNST", 0x6, ARM_EXT_BARRIER),
  20965. UL_BARRIER ("nshst", "NSHST", 0x6, ARM_EXT_BARRIER),
  20966. UL_BARRIER ("nshld", "NSHLD", 0x5, ARM_EXT_V8),
  20967. UL_BARRIER ("osh", "OSH", 0x3, ARM_EXT_BARRIER),
  20968. UL_BARRIER ("oshst", "OSHST", 0x2, ARM_EXT_BARRIER),
  20969. UL_BARRIER ("oshld", "OSHLD", 0x1, ARM_EXT_V8)
  20970. };
  20971. #undef UL_BARRIER
  20972. /* Table of ARM-format instructions. */
  20973. /* Macros for gluing together operand strings. N.B. In all cases
  20974. other than OPS0, the trailing OP_stop comes from default
  20975. zero-initialization of the unspecified elements of the array. */
  20976. #define OPS0() { OP_stop, }
  20977. #define OPS1(a) { OP_##a, }
  20978. #define OPS2(a,b) { OP_##a,OP_##b, }
  20979. #define OPS3(a,b,c) { OP_##a,OP_##b,OP_##c, }
  20980. #define OPS4(a,b,c,d) { OP_##a,OP_##b,OP_##c,OP_##d, }
  20981. #define OPS5(a,b,c,d,e) { OP_##a,OP_##b,OP_##c,OP_##d,OP_##e, }
  20982. #define OPS6(a,b,c,d,e,f) { OP_##a,OP_##b,OP_##c,OP_##d,OP_##e,OP_##f, }
  20983. /* These macros are similar to the OPSn, but do not prepend the OP_ prefix.
  20984. This is useful when mixing operands for ARM and THUMB, i.e. using the
  20985. MIX_ARM_THUMB_OPERANDS macro.
  20986. In order to use these macros, prefix the number of operands with _
  20987. e.g. _3. */
  20988. #define OPS_1(a) { a, }
  20989. #define OPS_2(a,b) { a,b, }
  20990. #define OPS_3(a,b,c) { a,b,c, }
  20991. #define OPS_4(a,b,c,d) { a,b,c,d, }
  20992. #define OPS_5(a,b,c,d,e) { a,b,c,d,e, }
  20993. #define OPS_6(a,b,c,d,e,f) { a,b,c,d,e,f, }
  20994. /* These macros abstract out the exact format of the mnemonic table and
  20995. save some repeated characters. */
  20996. /* The normal sort of mnemonic; has a Thumb variant; takes a conditional suffix. */
  20997. #define TxCE(mnem, op, top, nops, ops, ae, te) \
  20998. { mnem, OPS##nops ops, OT_csuffix, 0x##op, top, ARM_VARIANT, \
  20999. THUMB_VARIANT, do_##ae, do_##te, 0 }
  21000. /* Two variants of the above - TCE for a numeric Thumb opcode, tCE for
  21001. a T_MNEM_xyz enumerator. */
  21002. #define TCE(mnem, aop, top, nops, ops, ae, te) \
  21003. TxCE (mnem, aop, 0x##top, nops, ops, ae, te)
  21004. #define tCE(mnem, aop, top, nops, ops, ae, te) \
  21005. TxCE (mnem, aop, T_MNEM##top, nops, ops, ae, te)
  21006. /* Second most common sort of mnemonic: has a Thumb variant, takes a conditional
  21007. infix after the third character. */
  21008. #define TxC3(mnem, op, top, nops, ops, ae, te) \
  21009. { mnem, OPS##nops ops, OT_cinfix3, 0x##op, top, ARM_VARIANT, \
  21010. THUMB_VARIANT, do_##ae, do_##te, 0 }
  21011. #define TxC3w(mnem, op, top, nops, ops, ae, te) \
  21012. { mnem, OPS##nops ops, OT_cinfix3_deprecated, 0x##op, top, ARM_VARIANT, \
  21013. THUMB_VARIANT, do_##ae, do_##te, 0 }
  21014. #define TC3(mnem, aop, top, nops, ops, ae, te) \
  21015. TxC3 (mnem, aop, 0x##top, nops, ops, ae, te)
  21016. #define TC3w(mnem, aop, top, nops, ops, ae, te) \
  21017. TxC3w (mnem, aop, 0x##top, nops, ops, ae, te)
  21018. #define tC3(mnem, aop, top, nops, ops, ae, te) \
  21019. TxC3 (mnem, aop, T_MNEM##top, nops, ops, ae, te)
  21020. #define tC3w(mnem, aop, top, nops, ops, ae, te) \
  21021. TxC3w (mnem, aop, T_MNEM##top, nops, ops, ae, te)
  21022. /* Mnemonic that cannot be conditionalized. The ARM condition-code
  21023. field is still 0xE. Many of the Thumb variants can be executed
  21024. conditionally, so this is checked separately. */
  21025. #define TUE(mnem, op, top, nops, ops, ae, te) \
  21026. { mnem, OPS##nops ops, OT_unconditional, 0x##op, 0x##top, ARM_VARIANT, \
  21027. THUMB_VARIANT, do_##ae, do_##te, 0 }
  21028. /* Same as TUE but the encoding function for ARM and Thumb modes is the same.
  21029. Used by mnemonics that have very minimal differences in the encoding for
  21030. ARM and Thumb variants and can be handled in a common function. */
  21031. #define TUEc(mnem, op, top, nops, ops, en) \
  21032. { mnem, OPS##nops ops, OT_unconditional, 0x##op, 0x##top, ARM_VARIANT, \
  21033. THUMB_VARIANT, do_##en, do_##en, 0 }
  21034. /* Mnemonic that cannot be conditionalized, and bears 0xF in its ARM
  21035. condition code field. */
  21036. #define TUF(mnem, op, top, nops, ops, ae, te) \
  21037. { mnem, OPS##nops ops, OT_unconditionalF, 0x##op, 0x##top, ARM_VARIANT, \
  21038. THUMB_VARIANT, do_##ae, do_##te, 0 }
  21039. /* ARM-only variants of all the above. */
  21040. #define CE(mnem, op, nops, ops, ae) \
  21041. { mnem, OPS##nops ops, OT_csuffix, 0x##op, 0x0, ARM_VARIANT, 0, do_##ae, NULL, 0 }
  21042. #define C3(mnem, op, nops, ops, ae) \
  21043. { #mnem, OPS##nops ops, OT_cinfix3, 0x##op, 0x0, ARM_VARIANT, 0, do_##ae, NULL, 0 }
  21044. /* Thumb-only variants of TCE and TUE. */
  21045. #define ToC(mnem, top, nops, ops, te) \
  21046. { mnem, OPS##nops ops, OT_csuffix, 0x0, 0x##top, 0, THUMB_VARIANT, NULL, \
  21047. do_##te, 0 }
  21048. #define ToU(mnem, top, nops, ops, te) \
  21049. { mnem, OPS##nops ops, OT_unconditional, 0x0, 0x##top, 0, THUMB_VARIANT, \
  21050. NULL, do_##te, 0 }
  21051. /* T_MNEM_xyz enumerator variants of ToC. */
  21052. #define toC(mnem, top, nops, ops, te) \
  21053. { mnem, OPS##nops ops, OT_csuffix, 0x0, T_MNEM##top, 0, THUMB_VARIANT, NULL, \
  21054. do_##te, 0 }
  21055. /* T_MNEM_xyz enumerator variants of ToU. */
  21056. #define toU(mnem, top, nops, ops, te) \
  21057. { mnem, OPS##nops ops, OT_unconditional, 0x0, T_MNEM##top, 0, THUMB_VARIANT, \
  21058. NULL, do_##te, 0 }
  21059. /* Legacy mnemonics that always have conditional infix after the third
  21060. character. */
  21061. #define CL(mnem, op, nops, ops, ae) \
  21062. { mnem, OPS##nops ops, OT_cinfix3_legacy, \
  21063. 0x##op, 0x0, ARM_VARIANT, 0, do_##ae, NULL, 0 }
  21064. /* Coprocessor instructions. Isomorphic between Arm and Thumb-2. */
  21065. #define cCE(mnem, op, nops, ops, ae) \
  21066. { mnem, OPS##nops ops, OT_csuffix, 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae, 0 }
  21067. /* mov instructions that are shared between coprocessor and MVE. */
  21068. #define mcCE(mnem, op, nops, ops, ae) \
  21069. { #mnem, OPS##nops ops, OT_csuffix, 0x##op, 0xe##op, ARM_VARIANT, THUMB_VARIANT, do_##ae, do_##ae, 0 }
  21070. /* Legacy coprocessor instructions where conditional infix and conditional
  21071. suffix are ambiguous. For consistency this includes all FPA instructions,
  21072. not just the potentially ambiguous ones. */
  21073. #define cCL(mnem, op, nops, ops, ae) \
  21074. { mnem, OPS##nops ops, OT_cinfix3_legacy, \
  21075. 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae, 0 }
  21076. /* Coprocessor, takes either a suffix or a position-3 infix
  21077. (for an FPA corner case). */
  21078. #define C3E(mnem, op, nops, ops, ae) \
  21079. { mnem, OPS##nops ops, OT_csuf_or_in3, \
  21080. 0x##op, 0xe##op, ARM_VARIANT, ARM_VARIANT, do_##ae, do_##ae, 0 }
  21081. #define xCM_(m1, m2, m3, op, nops, ops, ae) \
  21082. { m1 #m2 m3, OPS##nops ops, \
  21083. sizeof (#m2) == 1 ? OT_odd_infix_unc : OT_odd_infix_0 + sizeof (m1) - 1, \
  21084. 0x##op, 0x0, ARM_VARIANT, 0, do_##ae, NULL, 0 }
  21085. #define CM(m1, m2, op, nops, ops, ae) \
  21086. xCM_ (m1, , m2, op, nops, ops, ae), \
  21087. xCM_ (m1, eq, m2, op, nops, ops, ae), \
  21088. xCM_ (m1, ne, m2, op, nops, ops, ae), \
  21089. xCM_ (m1, cs, m2, op, nops, ops, ae), \
  21090. xCM_ (m1, hs, m2, op, nops, ops, ae), \
  21091. xCM_ (m1, cc, m2, op, nops, ops, ae), \
  21092. xCM_ (m1, ul, m2, op, nops, ops, ae), \
  21093. xCM_ (m1, lo, m2, op, nops, ops, ae), \
  21094. xCM_ (m1, mi, m2, op, nops, ops, ae), \
  21095. xCM_ (m1, pl, m2, op, nops, ops, ae), \
  21096. xCM_ (m1, vs, m2, op, nops, ops, ae), \
  21097. xCM_ (m1, vc, m2, op, nops, ops, ae), \
  21098. xCM_ (m1, hi, m2, op, nops, ops, ae), \
  21099. xCM_ (m1, ls, m2, op, nops, ops, ae), \
  21100. xCM_ (m1, ge, m2, op, nops, ops, ae), \
  21101. xCM_ (m1, lt, m2, op, nops, ops, ae), \
  21102. xCM_ (m1, gt, m2, op, nops, ops, ae), \
  21103. xCM_ (m1, le, m2, op, nops, ops, ae), \
  21104. xCM_ (m1, al, m2, op, nops, ops, ae)
  21105. #define UE(mnem, op, nops, ops, ae) \
  21106. { #mnem, OPS##nops ops, OT_unconditional, 0x##op, 0, ARM_VARIANT, 0, do_##ae, NULL, 0 }
  21107. #define UF(mnem, op, nops, ops, ae) \
  21108. { #mnem, OPS##nops ops, OT_unconditionalF, 0x##op, 0, ARM_VARIANT, 0, do_##ae, NULL, 0 }
  21109. /* Neon data-processing. ARM versions are unconditional with cond=0xf.
  21110. The Thumb and ARM variants are mostly the same (bits 0-23 and 24/28), so we
  21111. use the same encoding function for each. */
  21112. #define NUF(mnem, op, nops, ops, enc) \
  21113. { #mnem, OPS##nops ops, OT_unconditionalF, 0x##op, 0x##op, \
  21114. ARM_VARIANT, THUMB_VARIANT, do_##enc, do_##enc, 0 }
  21115. /* Neon data processing, version which indirects through neon_enc_tab for
  21116. the various overloaded versions of opcodes. */
  21117. #define nUF(mnem, op, nops, ops, enc) \
  21118. { #mnem, OPS##nops ops, OT_unconditionalF, N_MNEM##op, N_MNEM##op, \
  21119. ARM_VARIANT, THUMB_VARIANT, do_##enc, do_##enc, 0 }
  21120. /* Neon insn with conditional suffix for the ARM version, non-overloaded
  21121. version. */
  21122. #define NCE_tag(mnem, op, nops, ops, enc, tag, mve_p) \
  21123. { #mnem, OPS##nops ops, tag, 0x##op, 0x##op, ARM_VARIANT, \
  21124. THUMB_VARIANT, do_##enc, do_##enc, mve_p }
  21125. #define NCE(mnem, op, nops, ops, enc) \
  21126. NCE_tag (mnem, op, nops, ops, enc, OT_csuffix, 0)
  21127. #define NCEF(mnem, op, nops, ops, enc) \
  21128. NCE_tag (mnem, op, nops, ops, enc, OT_csuffixF, 0)
  21129. /* Neon insn with conditional suffix for the ARM version, overloaded types. */
  21130. #define nCE_tag(mnem, op, nops, ops, enc, tag, mve_p) \
  21131. { #mnem, OPS##nops ops, tag, N_MNEM##op, N_MNEM##op, \
  21132. ARM_VARIANT, THUMB_VARIANT, do_##enc, do_##enc, mve_p }
  21133. #define nCE(mnem, op, nops, ops, enc) \
  21134. nCE_tag (mnem, op, nops, ops, enc, OT_csuffix, 0)
  21135. #define nCEF(mnem, op, nops, ops, enc) \
  21136. nCE_tag (mnem, op, nops, ops, enc, OT_csuffixF, 0)
  21137. /* */
  21138. #define mCEF(mnem, op, nops, ops, enc) \
  21139. { #mnem, OPS##nops ops, OT_csuffixF, M_MNEM##op, M_MNEM##op, \
  21140. ARM_VARIANT, THUMB_VARIANT, do_##enc, do_##enc, 1 }
  21141. /* nCEF but for MVE predicated instructions. */
  21142. #define mnCEF(mnem, op, nops, ops, enc) \
  21143. nCE_tag (mnem, op, nops, ops, enc, OT_csuffixF, 1)
  21144. /* nCE but for MVE predicated instructions. */
  21145. #define mnCE(mnem, op, nops, ops, enc) \
  21146. nCE_tag (mnem, op, nops, ops, enc, OT_csuffix, 1)
  21147. /* NUF but for potentially MVE predicated instructions. */
  21148. #define MNUF(mnem, op, nops, ops, enc) \
  21149. { #mnem, OPS##nops ops, OT_unconditionalF, 0x##op, 0x##op, \
  21150. ARM_VARIANT, THUMB_VARIANT, do_##enc, do_##enc, 1 }
  21151. /* nUF but for potentially MVE predicated instructions. */
  21152. #define mnUF(mnem, op, nops, ops, enc) \
  21153. { #mnem, OPS##nops ops, OT_unconditionalF, N_MNEM##op, N_MNEM##op, \
  21154. ARM_VARIANT, THUMB_VARIANT, do_##enc, do_##enc, 1 }
  21155. /* ToC but for potentially MVE predicated instructions. */
  21156. #define mToC(mnem, top, nops, ops, te) \
  21157. { mnem, OPS##nops ops, OT_csuffix, 0x0, 0x##top, 0, THUMB_VARIANT, NULL, \
  21158. do_##te, 1 }
  21159. /* NCE but for MVE predicated instructions. */
  21160. #define MNCE(mnem, op, nops, ops, enc) \
  21161. NCE_tag (mnem, op, nops, ops, enc, OT_csuffix, 1)
  21162. /* NCEF but for MVE predicated instructions. */
  21163. #define MNCEF(mnem, op, nops, ops, enc) \
  21164. NCE_tag (mnem, op, nops, ops, enc, OT_csuffixF, 1)
  21165. #define do_0 0
  21166. static const struct asm_opcode insns[] =
  21167. {
  21168. #define ARM_VARIANT & arm_ext_v1 /* Core ARM Instructions. */
  21169. #define THUMB_VARIANT & arm_ext_v4t
  21170. tCE("and", 0000000, _and, 3, (RR, oRR, SH), arit, t_arit3c),
  21171. tC3("ands", 0100000, _ands, 3, (RR, oRR, SH), arit, t_arit3c),
  21172. tCE("eor", 0200000, _eor, 3, (RR, oRR, SH), arit, t_arit3c),
  21173. tC3("eors", 0300000, _eors, 3, (RR, oRR, SH), arit, t_arit3c),
  21174. tCE("sub", 0400000, _sub, 3, (RR, oRR, SH), arit, t_add_sub),
  21175. tC3("subs", 0500000, _subs, 3, (RR, oRR, SH), arit, t_add_sub),
  21176. tCE("add", 0800000, _add, 3, (RR, oRR, SHG), arit, t_add_sub),
  21177. tC3("adds", 0900000, _adds, 3, (RR, oRR, SHG), arit, t_add_sub),
  21178. tCE("adc", 0a00000, _adc, 3, (RR, oRR, SH), arit, t_arit3c),
  21179. tC3("adcs", 0b00000, _adcs, 3, (RR, oRR, SH), arit, t_arit3c),
  21180. tCE("sbc", 0c00000, _sbc, 3, (RR, oRR, SH), arit, t_arit3),
  21181. tC3("sbcs", 0d00000, _sbcs, 3, (RR, oRR, SH), arit, t_arit3),
  21182. tCE("orr", 1800000, _orr, 3, (RR, oRR, SH), arit, t_arit3c),
  21183. tC3("orrs", 1900000, _orrs, 3, (RR, oRR, SH), arit, t_arit3c),
  21184. tCE("bic", 1c00000, _bic, 3, (RR, oRR, SH), arit, t_arit3),
  21185. tC3("bics", 1d00000, _bics, 3, (RR, oRR, SH), arit, t_arit3),
  21186. /* The p-variants of tst/cmp/cmn/teq (below) are the pre-V6 mechanism
  21187. for setting PSR flag bits. They are obsolete in V6 and do not
  21188. have Thumb equivalents. */
  21189. tCE("tst", 1100000, _tst, 2, (RR, SH), cmp, t_mvn_tst),
  21190. tC3w("tsts", 1100000, _tst, 2, (RR, SH), cmp, t_mvn_tst),
  21191. CL("tstp", 110f000, 2, (RR, SH), cmp),
  21192. tCE("cmp", 1500000, _cmp, 2, (RR, SH), cmp, t_mov_cmp),
  21193. tC3w("cmps", 1500000, _cmp, 2, (RR, SH), cmp, t_mov_cmp),
  21194. CL("cmpp", 150f000, 2, (RR, SH), cmp),
  21195. tCE("cmn", 1700000, _cmn, 2, (RR, SH), cmp, t_mvn_tst),
  21196. tC3w("cmns", 1700000, _cmn, 2, (RR, SH), cmp, t_mvn_tst),
  21197. CL("cmnp", 170f000, 2, (RR, SH), cmp),
  21198. tCE("mov", 1a00000, _mov, 2, (RR, SH), mov, t_mov_cmp),
  21199. tC3("movs", 1b00000, _movs, 2, (RR, SHG), mov, t_mov_cmp),
  21200. tCE("mvn", 1e00000, _mvn, 2, (RR, SH), mov, t_mvn_tst),
  21201. tC3("mvns", 1f00000, _mvns, 2, (RR, SH), mov, t_mvn_tst),
  21202. tCE("ldr", 4100000, _ldr, 2, (RR, ADDRGLDR),ldst, t_ldst),
  21203. tC3("ldrb", 4500000, _ldrb, 2, (RRnpc_npcsp, ADDRGLDR),ldst, t_ldst),
  21204. tCE("str", 4000000, _str, _2, (MIX_ARM_THUMB_OPERANDS (OP_RR,
  21205. OP_RRnpc),
  21206. OP_ADDRGLDR),ldst, t_ldst),
  21207. tC3("strb", 4400000, _strb, 2, (RRnpc_npcsp, ADDRGLDR),ldst, t_ldst),
  21208. tCE("stm", 8800000, _stmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21209. tC3("stmia", 8800000, _stmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21210. tC3("stmea", 8800000, _stmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21211. tCE("ldm", 8900000, _ldmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21212. tC3("ldmia", 8900000, _ldmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21213. tC3("ldmfd", 8900000, _ldmia, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21214. tCE("b", a000000, _b, 1, (EXPr), branch, t_branch),
  21215. TCE("bl", b000000, f000f800, 1, (EXPr), bl, t_branch23),
  21216. /* Pseudo ops. */
  21217. tCE("adr", 28f0000, _adr, 2, (RR, EXP), adr, t_adr),
  21218. C3(adrl, 28f0000, 2, (RR, EXP), adrl),
  21219. tCE("nop", 1a00000, _nop, 1, (oI255c), nop, t_nop),
  21220. tCE("udf", 7f000f0, _udf, 1, (oIffffb), bkpt, t_udf),
  21221. /* Thumb-compatibility pseudo ops. */
  21222. tCE("lsl", 1a00000, _lsl, 3, (RR, oRR, SH), shift, t_shift),
  21223. tC3("lsls", 1b00000, _lsls, 3, (RR, oRR, SH), shift, t_shift),
  21224. tCE("lsr", 1a00020, _lsr, 3, (RR, oRR, SH), shift, t_shift),
  21225. tC3("lsrs", 1b00020, _lsrs, 3, (RR, oRR, SH), shift, t_shift),
  21226. tCE("asr", 1a00040, _asr, 3, (RR, oRR, SH), shift, t_shift),
  21227. tC3("asrs", 1b00040, _asrs, 3, (RR, oRR, SH), shift, t_shift),
  21228. tCE("ror", 1a00060, _ror, 3, (RR, oRR, SH), shift, t_shift),
  21229. tC3("rors", 1b00060, _rors, 3, (RR, oRR, SH), shift, t_shift),
  21230. tCE("neg", 2600000, _neg, 2, (RR, RR), rd_rn, t_neg),
  21231. tC3("negs", 2700000, _negs, 2, (RR, RR), rd_rn, t_neg),
  21232. tCE("push", 92d0000, _push, 1, (REGLST), push_pop, t_push_pop),
  21233. tCE("pop", 8bd0000, _pop, 1, (REGLST), push_pop, t_push_pop),
  21234. /* These may simplify to neg. */
  21235. TCE("rsb", 0600000, ebc00000, 3, (RR, oRR, SH), arit, t_rsb),
  21236. TC3("rsbs", 0700000, ebd00000, 3, (RR, oRR, SH), arit, t_rsb),
  21237. #undef THUMB_VARIANT
  21238. #define THUMB_VARIANT & arm_ext_os
  21239. TCE("swi", f000000, df00, 1, (EXPi), swi, t_swi),
  21240. TCE("svc", f000000, df00, 1, (EXPi), swi, t_swi),
  21241. #undef THUMB_VARIANT
  21242. #define THUMB_VARIANT & arm_ext_v6
  21243. TCE("cpy", 1a00000, 4600, 2, (RR, RR), rd_rm, t_cpy),
  21244. /* V1 instructions with no Thumb analogue prior to V6T2. */
  21245. #undef THUMB_VARIANT
  21246. #define THUMB_VARIANT & arm_ext_v6t2
  21247. TCE("teq", 1300000, ea900f00, 2, (RR, SH), cmp, t_mvn_tst),
  21248. TC3w("teqs", 1300000, ea900f00, 2, (RR, SH), cmp, t_mvn_tst),
  21249. CL("teqp", 130f000, 2, (RR, SH), cmp),
  21250. TC3("ldrt", 4300000, f8500e00, 2, (RRnpc_npcsp, ADDR),ldstt, t_ldstt),
  21251. TC3("ldrbt", 4700000, f8100e00, 2, (RRnpc_npcsp, ADDR),ldstt, t_ldstt),
  21252. TC3("strt", 4200000, f8400e00, 2, (RR_npcsp, ADDR), ldstt, t_ldstt),
  21253. TC3("strbt", 4600000, f8000e00, 2, (RRnpc_npcsp, ADDR),ldstt, t_ldstt),
  21254. TC3("stmdb", 9000000, e9000000, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21255. TC3("stmfd", 9000000, e9000000, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21256. TC3("ldmdb", 9100000, e9100000, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21257. TC3("ldmea", 9100000, e9100000, 2, (RRw, REGLST), ldmstm, t_ldmstm),
  21258. /* V1 instructions with no Thumb analogue at all. */
  21259. CE("rsc", 0e00000, 3, (RR, oRR, SH), arit),
  21260. C3(rscs, 0f00000, 3, (RR, oRR, SH), arit),
  21261. C3(stmib, 9800000, 2, (RRw, REGLST), ldmstm),
  21262. C3(stmfa, 9800000, 2, (RRw, REGLST), ldmstm),
  21263. C3(stmda, 8000000, 2, (RRw, REGLST), ldmstm),
  21264. C3(stmed, 8000000, 2, (RRw, REGLST), ldmstm),
  21265. C3(ldmib, 9900000, 2, (RRw, REGLST), ldmstm),
  21266. C3(ldmed, 9900000, 2, (RRw, REGLST), ldmstm),
  21267. C3(ldmda, 8100000, 2, (RRw, REGLST), ldmstm),
  21268. C3(ldmfa, 8100000, 2, (RRw, REGLST), ldmstm),
  21269. #undef ARM_VARIANT
  21270. #define ARM_VARIANT & arm_ext_v2 /* ARM 2 - multiplies. */
  21271. #undef THUMB_VARIANT
  21272. #define THUMB_VARIANT & arm_ext_v4t
  21273. tCE("mul", 0000090, _mul, 3, (RRnpc, RRnpc, oRR), mul, t_mul),
  21274. tC3("muls", 0100090, _muls, 3, (RRnpc, RRnpc, oRR), mul, t_mul),
  21275. #undef THUMB_VARIANT
  21276. #define THUMB_VARIANT & arm_ext_v6t2
  21277. TCE("mla", 0200090, fb000000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mlas, t_mla),
  21278. C3(mlas, 0300090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mlas),
  21279. /* Generic coprocessor instructions. */
  21280. TCE("cdp", e000000, ee000000, 6, (RCP, I15b, RCN, RCN, RCN, oI7b), cdp, cdp),
  21281. TCE("ldc", c100000, ec100000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21282. TC3("ldcl", c500000, ec500000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21283. TCE("stc", c000000, ec000000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21284. TC3("stcl", c400000, ec400000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21285. TCE("mcr", e000010, ee000010, 6, (RCP, I7b, RR, RCN, RCN, oI7b), co_reg, co_reg),
  21286. TCE("mrc", e100010, ee100010, 6, (RCP, I7b, APSR_RR, RCN, RCN, oI7b), co_reg, co_reg),
  21287. #undef ARM_VARIANT
  21288. #define ARM_VARIANT & arm_ext_v2s /* ARM 3 - swp instructions. */
  21289. CE("swp", 1000090, 3, (RRnpc, RRnpc, RRnpcb), rd_rm_rn),
  21290. C3(swpb, 1400090, 3, (RRnpc, RRnpc, RRnpcb), rd_rm_rn),
  21291. #undef ARM_VARIANT
  21292. #define ARM_VARIANT & arm_ext_v3 /* ARM 6 Status register instructions. */
  21293. #undef THUMB_VARIANT
  21294. #define THUMB_VARIANT & arm_ext_msr
  21295. TCE("mrs", 1000000, f3e08000, 2, (RRnpc, rPSR), mrs, t_mrs),
  21296. TCE("msr", 120f000, f3808000, 2, (wPSR, RR_EXi), msr, t_msr),
  21297. #undef ARM_VARIANT
  21298. #define ARM_VARIANT & arm_ext_v3m /* ARM 7M long multiplies. */
  21299. #undef THUMB_VARIANT
  21300. #define THUMB_VARIANT & arm_ext_v6t2
  21301. TCE("smull", 0c00090, fb800000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull, t_mull),
  21302. CM("smull","s", 0d00090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull),
  21303. TCE("umull", 0800090, fba00000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull, t_mull),
  21304. CM("umull","s", 0900090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull),
  21305. TCE("smlal", 0e00090, fbc00000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull, t_mull),
  21306. CM("smlal","s", 0f00090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull),
  21307. TCE("umlal", 0a00090, fbe00000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull, t_mull),
  21308. CM("umlal","s", 0b00090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mull),
  21309. #undef ARM_VARIANT
  21310. #define ARM_VARIANT & arm_ext_v4 /* ARM Architecture 4. */
  21311. #undef THUMB_VARIANT
  21312. #define THUMB_VARIANT & arm_ext_v4t
  21313. tC3("ldrh", 01000b0, _ldrh, 2, (RRnpc_npcsp, ADDRGLDRS), ldstv4, t_ldst),
  21314. tC3("strh", 00000b0, _strh, 2, (RRnpc_npcsp, ADDRGLDRS), ldstv4, t_ldst),
  21315. tC3("ldrsh", 01000f0, _ldrsh, 2, (RRnpc_npcsp, ADDRGLDRS), ldstv4, t_ldst),
  21316. tC3("ldrsb", 01000d0, _ldrsb, 2, (RRnpc_npcsp, ADDRGLDRS), ldstv4, t_ldst),
  21317. tC3("ldsh", 01000f0, _ldrsh, 2, (RRnpc_npcsp, ADDRGLDRS), ldstv4, t_ldst),
  21318. tC3("ldsb", 01000d0, _ldrsb, 2, (RRnpc_npcsp, ADDRGLDRS), ldstv4, t_ldst),
  21319. #undef ARM_VARIANT
  21320. #define ARM_VARIANT & arm_ext_v4t_5
  21321. /* ARM Architecture 4T. */
  21322. /* Note: bx (and blx) are required on V5, even if the processor does
  21323. not support Thumb. */
  21324. TCE("bx", 12fff10, 4700, 1, (RR), bx, t_bx),
  21325. #undef ARM_VARIANT
  21326. #define ARM_VARIANT & arm_ext_v5 /* ARM Architecture 5T. */
  21327. #undef THUMB_VARIANT
  21328. #define THUMB_VARIANT & arm_ext_v5t
  21329. /* Note: blx has 2 variants; the .value coded here is for
  21330. BLX(2). Only this variant has conditional execution. */
  21331. TCE("blx", 12fff30, 4780, 1, (RR_EXr), blx, t_blx),
  21332. TUE("bkpt", 1200070, be00, 1, (oIffffb), bkpt, t_bkpt),
  21333. #undef THUMB_VARIANT
  21334. #define THUMB_VARIANT & arm_ext_v6t2
  21335. TCE("clz", 16f0f10, fab0f080, 2, (RRnpc, RRnpc), rd_rm, t_clz),
  21336. TUF("ldc2", c100000, fc100000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21337. TUF("ldc2l", c500000, fc500000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21338. TUF("stc2", c000000, fc000000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21339. TUF("stc2l", c400000, fc400000, 3, (RCP, RCN, ADDRGLDC), lstc, lstc),
  21340. TUF("cdp2", e000000, fe000000, 6, (RCP, I15b, RCN, RCN, RCN, oI7b), cdp, cdp),
  21341. TUF("mcr2", e000010, fe000010, 6, (RCP, I7b, RR, RCN, RCN, oI7b), co_reg, co_reg),
  21342. TUF("mrc2", e100010, fe100010, 6, (RCP, I7b, RR, RCN, RCN, oI7b), co_reg, co_reg),
  21343. #undef ARM_VARIANT
  21344. #define ARM_VARIANT & arm_ext_v5exp /* ARM Architecture 5TExP. */
  21345. #undef THUMB_VARIANT
  21346. #define THUMB_VARIANT & arm_ext_v5exp
  21347. TCE("smlabb", 1000080, fb100000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
  21348. TCE("smlatb", 10000a0, fb100020, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
  21349. TCE("smlabt", 10000c0, fb100010, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
  21350. TCE("smlatt", 10000e0, fb100030, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
  21351. TCE("smlawb", 1200080, fb300000, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
  21352. TCE("smlawt", 12000c0, fb300010, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smla, t_mla),
  21353. TCE("smlalbb", 1400080, fbc00080, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smlal, t_mlal),
  21354. TCE("smlaltb", 14000a0, fbc000a0, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smlal, t_mlal),
  21355. TCE("smlalbt", 14000c0, fbc00090, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smlal, t_mlal),
  21356. TCE("smlaltt", 14000e0, fbc000b0, 4, (RRnpc, RRnpc, RRnpc, RRnpc), smlal, t_mlal),
  21357. TCE("smulbb", 1600080, fb10f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21358. TCE("smultb", 16000a0, fb10f020, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21359. TCE("smulbt", 16000c0, fb10f010, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21360. TCE("smultt", 16000e0, fb10f030, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21361. TCE("smulwb", 12000a0, fb30f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21362. TCE("smulwt", 12000e0, fb30f010, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21363. TCE("qadd", 1000050, fa80f080, 3, (RRnpc, RRnpc, RRnpc), rd_rm_rn, t_simd2),
  21364. TCE("qdadd", 1400050, fa80f090, 3, (RRnpc, RRnpc, RRnpc), rd_rm_rn, t_simd2),
  21365. TCE("qsub", 1200050, fa80f0a0, 3, (RRnpc, RRnpc, RRnpc), rd_rm_rn, t_simd2),
  21366. TCE("qdsub", 1600050, fa80f0b0, 3, (RRnpc, RRnpc, RRnpc), rd_rm_rn, t_simd2),
  21367. #undef ARM_VARIANT
  21368. #define ARM_VARIANT & arm_ext_v5e /* ARM Architecture 5TE. */
  21369. #undef THUMB_VARIANT
  21370. #define THUMB_VARIANT & arm_ext_v6t2
  21371. TUF("pld", 450f000, f810f000, 1, (ADDR), pld, t_pld),
  21372. TC3("ldrd", 00000d0, e8500000, 3, (RRnpc_npcsp, oRRnpc_npcsp, ADDRGLDRS),
  21373. ldrd, t_ldstd),
  21374. TC3("strd", 00000f0, e8400000, 3, (RRnpc_npcsp, oRRnpc_npcsp,
  21375. ADDRGLDRS), ldrd, t_ldstd),
  21376. TCE("mcrr", c400000, ec400000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
  21377. TCE("mrrc", c500000, ec500000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
  21378. #undef ARM_VARIANT
  21379. #define ARM_VARIANT & arm_ext_v5j /* ARM Architecture 5TEJ. */
  21380. TCE("bxj", 12fff20, f3c08f00, 1, (RR), bxj, t_bxj),
  21381. #undef ARM_VARIANT
  21382. #define ARM_VARIANT & arm_ext_v6 /* ARM V6. */
  21383. #undef THUMB_VARIANT
  21384. #define THUMB_VARIANT & arm_ext_v6
  21385. TUF("cpsie", 1080000, b660, 2, (CPSF, oI31b), cpsi, t_cpsi),
  21386. TUF("cpsid", 10c0000, b670, 2, (CPSF, oI31b), cpsi, t_cpsi),
  21387. tCE("rev", 6bf0f30, _rev, 2, (RRnpc, RRnpc), rd_rm, t_rev),
  21388. tCE("rev16", 6bf0fb0, _rev16, 2, (RRnpc, RRnpc), rd_rm, t_rev),
  21389. tCE("revsh", 6ff0fb0, _revsh, 2, (RRnpc, RRnpc), rd_rm, t_rev),
  21390. tCE("sxth", 6bf0070, _sxth, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
  21391. tCE("uxth", 6ff0070, _uxth, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
  21392. tCE("sxtb", 6af0070, _sxtb, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
  21393. tCE("uxtb", 6ef0070, _uxtb, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
  21394. TUF("setend", 1010000, b650, 1, (ENDI), setend, t_setend),
  21395. #undef THUMB_VARIANT
  21396. #define THUMB_VARIANT & arm_ext_v6t2_v8m
  21397. TCE("ldrex", 1900f9f, e8500f00, 2, (RRnpc_npcsp, ADDR), ldrex, t_ldrex),
  21398. TCE("strex", 1800f90, e8400000, 3, (RRnpc_npcsp, RRnpc_npcsp, ADDR),
  21399. strex, t_strex),
  21400. #undef THUMB_VARIANT
  21401. #define THUMB_VARIANT & arm_ext_v6t2
  21402. TUF("mcrr2", c400000, fc400000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
  21403. TUF("mrrc2", c500000, fc500000, 5, (RCP, I15b, RRnpc, RRnpc, RCN), co_reg2c, co_reg2c),
  21404. TCE("ssat", 6a00010, f3000000, 4, (RRnpc, I32, RRnpc, oSHllar),ssat, t_ssat),
  21405. TCE("usat", 6e00010, f3800000, 4, (RRnpc, I31, RRnpc, oSHllar),usat, t_usat),
  21406. /* ARM V6 not included in V7M. */
  21407. #undef THUMB_VARIANT
  21408. #define THUMB_VARIANT & arm_ext_v6_notm
  21409. TUF("rfeia", 8900a00, e990c000, 1, (RRw), rfe, rfe),
  21410. TUF("rfe", 8900a00, e990c000, 1, (RRw), rfe, rfe),
  21411. UF(rfeib, 9900a00, 1, (RRw), rfe),
  21412. UF(rfeda, 8100a00, 1, (RRw), rfe),
  21413. TUF("rfedb", 9100a00, e810c000, 1, (RRw), rfe, rfe),
  21414. TUF("rfefd", 8900a00, e990c000, 1, (RRw), rfe, rfe),
  21415. UF(rfefa, 8100a00, 1, (RRw), rfe),
  21416. TUF("rfeea", 9100a00, e810c000, 1, (RRw), rfe, rfe),
  21417. UF(rfeed, 9900a00, 1, (RRw), rfe),
  21418. TUF("srsia", 8c00500, e980c000, 2, (oRRw, I31w), srs, srs),
  21419. TUF("srs", 8c00500, e980c000, 2, (oRRw, I31w), srs, srs),
  21420. TUF("srsea", 8c00500, e980c000, 2, (oRRw, I31w), srs, srs),
  21421. UF(srsib, 9c00500, 2, (oRRw, I31w), srs),
  21422. UF(srsfa, 9c00500, 2, (oRRw, I31w), srs),
  21423. UF(srsda, 8400500, 2, (oRRw, I31w), srs),
  21424. UF(srsed, 8400500, 2, (oRRw, I31w), srs),
  21425. TUF("srsdb", 9400500, e800c000, 2, (oRRw, I31w), srs, srs),
  21426. TUF("srsfd", 9400500, e800c000, 2, (oRRw, I31w), srs, srs),
  21427. TUF("cps", 1020000, f3af8100, 1, (I31b), imm0, t_cps),
  21428. /* ARM V6 not included in V7M (eg. integer SIMD). */
  21429. #undef THUMB_VARIANT
  21430. #define THUMB_VARIANT & arm_ext_v6_dsp
  21431. TCE("pkhbt", 6800010, eac00000, 4, (RRnpc, RRnpc, RRnpc, oSHll), pkhbt, t_pkhbt),
  21432. TCE("pkhtb", 6800050, eac00020, 4, (RRnpc, RRnpc, RRnpc, oSHar), pkhtb, t_pkhtb),
  21433. TCE("qadd16", 6200f10, fa90f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21434. TCE("qadd8", 6200f90, fa80f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21435. TCE("qasx", 6200f30, faa0f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21436. /* Old name for QASX. */
  21437. TCE("qaddsubx",6200f30, faa0f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21438. TCE("qsax", 6200f50, fae0f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21439. /* Old name for QSAX. */
  21440. TCE("qsubaddx",6200f50, fae0f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21441. TCE("qsub16", 6200f70, fad0f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21442. TCE("qsub8", 6200ff0, fac0f010, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21443. TCE("sadd16", 6100f10, fa90f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21444. TCE("sadd8", 6100f90, fa80f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21445. TCE("sasx", 6100f30, faa0f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21446. /* Old name for SASX. */
  21447. TCE("saddsubx",6100f30, faa0f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21448. TCE("shadd16", 6300f10, fa90f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21449. TCE("shadd8", 6300f90, fa80f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21450. TCE("shasx", 6300f30, faa0f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21451. /* Old name for SHASX. */
  21452. TCE("shaddsubx", 6300f30, faa0f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21453. TCE("shsax", 6300f50, fae0f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21454. /* Old name for SHSAX. */
  21455. TCE("shsubaddx", 6300f50, fae0f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21456. TCE("shsub16", 6300f70, fad0f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21457. TCE("shsub8", 6300ff0, fac0f020, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21458. TCE("ssax", 6100f50, fae0f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21459. /* Old name for SSAX. */
  21460. TCE("ssubaddx",6100f50, fae0f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21461. TCE("ssub16", 6100f70, fad0f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21462. TCE("ssub8", 6100ff0, fac0f000, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21463. TCE("uadd16", 6500f10, fa90f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21464. TCE("uadd8", 6500f90, fa80f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21465. TCE("uasx", 6500f30, faa0f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21466. /* Old name for UASX. */
  21467. TCE("uaddsubx",6500f30, faa0f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21468. TCE("uhadd16", 6700f10, fa90f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21469. TCE("uhadd8", 6700f90, fa80f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21470. TCE("uhasx", 6700f30, faa0f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21471. /* Old name for UHASX. */
  21472. TCE("uhaddsubx", 6700f30, faa0f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21473. TCE("uhsax", 6700f50, fae0f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21474. /* Old name for UHSAX. */
  21475. TCE("uhsubaddx", 6700f50, fae0f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21476. TCE("uhsub16", 6700f70, fad0f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21477. TCE("uhsub8", 6700ff0, fac0f060, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21478. TCE("uqadd16", 6600f10, fa90f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21479. TCE("uqadd8", 6600f90, fa80f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21480. TCE("uqasx", 6600f30, faa0f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21481. /* Old name for UQASX. */
  21482. TCE("uqaddsubx", 6600f30, faa0f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21483. TCE("uqsax", 6600f50, fae0f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21484. /* Old name for UQSAX. */
  21485. TCE("uqsubaddx", 6600f50, fae0f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21486. TCE("uqsub16", 6600f70, fad0f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21487. TCE("uqsub8", 6600ff0, fac0f050, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21488. TCE("usub16", 6500f70, fad0f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21489. TCE("usax", 6500f50, fae0f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21490. /* Old name for USAX. */
  21491. TCE("usubaddx",6500f50, fae0f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21492. TCE("usub8", 6500ff0, fac0f040, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21493. TCE("sxtah", 6b00070, fa00f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
  21494. TCE("sxtab16", 6800070, fa20f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
  21495. TCE("sxtab", 6a00070, fa40f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
  21496. TCE("sxtb16", 68f0070, fa2ff080, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
  21497. TCE("uxtah", 6f00070, fa10f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
  21498. TCE("uxtab16", 6c00070, fa30f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
  21499. TCE("uxtab", 6e00070, fa50f080, 4, (RRnpc, RRnpc, RRnpc, oROR), sxtah, t_sxtah),
  21500. TCE("uxtb16", 6cf0070, fa3ff080, 3, (RRnpc, RRnpc, oROR), sxth, t_sxth),
  21501. TCE("sel", 6800fb0, faa0f080, 3, (RRnpc, RRnpc, RRnpc), rd_rn_rm, t_simd),
  21502. TCE("smlad", 7000010, fb200000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21503. TCE("smladx", 7000030, fb200010, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21504. TCE("smlald", 7400010, fbc000c0, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal,t_mlal),
  21505. TCE("smlaldx", 7400030, fbc000d0, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal,t_mlal),
  21506. TCE("smlsd", 7000050, fb400000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21507. TCE("smlsdx", 7000070, fb400010, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21508. TCE("smlsld", 7400050, fbd000c0, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal,t_mlal),
  21509. TCE("smlsldx", 7400070, fbd000d0, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal,t_mlal),
  21510. TCE("smmla", 7500010, fb500000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21511. TCE("smmlar", 7500030, fb500010, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21512. TCE("smmls", 75000d0, fb600000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21513. TCE("smmlsr", 75000f0, fb600010, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21514. TCE("smmul", 750f010, fb50f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21515. TCE("smmulr", 750f030, fb50f010, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21516. TCE("smuad", 700f010, fb20f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21517. TCE("smuadx", 700f030, fb20f010, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21518. TCE("smusd", 700f050, fb40f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21519. TCE("smusdx", 700f070, fb40f010, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21520. TCE("ssat16", 6a00f30, f3200000, 3, (RRnpc, I16, RRnpc), ssat16, t_ssat16),
  21521. TCE("umaal", 0400090, fbe00060, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smlal, t_mlal),
  21522. TCE("usad8", 780f010, fb70f000, 3, (RRnpc, RRnpc, RRnpc), smul, t_simd),
  21523. TCE("usada8", 7800010, fb700000, 4, (RRnpc, RRnpc, RRnpc, RRnpc),smla, t_mla),
  21524. TCE("usat16", 6e00f30, f3a00000, 3, (RRnpc, I15, RRnpc), usat16, t_usat16),
  21525. #undef ARM_VARIANT
  21526. #define ARM_VARIANT & arm_ext_v6k_v6t2
  21527. #undef THUMB_VARIANT
  21528. #define THUMB_VARIANT & arm_ext_v6k_v6t2
  21529. tCE("yield", 320f001, _yield, 0, (), noargs, t_hint),
  21530. tCE("wfe", 320f002, _wfe, 0, (), noargs, t_hint),
  21531. tCE("wfi", 320f003, _wfi, 0, (), noargs, t_hint),
  21532. tCE("sev", 320f004, _sev, 0, (), noargs, t_hint),
  21533. #undef THUMB_VARIANT
  21534. #define THUMB_VARIANT & arm_ext_v6_notm
  21535. TCE("ldrexd", 1b00f9f, e8d0007f, 3, (RRnpc_npcsp, oRRnpc_npcsp, RRnpcb),
  21536. ldrexd, t_ldrexd),
  21537. TCE("strexd", 1a00f90, e8c00070, 4, (RRnpc_npcsp, RRnpc_npcsp, oRRnpc_npcsp,
  21538. RRnpcb), strexd, t_strexd),
  21539. #undef THUMB_VARIANT
  21540. #define THUMB_VARIANT & arm_ext_v6t2_v8m
  21541. TCE("ldrexb", 1d00f9f, e8d00f4f, 2, (RRnpc_npcsp,RRnpcb),
  21542. rd_rn, rd_rn),
  21543. TCE("ldrexh", 1f00f9f, e8d00f5f, 2, (RRnpc_npcsp, RRnpcb),
  21544. rd_rn, rd_rn),
  21545. TCE("strexb", 1c00f90, e8c00f40, 3, (RRnpc_npcsp, RRnpc_npcsp, ADDR),
  21546. strex, t_strexbh),
  21547. TCE("strexh", 1e00f90, e8c00f50, 3, (RRnpc_npcsp, RRnpc_npcsp, ADDR),
  21548. strex, t_strexbh),
  21549. TUF("clrex", 57ff01f, f3bf8f2f, 0, (), noargs, noargs),
  21550. #undef ARM_VARIANT
  21551. #define ARM_VARIANT & arm_ext_sec
  21552. #undef THUMB_VARIANT
  21553. #define THUMB_VARIANT & arm_ext_sec
  21554. TCE("smc", 1600070, f7f08000, 1, (EXPi), smc, t_smc),
  21555. #undef ARM_VARIANT
  21556. #define ARM_VARIANT & arm_ext_virt
  21557. #undef THUMB_VARIANT
  21558. #define THUMB_VARIANT & arm_ext_virt
  21559. TCE("hvc", 1400070, f7e08000, 1, (EXPi), hvc, t_hvc),
  21560. TCE("eret", 160006e, f3de8f00, 0, (), noargs, noargs),
  21561. #undef ARM_VARIANT
  21562. #define ARM_VARIANT & arm_ext_pan
  21563. #undef THUMB_VARIANT
  21564. #define THUMB_VARIANT & arm_ext_pan
  21565. TUF("setpan", 1100000, b610, 1, (I7), setpan, t_setpan),
  21566. #undef ARM_VARIANT
  21567. #define ARM_VARIANT & arm_ext_v6t2
  21568. #undef THUMB_VARIANT
  21569. #define THUMB_VARIANT & arm_ext_v6t2
  21570. TCE("bfc", 7c0001f, f36f0000, 3, (RRnpc, I31, I32), bfc, t_bfc),
  21571. TCE("bfi", 7c00010, f3600000, 4, (RRnpc, RRnpc_I0, I31, I32), bfi, t_bfi),
  21572. TCE("sbfx", 7a00050, f3400000, 4, (RR, RR, I31, I32), bfx, t_bfx),
  21573. TCE("ubfx", 7e00050, f3c00000, 4, (RR, RR, I31, I32), bfx, t_bfx),
  21574. TCE("mls", 0600090, fb000010, 4, (RRnpc, RRnpc, RRnpc, RRnpc), mlas, t_mla),
  21575. TCE("rbit", 6ff0f30, fa90f0a0, 2, (RR, RR), rd_rm, t_rbit),
  21576. TC3("ldrht", 03000b0, f8300e00, 2, (RRnpc_npcsp, ADDR), ldsttv4, t_ldstt),
  21577. TC3("ldrsht", 03000f0, f9300e00, 2, (RRnpc_npcsp, ADDR), ldsttv4, t_ldstt),
  21578. TC3("ldrsbt", 03000d0, f9100e00, 2, (RRnpc_npcsp, ADDR), ldsttv4, t_ldstt),
  21579. TC3("strht", 02000b0, f8200e00, 2, (RRnpc_npcsp, ADDR), ldsttv4, t_ldstt),
  21580. #undef ARM_VARIANT
  21581. #define ARM_VARIANT & arm_ext_v3
  21582. #undef THUMB_VARIANT
  21583. #define THUMB_VARIANT & arm_ext_v6t2
  21584. TUE("csdb", 320f014, f3af8014, 0, (), noargs, t_csdb),
  21585. TUF("ssbb", 57ff040, f3bf8f40, 0, (), noargs, t_csdb),
  21586. TUF("pssbb", 57ff044, f3bf8f44, 0, (), noargs, t_csdb),
  21587. #undef ARM_VARIANT
  21588. #define ARM_VARIANT & arm_ext_v6t2
  21589. #undef THUMB_VARIANT
  21590. #define THUMB_VARIANT & arm_ext_v6t2_v8m
  21591. TCE("movw", 3000000, f2400000, 2, (RRnpc, HALF), mov16, t_mov16),
  21592. TCE("movt", 3400000, f2c00000, 2, (RRnpc, HALF), mov16, t_mov16),
  21593. /* Thumb-only instructions. */
  21594. #undef ARM_VARIANT
  21595. #define ARM_VARIANT NULL
  21596. TUE("cbnz", 0, b900, 2, (RR, EXP), 0, t_cbz),
  21597. TUE("cbz", 0, b100, 2, (RR, EXP), 0, t_cbz),
  21598. /* ARM does not really have an IT instruction, so always allow it.
  21599. The opcode is copied from Thumb in order to allow warnings in
  21600. -mimplicit-it=[never | arm] modes. */
  21601. #undef ARM_VARIANT
  21602. #define ARM_VARIANT & arm_ext_v1
  21603. #undef THUMB_VARIANT
  21604. #define THUMB_VARIANT & arm_ext_v6t2
  21605. TUE("it", bf08, bf08, 1, (COND), it, t_it),
  21606. TUE("itt", bf0c, bf0c, 1, (COND), it, t_it),
  21607. TUE("ite", bf04, bf04, 1, (COND), it, t_it),
  21608. TUE("ittt", bf0e, bf0e, 1, (COND), it, t_it),
  21609. TUE("itet", bf06, bf06, 1, (COND), it, t_it),
  21610. TUE("itte", bf0a, bf0a, 1, (COND), it, t_it),
  21611. TUE("itee", bf02, bf02, 1, (COND), it, t_it),
  21612. TUE("itttt", bf0f, bf0f, 1, (COND), it, t_it),
  21613. TUE("itett", bf07, bf07, 1, (COND), it, t_it),
  21614. TUE("ittet", bf0b, bf0b, 1, (COND), it, t_it),
  21615. TUE("iteet", bf03, bf03, 1, (COND), it, t_it),
  21616. TUE("ittte", bf0d, bf0d, 1, (COND), it, t_it),
  21617. TUE("itete", bf05, bf05, 1, (COND), it, t_it),
  21618. TUE("ittee", bf09, bf09, 1, (COND), it, t_it),
  21619. TUE("iteee", bf01, bf01, 1, (COND), it, t_it),
  21620. /* ARM/Thumb-2 instructions with no Thumb-1 equivalent. */
  21621. TC3("rrx", 01a00060, ea4f0030, 2, (RR, RR), rd_rm, t_rrx),
  21622. TC3("rrxs", 01b00060, ea5f0030, 2, (RR, RR), rd_rm, t_rrx),
  21623. /* Thumb2 only instructions. */
  21624. #undef ARM_VARIANT
  21625. #define ARM_VARIANT NULL
  21626. TCE("addw", 0, f2000000, 3, (RR, RR, EXPi), 0, t_add_sub_w),
  21627. TCE("subw", 0, f2a00000, 3, (RR, RR, EXPi), 0, t_add_sub_w),
  21628. TCE("orn", 0, ea600000, 3, (RR, oRR, SH), 0, t_orn),
  21629. TCE("orns", 0, ea700000, 3, (RR, oRR, SH), 0, t_orn),
  21630. TCE("tbb", 0, e8d0f000, 1, (TB), 0, t_tb),
  21631. TCE("tbh", 0, e8d0f010, 1, (TB), 0, t_tb),
  21632. /* Hardware division instructions. */
  21633. #undef ARM_VARIANT
  21634. #define ARM_VARIANT & arm_ext_adiv
  21635. #undef THUMB_VARIANT
  21636. #define THUMB_VARIANT & arm_ext_div
  21637. TCE("sdiv", 710f010, fb90f0f0, 3, (RR, oRR, RR), div, t_div),
  21638. TCE("udiv", 730f010, fbb0f0f0, 3, (RR, oRR, RR), div, t_div),
  21639. /* ARM V6M/V7 instructions. */
  21640. #undef ARM_VARIANT
  21641. #define ARM_VARIANT & arm_ext_barrier
  21642. #undef THUMB_VARIANT
  21643. #define THUMB_VARIANT & arm_ext_barrier
  21644. TUF("dmb", 57ff050, f3bf8f50, 1, (oBARRIER_I15), barrier, barrier),
  21645. TUF("dsb", 57ff040, f3bf8f40, 1, (oBARRIER_I15), barrier, barrier),
  21646. TUF("isb", 57ff060, f3bf8f60, 1, (oBARRIER_I15), barrier, barrier),
  21647. /* ARM V7 instructions. */
  21648. #undef ARM_VARIANT
  21649. #define ARM_VARIANT & arm_ext_v7
  21650. #undef THUMB_VARIANT
  21651. #define THUMB_VARIANT & arm_ext_v7
  21652. TUF("pli", 450f000, f910f000, 1, (ADDR), pli, t_pld),
  21653. TCE("dbg", 320f0f0, f3af80f0, 1, (I15), dbg, t_dbg),
  21654. #undef ARM_VARIANT
  21655. #define ARM_VARIANT & arm_ext_mp
  21656. #undef THUMB_VARIANT
  21657. #define THUMB_VARIANT & arm_ext_mp
  21658. TUF("pldw", 410f000, f830f000, 1, (ADDR), pld, t_pld),
  21659. /* AArchv8 instructions. */
  21660. #undef ARM_VARIANT
  21661. #define ARM_VARIANT & arm_ext_v8
  21662. /* Instructions shared between armv8-a and armv8-m. */
  21663. #undef THUMB_VARIANT
  21664. #define THUMB_VARIANT & arm_ext_atomics
  21665. TCE("lda", 1900c9f, e8d00faf, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
  21666. TCE("ldab", 1d00c9f, e8d00f8f, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
  21667. TCE("ldah", 1f00c9f, e8d00f9f, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
  21668. TCE("stl", 180fc90, e8c00faf, 2, (RRnpc, RRnpcb), rm_rn, rd_rn),
  21669. TCE("stlb", 1c0fc90, e8c00f8f, 2, (RRnpc, RRnpcb), rm_rn, rd_rn),
  21670. TCE("stlh", 1e0fc90, e8c00f9f, 2, (RRnpc, RRnpcb), rm_rn, rd_rn),
  21671. TCE("ldaex", 1900e9f, e8d00fef, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
  21672. TCE("ldaexb", 1d00e9f, e8d00fcf, 2, (RRnpc,RRnpcb), rd_rn, rd_rn),
  21673. TCE("ldaexh", 1f00e9f, e8d00fdf, 2, (RRnpc, RRnpcb), rd_rn, rd_rn),
  21674. TCE("stlex", 1800e90, e8c00fe0, 3, (RRnpc, RRnpc, RRnpcb),
  21675. stlex, t_stlex),
  21676. TCE("stlexb", 1c00e90, e8c00fc0, 3, (RRnpc, RRnpc, RRnpcb),
  21677. stlex, t_stlex),
  21678. TCE("stlexh", 1e00e90, e8c00fd0, 3, (RRnpc, RRnpc, RRnpcb),
  21679. stlex, t_stlex),
  21680. #undef THUMB_VARIANT
  21681. #define THUMB_VARIANT & arm_ext_v8
  21682. tCE("sevl", 320f005, _sevl, 0, (), noargs, t_hint),
  21683. TCE("ldaexd", 1b00e9f, e8d000ff, 3, (RRnpc, oRRnpc, RRnpcb),
  21684. ldrexd, t_ldrexd),
  21685. TCE("stlexd", 1a00e90, e8c000f0, 4, (RRnpc, RRnpc, oRRnpc, RRnpcb),
  21686. strexd, t_strexd),
  21687. #undef THUMB_VARIANT
  21688. #define THUMB_VARIANT & arm_ext_v8r
  21689. #undef ARM_VARIANT
  21690. #define ARM_VARIANT & arm_ext_v8r
  21691. /* ARMv8-R instructions. */
  21692. TUF("dfb", 57ff04c, f3bf8f4c, 0, (), noargs, noargs),
  21693. /* Defined in V8 but is in undefined encoding space for earlier
  21694. architectures. However earlier architectures are required to treat
  21695. this instuction as a semihosting trap as well. Hence while not explicitly
  21696. defined as such, it is in fact correct to define the instruction for all
  21697. architectures. */
  21698. #undef THUMB_VARIANT
  21699. #define THUMB_VARIANT & arm_ext_v1
  21700. #undef ARM_VARIANT
  21701. #define ARM_VARIANT & arm_ext_v1
  21702. TUE("hlt", 1000070, ba80, 1, (oIffffb), bkpt, t_hlt),
  21703. /* ARMv8 T32 only. */
  21704. #undef ARM_VARIANT
  21705. #define ARM_VARIANT NULL
  21706. TUF("dcps1", 0, f78f8001, 0, (), noargs, noargs),
  21707. TUF("dcps2", 0, f78f8002, 0, (), noargs, noargs),
  21708. TUF("dcps3", 0, f78f8003, 0, (), noargs, noargs),
  21709. /* FP for ARMv8. */
  21710. #undef ARM_VARIANT
  21711. #define ARM_VARIANT & fpu_vfp_ext_armv8xd
  21712. #undef THUMB_VARIANT
  21713. #define THUMB_VARIANT & fpu_vfp_ext_armv8xd
  21714. nUF(vseleq, _vseleq, 3, (RVSD, RVSD, RVSD), vsel),
  21715. nUF(vselvs, _vselvs, 3, (RVSD, RVSD, RVSD), vsel),
  21716. nUF(vselge, _vselge, 3, (RVSD, RVSD, RVSD), vsel),
  21717. nUF(vselgt, _vselgt, 3, (RVSD, RVSD, RVSD), vsel),
  21718. nCE(vrintr, _vrintr, 2, (RNSDQ, oRNSDQ), vrintr),
  21719. mnCE(vrintz, _vrintr, 2, (RNSDQMQ, oRNSDQMQ), vrintz),
  21720. mnCE(vrintx, _vrintr, 2, (RNSDQMQ, oRNSDQMQ), vrintx),
  21721. mnUF(vrinta, _vrinta, 2, (RNSDQMQ, oRNSDQMQ), vrinta),
  21722. mnUF(vrintn, _vrinta, 2, (RNSDQMQ, oRNSDQMQ), vrintn),
  21723. mnUF(vrintp, _vrinta, 2, (RNSDQMQ, oRNSDQMQ), vrintp),
  21724. mnUF(vrintm, _vrinta, 2, (RNSDQMQ, oRNSDQMQ), vrintm),
  21725. /* Crypto v1 extensions. */
  21726. #undef ARM_VARIANT
  21727. #define ARM_VARIANT & fpu_crypto_ext_armv8
  21728. #undef THUMB_VARIANT
  21729. #define THUMB_VARIANT & fpu_crypto_ext_armv8
  21730. nUF(aese, _aes, 2, (RNQ, RNQ), aese),
  21731. nUF(aesd, _aes, 2, (RNQ, RNQ), aesd),
  21732. nUF(aesmc, _aes, 2, (RNQ, RNQ), aesmc),
  21733. nUF(aesimc, _aes, 2, (RNQ, RNQ), aesimc),
  21734. nUF(sha1c, _sha3op, 3, (RNQ, RNQ, RNQ), sha1c),
  21735. nUF(sha1p, _sha3op, 3, (RNQ, RNQ, RNQ), sha1p),
  21736. nUF(sha1m, _sha3op, 3, (RNQ, RNQ, RNQ), sha1m),
  21737. nUF(sha1su0, _sha3op, 3, (RNQ, RNQ, RNQ), sha1su0),
  21738. nUF(sha256h, _sha3op, 3, (RNQ, RNQ, RNQ), sha256h),
  21739. nUF(sha256h2, _sha3op, 3, (RNQ, RNQ, RNQ), sha256h2),
  21740. nUF(sha256su1, _sha3op, 3, (RNQ, RNQ, RNQ), sha256su1),
  21741. nUF(sha1h, _sha1h, 2, (RNQ, RNQ), sha1h),
  21742. nUF(sha1su1, _sha2op, 2, (RNQ, RNQ), sha1su1),
  21743. nUF(sha256su0, _sha2op, 2, (RNQ, RNQ), sha256su0),
  21744. #undef ARM_VARIANT
  21745. #define ARM_VARIANT & arm_ext_crc
  21746. #undef THUMB_VARIANT
  21747. #define THUMB_VARIANT & arm_ext_crc
  21748. TUEc("crc32b", 1000040, fac0f080, 3, (RR, oRR, RR), crc32b),
  21749. TUEc("crc32h", 1200040, fac0f090, 3, (RR, oRR, RR), crc32h),
  21750. TUEc("crc32w", 1400040, fac0f0a0, 3, (RR, oRR, RR), crc32w),
  21751. TUEc("crc32cb",1000240, fad0f080, 3, (RR, oRR, RR), crc32cb),
  21752. TUEc("crc32ch",1200240, fad0f090, 3, (RR, oRR, RR), crc32ch),
  21753. TUEc("crc32cw",1400240, fad0f0a0, 3, (RR, oRR, RR), crc32cw),
  21754. /* ARMv8.2 RAS extension. */
  21755. #undef ARM_VARIANT
  21756. #define ARM_VARIANT & arm_ext_ras
  21757. #undef THUMB_VARIANT
  21758. #define THUMB_VARIANT & arm_ext_ras
  21759. TUE ("esb", 320f010, f3af8010, 0, (), noargs, noargs),
  21760. #undef ARM_VARIANT
  21761. #define ARM_VARIANT & arm_ext_v8_3
  21762. #undef THUMB_VARIANT
  21763. #define THUMB_VARIANT & arm_ext_v8_3
  21764. NCE (vjcvt, eb90bc0, 2, (RVS, RVD), vjcvt),
  21765. #undef ARM_VARIANT
  21766. #define ARM_VARIANT & fpu_neon_ext_dotprod
  21767. #undef THUMB_VARIANT
  21768. #define THUMB_VARIANT & fpu_neon_ext_dotprod
  21769. NUF (vsdot, d00, 3, (RNDQ, RNDQ, RNDQ_RNSC), neon_dotproduct_s),
  21770. NUF (vudot, d00, 3, (RNDQ, RNDQ, RNDQ_RNSC), neon_dotproduct_u),
  21771. #undef ARM_VARIANT
  21772. #define ARM_VARIANT & fpu_fpa_ext_v1 /* Core FPA instruction set (V1). */
  21773. #undef THUMB_VARIANT
  21774. #define THUMB_VARIANT NULL
  21775. cCE("wfs", e200110, 1, (RR), rd),
  21776. cCE("rfs", e300110, 1, (RR), rd),
  21777. cCE("wfc", e400110, 1, (RR), rd),
  21778. cCE("rfc", e500110, 1, (RR), rd),
  21779. cCL("ldfs", c100100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21780. cCL("ldfd", c108100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21781. cCL("ldfe", c500100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21782. cCL("ldfp", c508100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21783. cCL("stfs", c000100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21784. cCL("stfd", c008100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21785. cCL("stfe", c400100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21786. cCL("stfp", c408100, 2, (RF, ADDRGLDC), rd_cpaddr),
  21787. cCL("mvfs", e008100, 2, (RF, RF_IF), rd_rm),
  21788. cCL("mvfsp", e008120, 2, (RF, RF_IF), rd_rm),
  21789. cCL("mvfsm", e008140, 2, (RF, RF_IF), rd_rm),
  21790. cCL("mvfsz", e008160, 2, (RF, RF_IF), rd_rm),
  21791. cCL("mvfd", e008180, 2, (RF, RF_IF), rd_rm),
  21792. cCL("mvfdp", e0081a0, 2, (RF, RF_IF), rd_rm),
  21793. cCL("mvfdm", e0081c0, 2, (RF, RF_IF), rd_rm),
  21794. cCL("mvfdz", e0081e0, 2, (RF, RF_IF), rd_rm),
  21795. cCL("mvfe", e088100, 2, (RF, RF_IF), rd_rm),
  21796. cCL("mvfep", e088120, 2, (RF, RF_IF), rd_rm),
  21797. cCL("mvfem", e088140, 2, (RF, RF_IF), rd_rm),
  21798. cCL("mvfez", e088160, 2, (RF, RF_IF), rd_rm),
  21799. cCL("mnfs", e108100, 2, (RF, RF_IF), rd_rm),
  21800. cCL("mnfsp", e108120, 2, (RF, RF_IF), rd_rm),
  21801. cCL("mnfsm", e108140, 2, (RF, RF_IF), rd_rm),
  21802. cCL("mnfsz", e108160, 2, (RF, RF_IF), rd_rm),
  21803. cCL("mnfd", e108180, 2, (RF, RF_IF), rd_rm),
  21804. cCL("mnfdp", e1081a0, 2, (RF, RF_IF), rd_rm),
  21805. cCL("mnfdm", e1081c0, 2, (RF, RF_IF), rd_rm),
  21806. cCL("mnfdz", e1081e0, 2, (RF, RF_IF), rd_rm),
  21807. cCL("mnfe", e188100, 2, (RF, RF_IF), rd_rm),
  21808. cCL("mnfep", e188120, 2, (RF, RF_IF), rd_rm),
  21809. cCL("mnfem", e188140, 2, (RF, RF_IF), rd_rm),
  21810. cCL("mnfez", e188160, 2, (RF, RF_IF), rd_rm),
  21811. cCL("abss", e208100, 2, (RF, RF_IF), rd_rm),
  21812. cCL("abssp", e208120, 2, (RF, RF_IF), rd_rm),
  21813. cCL("abssm", e208140, 2, (RF, RF_IF), rd_rm),
  21814. cCL("abssz", e208160, 2, (RF, RF_IF), rd_rm),
  21815. cCL("absd", e208180, 2, (RF, RF_IF), rd_rm),
  21816. cCL("absdp", e2081a0, 2, (RF, RF_IF), rd_rm),
  21817. cCL("absdm", e2081c0, 2, (RF, RF_IF), rd_rm),
  21818. cCL("absdz", e2081e0, 2, (RF, RF_IF), rd_rm),
  21819. cCL("abse", e288100, 2, (RF, RF_IF), rd_rm),
  21820. cCL("absep", e288120, 2, (RF, RF_IF), rd_rm),
  21821. cCL("absem", e288140, 2, (RF, RF_IF), rd_rm),
  21822. cCL("absez", e288160, 2, (RF, RF_IF), rd_rm),
  21823. cCL("rnds", e308100, 2, (RF, RF_IF), rd_rm),
  21824. cCL("rndsp", e308120, 2, (RF, RF_IF), rd_rm),
  21825. cCL("rndsm", e308140, 2, (RF, RF_IF), rd_rm),
  21826. cCL("rndsz", e308160, 2, (RF, RF_IF), rd_rm),
  21827. cCL("rndd", e308180, 2, (RF, RF_IF), rd_rm),
  21828. cCL("rnddp", e3081a0, 2, (RF, RF_IF), rd_rm),
  21829. cCL("rnddm", e3081c0, 2, (RF, RF_IF), rd_rm),
  21830. cCL("rnddz", e3081e0, 2, (RF, RF_IF), rd_rm),
  21831. cCL("rnde", e388100, 2, (RF, RF_IF), rd_rm),
  21832. cCL("rndep", e388120, 2, (RF, RF_IF), rd_rm),
  21833. cCL("rndem", e388140, 2, (RF, RF_IF), rd_rm),
  21834. cCL("rndez", e388160, 2, (RF, RF_IF), rd_rm),
  21835. cCL("sqts", e408100, 2, (RF, RF_IF), rd_rm),
  21836. cCL("sqtsp", e408120, 2, (RF, RF_IF), rd_rm),
  21837. cCL("sqtsm", e408140, 2, (RF, RF_IF), rd_rm),
  21838. cCL("sqtsz", e408160, 2, (RF, RF_IF), rd_rm),
  21839. cCL("sqtd", e408180, 2, (RF, RF_IF), rd_rm),
  21840. cCL("sqtdp", e4081a0, 2, (RF, RF_IF), rd_rm),
  21841. cCL("sqtdm", e4081c0, 2, (RF, RF_IF), rd_rm),
  21842. cCL("sqtdz", e4081e0, 2, (RF, RF_IF), rd_rm),
  21843. cCL("sqte", e488100, 2, (RF, RF_IF), rd_rm),
  21844. cCL("sqtep", e488120, 2, (RF, RF_IF), rd_rm),
  21845. cCL("sqtem", e488140, 2, (RF, RF_IF), rd_rm),
  21846. cCL("sqtez", e488160, 2, (RF, RF_IF), rd_rm),
  21847. cCL("logs", e508100, 2, (RF, RF_IF), rd_rm),
  21848. cCL("logsp", e508120, 2, (RF, RF_IF), rd_rm),
  21849. cCL("logsm", e508140, 2, (RF, RF_IF), rd_rm),
  21850. cCL("logsz", e508160, 2, (RF, RF_IF), rd_rm),
  21851. cCL("logd", e508180, 2, (RF, RF_IF), rd_rm),
  21852. cCL("logdp", e5081a0, 2, (RF, RF_IF), rd_rm),
  21853. cCL("logdm", e5081c0, 2, (RF, RF_IF), rd_rm),
  21854. cCL("logdz", e5081e0, 2, (RF, RF_IF), rd_rm),
  21855. cCL("loge", e588100, 2, (RF, RF_IF), rd_rm),
  21856. cCL("logep", e588120, 2, (RF, RF_IF), rd_rm),
  21857. cCL("logem", e588140, 2, (RF, RF_IF), rd_rm),
  21858. cCL("logez", e588160, 2, (RF, RF_IF), rd_rm),
  21859. cCL("lgns", e608100, 2, (RF, RF_IF), rd_rm),
  21860. cCL("lgnsp", e608120, 2, (RF, RF_IF), rd_rm),
  21861. cCL("lgnsm", e608140, 2, (RF, RF_IF), rd_rm),
  21862. cCL("lgnsz", e608160, 2, (RF, RF_IF), rd_rm),
  21863. cCL("lgnd", e608180, 2, (RF, RF_IF), rd_rm),
  21864. cCL("lgndp", e6081a0, 2, (RF, RF_IF), rd_rm),
  21865. cCL("lgndm", e6081c0, 2, (RF, RF_IF), rd_rm),
  21866. cCL("lgndz", e6081e0, 2, (RF, RF_IF), rd_rm),
  21867. cCL("lgne", e688100, 2, (RF, RF_IF), rd_rm),
  21868. cCL("lgnep", e688120, 2, (RF, RF_IF), rd_rm),
  21869. cCL("lgnem", e688140, 2, (RF, RF_IF), rd_rm),
  21870. cCL("lgnez", e688160, 2, (RF, RF_IF), rd_rm),
  21871. cCL("exps", e708100, 2, (RF, RF_IF), rd_rm),
  21872. cCL("expsp", e708120, 2, (RF, RF_IF), rd_rm),
  21873. cCL("expsm", e708140, 2, (RF, RF_IF), rd_rm),
  21874. cCL("expsz", e708160, 2, (RF, RF_IF), rd_rm),
  21875. cCL("expd", e708180, 2, (RF, RF_IF), rd_rm),
  21876. cCL("expdp", e7081a0, 2, (RF, RF_IF), rd_rm),
  21877. cCL("expdm", e7081c0, 2, (RF, RF_IF), rd_rm),
  21878. cCL("expdz", e7081e0, 2, (RF, RF_IF), rd_rm),
  21879. cCL("expe", e788100, 2, (RF, RF_IF), rd_rm),
  21880. cCL("expep", e788120, 2, (RF, RF_IF), rd_rm),
  21881. cCL("expem", e788140, 2, (RF, RF_IF), rd_rm),
  21882. cCL("expdz", e788160, 2, (RF, RF_IF), rd_rm),
  21883. cCL("sins", e808100, 2, (RF, RF_IF), rd_rm),
  21884. cCL("sinsp", e808120, 2, (RF, RF_IF), rd_rm),
  21885. cCL("sinsm", e808140, 2, (RF, RF_IF), rd_rm),
  21886. cCL("sinsz", e808160, 2, (RF, RF_IF), rd_rm),
  21887. cCL("sind", e808180, 2, (RF, RF_IF), rd_rm),
  21888. cCL("sindp", e8081a0, 2, (RF, RF_IF), rd_rm),
  21889. cCL("sindm", e8081c0, 2, (RF, RF_IF), rd_rm),
  21890. cCL("sindz", e8081e0, 2, (RF, RF_IF), rd_rm),
  21891. cCL("sine", e888100, 2, (RF, RF_IF), rd_rm),
  21892. cCL("sinep", e888120, 2, (RF, RF_IF), rd_rm),
  21893. cCL("sinem", e888140, 2, (RF, RF_IF), rd_rm),
  21894. cCL("sinez", e888160, 2, (RF, RF_IF), rd_rm),
  21895. cCL("coss", e908100, 2, (RF, RF_IF), rd_rm),
  21896. cCL("cossp", e908120, 2, (RF, RF_IF), rd_rm),
  21897. cCL("cossm", e908140, 2, (RF, RF_IF), rd_rm),
  21898. cCL("cossz", e908160, 2, (RF, RF_IF), rd_rm),
  21899. cCL("cosd", e908180, 2, (RF, RF_IF), rd_rm),
  21900. cCL("cosdp", e9081a0, 2, (RF, RF_IF), rd_rm),
  21901. cCL("cosdm", e9081c0, 2, (RF, RF_IF), rd_rm),
  21902. cCL("cosdz", e9081e0, 2, (RF, RF_IF), rd_rm),
  21903. cCL("cose", e988100, 2, (RF, RF_IF), rd_rm),
  21904. cCL("cosep", e988120, 2, (RF, RF_IF), rd_rm),
  21905. cCL("cosem", e988140, 2, (RF, RF_IF), rd_rm),
  21906. cCL("cosez", e988160, 2, (RF, RF_IF), rd_rm),
  21907. cCL("tans", ea08100, 2, (RF, RF_IF), rd_rm),
  21908. cCL("tansp", ea08120, 2, (RF, RF_IF), rd_rm),
  21909. cCL("tansm", ea08140, 2, (RF, RF_IF), rd_rm),
  21910. cCL("tansz", ea08160, 2, (RF, RF_IF), rd_rm),
  21911. cCL("tand", ea08180, 2, (RF, RF_IF), rd_rm),
  21912. cCL("tandp", ea081a0, 2, (RF, RF_IF), rd_rm),
  21913. cCL("tandm", ea081c0, 2, (RF, RF_IF), rd_rm),
  21914. cCL("tandz", ea081e0, 2, (RF, RF_IF), rd_rm),
  21915. cCL("tane", ea88100, 2, (RF, RF_IF), rd_rm),
  21916. cCL("tanep", ea88120, 2, (RF, RF_IF), rd_rm),
  21917. cCL("tanem", ea88140, 2, (RF, RF_IF), rd_rm),
  21918. cCL("tanez", ea88160, 2, (RF, RF_IF), rd_rm),
  21919. cCL("asns", eb08100, 2, (RF, RF_IF), rd_rm),
  21920. cCL("asnsp", eb08120, 2, (RF, RF_IF), rd_rm),
  21921. cCL("asnsm", eb08140, 2, (RF, RF_IF), rd_rm),
  21922. cCL("asnsz", eb08160, 2, (RF, RF_IF), rd_rm),
  21923. cCL("asnd", eb08180, 2, (RF, RF_IF), rd_rm),
  21924. cCL("asndp", eb081a0, 2, (RF, RF_IF), rd_rm),
  21925. cCL("asndm", eb081c0, 2, (RF, RF_IF), rd_rm),
  21926. cCL("asndz", eb081e0, 2, (RF, RF_IF), rd_rm),
  21927. cCL("asne", eb88100, 2, (RF, RF_IF), rd_rm),
  21928. cCL("asnep", eb88120, 2, (RF, RF_IF), rd_rm),
  21929. cCL("asnem", eb88140, 2, (RF, RF_IF), rd_rm),
  21930. cCL("asnez", eb88160, 2, (RF, RF_IF), rd_rm),
  21931. cCL("acss", ec08100, 2, (RF, RF_IF), rd_rm),
  21932. cCL("acssp", ec08120, 2, (RF, RF_IF), rd_rm),
  21933. cCL("acssm", ec08140, 2, (RF, RF_IF), rd_rm),
  21934. cCL("acssz", ec08160, 2, (RF, RF_IF), rd_rm),
  21935. cCL("acsd", ec08180, 2, (RF, RF_IF), rd_rm),
  21936. cCL("acsdp", ec081a0, 2, (RF, RF_IF), rd_rm),
  21937. cCL("acsdm", ec081c0, 2, (RF, RF_IF), rd_rm),
  21938. cCL("acsdz", ec081e0, 2, (RF, RF_IF), rd_rm),
  21939. cCL("acse", ec88100, 2, (RF, RF_IF), rd_rm),
  21940. cCL("acsep", ec88120, 2, (RF, RF_IF), rd_rm),
  21941. cCL("acsem", ec88140, 2, (RF, RF_IF), rd_rm),
  21942. cCL("acsez", ec88160, 2, (RF, RF_IF), rd_rm),
  21943. cCL("atns", ed08100, 2, (RF, RF_IF), rd_rm),
  21944. cCL("atnsp", ed08120, 2, (RF, RF_IF), rd_rm),
  21945. cCL("atnsm", ed08140, 2, (RF, RF_IF), rd_rm),
  21946. cCL("atnsz", ed08160, 2, (RF, RF_IF), rd_rm),
  21947. cCL("atnd", ed08180, 2, (RF, RF_IF), rd_rm),
  21948. cCL("atndp", ed081a0, 2, (RF, RF_IF), rd_rm),
  21949. cCL("atndm", ed081c0, 2, (RF, RF_IF), rd_rm),
  21950. cCL("atndz", ed081e0, 2, (RF, RF_IF), rd_rm),
  21951. cCL("atne", ed88100, 2, (RF, RF_IF), rd_rm),
  21952. cCL("atnep", ed88120, 2, (RF, RF_IF), rd_rm),
  21953. cCL("atnem", ed88140, 2, (RF, RF_IF), rd_rm),
  21954. cCL("atnez", ed88160, 2, (RF, RF_IF), rd_rm),
  21955. cCL("urds", ee08100, 2, (RF, RF_IF), rd_rm),
  21956. cCL("urdsp", ee08120, 2, (RF, RF_IF), rd_rm),
  21957. cCL("urdsm", ee08140, 2, (RF, RF_IF), rd_rm),
  21958. cCL("urdsz", ee08160, 2, (RF, RF_IF), rd_rm),
  21959. cCL("urdd", ee08180, 2, (RF, RF_IF), rd_rm),
  21960. cCL("urddp", ee081a0, 2, (RF, RF_IF), rd_rm),
  21961. cCL("urddm", ee081c0, 2, (RF, RF_IF), rd_rm),
  21962. cCL("urddz", ee081e0, 2, (RF, RF_IF), rd_rm),
  21963. cCL("urde", ee88100, 2, (RF, RF_IF), rd_rm),
  21964. cCL("urdep", ee88120, 2, (RF, RF_IF), rd_rm),
  21965. cCL("urdem", ee88140, 2, (RF, RF_IF), rd_rm),
  21966. cCL("urdez", ee88160, 2, (RF, RF_IF), rd_rm),
  21967. cCL("nrms", ef08100, 2, (RF, RF_IF), rd_rm),
  21968. cCL("nrmsp", ef08120, 2, (RF, RF_IF), rd_rm),
  21969. cCL("nrmsm", ef08140, 2, (RF, RF_IF), rd_rm),
  21970. cCL("nrmsz", ef08160, 2, (RF, RF_IF), rd_rm),
  21971. cCL("nrmd", ef08180, 2, (RF, RF_IF), rd_rm),
  21972. cCL("nrmdp", ef081a0, 2, (RF, RF_IF), rd_rm),
  21973. cCL("nrmdm", ef081c0, 2, (RF, RF_IF), rd_rm),
  21974. cCL("nrmdz", ef081e0, 2, (RF, RF_IF), rd_rm),
  21975. cCL("nrme", ef88100, 2, (RF, RF_IF), rd_rm),
  21976. cCL("nrmep", ef88120, 2, (RF, RF_IF), rd_rm),
  21977. cCL("nrmem", ef88140, 2, (RF, RF_IF), rd_rm),
  21978. cCL("nrmez", ef88160, 2, (RF, RF_IF), rd_rm),
  21979. cCL("adfs", e000100, 3, (RF, RF, RF_IF), rd_rn_rm),
  21980. cCL("adfsp", e000120, 3, (RF, RF, RF_IF), rd_rn_rm),
  21981. cCL("adfsm", e000140, 3, (RF, RF, RF_IF), rd_rn_rm),
  21982. cCL("adfsz", e000160, 3, (RF, RF, RF_IF), rd_rn_rm),
  21983. cCL("adfd", e000180, 3, (RF, RF, RF_IF), rd_rn_rm),
  21984. cCL("adfdp", e0001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  21985. cCL("adfdm", e0001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  21986. cCL("adfdz", e0001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  21987. cCL("adfe", e080100, 3, (RF, RF, RF_IF), rd_rn_rm),
  21988. cCL("adfep", e080120, 3, (RF, RF, RF_IF), rd_rn_rm),
  21989. cCL("adfem", e080140, 3, (RF, RF, RF_IF), rd_rn_rm),
  21990. cCL("adfez", e080160, 3, (RF, RF, RF_IF), rd_rn_rm),
  21991. cCL("sufs", e200100, 3, (RF, RF, RF_IF), rd_rn_rm),
  21992. cCL("sufsp", e200120, 3, (RF, RF, RF_IF), rd_rn_rm),
  21993. cCL("sufsm", e200140, 3, (RF, RF, RF_IF), rd_rn_rm),
  21994. cCL("sufsz", e200160, 3, (RF, RF, RF_IF), rd_rn_rm),
  21995. cCL("sufd", e200180, 3, (RF, RF, RF_IF), rd_rn_rm),
  21996. cCL("sufdp", e2001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  21997. cCL("sufdm", e2001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  21998. cCL("sufdz", e2001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  21999. cCL("sufe", e280100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22000. cCL("sufep", e280120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22001. cCL("sufem", e280140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22002. cCL("sufez", e280160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22003. cCL("rsfs", e300100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22004. cCL("rsfsp", e300120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22005. cCL("rsfsm", e300140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22006. cCL("rsfsz", e300160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22007. cCL("rsfd", e300180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22008. cCL("rsfdp", e3001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22009. cCL("rsfdm", e3001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22010. cCL("rsfdz", e3001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22011. cCL("rsfe", e380100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22012. cCL("rsfep", e380120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22013. cCL("rsfem", e380140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22014. cCL("rsfez", e380160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22015. cCL("mufs", e100100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22016. cCL("mufsp", e100120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22017. cCL("mufsm", e100140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22018. cCL("mufsz", e100160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22019. cCL("mufd", e100180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22020. cCL("mufdp", e1001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22021. cCL("mufdm", e1001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22022. cCL("mufdz", e1001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22023. cCL("mufe", e180100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22024. cCL("mufep", e180120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22025. cCL("mufem", e180140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22026. cCL("mufez", e180160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22027. cCL("dvfs", e400100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22028. cCL("dvfsp", e400120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22029. cCL("dvfsm", e400140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22030. cCL("dvfsz", e400160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22031. cCL("dvfd", e400180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22032. cCL("dvfdp", e4001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22033. cCL("dvfdm", e4001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22034. cCL("dvfdz", e4001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22035. cCL("dvfe", e480100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22036. cCL("dvfep", e480120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22037. cCL("dvfem", e480140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22038. cCL("dvfez", e480160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22039. cCL("rdfs", e500100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22040. cCL("rdfsp", e500120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22041. cCL("rdfsm", e500140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22042. cCL("rdfsz", e500160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22043. cCL("rdfd", e500180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22044. cCL("rdfdp", e5001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22045. cCL("rdfdm", e5001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22046. cCL("rdfdz", e5001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22047. cCL("rdfe", e580100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22048. cCL("rdfep", e580120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22049. cCL("rdfem", e580140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22050. cCL("rdfez", e580160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22051. cCL("pows", e600100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22052. cCL("powsp", e600120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22053. cCL("powsm", e600140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22054. cCL("powsz", e600160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22055. cCL("powd", e600180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22056. cCL("powdp", e6001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22057. cCL("powdm", e6001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22058. cCL("powdz", e6001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22059. cCL("powe", e680100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22060. cCL("powep", e680120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22061. cCL("powem", e680140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22062. cCL("powez", e680160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22063. cCL("rpws", e700100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22064. cCL("rpwsp", e700120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22065. cCL("rpwsm", e700140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22066. cCL("rpwsz", e700160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22067. cCL("rpwd", e700180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22068. cCL("rpwdp", e7001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22069. cCL("rpwdm", e7001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22070. cCL("rpwdz", e7001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22071. cCL("rpwe", e780100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22072. cCL("rpwep", e780120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22073. cCL("rpwem", e780140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22074. cCL("rpwez", e780160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22075. cCL("rmfs", e800100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22076. cCL("rmfsp", e800120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22077. cCL("rmfsm", e800140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22078. cCL("rmfsz", e800160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22079. cCL("rmfd", e800180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22080. cCL("rmfdp", e8001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22081. cCL("rmfdm", e8001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22082. cCL("rmfdz", e8001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22083. cCL("rmfe", e880100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22084. cCL("rmfep", e880120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22085. cCL("rmfem", e880140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22086. cCL("rmfez", e880160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22087. cCL("fmls", e900100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22088. cCL("fmlsp", e900120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22089. cCL("fmlsm", e900140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22090. cCL("fmlsz", e900160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22091. cCL("fmld", e900180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22092. cCL("fmldp", e9001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22093. cCL("fmldm", e9001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22094. cCL("fmldz", e9001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22095. cCL("fmle", e980100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22096. cCL("fmlep", e980120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22097. cCL("fmlem", e980140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22098. cCL("fmlez", e980160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22099. cCL("fdvs", ea00100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22100. cCL("fdvsp", ea00120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22101. cCL("fdvsm", ea00140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22102. cCL("fdvsz", ea00160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22103. cCL("fdvd", ea00180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22104. cCL("fdvdp", ea001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22105. cCL("fdvdm", ea001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22106. cCL("fdvdz", ea001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22107. cCL("fdve", ea80100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22108. cCL("fdvep", ea80120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22109. cCL("fdvem", ea80140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22110. cCL("fdvez", ea80160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22111. cCL("frds", eb00100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22112. cCL("frdsp", eb00120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22113. cCL("frdsm", eb00140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22114. cCL("frdsz", eb00160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22115. cCL("frdd", eb00180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22116. cCL("frddp", eb001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22117. cCL("frddm", eb001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22118. cCL("frddz", eb001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22119. cCL("frde", eb80100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22120. cCL("frdep", eb80120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22121. cCL("frdem", eb80140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22122. cCL("frdez", eb80160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22123. cCL("pols", ec00100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22124. cCL("polsp", ec00120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22125. cCL("polsm", ec00140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22126. cCL("polsz", ec00160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22127. cCL("pold", ec00180, 3, (RF, RF, RF_IF), rd_rn_rm),
  22128. cCL("poldp", ec001a0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22129. cCL("poldm", ec001c0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22130. cCL("poldz", ec001e0, 3, (RF, RF, RF_IF), rd_rn_rm),
  22131. cCL("pole", ec80100, 3, (RF, RF, RF_IF), rd_rn_rm),
  22132. cCL("polep", ec80120, 3, (RF, RF, RF_IF), rd_rn_rm),
  22133. cCL("polem", ec80140, 3, (RF, RF, RF_IF), rd_rn_rm),
  22134. cCL("polez", ec80160, 3, (RF, RF, RF_IF), rd_rn_rm),
  22135. cCE("cmf", e90f110, 2, (RF, RF_IF), fpa_cmp),
  22136. C3E("cmfe", ed0f110, 2, (RF, RF_IF), fpa_cmp),
  22137. cCE("cnf", eb0f110, 2, (RF, RF_IF), fpa_cmp),
  22138. C3E("cnfe", ef0f110, 2, (RF, RF_IF), fpa_cmp),
  22139. cCL("flts", e000110, 2, (RF, RR), rn_rd),
  22140. cCL("fltsp", e000130, 2, (RF, RR), rn_rd),
  22141. cCL("fltsm", e000150, 2, (RF, RR), rn_rd),
  22142. cCL("fltsz", e000170, 2, (RF, RR), rn_rd),
  22143. cCL("fltd", e000190, 2, (RF, RR), rn_rd),
  22144. cCL("fltdp", e0001b0, 2, (RF, RR), rn_rd),
  22145. cCL("fltdm", e0001d0, 2, (RF, RR), rn_rd),
  22146. cCL("fltdz", e0001f0, 2, (RF, RR), rn_rd),
  22147. cCL("flte", e080110, 2, (RF, RR), rn_rd),
  22148. cCL("fltep", e080130, 2, (RF, RR), rn_rd),
  22149. cCL("fltem", e080150, 2, (RF, RR), rn_rd),
  22150. cCL("fltez", e080170, 2, (RF, RR), rn_rd),
  22151. /* The implementation of the FIX instruction is broken on some
  22152. assemblers, in that it accepts a precision specifier as well as a
  22153. rounding specifier, despite the fact that this is meaningless.
  22154. To be more compatible, we accept it as well, though of course it
  22155. does not set any bits. */
  22156. cCE("fix", e100110, 2, (RR, RF), rd_rm),
  22157. cCL("fixp", e100130, 2, (RR, RF), rd_rm),
  22158. cCL("fixm", e100150, 2, (RR, RF), rd_rm),
  22159. cCL("fixz", e100170, 2, (RR, RF), rd_rm),
  22160. cCL("fixsp", e100130, 2, (RR, RF), rd_rm),
  22161. cCL("fixsm", e100150, 2, (RR, RF), rd_rm),
  22162. cCL("fixsz", e100170, 2, (RR, RF), rd_rm),
  22163. cCL("fixdp", e100130, 2, (RR, RF), rd_rm),
  22164. cCL("fixdm", e100150, 2, (RR, RF), rd_rm),
  22165. cCL("fixdz", e100170, 2, (RR, RF), rd_rm),
  22166. cCL("fixep", e100130, 2, (RR, RF), rd_rm),
  22167. cCL("fixem", e100150, 2, (RR, RF), rd_rm),
  22168. cCL("fixez", e100170, 2, (RR, RF), rd_rm),
  22169. /* Instructions that were new with the real FPA, call them V2. */
  22170. #undef ARM_VARIANT
  22171. #define ARM_VARIANT & fpu_fpa_ext_v2
  22172. cCE("lfm", c100200, 3, (RF, I4b, ADDR), fpa_ldmstm),
  22173. cCL("lfmfd", c900200, 3, (RF, I4b, ADDR), fpa_ldmstm),
  22174. cCL("lfmea", d100200, 3, (RF, I4b, ADDR), fpa_ldmstm),
  22175. cCE("sfm", c000200, 3, (RF, I4b, ADDR), fpa_ldmstm),
  22176. cCL("sfmfd", d000200, 3, (RF, I4b, ADDR), fpa_ldmstm),
  22177. cCL("sfmea", c800200, 3, (RF, I4b, ADDR), fpa_ldmstm),
  22178. #undef ARM_VARIANT
  22179. #define ARM_VARIANT & fpu_vfp_ext_v1xd /* VFP V1xD (single precision). */
  22180. #undef THUMB_VARIANT
  22181. #define THUMB_VARIANT & arm_ext_v6t2
  22182. mcCE(vmrs, ef00a10, 2, (APSR_RR, RVC), vmrs),
  22183. mcCE(vmsr, ee00a10, 2, (RVC, RR), vmsr),
  22184. mcCE(fldd, d100b00, 2, (RVD, ADDRGLDC), vfp_dp_ldst),
  22185. mcCE(fstd, d000b00, 2, (RVD, ADDRGLDC), vfp_dp_ldst),
  22186. mcCE(flds, d100a00, 2, (RVS, ADDRGLDC), vfp_sp_ldst),
  22187. mcCE(fsts, d000a00, 2, (RVS, ADDRGLDC), vfp_sp_ldst),
  22188. /* Memory operations. */
  22189. mcCE(fldmias, c900a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmia),
  22190. mcCE(fldmdbs, d300a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmdb),
  22191. mcCE(fstmias, c800a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmia),
  22192. mcCE(fstmdbs, d200a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmdb),
  22193. #undef THUMB_VARIANT
  22194. /* Moves and type conversions. */
  22195. cCE("fmstat", ef1fa10, 0, (), noargs),
  22196. cCE("fsitos", eb80ac0, 2, (RVS, RVS), vfp_sp_monadic),
  22197. cCE("fuitos", eb80a40, 2, (RVS, RVS), vfp_sp_monadic),
  22198. cCE("ftosis", ebd0a40, 2, (RVS, RVS), vfp_sp_monadic),
  22199. cCE("ftosizs", ebd0ac0, 2, (RVS, RVS), vfp_sp_monadic),
  22200. cCE("ftouis", ebc0a40, 2, (RVS, RVS), vfp_sp_monadic),
  22201. cCE("ftouizs", ebc0ac0, 2, (RVS, RVS), vfp_sp_monadic),
  22202. cCE("fmrx", ef00a10, 2, (RR, RVC), rd_rn),
  22203. cCE("fmxr", ee00a10, 2, (RVC, RR), rn_rd),
  22204. /* Memory operations. */
  22205. cCE("fldmfds", c900a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmia),
  22206. cCE("fldmeas", d300a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmdb),
  22207. cCE("fldmiax", c900b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmia),
  22208. cCE("fldmfdx", c900b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmia),
  22209. cCE("fldmdbx", d300b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmdb),
  22210. cCE("fldmeax", d300b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmdb),
  22211. cCE("fstmeas", c800a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmia),
  22212. cCE("fstmfds", d200a00, 2, (RRnpctw, VRSLST), vfp_sp_ldstmdb),
  22213. cCE("fstmiax", c800b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmia),
  22214. cCE("fstmeax", c800b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmia),
  22215. cCE("fstmdbx", d200b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmdb),
  22216. cCE("fstmfdx", d200b00, 2, (RRnpctw, VRDLST), vfp_xp_ldstmdb),
  22217. /* Monadic operations. */
  22218. cCE("fabss", eb00ac0, 2, (RVS, RVS), vfp_sp_monadic),
  22219. cCE("fnegs", eb10a40, 2, (RVS, RVS), vfp_sp_monadic),
  22220. cCE("fsqrts", eb10ac0, 2, (RVS, RVS), vfp_sp_monadic),
  22221. /* Dyadic operations. */
  22222. cCE("fadds", e300a00, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22223. cCE("fsubs", e300a40, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22224. cCE("fmuls", e200a00, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22225. cCE("fdivs", e800a00, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22226. cCE("fmacs", e000a00, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22227. cCE("fmscs", e100a00, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22228. cCE("fnmuls", e200a40, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22229. cCE("fnmacs", e000a40, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22230. cCE("fnmscs", e100a40, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22231. /* Comparisons. */
  22232. cCE("fcmps", eb40a40, 2, (RVS, RVS), vfp_sp_monadic),
  22233. cCE("fcmpzs", eb50a40, 1, (RVS), vfp_sp_compare_z),
  22234. cCE("fcmpes", eb40ac0, 2, (RVS, RVS), vfp_sp_monadic),
  22235. cCE("fcmpezs", eb50ac0, 1, (RVS), vfp_sp_compare_z),
  22236. /* Double precision load/store are still present on single precision
  22237. implementations. */
  22238. cCE("fldmiad", c900b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmia),
  22239. cCE("fldmfdd", c900b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmia),
  22240. cCE("fldmdbd", d300b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmdb),
  22241. cCE("fldmead", d300b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmdb),
  22242. cCE("fstmiad", c800b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmia),
  22243. cCE("fstmead", c800b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmia),
  22244. cCE("fstmdbd", d200b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmdb),
  22245. cCE("fstmfdd", d200b00, 2, (RRnpctw, VRDLST), vfp_dp_ldstmdb),
  22246. #undef ARM_VARIANT
  22247. #define ARM_VARIANT & fpu_vfp_ext_v1 /* VFP V1 (Double precision). */
  22248. /* Moves and type conversions. */
  22249. cCE("fcvtds", eb70ac0, 2, (RVD, RVS), vfp_dp_sp_cvt),
  22250. cCE("fcvtsd", eb70bc0, 2, (RVS, RVD), vfp_sp_dp_cvt),
  22251. cCE("fmdhr", e200b10, 2, (RVD, RR), vfp_dp_rn_rd),
  22252. cCE("fmdlr", e000b10, 2, (RVD, RR), vfp_dp_rn_rd),
  22253. cCE("fmrdh", e300b10, 2, (RR, RVD), vfp_dp_rd_rn),
  22254. cCE("fmrdl", e100b10, 2, (RR, RVD), vfp_dp_rd_rn),
  22255. cCE("fsitod", eb80bc0, 2, (RVD, RVS), vfp_dp_sp_cvt),
  22256. cCE("fuitod", eb80b40, 2, (RVD, RVS), vfp_dp_sp_cvt),
  22257. cCE("ftosid", ebd0b40, 2, (RVS, RVD), vfp_sp_dp_cvt),
  22258. cCE("ftosizd", ebd0bc0, 2, (RVS, RVD), vfp_sp_dp_cvt),
  22259. cCE("ftouid", ebc0b40, 2, (RVS, RVD), vfp_sp_dp_cvt),
  22260. cCE("ftouizd", ebc0bc0, 2, (RVS, RVD), vfp_sp_dp_cvt),
  22261. /* Monadic operations. */
  22262. cCE("fabsd", eb00bc0, 2, (RVD, RVD), vfp_dp_rd_rm),
  22263. cCE("fnegd", eb10b40, 2, (RVD, RVD), vfp_dp_rd_rm),
  22264. cCE("fsqrtd", eb10bc0, 2, (RVD, RVD), vfp_dp_rd_rm),
  22265. /* Dyadic operations. */
  22266. cCE("faddd", e300b00, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22267. cCE("fsubd", e300b40, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22268. cCE("fmuld", e200b00, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22269. cCE("fdivd", e800b00, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22270. cCE("fmacd", e000b00, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22271. cCE("fmscd", e100b00, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22272. cCE("fnmuld", e200b40, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22273. cCE("fnmacd", e000b40, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22274. cCE("fnmscd", e100b40, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22275. /* Comparisons. */
  22276. cCE("fcmpd", eb40b40, 2, (RVD, RVD), vfp_dp_rd_rm),
  22277. cCE("fcmpzd", eb50b40, 1, (RVD), vfp_dp_rd),
  22278. cCE("fcmped", eb40bc0, 2, (RVD, RVD), vfp_dp_rd_rm),
  22279. cCE("fcmpezd", eb50bc0, 1, (RVD), vfp_dp_rd),
  22280. /* Instructions which may belong to either the Neon or VFP instruction sets.
  22281. Individual encoder functions perform additional architecture checks. */
  22282. #undef ARM_VARIANT
  22283. #define ARM_VARIANT & fpu_vfp_ext_v1xd
  22284. #undef THUMB_VARIANT
  22285. #define THUMB_VARIANT & arm_ext_v6t2
  22286. NCE(vldm, c900b00, 2, (RRnpctw, VRSDLST), neon_ldm_stm),
  22287. NCE(vldmia, c900b00, 2, (RRnpctw, VRSDLST), neon_ldm_stm),
  22288. NCE(vldmdb, d100b00, 2, (RRnpctw, VRSDLST), neon_ldm_stm),
  22289. NCE(vstm, c800b00, 2, (RRnpctw, VRSDLST), neon_ldm_stm),
  22290. NCE(vstmia, c800b00, 2, (RRnpctw, VRSDLST), neon_ldm_stm),
  22291. NCE(vstmdb, d000b00, 2, (RRnpctw, VRSDLST), neon_ldm_stm),
  22292. NCE(vpop, 0, 1, (VRSDLST), vfp_nsyn_pop),
  22293. NCE(vpush, 0, 1, (VRSDLST), vfp_nsyn_push),
  22294. #undef THUMB_VARIANT
  22295. #define THUMB_VARIANT & fpu_vfp_ext_v1xd
  22296. /* These mnemonics are unique to VFP. */
  22297. NCE(vsqrt, 0, 2, (RVSD, RVSD), vfp_nsyn_sqrt),
  22298. NCE(vdiv, 0, 3, (RVSD, RVSD, RVSD), vfp_nsyn_div),
  22299. nCE(vnmul, _vnmul, 3, (RVSD, RVSD, RVSD), vfp_nsyn_nmul),
  22300. nCE(vnmla, _vnmla, 3, (RVSD, RVSD, RVSD), vfp_nsyn_nmul),
  22301. nCE(vnmls, _vnmls, 3, (RVSD, RVSD, RVSD), vfp_nsyn_nmul),
  22302. NCE(vcvtz, 0, 2, (RVSD, RVSD), vfp_nsyn_cvtz),
  22303. /* Mnemonics shared by Neon and VFP. */
  22304. nCEF(vmls, _vmls, 3, (RNSDQ, oRNSDQ, RNSDQ_RNSC), neon_mac_maybe_scalar),
  22305. mnCEF(vcvt, _vcvt, 3, (RNSDQMQ, RNSDQMQ, oI32z), neon_cvt),
  22306. nCEF(vcvtr, _vcvt, 2, (RNSDQ, RNSDQ), neon_cvtr),
  22307. MNCEF(vcvtb, eb20a40, 3, (RVSDMQ, RVSDMQ, oI32b), neon_cvtb),
  22308. MNCEF(vcvtt, eb20a40, 3, (RVSDMQ, RVSDMQ, oI32b), neon_cvtt),
  22309. /* NOTE: All VMOV encoding is special-cased! */
  22310. NCE(vmovq, 0, 1, (VMOV), neon_mov),
  22311. #undef THUMB_VARIANT
  22312. /* Could be either VLDR/VSTR or VLDR/VSTR (system register) which are guarded
  22313. by different feature bits. Since we are setting the Thumb guard, we can
  22314. require Thumb-1 which makes it a nop guard and set the right feature bit in
  22315. do_vldr_vstr (). */
  22316. #define THUMB_VARIANT & arm_ext_v4t
  22317. NCE(vldr, d100b00, 2, (VLDR, ADDRGLDC), vldr_vstr),
  22318. NCE(vstr, d000b00, 2, (VLDR, ADDRGLDC), vldr_vstr),
  22319. #undef ARM_VARIANT
  22320. #define ARM_VARIANT & arm_ext_fp16
  22321. #undef THUMB_VARIANT
  22322. #define THUMB_VARIANT & arm_ext_fp16
  22323. /* New instructions added from v8.2, allowing the extraction and insertion of
  22324. the upper 16 bits of a 32-bit vector register. */
  22325. NCE (vmovx, eb00a40, 2, (RVS, RVS), neon_movhf),
  22326. NCE (vins, eb00ac0, 2, (RVS, RVS), neon_movhf),
  22327. /* New backported fma/fms instructions optional in v8.2. */
  22328. NUF (vfmsl, 810, 3, (RNDQ, RNSD, RNSD_RNSC), neon_vfmsl),
  22329. NUF (vfmal, 810, 3, (RNDQ, RNSD, RNSD_RNSC), neon_vfmal),
  22330. #undef THUMB_VARIANT
  22331. #define THUMB_VARIANT & fpu_neon_ext_v1
  22332. #undef ARM_VARIANT
  22333. #define ARM_VARIANT & fpu_neon_ext_v1
  22334. /* Data processing with three registers of the same length. */
  22335. /* integer ops, valid types S8 S16 S32 U8 U16 U32. */
  22336. NUF(vaba, 0000710, 3, (RNDQ, RNDQ, RNDQ), neon_dyadic_i_su),
  22337. NUF(vabaq, 0000710, 3, (RNQ, RNQ, RNQ), neon_dyadic_i_su),
  22338. NUF(vhaddq, 0000000, 3, (RNQ, oRNQ, RNQ), neon_dyadic_i_su),
  22339. NUF(vrhaddq, 0000100, 3, (RNQ, oRNQ, RNQ), neon_dyadic_i_su),
  22340. NUF(vhsubq, 0000200, 3, (RNQ, oRNQ, RNQ), neon_dyadic_i_su),
  22341. /* integer ops, valid types S8 S16 S32 S64 U8 U16 U32 U64. */
  22342. NUF(vqaddq, 0000010, 3, (RNQ, oRNQ, RNQ), neon_dyadic_i64_su),
  22343. NUF(vqsubq, 0000210, 3, (RNQ, oRNQ, RNQ), neon_dyadic_i64_su),
  22344. NUF(vrshlq, 0000500, 3, (RNQ, oRNQ, RNQ), neon_rshl),
  22345. NUF(vqrshlq, 0000510, 3, (RNQ, oRNQ, RNQ), neon_rshl),
  22346. /* If not immediate, fall back to neon_dyadic_i64_su.
  22347. shl should accept I8 I16 I32 I64,
  22348. qshl should accept S8 S16 S32 S64 U8 U16 U32 U64. */
  22349. nUF(vshlq, _vshl, 3, (RNQ, oRNQ, RNDQ_I63b), neon_shl),
  22350. nUF(vqshlq, _vqshl, 3, (RNQ, oRNQ, RNDQ_I63b), neon_qshl),
  22351. /* Logic ops, types optional & ignored. */
  22352. nUF(vandq, _vand, 3, (RNQ, oRNQ, RNDQ_Ibig), neon_logic),
  22353. nUF(vbicq, _vbic, 3, (RNQ, oRNQ, RNDQ_Ibig), neon_logic),
  22354. nUF(vorrq, _vorr, 3, (RNQ, oRNQ, RNDQ_Ibig), neon_logic),
  22355. nUF(vornq, _vorn, 3, (RNQ, oRNQ, RNDQ_Ibig), neon_logic),
  22356. nUF(veorq, _veor, 3, (RNQ, oRNQ, RNQ), neon_logic),
  22357. /* Bitfield ops, untyped. */
  22358. NUF(vbsl, 1100110, 3, (RNDQ, RNDQ, RNDQ), neon_bitfield),
  22359. NUF(vbslq, 1100110, 3, (RNQ, RNQ, RNQ), neon_bitfield),
  22360. NUF(vbit, 1200110, 3, (RNDQ, RNDQ, RNDQ), neon_bitfield),
  22361. NUF(vbitq, 1200110, 3, (RNQ, RNQ, RNQ), neon_bitfield),
  22362. NUF(vbif, 1300110, 3, (RNDQ, RNDQ, RNDQ), neon_bitfield),
  22363. NUF(vbifq, 1300110, 3, (RNQ, RNQ, RNQ), neon_bitfield),
  22364. /* Int and float variants, types S8 S16 S32 U8 U16 U32 F16 F32. */
  22365. nUF(vabdq, _vabd, 3, (RNQ, oRNQ, RNQ), neon_dyadic_if_su),
  22366. nUF(vmaxq, _vmax, 3, (RNQ, oRNQ, RNQ), neon_dyadic_if_su),
  22367. nUF(vminq, _vmin, 3, (RNQ, oRNQ, RNQ), neon_dyadic_if_su),
  22368. /* Comparisons. Types S8 S16 S32 U8 U16 U32 F32. Non-immediate versions fall
  22369. back to neon_dyadic_if_su. */
  22370. nUF(vcge, _vcge, 3, (RNDQ, oRNDQ, RNDQ_I0), neon_cmp),
  22371. nUF(vcgeq, _vcge, 3, (RNQ, oRNQ, RNDQ_I0), neon_cmp),
  22372. nUF(vcgt, _vcgt, 3, (RNDQ, oRNDQ, RNDQ_I0), neon_cmp),
  22373. nUF(vcgtq, _vcgt, 3, (RNQ, oRNQ, RNDQ_I0), neon_cmp),
  22374. nUF(vclt, _vclt, 3, (RNDQ, oRNDQ, RNDQ_I0), neon_cmp_inv),
  22375. nUF(vcltq, _vclt, 3, (RNQ, oRNQ, RNDQ_I0), neon_cmp_inv),
  22376. nUF(vcle, _vcle, 3, (RNDQ, oRNDQ, RNDQ_I0), neon_cmp_inv),
  22377. nUF(vcleq, _vcle, 3, (RNQ, oRNQ, RNDQ_I0), neon_cmp_inv),
  22378. /* Comparison. Type I8 I16 I32 F32. */
  22379. nUF(vceq, _vceq, 3, (RNDQ, oRNDQ, RNDQ_I0), neon_ceq),
  22380. nUF(vceqq, _vceq, 3, (RNQ, oRNQ, RNDQ_I0), neon_ceq),
  22381. /* As above, D registers only. */
  22382. nUF(vpmax, _vpmax, 3, (RND, oRND, RND), neon_dyadic_if_su_d),
  22383. nUF(vpmin, _vpmin, 3, (RND, oRND, RND), neon_dyadic_if_su_d),
  22384. /* Int and float variants, signedness unimportant. */
  22385. nUF(vmlaq, _vmla, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_mac_maybe_scalar),
  22386. nUF(vmlsq, _vmls, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_mac_maybe_scalar),
  22387. nUF(vpadd, _vpadd, 3, (RND, oRND, RND), neon_dyadic_if_i_d),
  22388. /* Add/sub take types I8 I16 I32 I64 F32. */
  22389. nUF(vaddq, _vadd, 3, (RNQ, oRNQ, RNQ), neon_addsub_if_i),
  22390. nUF(vsubq, _vsub, 3, (RNQ, oRNQ, RNQ), neon_addsub_if_i),
  22391. /* vtst takes sizes 8, 16, 32. */
  22392. NUF(vtst, 0000810, 3, (RNDQ, oRNDQ, RNDQ), neon_tst),
  22393. NUF(vtstq, 0000810, 3, (RNQ, oRNQ, RNQ), neon_tst),
  22394. /* VMUL takes I8 I16 I32 F32 P8. */
  22395. nUF(vmulq, _vmul, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_mul),
  22396. /* VQD{R}MULH takes S16 S32. */
  22397. nUF(vqdmulhq, _vqdmulh, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_qdmulh),
  22398. nUF(vqrdmulhq, _vqrdmulh, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_qdmulh),
  22399. NUF(vacge, 0000e10, 3, (RNDQ, oRNDQ, RNDQ), neon_fcmp_absolute),
  22400. NUF(vacgeq, 0000e10, 3, (RNQ, oRNQ, RNQ), neon_fcmp_absolute),
  22401. NUF(vacgt, 0200e10, 3, (RNDQ, oRNDQ, RNDQ), neon_fcmp_absolute),
  22402. NUF(vacgtq, 0200e10, 3, (RNQ, oRNQ, RNQ), neon_fcmp_absolute),
  22403. NUF(vaclt, 0200e10, 3, (RNDQ, oRNDQ, RNDQ), neon_fcmp_absolute_inv),
  22404. NUF(vacltq, 0200e10, 3, (RNQ, oRNQ, RNQ), neon_fcmp_absolute_inv),
  22405. NUF(vacle, 0000e10, 3, (RNDQ, oRNDQ, RNDQ), neon_fcmp_absolute_inv),
  22406. NUF(vacleq, 0000e10, 3, (RNQ, oRNQ, RNQ), neon_fcmp_absolute_inv),
  22407. NUF(vrecps, 0000f10, 3, (RNDQ, oRNDQ, RNDQ), neon_step),
  22408. NUF(vrecpsq, 0000f10, 3, (RNQ, oRNQ, RNQ), neon_step),
  22409. NUF(vrsqrts, 0200f10, 3, (RNDQ, oRNDQ, RNDQ), neon_step),
  22410. NUF(vrsqrtsq, 0200f10, 3, (RNQ, oRNQ, RNQ), neon_step),
  22411. /* ARM v8.1 extension. */
  22412. nUF (vqrdmlahq, _vqrdmlah, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_qrdmlah),
  22413. nUF (vqrdmlsh, _vqrdmlsh, 3, (RNDQ, oRNDQ, RNDQ_RNSC), neon_qrdmlah),
  22414. nUF (vqrdmlshq, _vqrdmlsh, 3, (RNQ, oRNQ, RNDQ_RNSC), neon_qrdmlah),
  22415. /* Two address, int/float. Types S8 S16 S32 F32. */
  22416. NUF(vabsq, 1b10300, 2, (RNQ, RNQ), neon_abs_neg),
  22417. NUF(vnegq, 1b10380, 2, (RNQ, RNQ), neon_abs_neg),
  22418. /* Data processing with two registers and a shift amount. */
  22419. /* Right shifts, and variants with rounding.
  22420. Types accepted S8 S16 S32 S64 U8 U16 U32 U64. */
  22421. NUF(vshrq, 0800010, 3, (RNQ, oRNQ, I64z), neon_rshift_round_imm),
  22422. NUF(vrshrq, 0800210, 3, (RNQ, oRNQ, I64z), neon_rshift_round_imm),
  22423. NUF(vsra, 0800110, 3, (RNDQ, oRNDQ, I64), neon_rshift_round_imm),
  22424. NUF(vsraq, 0800110, 3, (RNQ, oRNQ, I64), neon_rshift_round_imm),
  22425. NUF(vrsra, 0800310, 3, (RNDQ, oRNDQ, I64), neon_rshift_round_imm),
  22426. NUF(vrsraq, 0800310, 3, (RNQ, oRNQ, I64), neon_rshift_round_imm),
  22427. /* Shift and insert. Sizes accepted 8 16 32 64. */
  22428. NUF(vsliq, 1800510, 3, (RNQ, oRNQ, I63), neon_sli),
  22429. NUF(vsriq, 1800410, 3, (RNQ, oRNQ, I64), neon_sri),
  22430. /* QSHL{U} immediate accepts S8 S16 S32 S64 U8 U16 U32 U64. */
  22431. NUF(vqshluq, 1800610, 3, (RNQ, oRNQ, I63), neon_qshlu_imm),
  22432. /* Right shift immediate, saturating & narrowing, with rounding variants.
  22433. Types accepted S16 S32 S64 U16 U32 U64. */
  22434. NUF(vqshrn, 0800910, 3, (RND, RNQ, I32z), neon_rshift_sat_narrow),
  22435. NUF(vqrshrn, 0800950, 3, (RND, RNQ, I32z), neon_rshift_sat_narrow),
  22436. /* As above, unsigned. Types accepted S16 S32 S64. */
  22437. NUF(vqshrun, 0800810, 3, (RND, RNQ, I32z), neon_rshift_sat_narrow_u),
  22438. NUF(vqrshrun, 0800850, 3, (RND, RNQ, I32z), neon_rshift_sat_narrow_u),
  22439. /* Right shift narrowing. Types accepted I16 I32 I64. */
  22440. NUF(vshrn, 0800810, 3, (RND, RNQ, I32z), neon_rshift_narrow),
  22441. NUF(vrshrn, 0800850, 3, (RND, RNQ, I32z), neon_rshift_narrow),
  22442. /* Special case. Types S8 S16 S32 U8 U16 U32. Handles max shift variant. */
  22443. nUF(vshll, _vshll, 3, (RNQ, RND, I32), neon_shll),
  22444. /* CVT with optional immediate for fixed-point variant. */
  22445. nUF(vcvtq, _vcvt, 3, (RNQ, RNQ, oI32b), neon_cvt),
  22446. nUF(vmvnq, _vmvn, 2, (RNQ, RNDQ_Ibig), neon_mvn),
  22447. /* Data processing, three registers of different lengths. */
  22448. /* Dyadic, long insns. Types S8 S16 S32 U8 U16 U32. */
  22449. NUF(vabal, 0800500, 3, (RNQ, RND, RND), neon_abal),
  22450. /* If not scalar, fall back to neon_dyadic_long.
  22451. Vector types as above, scalar types S16 S32 U16 U32. */
  22452. nUF(vmlal, _vmlal, 3, (RNQ, RND, RND_RNSC), neon_mac_maybe_scalar_long),
  22453. nUF(vmlsl, _vmlsl, 3, (RNQ, RND, RND_RNSC), neon_mac_maybe_scalar_long),
  22454. /* Dyadic, widening insns. Types S8 S16 S32 U8 U16 U32. */
  22455. NUF(vaddw, 0800100, 3, (RNQ, oRNQ, RND), neon_dyadic_wide),
  22456. NUF(vsubw, 0800300, 3, (RNQ, oRNQ, RND), neon_dyadic_wide),
  22457. /* Dyadic, narrowing insns. Types I16 I32 I64. */
  22458. NUF(vaddhn, 0800400, 3, (RND, RNQ, RNQ), neon_dyadic_narrow),
  22459. NUF(vraddhn, 1800400, 3, (RND, RNQ, RNQ), neon_dyadic_narrow),
  22460. NUF(vsubhn, 0800600, 3, (RND, RNQ, RNQ), neon_dyadic_narrow),
  22461. NUF(vrsubhn, 1800600, 3, (RND, RNQ, RNQ), neon_dyadic_narrow),
  22462. /* Saturating doubling multiplies. Types S16 S32. */
  22463. nUF(vqdmlal, _vqdmlal, 3, (RNQ, RND, RND_RNSC), neon_mul_sat_scalar_long),
  22464. nUF(vqdmlsl, _vqdmlsl, 3, (RNQ, RND, RND_RNSC), neon_mul_sat_scalar_long),
  22465. nUF(vqdmull, _vqdmull, 3, (RNQ, RND, RND_RNSC), neon_mul_sat_scalar_long),
  22466. /* VMULL. Vector types S8 S16 S32 U8 U16 U32 P8, scalar types
  22467. S16 S32 U16 U32. */
  22468. nUF(vmull, _vmull, 3, (RNQ, RND, RND_RNSC), neon_vmull),
  22469. /* Extract. Size 8. */
  22470. NUF(vext, 0b00000, 4, (RNDQ, oRNDQ, RNDQ, I15), neon_ext),
  22471. NUF(vextq, 0b00000, 4, (RNQ, oRNQ, RNQ, I15), neon_ext),
  22472. /* Two registers, miscellaneous. */
  22473. /* Reverse. Sizes 8 16 32 (must be < size in opcode). */
  22474. NUF(vrev64q, 1b00000, 2, (RNQ, RNQ), neon_rev),
  22475. NUF(vrev32q, 1b00080, 2, (RNQ, RNQ), neon_rev),
  22476. NUF(vrev16q, 1b00100, 2, (RNQ, RNQ), neon_rev),
  22477. /* Vector replicate. Sizes 8 16 32. */
  22478. nCE(vdupq, _vdup, 2, (RNQ, RR_RNSC), neon_dup),
  22479. /* VMOVL. Types S8 S16 S32 U8 U16 U32. */
  22480. NUF(vmovl, 0800a10, 2, (RNQ, RND), neon_movl),
  22481. /* VMOVN. Types I16 I32 I64. */
  22482. nUF(vmovn, _vmovn, 2, (RND, RNQ), neon_movn),
  22483. /* VQMOVN. Types S16 S32 S64 U16 U32 U64. */
  22484. nUF(vqmovn, _vqmovn, 2, (RND, RNQ), neon_qmovn),
  22485. /* VQMOVUN. Types S16 S32 S64. */
  22486. nUF(vqmovun, _vqmovun, 2, (RND, RNQ), neon_qmovun),
  22487. /* VZIP / VUZP. Sizes 8 16 32. */
  22488. NUF(vzip, 1b20180, 2, (RNDQ, RNDQ), neon_zip_uzp),
  22489. NUF(vzipq, 1b20180, 2, (RNQ, RNQ), neon_zip_uzp),
  22490. NUF(vuzp, 1b20100, 2, (RNDQ, RNDQ), neon_zip_uzp),
  22491. NUF(vuzpq, 1b20100, 2, (RNQ, RNQ), neon_zip_uzp),
  22492. /* VQABS / VQNEG. Types S8 S16 S32. */
  22493. NUF(vqabsq, 1b00700, 2, (RNQ, RNQ), neon_sat_abs_neg),
  22494. NUF(vqnegq, 1b00780, 2, (RNQ, RNQ), neon_sat_abs_neg),
  22495. /* Pairwise, lengthening. Types S8 S16 S32 U8 U16 U32. */
  22496. NUF(vpadal, 1b00600, 2, (RNDQ, RNDQ), neon_pair_long),
  22497. NUF(vpadalq, 1b00600, 2, (RNQ, RNQ), neon_pair_long),
  22498. NUF(vpaddl, 1b00200, 2, (RNDQ, RNDQ), neon_pair_long),
  22499. NUF(vpaddlq, 1b00200, 2, (RNQ, RNQ), neon_pair_long),
  22500. /* Reciprocal estimates. Types U32 F16 F32. */
  22501. NUF(vrecpe, 1b30400, 2, (RNDQ, RNDQ), neon_recip_est),
  22502. NUF(vrecpeq, 1b30400, 2, (RNQ, RNQ), neon_recip_est),
  22503. NUF(vrsqrte, 1b30480, 2, (RNDQ, RNDQ), neon_recip_est),
  22504. NUF(vrsqrteq, 1b30480, 2, (RNQ, RNQ), neon_recip_est),
  22505. /* VCLS. Types S8 S16 S32. */
  22506. NUF(vclsq, 1b00400, 2, (RNQ, RNQ), neon_cls),
  22507. /* VCLZ. Types I8 I16 I32. */
  22508. NUF(vclzq, 1b00480, 2, (RNQ, RNQ), neon_clz),
  22509. /* VCNT. Size 8. */
  22510. NUF(vcnt, 1b00500, 2, (RNDQ, RNDQ), neon_cnt),
  22511. NUF(vcntq, 1b00500, 2, (RNQ, RNQ), neon_cnt),
  22512. /* Two address, untyped. */
  22513. NUF(vswp, 1b20000, 2, (RNDQ, RNDQ), neon_swp),
  22514. NUF(vswpq, 1b20000, 2, (RNQ, RNQ), neon_swp),
  22515. /* VTRN. Sizes 8 16 32. */
  22516. nUF(vtrn, _vtrn, 2, (RNDQ, RNDQ), neon_trn),
  22517. nUF(vtrnq, _vtrn, 2, (RNQ, RNQ), neon_trn),
  22518. /* Table lookup. Size 8. */
  22519. NUF(vtbl, 1b00800, 3, (RND, NRDLST, RND), neon_tbl_tbx),
  22520. NUF(vtbx, 1b00840, 3, (RND, NRDLST, RND), neon_tbl_tbx),
  22521. #undef THUMB_VARIANT
  22522. #define THUMB_VARIANT & fpu_vfp_v3_or_neon_ext
  22523. #undef ARM_VARIANT
  22524. #define ARM_VARIANT & fpu_vfp_v3_or_neon_ext
  22525. /* Neon element/structure load/store. */
  22526. nUF(vld1, _vld1, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22527. nUF(vst1, _vst1, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22528. nUF(vld2, _vld2, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22529. nUF(vst2, _vst2, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22530. nUF(vld3, _vld3, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22531. nUF(vst3, _vst3, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22532. nUF(vld4, _vld4, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22533. nUF(vst4, _vst4, 2, (NSTRLST, ADDR), neon_ldx_stx),
  22534. #undef THUMB_VARIANT
  22535. #define THUMB_VARIANT & fpu_vfp_ext_v3xd
  22536. #undef ARM_VARIANT
  22537. #define ARM_VARIANT & fpu_vfp_ext_v3xd
  22538. cCE("fconsts", eb00a00, 2, (RVS, I255), vfp_sp_const),
  22539. cCE("fshtos", eba0a40, 2, (RVS, I16z), vfp_sp_conv_16),
  22540. cCE("fsltos", eba0ac0, 2, (RVS, I32), vfp_sp_conv_32),
  22541. cCE("fuhtos", ebb0a40, 2, (RVS, I16z), vfp_sp_conv_16),
  22542. cCE("fultos", ebb0ac0, 2, (RVS, I32), vfp_sp_conv_32),
  22543. cCE("ftoshs", ebe0a40, 2, (RVS, I16z), vfp_sp_conv_16),
  22544. cCE("ftosls", ebe0ac0, 2, (RVS, I32), vfp_sp_conv_32),
  22545. cCE("ftouhs", ebf0a40, 2, (RVS, I16z), vfp_sp_conv_16),
  22546. cCE("ftouls", ebf0ac0, 2, (RVS, I32), vfp_sp_conv_32),
  22547. #undef THUMB_VARIANT
  22548. #define THUMB_VARIANT & fpu_vfp_ext_v3
  22549. #undef ARM_VARIANT
  22550. #define ARM_VARIANT & fpu_vfp_ext_v3
  22551. cCE("fconstd", eb00b00, 2, (RVD, I255), vfp_dp_const),
  22552. cCE("fshtod", eba0b40, 2, (RVD, I16z), vfp_dp_conv_16),
  22553. cCE("fsltod", eba0bc0, 2, (RVD, I32), vfp_dp_conv_32),
  22554. cCE("fuhtod", ebb0b40, 2, (RVD, I16z), vfp_dp_conv_16),
  22555. cCE("fultod", ebb0bc0, 2, (RVD, I32), vfp_dp_conv_32),
  22556. cCE("ftoshd", ebe0b40, 2, (RVD, I16z), vfp_dp_conv_16),
  22557. cCE("ftosld", ebe0bc0, 2, (RVD, I32), vfp_dp_conv_32),
  22558. cCE("ftouhd", ebf0b40, 2, (RVD, I16z), vfp_dp_conv_16),
  22559. cCE("ftould", ebf0bc0, 2, (RVD, I32), vfp_dp_conv_32),
  22560. #undef ARM_VARIANT
  22561. #define ARM_VARIANT & fpu_vfp_ext_fma
  22562. #undef THUMB_VARIANT
  22563. #define THUMB_VARIANT & fpu_vfp_ext_fma
  22564. /* Mnemonics shared by Neon, VFP, MVE and BF16. These are included in the
  22565. VFP FMA variant; NEON and VFP FMA always includes the NEON
  22566. FMA instructions. */
  22567. mnCEF(vfma, _vfma, 3, (RNSDQMQ, oRNSDQMQ, RNSDQMQR), neon_fmac),
  22568. TUF ("vfmat", c300850, fc300850, 3, (RNSDQMQ, oRNSDQMQ, RNSDQ_RNSC_MQ_RR), mve_vfma, mve_vfma),
  22569. mnCEF(vfms, _vfms, 3, (RNSDQMQ, oRNSDQMQ, RNSDQMQ), neon_fmac),
  22570. /* ffmas/ffmad/ffmss/ffmsd are dummy mnemonics to satisfy gas;
  22571. the v form should always be used. */
  22572. cCE("ffmas", ea00a00, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22573. cCE("ffnmas", ea00a40, 3, (RVS, RVS, RVS), vfp_sp_dyadic),
  22574. cCE("ffmad", ea00b00, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22575. cCE("ffnmad", ea00b40, 3, (RVD, RVD, RVD), vfp_dp_rd_rn_rm),
  22576. nCE(vfnma, _vfnma, 3, (RVSD, RVSD, RVSD), vfp_nsyn_nmul),
  22577. nCE(vfnms, _vfnms, 3, (RVSD, RVSD, RVSD), vfp_nsyn_nmul),
  22578. #undef THUMB_VARIANT
  22579. #undef ARM_VARIANT
  22580. #define ARM_VARIANT & arm_cext_xscale /* Intel XScale extensions. */
  22581. cCE("mia", e200010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
  22582. cCE("miaph", e280010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
  22583. cCE("miabb", e2c0010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
  22584. cCE("miabt", e2d0010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
  22585. cCE("miatb", e2e0010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
  22586. cCE("miatt", e2f0010, 3, (RXA, RRnpc, RRnpc), xsc_mia),
  22587. cCE("mar", c400000, 3, (RXA, RRnpc, RRnpc), xsc_mar),
  22588. cCE("mra", c500000, 3, (RRnpc, RRnpc, RXA), xsc_mra),
  22589. #undef ARM_VARIANT
  22590. #define ARM_VARIANT & arm_cext_iwmmxt /* Intel Wireless MMX technology. */
  22591. cCE("tandcb", e13f130, 1, (RR), iwmmxt_tandorc),
  22592. cCE("tandch", e53f130, 1, (RR), iwmmxt_tandorc),
  22593. cCE("tandcw", e93f130, 1, (RR), iwmmxt_tandorc),
  22594. cCE("tbcstb", e400010, 2, (RIWR, RR), rn_rd),
  22595. cCE("tbcsth", e400050, 2, (RIWR, RR), rn_rd),
  22596. cCE("tbcstw", e400090, 2, (RIWR, RR), rn_rd),
  22597. cCE("textrcb", e130170, 2, (RR, I7), iwmmxt_textrc),
  22598. cCE("textrch", e530170, 2, (RR, I7), iwmmxt_textrc),
  22599. cCE("textrcw", e930170, 2, (RR, I7), iwmmxt_textrc),
  22600. cCE("textrmub",e100070, 3, (RR, RIWR, I7), iwmmxt_textrm),
  22601. cCE("textrmuh",e500070, 3, (RR, RIWR, I7), iwmmxt_textrm),
  22602. cCE("textrmuw",e900070, 3, (RR, RIWR, I7), iwmmxt_textrm),
  22603. cCE("textrmsb",e100078, 3, (RR, RIWR, I7), iwmmxt_textrm),
  22604. cCE("textrmsh",e500078, 3, (RR, RIWR, I7), iwmmxt_textrm),
  22605. cCE("textrmsw",e900078, 3, (RR, RIWR, I7), iwmmxt_textrm),
  22606. cCE("tinsrb", e600010, 3, (RIWR, RR, I7), iwmmxt_tinsr),
  22607. cCE("tinsrh", e600050, 3, (RIWR, RR, I7), iwmmxt_tinsr),
  22608. cCE("tinsrw", e600090, 3, (RIWR, RR, I7), iwmmxt_tinsr),
  22609. cCE("tmcr", e000110, 2, (RIWC_RIWG, RR), rn_rd),
  22610. cCE("tmcrr", c400000, 3, (RIWR, RR, RR), rm_rd_rn),
  22611. cCE("tmia", e200010, 3, (RIWR, RR, RR), iwmmxt_tmia),
  22612. cCE("tmiaph", e280010, 3, (RIWR, RR, RR), iwmmxt_tmia),
  22613. cCE("tmiabb", e2c0010, 3, (RIWR, RR, RR), iwmmxt_tmia),
  22614. cCE("tmiabt", e2d0010, 3, (RIWR, RR, RR), iwmmxt_tmia),
  22615. cCE("tmiatb", e2e0010, 3, (RIWR, RR, RR), iwmmxt_tmia),
  22616. cCE("tmiatt", e2f0010, 3, (RIWR, RR, RR), iwmmxt_tmia),
  22617. cCE("tmovmskb",e100030, 2, (RR, RIWR), rd_rn),
  22618. cCE("tmovmskh",e500030, 2, (RR, RIWR), rd_rn),
  22619. cCE("tmovmskw",e900030, 2, (RR, RIWR), rd_rn),
  22620. cCE("tmrc", e100110, 2, (RR, RIWC_RIWG), rd_rn),
  22621. cCE("tmrrc", c500000, 3, (RR, RR, RIWR), rd_rn_rm),
  22622. cCE("torcb", e13f150, 1, (RR), iwmmxt_tandorc),
  22623. cCE("torch", e53f150, 1, (RR), iwmmxt_tandorc),
  22624. cCE("torcw", e93f150, 1, (RR), iwmmxt_tandorc),
  22625. cCE("waccb", e0001c0, 2, (RIWR, RIWR), rd_rn),
  22626. cCE("wacch", e4001c0, 2, (RIWR, RIWR), rd_rn),
  22627. cCE("waccw", e8001c0, 2, (RIWR, RIWR), rd_rn),
  22628. cCE("waddbss", e300180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22629. cCE("waddb", e000180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22630. cCE("waddbus", e100180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22631. cCE("waddhss", e700180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22632. cCE("waddh", e400180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22633. cCE("waddhus", e500180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22634. cCE("waddwss", eb00180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22635. cCE("waddw", e800180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22636. cCE("waddwus", e900180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22637. cCE("waligni", e000020, 4, (RIWR, RIWR, RIWR, I7), iwmmxt_waligni),
  22638. cCE("walignr0",e800020, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22639. cCE("walignr1",e900020, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22640. cCE("walignr2",ea00020, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22641. cCE("walignr3",eb00020, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22642. cCE("wand", e200000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22643. cCE("wandn", e300000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22644. cCE("wavg2b", e800000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22645. cCE("wavg2br", e900000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22646. cCE("wavg2h", ec00000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22647. cCE("wavg2hr", ed00000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22648. cCE("wcmpeqb", e000060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22649. cCE("wcmpeqh", e400060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22650. cCE("wcmpeqw", e800060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22651. cCE("wcmpgtub",e100060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22652. cCE("wcmpgtuh",e500060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22653. cCE("wcmpgtuw",e900060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22654. cCE("wcmpgtsb",e300060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22655. cCE("wcmpgtsh",e700060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22656. cCE("wcmpgtsw",eb00060, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22657. cCE("wldrb", c100000, 2, (RIWR, ADDR), iwmmxt_wldstbh),
  22658. cCE("wldrh", c500000, 2, (RIWR, ADDR), iwmmxt_wldstbh),
  22659. cCE("wldrw", c100100, 2, (RIWR_RIWC, ADDR), iwmmxt_wldstw),
  22660. cCE("wldrd", c500100, 2, (RIWR, ADDR), iwmmxt_wldstd),
  22661. cCE("wmacs", e600100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22662. cCE("wmacsz", e700100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22663. cCE("wmacu", e400100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22664. cCE("wmacuz", e500100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22665. cCE("wmadds", ea00100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22666. cCE("wmaddu", e800100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22667. cCE("wmaxsb", e200160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22668. cCE("wmaxsh", e600160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22669. cCE("wmaxsw", ea00160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22670. cCE("wmaxub", e000160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22671. cCE("wmaxuh", e400160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22672. cCE("wmaxuw", e800160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22673. cCE("wminsb", e300160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22674. cCE("wminsh", e700160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22675. cCE("wminsw", eb00160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22676. cCE("wminub", e100160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22677. cCE("wminuh", e500160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22678. cCE("wminuw", e900160, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22679. cCE("wmov", e000000, 2, (RIWR, RIWR), iwmmxt_wmov),
  22680. cCE("wmulsm", e300100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22681. cCE("wmulsl", e200100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22682. cCE("wmulum", e100100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22683. cCE("wmulul", e000100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22684. cCE("wor", e000000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22685. cCE("wpackhss",e700080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22686. cCE("wpackhus",e500080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22687. cCE("wpackwss",eb00080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22688. cCE("wpackwus",e900080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22689. cCE("wpackdss",ef00080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22690. cCE("wpackdus",ed00080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22691. cCE("wrorh", e700040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22692. cCE("wrorhg", e700148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22693. cCE("wrorw", eb00040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22694. cCE("wrorwg", eb00148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22695. cCE("wrord", ef00040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22696. cCE("wrordg", ef00148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22697. cCE("wsadb", e000120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22698. cCE("wsadbz", e100120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22699. cCE("wsadh", e400120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22700. cCE("wsadhz", e500120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22701. cCE("wshufh", e0001e0, 3, (RIWR, RIWR, I255), iwmmxt_wshufh),
  22702. cCE("wsllh", e500040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22703. cCE("wsllhg", e500148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22704. cCE("wsllw", e900040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22705. cCE("wsllwg", e900148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22706. cCE("wslld", ed00040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22707. cCE("wslldg", ed00148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22708. cCE("wsrah", e400040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22709. cCE("wsrahg", e400148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22710. cCE("wsraw", e800040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22711. cCE("wsrawg", e800148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22712. cCE("wsrad", ec00040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22713. cCE("wsradg", ec00148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22714. cCE("wsrlh", e600040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22715. cCE("wsrlhg", e600148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22716. cCE("wsrlw", ea00040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22717. cCE("wsrlwg", ea00148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22718. cCE("wsrld", ee00040, 3, (RIWR, RIWR, RIWR_I32z),iwmmxt_wrwrwr_or_imm5),
  22719. cCE("wsrldg", ee00148, 3, (RIWR, RIWR, RIWG), rd_rn_rm),
  22720. cCE("wstrb", c000000, 2, (RIWR, ADDR), iwmmxt_wldstbh),
  22721. cCE("wstrh", c400000, 2, (RIWR, ADDR), iwmmxt_wldstbh),
  22722. cCE("wstrw", c000100, 2, (RIWR_RIWC, ADDR), iwmmxt_wldstw),
  22723. cCE("wstrd", c400100, 2, (RIWR, ADDR), iwmmxt_wldstd),
  22724. cCE("wsubbss", e3001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22725. cCE("wsubb", e0001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22726. cCE("wsubbus", e1001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22727. cCE("wsubhss", e7001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22728. cCE("wsubh", e4001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22729. cCE("wsubhus", e5001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22730. cCE("wsubwss", eb001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22731. cCE("wsubw", e8001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22732. cCE("wsubwus", e9001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22733. cCE("wunpckehub",e0000c0, 2, (RIWR, RIWR), rd_rn),
  22734. cCE("wunpckehuh",e4000c0, 2, (RIWR, RIWR), rd_rn),
  22735. cCE("wunpckehuw",e8000c0, 2, (RIWR, RIWR), rd_rn),
  22736. cCE("wunpckehsb",e2000c0, 2, (RIWR, RIWR), rd_rn),
  22737. cCE("wunpckehsh",e6000c0, 2, (RIWR, RIWR), rd_rn),
  22738. cCE("wunpckehsw",ea000c0, 2, (RIWR, RIWR), rd_rn),
  22739. cCE("wunpckihb", e1000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22740. cCE("wunpckihh", e5000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22741. cCE("wunpckihw", e9000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22742. cCE("wunpckelub",e0000e0, 2, (RIWR, RIWR), rd_rn),
  22743. cCE("wunpckeluh",e4000e0, 2, (RIWR, RIWR), rd_rn),
  22744. cCE("wunpckeluw",e8000e0, 2, (RIWR, RIWR), rd_rn),
  22745. cCE("wunpckelsb",e2000e0, 2, (RIWR, RIWR), rd_rn),
  22746. cCE("wunpckelsh",e6000e0, 2, (RIWR, RIWR), rd_rn),
  22747. cCE("wunpckelsw",ea000e0, 2, (RIWR, RIWR), rd_rn),
  22748. cCE("wunpckilb", e1000e0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22749. cCE("wunpckilh", e5000e0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22750. cCE("wunpckilw", e9000e0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22751. cCE("wxor", e100000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22752. cCE("wzero", e300000, 1, (RIWR), iwmmxt_wzero),
  22753. #undef ARM_VARIANT
  22754. #define ARM_VARIANT & arm_cext_iwmmxt2 /* Intel Wireless MMX technology, version 2. */
  22755. cCE("torvscb", e12f190, 1, (RR), iwmmxt_tandorc),
  22756. cCE("torvsch", e52f190, 1, (RR), iwmmxt_tandorc),
  22757. cCE("torvscw", e92f190, 1, (RR), iwmmxt_tandorc),
  22758. cCE("wabsb", e2001c0, 2, (RIWR, RIWR), rd_rn),
  22759. cCE("wabsh", e6001c0, 2, (RIWR, RIWR), rd_rn),
  22760. cCE("wabsw", ea001c0, 2, (RIWR, RIWR), rd_rn),
  22761. cCE("wabsdiffb", e1001c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22762. cCE("wabsdiffh", e5001c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22763. cCE("wabsdiffw", e9001c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22764. cCE("waddbhusl", e2001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22765. cCE("waddbhusm", e6001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22766. cCE("waddhc", e600180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22767. cCE("waddwc", ea00180, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22768. cCE("waddsubhx", ea001a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22769. cCE("wavg4", e400000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22770. cCE("wavg4r", e500000, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22771. cCE("wmaddsn", ee00100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22772. cCE("wmaddsx", eb00100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22773. cCE("wmaddun", ec00100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22774. cCE("wmaddux", e900100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22775. cCE("wmerge", e000080, 4, (RIWR, RIWR, RIWR, I7), iwmmxt_wmerge),
  22776. cCE("wmiabb", e0000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22777. cCE("wmiabt", e1000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22778. cCE("wmiatb", e2000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22779. cCE("wmiatt", e3000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22780. cCE("wmiabbn", e4000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22781. cCE("wmiabtn", e5000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22782. cCE("wmiatbn", e6000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22783. cCE("wmiattn", e7000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22784. cCE("wmiawbb", e800120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22785. cCE("wmiawbt", e900120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22786. cCE("wmiawtb", ea00120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22787. cCE("wmiawtt", eb00120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22788. cCE("wmiawbbn", ec00120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22789. cCE("wmiawbtn", ed00120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22790. cCE("wmiawtbn", ee00120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22791. cCE("wmiawttn", ef00120, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22792. cCE("wmulsmr", ef00100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22793. cCE("wmulumr", ed00100, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22794. cCE("wmulwumr", ec000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22795. cCE("wmulwsmr", ee000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22796. cCE("wmulwum", ed000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22797. cCE("wmulwsm", ef000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22798. cCE("wmulwl", eb000c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22799. cCE("wqmiabb", e8000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22800. cCE("wqmiabt", e9000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22801. cCE("wqmiatb", ea000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22802. cCE("wqmiatt", eb000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22803. cCE("wqmiabbn", ec000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22804. cCE("wqmiabtn", ed000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22805. cCE("wqmiatbn", ee000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22806. cCE("wqmiattn", ef000a0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22807. cCE("wqmulm", e100080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22808. cCE("wqmulmr", e300080, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22809. cCE("wqmulwm", ec000e0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22810. cCE("wqmulwmr", ee000e0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22811. cCE("wsubaddhx", ed001c0, 3, (RIWR, RIWR, RIWR), rd_rn_rm),
  22812. #undef ARM_VARIANT
  22813. #define ARM_VARIANT & arm_cext_maverick /* Cirrus Maverick instructions. */
  22814. cCE("cfldrs", c100400, 2, (RMF, ADDRGLDC), rd_cpaddr),
  22815. cCE("cfldrd", c500400, 2, (RMD, ADDRGLDC), rd_cpaddr),
  22816. cCE("cfldr32", c100500, 2, (RMFX, ADDRGLDC), rd_cpaddr),
  22817. cCE("cfldr64", c500500, 2, (RMDX, ADDRGLDC), rd_cpaddr),
  22818. cCE("cfstrs", c000400, 2, (RMF, ADDRGLDC), rd_cpaddr),
  22819. cCE("cfstrd", c400400, 2, (RMD, ADDRGLDC), rd_cpaddr),
  22820. cCE("cfstr32", c000500, 2, (RMFX, ADDRGLDC), rd_cpaddr),
  22821. cCE("cfstr64", c400500, 2, (RMDX, ADDRGLDC), rd_cpaddr),
  22822. cCE("cfmvsr", e000450, 2, (RMF, RR), rn_rd),
  22823. cCE("cfmvrs", e100450, 2, (RR, RMF), rd_rn),
  22824. cCE("cfmvdlr", e000410, 2, (RMD, RR), rn_rd),
  22825. cCE("cfmvrdl", e100410, 2, (RR, RMD), rd_rn),
  22826. cCE("cfmvdhr", e000430, 2, (RMD, RR), rn_rd),
  22827. cCE("cfmvrdh", e100430, 2, (RR, RMD), rd_rn),
  22828. cCE("cfmv64lr",e000510, 2, (RMDX, RR), rn_rd),
  22829. cCE("cfmvr64l",e100510, 2, (RR, RMDX), rd_rn),
  22830. cCE("cfmv64hr",e000530, 2, (RMDX, RR), rn_rd),
  22831. cCE("cfmvr64h",e100530, 2, (RR, RMDX), rd_rn),
  22832. cCE("cfmval32",e200440, 2, (RMAX, RMFX), rd_rn),
  22833. cCE("cfmv32al",e100440, 2, (RMFX, RMAX), rd_rn),
  22834. cCE("cfmvam32",e200460, 2, (RMAX, RMFX), rd_rn),
  22835. cCE("cfmv32am",e100460, 2, (RMFX, RMAX), rd_rn),
  22836. cCE("cfmvah32",e200480, 2, (RMAX, RMFX), rd_rn),
  22837. cCE("cfmv32ah",e100480, 2, (RMFX, RMAX), rd_rn),
  22838. cCE("cfmva32", e2004a0, 2, (RMAX, RMFX), rd_rn),
  22839. cCE("cfmv32a", e1004a0, 2, (RMFX, RMAX), rd_rn),
  22840. cCE("cfmva64", e2004c0, 2, (RMAX, RMDX), rd_rn),
  22841. cCE("cfmv64a", e1004c0, 2, (RMDX, RMAX), rd_rn),
  22842. cCE("cfmvsc32",e2004e0, 2, (RMDS, RMDX), mav_dspsc),
  22843. cCE("cfmv32sc",e1004e0, 2, (RMDX, RMDS), rd),
  22844. cCE("cfcpys", e000400, 2, (RMF, RMF), rd_rn),
  22845. cCE("cfcpyd", e000420, 2, (RMD, RMD), rd_rn),
  22846. cCE("cfcvtsd", e000460, 2, (RMD, RMF), rd_rn),
  22847. cCE("cfcvtds", e000440, 2, (RMF, RMD), rd_rn),
  22848. cCE("cfcvt32s",e000480, 2, (RMF, RMFX), rd_rn),
  22849. cCE("cfcvt32d",e0004a0, 2, (RMD, RMFX), rd_rn),
  22850. cCE("cfcvt64s",e0004c0, 2, (RMF, RMDX), rd_rn),
  22851. cCE("cfcvt64d",e0004e0, 2, (RMD, RMDX), rd_rn),
  22852. cCE("cfcvts32",e100580, 2, (RMFX, RMF), rd_rn),
  22853. cCE("cfcvtd32",e1005a0, 2, (RMFX, RMD), rd_rn),
  22854. cCE("cftruncs32",e1005c0, 2, (RMFX, RMF), rd_rn),
  22855. cCE("cftruncd32",e1005e0, 2, (RMFX, RMD), rd_rn),
  22856. cCE("cfrshl32",e000550, 3, (RMFX, RMFX, RR), mav_triple),
  22857. cCE("cfrshl64",e000570, 3, (RMDX, RMDX, RR), mav_triple),
  22858. cCE("cfsh32", e000500, 3, (RMFX, RMFX, I63s), mav_shift),
  22859. cCE("cfsh64", e200500, 3, (RMDX, RMDX, I63s), mav_shift),
  22860. cCE("cfcmps", e100490, 3, (RR, RMF, RMF), rd_rn_rm),
  22861. cCE("cfcmpd", e1004b0, 3, (RR, RMD, RMD), rd_rn_rm),
  22862. cCE("cfcmp32", e100590, 3, (RR, RMFX, RMFX), rd_rn_rm),
  22863. cCE("cfcmp64", e1005b0, 3, (RR, RMDX, RMDX), rd_rn_rm),
  22864. cCE("cfabss", e300400, 2, (RMF, RMF), rd_rn),
  22865. cCE("cfabsd", e300420, 2, (RMD, RMD), rd_rn),
  22866. cCE("cfnegs", e300440, 2, (RMF, RMF), rd_rn),
  22867. cCE("cfnegd", e300460, 2, (RMD, RMD), rd_rn),
  22868. cCE("cfadds", e300480, 3, (RMF, RMF, RMF), rd_rn_rm),
  22869. cCE("cfaddd", e3004a0, 3, (RMD, RMD, RMD), rd_rn_rm),
  22870. cCE("cfsubs", e3004c0, 3, (RMF, RMF, RMF), rd_rn_rm),
  22871. cCE("cfsubd", e3004e0, 3, (RMD, RMD, RMD), rd_rn_rm),
  22872. cCE("cfmuls", e100400, 3, (RMF, RMF, RMF), rd_rn_rm),
  22873. cCE("cfmuld", e100420, 3, (RMD, RMD, RMD), rd_rn_rm),
  22874. cCE("cfabs32", e300500, 2, (RMFX, RMFX), rd_rn),
  22875. cCE("cfabs64", e300520, 2, (RMDX, RMDX), rd_rn),
  22876. cCE("cfneg32", e300540, 2, (RMFX, RMFX), rd_rn),
  22877. cCE("cfneg64", e300560, 2, (RMDX, RMDX), rd_rn),
  22878. cCE("cfadd32", e300580, 3, (RMFX, RMFX, RMFX), rd_rn_rm),
  22879. cCE("cfadd64", e3005a0, 3, (RMDX, RMDX, RMDX), rd_rn_rm),
  22880. cCE("cfsub32", e3005c0, 3, (RMFX, RMFX, RMFX), rd_rn_rm),
  22881. cCE("cfsub64", e3005e0, 3, (RMDX, RMDX, RMDX), rd_rn_rm),
  22882. cCE("cfmul32", e100500, 3, (RMFX, RMFX, RMFX), rd_rn_rm),
  22883. cCE("cfmul64", e100520, 3, (RMDX, RMDX, RMDX), rd_rn_rm),
  22884. cCE("cfmac32", e100540, 3, (RMFX, RMFX, RMFX), rd_rn_rm),
  22885. cCE("cfmsc32", e100560, 3, (RMFX, RMFX, RMFX), rd_rn_rm),
  22886. cCE("cfmadd32",e000600, 4, (RMAX, RMFX, RMFX, RMFX), mav_quad),
  22887. cCE("cfmsub32",e100600, 4, (RMAX, RMFX, RMFX, RMFX), mav_quad),
  22888. cCE("cfmadda32", e200600, 4, (RMAX, RMAX, RMFX, RMFX), mav_quad),
  22889. cCE("cfmsuba32", e300600, 4, (RMAX, RMAX, RMFX, RMFX), mav_quad),
  22890. /* ARMv8.5-A instructions. */
  22891. #undef ARM_VARIANT
  22892. #define ARM_VARIANT & arm_ext_sb
  22893. #undef THUMB_VARIANT
  22894. #define THUMB_VARIANT & arm_ext_sb
  22895. TUF("sb", 57ff070, f3bf8f70, 0, (), noargs, noargs),
  22896. #undef ARM_VARIANT
  22897. #define ARM_VARIANT & arm_ext_predres
  22898. #undef THUMB_VARIANT
  22899. #define THUMB_VARIANT & arm_ext_predres
  22900. CE("cfprctx", e070f93, 1, (RRnpc), rd),
  22901. CE("dvprctx", e070fb3, 1, (RRnpc), rd),
  22902. CE("cpprctx", e070ff3, 1, (RRnpc), rd),
  22903. /* ARMv8-M instructions. */
  22904. #undef ARM_VARIANT
  22905. #define ARM_VARIANT NULL
  22906. #undef THUMB_VARIANT
  22907. #define THUMB_VARIANT & arm_ext_v8m
  22908. ToU("sg", e97fe97f, 0, (), noargs),
  22909. ToC("blxns", 4784, 1, (RRnpc), t_blx),
  22910. ToC("bxns", 4704, 1, (RRnpc), t_bx),
  22911. ToC("tt", e840f000, 2, (RRnpc, RRnpc), tt),
  22912. ToC("ttt", e840f040, 2, (RRnpc, RRnpc), tt),
  22913. ToC("tta", e840f080, 2, (RRnpc, RRnpc), tt),
  22914. ToC("ttat", e840f0c0, 2, (RRnpc, RRnpc), tt),
  22915. /* FP for ARMv8-M Mainline. Enabled for ARMv8-M Mainline because the
  22916. instructions behave as nop if no VFP is present. */
  22917. #undef THUMB_VARIANT
  22918. #define THUMB_VARIANT & arm_ext_v8m_main
  22919. ToC("vlldm", ec300a00, 1, (RRnpc), rn),
  22920. ToC("vlstm", ec200a00, 1, (RRnpc), rn),
  22921. /* Armv8.1-M Mainline instructions. */
  22922. #undef THUMB_VARIANT
  22923. #define THUMB_VARIANT & arm_ext_v8_1m_main
  22924. toU("aut", _aut, 3, (R12, LR, SP), t_pacbti),
  22925. toU("autg", _autg, 3, (RR, RR, RR), t_pacbti_nonop),
  22926. ToU("bti", f3af800f, 0, (), noargs),
  22927. toU("bxaut", _bxaut, 3, (RR, RR, RR), t_pacbti_nonop),
  22928. toU("pac", _pac, 3, (R12, LR, SP), t_pacbti),
  22929. toU("pacbti", _pacbti, 3, (R12, LR, SP), t_pacbti),
  22930. toU("pacg", _pacg, 3, (RR, RR, RR), t_pacbti_pacg),
  22931. toU("cinc", _cinc, 3, (RRnpcsp, RR_ZR, COND), t_cond),
  22932. toU("cinv", _cinv, 3, (RRnpcsp, RR_ZR, COND), t_cond),
  22933. toU("cneg", _cneg, 3, (RRnpcsp, RR_ZR, COND), t_cond),
  22934. toU("csel", _csel, 4, (RRnpcsp, RR_ZR, RR_ZR, COND), t_cond),
  22935. toU("csetm", _csetm, 2, (RRnpcsp, COND), t_cond),
  22936. toU("cset", _cset, 2, (RRnpcsp, COND), t_cond),
  22937. toU("csinc", _csinc, 4, (RRnpcsp, RR_ZR, RR_ZR, COND), t_cond),
  22938. toU("csinv", _csinv, 4, (RRnpcsp, RR_ZR, RR_ZR, COND), t_cond),
  22939. toU("csneg", _csneg, 4, (RRnpcsp, RR_ZR, RR_ZR, COND), t_cond),
  22940. toC("bf", _bf, 2, (EXPs, EXPs), t_branch_future),
  22941. toU("bfcsel", _bfcsel, 4, (EXPs, EXPs, EXPs, COND), t_branch_future),
  22942. toC("bfx", _bfx, 2, (EXPs, RRnpcsp), t_branch_future),
  22943. toC("bfl", _bfl, 2, (EXPs, EXPs), t_branch_future),
  22944. toC("bflx", _bflx, 2, (EXPs, RRnpcsp), t_branch_future),
  22945. toU("dls", _dls, 2, (LR, RRnpcsp), t_loloop),
  22946. toU("wls", _wls, 3, (LR, RRnpcsp, EXP), t_loloop),
  22947. toU("le", _le, 2, (oLR, EXP), t_loloop),
  22948. ToC("clrm", e89f0000, 1, (CLRMLST), t_clrm),
  22949. ToC("vscclrm", ec9f0a00, 1, (VRSDVLST), t_vscclrm),
  22950. #undef THUMB_VARIANT
  22951. #define THUMB_VARIANT & mve_ext
  22952. ToC("lsll", ea50010d, 3, (RRe, RRo, RRnpcsp_I32), mve_scalar_shift),
  22953. ToC("lsrl", ea50011f, 3, (RRe, RRo, I32), mve_scalar_shift),
  22954. ToC("asrl", ea50012d, 3, (RRe, RRo, RRnpcsp_I32), mve_scalar_shift),
  22955. ToC("uqrshll", ea51010d, 4, (RRe, RRo, I48_I64, RRnpcsp), mve_scalar_shift1),
  22956. ToC("sqrshrl", ea51012d, 4, (RRe, RRo, I48_I64, RRnpcsp), mve_scalar_shift1),
  22957. ToC("uqshll", ea51010f, 3, (RRe, RRo, I32), mve_scalar_shift),
  22958. ToC("urshrl", ea51011f, 3, (RRe, RRo, I32), mve_scalar_shift),
  22959. ToC("srshrl", ea51012f, 3, (RRe, RRo, I32), mve_scalar_shift),
  22960. ToC("sqshll", ea51013f, 3, (RRe, RRo, I32), mve_scalar_shift),
  22961. ToC("uqrshl", ea500f0d, 2, (RRnpcsp, RRnpcsp), mve_scalar_shift),
  22962. ToC("sqrshr", ea500f2d, 2, (RRnpcsp, RRnpcsp), mve_scalar_shift),
  22963. ToC("uqshl", ea500f0f, 2, (RRnpcsp, I32), mve_scalar_shift),
  22964. ToC("urshr", ea500f1f, 2, (RRnpcsp, I32), mve_scalar_shift),
  22965. ToC("srshr", ea500f2f, 2, (RRnpcsp, I32), mve_scalar_shift),
  22966. ToC("sqshl", ea500f3f, 2, (RRnpcsp, I32), mve_scalar_shift),
  22967. ToC("vpt", ee410f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22968. ToC("vptt", ee018f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22969. ToC("vpte", ee418f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22970. ToC("vpttt", ee014f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22971. ToC("vptte", ee01cf00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22972. ToC("vptet", ee41cf00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22973. ToC("vptee", ee414f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22974. ToC("vptttt", ee012f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22975. ToC("vpttte", ee016f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22976. ToC("vpttet", ee01ef00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22977. ToC("vpttee", ee01af00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22978. ToC("vptett", ee41af00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22979. ToC("vptete", ee41ef00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22980. ToC("vpteet", ee416f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22981. ToC("vpteee", ee412f00, 3, (COND, RMQ, RMQRZ), mve_vpt),
  22982. ToC("vpst", fe710f4d, 0, (), mve_vpt),
  22983. ToC("vpstt", fe318f4d, 0, (), mve_vpt),
  22984. ToC("vpste", fe718f4d, 0, (), mve_vpt),
  22985. ToC("vpsttt", fe314f4d, 0, (), mve_vpt),
  22986. ToC("vpstte", fe31cf4d, 0, (), mve_vpt),
  22987. ToC("vpstet", fe71cf4d, 0, (), mve_vpt),
  22988. ToC("vpstee", fe714f4d, 0, (), mve_vpt),
  22989. ToC("vpstttt", fe312f4d, 0, (), mve_vpt),
  22990. ToC("vpsttte", fe316f4d, 0, (), mve_vpt),
  22991. ToC("vpsttet", fe31ef4d, 0, (), mve_vpt),
  22992. ToC("vpsttee", fe31af4d, 0, (), mve_vpt),
  22993. ToC("vpstett", fe71af4d, 0, (), mve_vpt),
  22994. ToC("vpstete", fe71ef4d, 0, (), mve_vpt),
  22995. ToC("vpsteet", fe716f4d, 0, (), mve_vpt),
  22996. ToC("vpsteee", fe712f4d, 0, (), mve_vpt),
  22997. /* MVE and MVE FP only. */
  22998. mToC("vhcadd", ee000f00, 4, (RMQ, RMQ, RMQ, EXPi), mve_vhcadd),
  22999. mCEF(vctp, _vctp, 1, (RRnpc), mve_vctp),
  23000. mCEF(vadc, _vadc, 3, (RMQ, RMQ, RMQ), mve_vadc),
  23001. mCEF(vadci, _vadci, 3, (RMQ, RMQ, RMQ), mve_vadc),
  23002. mToC("vsbc", fe300f00, 3, (RMQ, RMQ, RMQ), mve_vsbc),
  23003. mToC("vsbci", fe301f00, 3, (RMQ, RMQ, RMQ), mve_vsbc),
  23004. mCEF(vmullb, _vmullb, 3, (RMQ, RMQ, RMQ), mve_vmull),
  23005. mCEF(vabav, _vabav, 3, (RRnpcsp, RMQ, RMQ), mve_vabav),
  23006. mCEF(vmladav, _vmladav, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23007. mCEF(vmladava, _vmladava, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23008. mCEF(vmladavx, _vmladavx, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23009. mCEF(vmladavax, _vmladavax, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23010. mCEF(vmlav, _vmladav, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23011. mCEF(vmlava, _vmladava, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23012. mCEF(vmlsdav, _vmlsdav, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23013. mCEF(vmlsdava, _vmlsdava, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23014. mCEF(vmlsdavx, _vmlsdavx, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23015. mCEF(vmlsdavax, _vmlsdavax, 3, (RRe, RMQ, RMQ), mve_vmladav),
  23016. mCEF(vst20, _vst20, 2, (MSTRLST2, ADDRMVE), mve_vst_vld),
  23017. mCEF(vst21, _vst21, 2, (MSTRLST2, ADDRMVE), mve_vst_vld),
  23018. mCEF(vst40, _vst40, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23019. mCEF(vst41, _vst41, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23020. mCEF(vst42, _vst42, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23021. mCEF(vst43, _vst43, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23022. mCEF(vld20, _vld20, 2, (MSTRLST2, ADDRMVE), mve_vst_vld),
  23023. mCEF(vld21, _vld21, 2, (MSTRLST2, ADDRMVE), mve_vst_vld),
  23024. mCEF(vld40, _vld40, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23025. mCEF(vld41, _vld41, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23026. mCEF(vld42, _vld42, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23027. mCEF(vld43, _vld43, 2, (MSTRLST4, ADDRMVE), mve_vst_vld),
  23028. mCEF(vstrb, _vstrb, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23029. mCEF(vstrh, _vstrh, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23030. mCEF(vstrw, _vstrw, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23031. mCEF(vstrd, _vstrd, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23032. mCEF(vldrb, _vldrb, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23033. mCEF(vldrh, _vldrh, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23034. mCEF(vldrw, _vldrw, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23035. mCEF(vldrd, _vldrd, 2, (RMQ, ADDRMVE), mve_vstr_vldr),
  23036. mCEF(vmovnt, _vmovnt, 2, (RMQ, RMQ), mve_movn),
  23037. mCEF(vmovnb, _vmovnb, 2, (RMQ, RMQ), mve_movn),
  23038. mCEF(vbrsr, _vbrsr, 3, (RMQ, RMQ, RR), mve_vbrsr),
  23039. mCEF(vaddlv, _vaddlv, 3, (RRe, RRo, RMQ), mve_vaddlv),
  23040. mCEF(vaddlva, _vaddlva, 3, (RRe, RRo, RMQ), mve_vaddlv),
  23041. mCEF(vaddv, _vaddv, 2, (RRe, RMQ), mve_vaddv),
  23042. mCEF(vaddva, _vaddva, 2, (RRe, RMQ), mve_vaddv),
  23043. mCEF(vddup, _vddup, 3, (RMQ, RRe, EXPi), mve_viddup),
  23044. mCEF(vdwdup, _vdwdup, 4, (RMQ, RRe, RR, EXPi), mve_viddup),
  23045. mCEF(vidup, _vidup, 3, (RMQ, RRe, EXPi), mve_viddup),
  23046. mCEF(viwdup, _viwdup, 4, (RMQ, RRe, RR, EXPi), mve_viddup),
  23047. mToC("vmaxa", ee330e81, 2, (RMQ, RMQ), mve_vmaxa_vmina),
  23048. mToC("vmina", ee331e81, 2, (RMQ, RMQ), mve_vmaxa_vmina),
  23049. mCEF(vmaxv, _vmaxv, 2, (RR, RMQ), mve_vmaxv),
  23050. mCEF(vmaxav, _vmaxav, 2, (RR, RMQ), mve_vmaxv),
  23051. mCEF(vminv, _vminv, 2, (RR, RMQ), mve_vmaxv),
  23052. mCEF(vminav, _vminav, 2, (RR, RMQ), mve_vmaxv),
  23053. mCEF(vmlaldav, _vmlaldav, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23054. mCEF(vmlaldava, _vmlaldava, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23055. mCEF(vmlaldavx, _vmlaldavx, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23056. mCEF(vmlaldavax, _vmlaldavax, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23057. mCEF(vmlalv, _vmlaldav, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23058. mCEF(vmlalva, _vmlaldava, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23059. mCEF(vmlsldav, _vmlsldav, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23060. mCEF(vmlsldava, _vmlsldava, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23061. mCEF(vmlsldavx, _vmlsldavx, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23062. mCEF(vmlsldavax, _vmlsldavax, 4, (RRe, RRo, RMQ, RMQ), mve_vmlaldav),
  23063. mToC("vrmlaldavh", ee800f00, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23064. mToC("vrmlaldavha",ee800f20, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23065. mCEF(vrmlaldavhx, _vrmlaldavhx, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23066. mCEF(vrmlaldavhax, _vrmlaldavhax, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23067. mToC("vrmlalvh", ee800f00, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23068. mToC("vrmlalvha", ee800f20, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23069. mCEF(vrmlsldavh, _vrmlsldavh, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23070. mCEF(vrmlsldavha, _vrmlsldavha, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23071. mCEF(vrmlsldavhx, _vrmlsldavhx, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23072. mCEF(vrmlsldavhax, _vrmlsldavhax, 4, (RRe, RR, RMQ, RMQ), mve_vrmlaldavh),
  23073. mToC("vmlas", ee011e40, 3, (RMQ, RMQ, RR), mve_vmlas),
  23074. mToC("vmulh", ee010e01, 3, (RMQ, RMQ, RMQ), mve_vmulh),
  23075. mToC("vrmulh", ee011e01, 3, (RMQ, RMQ, RMQ), mve_vmulh),
  23076. mToC("vpnot", fe310f4d, 0, (), mve_vpnot),
  23077. mToC("vpsel", fe310f01, 3, (RMQ, RMQ, RMQ), mve_vpsel),
  23078. mToC("vqdmladh", ee000e00, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23079. mToC("vqdmladhx", ee001e00, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23080. mToC("vqrdmladh", ee000e01, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23081. mToC("vqrdmladhx",ee001e01, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23082. mToC("vqdmlsdh", fe000e00, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23083. mToC("vqdmlsdhx", fe001e00, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23084. mToC("vqrdmlsdh", fe000e01, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23085. mToC("vqrdmlsdhx",fe001e01, 3, (RMQ, RMQ, RMQ), mve_vqdmladh),
  23086. mToC("vqdmlah", ee000e60, 3, (RMQ, RMQ, RR), mve_vqdmlah),
  23087. mToC("vqdmlash", ee001e60, 3, (RMQ, RMQ, RR), mve_vqdmlah),
  23088. mToC("vqrdmlash", ee001e40, 3, (RMQ, RMQ, RR), mve_vqdmlah),
  23089. mToC("vqdmullt", ee301f00, 3, (RMQ, RMQ, RMQRR), mve_vqdmull),
  23090. mToC("vqdmullb", ee300f00, 3, (RMQ, RMQ, RMQRR), mve_vqdmull),
  23091. mCEF(vqmovnt, _vqmovnt, 2, (RMQ, RMQ), mve_vqmovn),
  23092. mCEF(vqmovnb, _vqmovnb, 2, (RMQ, RMQ), mve_vqmovn),
  23093. mCEF(vqmovunt, _vqmovunt, 2, (RMQ, RMQ), mve_vqmovn),
  23094. mCEF(vqmovunb, _vqmovunb, 2, (RMQ, RMQ), mve_vqmovn),
  23095. mCEF(vshrnt, _vshrnt, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23096. mCEF(vshrnb, _vshrnb, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23097. mCEF(vrshrnt, _vrshrnt, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23098. mCEF(vrshrnb, _vrshrnb, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23099. mCEF(vqshrnt, _vqrshrnt, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23100. mCEF(vqshrnb, _vqrshrnb, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23101. mCEF(vqshrunt, _vqrshrunt, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23102. mCEF(vqshrunb, _vqrshrunb, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23103. mCEF(vqrshrnt, _vqrshrnt, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23104. mCEF(vqrshrnb, _vqrshrnb, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23105. mCEF(vqrshrunt, _vqrshrunt, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23106. mCEF(vqrshrunb, _vqrshrunb, 3, (RMQ, RMQ, I32z), mve_vshrn),
  23107. mToC("vshlc", eea00fc0, 3, (RMQ, RR, I32z), mve_vshlc),
  23108. mToC("vshllt", ee201e00, 3, (RMQ, RMQ, I32), mve_vshll),
  23109. mToC("vshllb", ee200e00, 3, (RMQ, RMQ, I32), mve_vshll),
  23110. toU("dlstp", _dlstp, 2, (LR, RR), t_loloop),
  23111. toU("wlstp", _wlstp, 3, (LR, RR, EXP), t_loloop),
  23112. toU("letp", _letp, 2, (LR, EXP), t_loloop),
  23113. toU("lctp", _lctp, 0, (), t_loloop),
  23114. #undef THUMB_VARIANT
  23115. #define THUMB_VARIANT & mve_fp_ext
  23116. mToC("vcmul", ee300e00, 4, (RMQ, RMQ, RMQ, EXPi), mve_vcmul),
  23117. mToC("vfmas", ee311e40, 3, (RMQ, RMQ, RR), mve_vfmas),
  23118. mToC("vmaxnma", ee3f0e81, 2, (RMQ, RMQ), mve_vmaxnma_vminnma),
  23119. mToC("vminnma", ee3f1e81, 2, (RMQ, RMQ), mve_vmaxnma_vminnma),
  23120. mToC("vmaxnmv", eeee0f00, 2, (RR, RMQ), mve_vmaxnmv),
  23121. mToC("vmaxnmav",eeec0f00, 2, (RR, RMQ), mve_vmaxnmv),
  23122. mToC("vminnmv", eeee0f80, 2, (RR, RMQ), mve_vmaxnmv),
  23123. mToC("vminnmav",eeec0f80, 2, (RR, RMQ), mve_vmaxnmv),
  23124. #undef ARM_VARIANT
  23125. #define ARM_VARIANT & fpu_vfp_ext_v1
  23126. #undef THUMB_VARIANT
  23127. #define THUMB_VARIANT & arm_ext_v6t2
  23128. mcCE(fcpyd, eb00b40, 2, (RVD, RVD), vfp_dp_rd_rm),
  23129. #undef ARM_VARIANT
  23130. #define ARM_VARIANT & fpu_vfp_ext_v1xd
  23131. mnCEF(vmla, _vmla, 3, (RNSDQMQ, oRNSDQMQ, RNSDQ_RNSC_MQ_RR), neon_mac_maybe_scalar),
  23132. mnCEF(vmul, _vmul, 3, (RNSDQMQ, oRNSDQMQ, RNSDQ_RNSC_MQ_RR), neon_mul),
  23133. MNCE(vmov, 0, 1, (VMOV), neon_mov),
  23134. mcCE(fmrs, e100a10, 2, (RR, RVS), vfp_reg_from_sp),
  23135. mcCE(fmsr, e000a10, 2, (RVS, RR), vfp_sp_from_reg),
  23136. mcCE(fcpys, eb00a40, 2, (RVS, RVS), vfp_sp_monadic),
  23137. mCEF(vmullt, _vmullt, 3, (RNSDQMQ, oRNSDQMQ, RNSDQ_RNSC_MQ), mve_vmull),
  23138. mnCEF(vadd, _vadd, 3, (RNSDQMQ, oRNSDQMQ, RNSDQMQR), neon_addsub_if_i),
  23139. mnCEF(vsub, _vsub, 3, (RNSDQMQ, oRNSDQMQ, RNSDQMQR), neon_addsub_if_i),
  23140. MNCEF(vabs, 1b10300, 2, (RNSDQMQ, RNSDQMQ), neon_abs_neg),
  23141. MNCEF(vneg, 1b10380, 2, (RNSDQMQ, RNSDQMQ), neon_abs_neg),
  23142. mCEF(vmovlt, _vmovlt, 1, (VMOV), mve_movl),
  23143. mCEF(vmovlb, _vmovlb, 1, (VMOV), mve_movl),
  23144. mnCE(vcmp, _vcmp, 3, (RVSD_COND, RSVDMQ_FI0, oRMQRZ), vfp_nsyn_cmp),
  23145. mnCE(vcmpe, _vcmpe, 3, (RVSD_COND, RSVDMQ_FI0, oRMQRZ), vfp_nsyn_cmp),
  23146. #undef ARM_VARIANT
  23147. #define ARM_VARIANT & fpu_vfp_ext_v2
  23148. mcCE(fmsrr, c400a10, 3, (VRSLST, RR, RR), vfp_sp2_from_reg2),
  23149. mcCE(fmrrs, c500a10, 3, (RR, RR, VRSLST), vfp_reg2_from_sp2),
  23150. mcCE(fmdrr, c400b10, 3, (RVD, RR, RR), vfp_dp_rm_rd_rn),
  23151. mcCE(fmrrd, c500b10, 3, (RR, RR, RVD), vfp_dp_rd_rn_rm),
  23152. #undef ARM_VARIANT
  23153. #define ARM_VARIANT & fpu_vfp_ext_armv8xd
  23154. mnUF(vcvta, _vcvta, 2, (RNSDQMQ, oRNSDQMQ), neon_cvta),
  23155. mnUF(vcvtp, _vcvta, 2, (RNSDQMQ, oRNSDQMQ), neon_cvtp),
  23156. mnUF(vcvtn, _vcvta, 3, (RNSDQMQ, oRNSDQMQ, oI32z), neon_cvtn),
  23157. mnUF(vcvtm, _vcvta, 2, (RNSDQMQ, oRNSDQMQ), neon_cvtm),
  23158. mnUF(vmaxnm, _vmaxnm, 3, (RNSDQMQ, oRNSDQMQ, RNSDQMQ), vmaxnm),
  23159. mnUF(vminnm, _vminnm, 3, (RNSDQMQ, oRNSDQMQ, RNSDQMQ), vmaxnm),
  23160. #undef ARM_VARIANT
  23161. #define ARM_VARIANT & fpu_neon_ext_v1
  23162. mnUF(vabd, _vabd, 3, (RNDQMQ, oRNDQMQ, RNDQMQ), neon_dyadic_if_su),
  23163. mnUF(vabdl, _vabdl, 3, (RNQMQ, RNDMQ, RNDMQ), neon_dyadic_long),
  23164. mnUF(vaddl, _vaddl, 3, (RNSDQMQ, oRNSDMQ, RNSDMQR), neon_dyadic_long),
  23165. mnUF(vsubl, _vsubl, 3, (RNSDQMQ, oRNSDMQ, RNSDMQR), neon_dyadic_long),
  23166. mnUF(vand, _vand, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_Ibig), neon_logic),
  23167. mnUF(vbic, _vbic, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_Ibig), neon_logic),
  23168. mnUF(vorr, _vorr, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_Ibig), neon_logic),
  23169. mnUF(vorn, _vorn, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_Ibig), neon_logic),
  23170. mnUF(veor, _veor, 3, (RNDQMQ, oRNDQMQ, RNDQMQ), neon_logic),
  23171. MNUF(vcls, 1b00400, 2, (RNDQMQ, RNDQMQ), neon_cls),
  23172. MNUF(vclz, 1b00480, 2, (RNDQMQ, RNDQMQ), neon_clz),
  23173. mnCE(vdup, _vdup, 2, (RNDQMQ, RR_RNSC), neon_dup),
  23174. MNUF(vhadd, 00000000, 3, (RNDQMQ, oRNDQMQ, RNDQMQR), neon_dyadic_i_su),
  23175. MNUF(vrhadd, 00000100, 3, (RNDQMQ, oRNDQMQ, RNDQMQ), neon_dyadic_i_su),
  23176. MNUF(vhsub, 00000200, 3, (RNDQMQ, oRNDQMQ, RNDQMQR), neon_dyadic_i_su),
  23177. mnUF(vmin, _vmin, 3, (RNDQMQ, oRNDQMQ, RNDQMQ), neon_dyadic_if_su),
  23178. mnUF(vmax, _vmax, 3, (RNDQMQ, oRNDQMQ, RNDQMQ), neon_dyadic_if_su),
  23179. MNUF(vqadd, 0000010, 3, (RNDQMQ, oRNDQMQ, RNDQMQR), neon_dyadic_i64_su),
  23180. MNUF(vqsub, 0000210, 3, (RNDQMQ, oRNDQMQ, RNDQMQR), neon_dyadic_i64_su),
  23181. mnUF(vmvn, _vmvn, 2, (RNDQMQ, RNDQMQ_Ibig), neon_mvn),
  23182. MNUF(vqabs, 1b00700, 2, (RNDQMQ, RNDQMQ), neon_sat_abs_neg),
  23183. MNUF(vqneg, 1b00780, 2, (RNDQMQ, RNDQMQ), neon_sat_abs_neg),
  23184. mnUF(vqrdmlah, _vqrdmlah,3, (RNDQMQ, oRNDQMQ, RNDQ_RNSC_RR), neon_qrdmlah),
  23185. mnUF(vqdmulh, _vqdmulh, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_RNSC_RR), neon_qdmulh),
  23186. mnUF(vqrdmulh, _vqrdmulh,3, (RNDQMQ, oRNDQMQ, RNDQMQ_RNSC_RR), neon_qdmulh),
  23187. MNUF(vqrshl, 0000510, 3, (RNDQMQ, oRNDQMQ, RNDQMQR), neon_rshl),
  23188. MNUF(vrshl, 0000500, 3, (RNDQMQ, oRNDQMQ, RNDQMQR), neon_rshl),
  23189. MNUF(vshr, 0800010, 3, (RNDQMQ, oRNDQMQ, I64z), neon_rshift_round_imm),
  23190. MNUF(vrshr, 0800210, 3, (RNDQMQ, oRNDQMQ, I64z), neon_rshift_round_imm),
  23191. MNUF(vsli, 1800510, 3, (RNDQMQ, oRNDQMQ, I63), neon_sli),
  23192. MNUF(vsri, 1800410, 3, (RNDQMQ, oRNDQMQ, I64z), neon_sri),
  23193. MNUF(vrev64, 1b00000, 2, (RNDQMQ, RNDQMQ), neon_rev),
  23194. MNUF(vrev32, 1b00080, 2, (RNDQMQ, RNDQMQ), neon_rev),
  23195. MNUF(vrev16, 1b00100, 2, (RNDQMQ, RNDQMQ), neon_rev),
  23196. mnUF(vshl, _vshl, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_I63b_RR), neon_shl),
  23197. mnUF(vqshl, _vqshl, 3, (RNDQMQ, oRNDQMQ, RNDQMQ_I63b_RR), neon_qshl),
  23198. MNUF(vqshlu, 1800610, 3, (RNDQMQ, oRNDQMQ, I63), neon_qshlu_imm),
  23199. #undef ARM_VARIANT
  23200. #define ARM_VARIANT & arm_ext_v8_3
  23201. #undef THUMB_VARIANT
  23202. #define THUMB_VARIANT & arm_ext_v6t2_v8m
  23203. MNUF (vcadd, 0, 4, (RNDQMQ, RNDQMQ, RNDQMQ, EXPi), vcadd),
  23204. MNUF (vcmla, 0, 4, (RNDQMQ, RNDQMQ, RNDQMQ_RNSC, EXPi), vcmla),
  23205. #undef ARM_VARIANT
  23206. #define ARM_VARIANT &arm_ext_bf16
  23207. #undef THUMB_VARIANT
  23208. #define THUMB_VARIANT &arm_ext_bf16
  23209. TUF ("vdot", c000d00, fc000d00, 3, (RNDQ, RNDQ, RNDQ_RNSC), vdot, vdot),
  23210. TUF ("vmmla", c000c40, fc000c40, 3, (RNQ, RNQ, RNQ), vmmla, vmmla),
  23211. TUF ("vfmab", c300810, fc300810, 3, (RNDQ, RNDQ, RNDQ_RNSC), bfloat_vfma, bfloat_vfma),
  23212. #undef ARM_VARIANT
  23213. #define ARM_VARIANT &arm_ext_i8mm
  23214. #undef THUMB_VARIANT
  23215. #define THUMB_VARIANT &arm_ext_i8mm
  23216. TUF ("vsmmla", c200c40, fc200c40, 3, (RNQ, RNQ, RNQ), vsmmla, vsmmla),
  23217. TUF ("vummla", c200c50, fc200c50, 3, (RNQ, RNQ, RNQ), vummla, vummla),
  23218. TUF ("vusmmla", ca00c40, fca00c40, 3, (RNQ, RNQ, RNQ), vsmmla, vsmmla),
  23219. TUF ("vusdot", c800d00, fc800d00, 3, (RNDQ, RNDQ, RNDQ_RNSC), vusdot, vusdot),
  23220. TUF ("vsudot", c800d10, fc800d10, 3, (RNDQ, RNDQ, RNSC), vsudot, vsudot),
  23221. #undef ARM_VARIANT
  23222. #undef THUMB_VARIANT
  23223. #define THUMB_VARIANT &arm_ext_cde
  23224. ToC ("cx1", ee000000, 3, (RCP, APSR_RR, I8191), cx1),
  23225. ToC ("cx1a", fe000000, 3, (RCP, APSR_RR, I8191), cx1a),
  23226. ToC ("cx1d", ee000040, 4, (RCP, RR, APSR_RR, I8191), cx1d),
  23227. ToC ("cx1da", fe000040, 4, (RCP, RR, APSR_RR, I8191), cx1da),
  23228. ToC ("cx2", ee400000, 4, (RCP, APSR_RR, APSR_RR, I511), cx2),
  23229. ToC ("cx2a", fe400000, 4, (RCP, APSR_RR, APSR_RR, I511), cx2a),
  23230. ToC ("cx2d", ee400040, 5, (RCP, RR, APSR_RR, APSR_RR, I511), cx2d),
  23231. ToC ("cx2da", fe400040, 5, (RCP, RR, APSR_RR, APSR_RR, I511), cx2da),
  23232. ToC ("cx3", ee800000, 5, (RCP, APSR_RR, APSR_RR, APSR_RR, I63), cx3),
  23233. ToC ("cx3a", fe800000, 5, (RCP, APSR_RR, APSR_RR, APSR_RR, I63), cx3a),
  23234. ToC ("cx3d", ee800040, 6, (RCP, RR, APSR_RR, APSR_RR, APSR_RR, I63), cx3d),
  23235. ToC ("cx3da", fe800040, 6, (RCP, RR, APSR_RR, APSR_RR, APSR_RR, I63), cx3da),
  23236. mToC ("vcx1", ec200000, 3, (RCP, RNSDMQ, I4095), vcx1),
  23237. mToC ("vcx1a", fc200000, 3, (RCP, RNSDMQ, I4095), vcx1),
  23238. mToC ("vcx2", ec300000, 4, (RCP, RNSDMQ, RNSDMQ, I127), vcx2),
  23239. mToC ("vcx2a", fc300000, 4, (RCP, RNSDMQ, RNSDMQ, I127), vcx2),
  23240. mToC ("vcx3", ec800000, 5, (RCP, RNSDMQ, RNSDMQ, RNSDMQ, I15), vcx3),
  23241. mToC ("vcx3a", fc800000, 5, (RCP, RNSDMQ, RNSDMQ, RNSDMQ, I15), vcx3),
  23242. };
  23243. #undef ARM_VARIANT
  23244. #undef THUMB_VARIANT
  23245. #undef TCE
  23246. #undef TUE
  23247. #undef TUF
  23248. #undef TCC
  23249. #undef cCE
  23250. #undef cCL
  23251. #undef C3E
  23252. #undef C3
  23253. #undef CE
  23254. #undef CM
  23255. #undef CL
  23256. #undef UE
  23257. #undef UF
  23258. #undef UT
  23259. #undef NUF
  23260. #undef nUF
  23261. #undef NCE
  23262. #undef nCE
  23263. #undef OPS0
  23264. #undef OPS1
  23265. #undef OPS2
  23266. #undef OPS3
  23267. #undef OPS4
  23268. #undef OPS5
  23269. #undef OPS6
  23270. #undef do_0
  23271. #undef ToC
  23272. #undef toC
  23273. #undef ToU
  23274. #undef toU
  23275. /* MD interface: bits in the object file. */
  23276. /* Turn an integer of n bytes (in val) into a stream of bytes appropriate
  23277. for use in the a.out file, and stores them in the array pointed to by buf.
  23278. This knows about the endian-ness of the target machine and does
  23279. THE RIGHT THING, whatever it is. Possible values for n are 1 (byte)
  23280. 2 (short) and 4 (long) Floating numbers are put out as a series of
  23281. LITTLENUMS (shorts, here at least). */
  23282. void
  23283. md_number_to_chars (char * buf, valueT val, int n)
  23284. {
  23285. if (target_big_endian)
  23286. number_to_chars_bigendian (buf, val, n);
  23287. else
  23288. number_to_chars_littleendian (buf, val, n);
  23289. }
  23290. static valueT
  23291. md_chars_to_number (char * buf, int n)
  23292. {
  23293. valueT result = 0;
  23294. unsigned char * where = (unsigned char *) buf;
  23295. if (target_big_endian)
  23296. {
  23297. while (n--)
  23298. {
  23299. result <<= 8;
  23300. result |= (*where++ & 255);
  23301. }
  23302. }
  23303. else
  23304. {
  23305. while (n--)
  23306. {
  23307. result <<= 8;
  23308. result |= (where[n] & 255);
  23309. }
  23310. }
  23311. return result;
  23312. }
  23313. /* MD interface: Sections. */
  23314. /* Calculate the maximum variable size (i.e., excluding fr_fix)
  23315. that an rs_machine_dependent frag may reach. */
  23316. unsigned int
  23317. arm_frag_max_var (fragS *fragp)
  23318. {
  23319. /* We only use rs_machine_dependent for variable-size Thumb instructions,
  23320. which are either THUMB_SIZE (2) or INSN_SIZE (4).
  23321. Note that we generate relaxable instructions even for cases that don't
  23322. really need it, like an immediate that's a trivial constant. So we're
  23323. overestimating the instruction size for some of those cases. Rather
  23324. than putting more intelligence here, it would probably be better to
  23325. avoid generating a relaxation frag in the first place when it can be
  23326. determined up front that a short instruction will suffice. */
  23327. gas_assert (fragp->fr_type == rs_machine_dependent);
  23328. return INSN_SIZE;
  23329. }
  23330. /* Estimate the size of a frag before relaxing. Assume everything fits in
  23331. 2 bytes. */
  23332. int
  23333. md_estimate_size_before_relax (fragS * fragp,
  23334. segT segtype ATTRIBUTE_UNUSED)
  23335. {
  23336. fragp->fr_var = 2;
  23337. return 2;
  23338. }
  23339. /* Convert a machine dependent frag. */
  23340. void
  23341. md_convert_frag (bfd *abfd, segT asec ATTRIBUTE_UNUSED, fragS *fragp)
  23342. {
  23343. unsigned long insn;
  23344. unsigned long old_op;
  23345. char *buf;
  23346. expressionS exp;
  23347. fixS *fixp;
  23348. int reloc_type;
  23349. int pc_rel;
  23350. int opcode;
  23351. buf = fragp->fr_literal + fragp->fr_fix;
  23352. old_op = bfd_get_16(abfd, buf);
  23353. if (fragp->fr_symbol)
  23354. {
  23355. exp.X_op = O_symbol;
  23356. exp.X_add_symbol = fragp->fr_symbol;
  23357. }
  23358. else
  23359. {
  23360. exp.X_op = O_constant;
  23361. }
  23362. exp.X_add_number = fragp->fr_offset;
  23363. opcode = fragp->fr_subtype;
  23364. switch (opcode)
  23365. {
  23366. case T_MNEM_ldr_pc:
  23367. case T_MNEM_ldr_pc2:
  23368. case T_MNEM_ldr_sp:
  23369. case T_MNEM_str_sp:
  23370. case T_MNEM_ldr:
  23371. case T_MNEM_ldrb:
  23372. case T_MNEM_ldrh:
  23373. case T_MNEM_str:
  23374. case T_MNEM_strb:
  23375. case T_MNEM_strh:
  23376. if (fragp->fr_var == 4)
  23377. {
  23378. insn = THUMB_OP32 (opcode);
  23379. if ((old_op >> 12) == 4 || (old_op >> 12) == 9)
  23380. {
  23381. insn |= (old_op & 0x700) << 4;
  23382. }
  23383. else
  23384. {
  23385. insn |= (old_op & 7) << 12;
  23386. insn |= (old_op & 0x38) << 13;
  23387. }
  23388. insn |= 0x00000c00;
  23389. put_thumb32_insn (buf, insn);
  23390. reloc_type = BFD_RELOC_ARM_T32_OFFSET_IMM;
  23391. }
  23392. else
  23393. {
  23394. reloc_type = BFD_RELOC_ARM_THUMB_OFFSET;
  23395. }
  23396. pc_rel = (opcode == T_MNEM_ldr_pc2);
  23397. break;
  23398. case T_MNEM_adr:
  23399. /* Thumb bits should be set in the frag handling so we process them
  23400. after all symbols have been seen. PR gas/25235. */
  23401. if (exp.X_op == O_symbol
  23402. && exp.X_add_symbol != NULL
  23403. && S_IS_DEFINED (exp.X_add_symbol)
  23404. && THUMB_IS_FUNC (exp.X_add_symbol))
  23405. exp.X_add_number |= 1;
  23406. if (fragp->fr_var == 4)
  23407. {
  23408. insn = THUMB_OP32 (opcode);
  23409. insn |= (old_op & 0xf0) << 4;
  23410. put_thumb32_insn (buf, insn);
  23411. reloc_type = BFD_RELOC_ARM_T32_ADD_PC12;
  23412. }
  23413. else
  23414. {
  23415. reloc_type = BFD_RELOC_ARM_THUMB_ADD;
  23416. exp.X_add_number -= 4;
  23417. }
  23418. pc_rel = 1;
  23419. break;
  23420. case T_MNEM_mov:
  23421. case T_MNEM_movs:
  23422. case T_MNEM_cmp:
  23423. case T_MNEM_cmn:
  23424. if (fragp->fr_var == 4)
  23425. {
  23426. int r0off = (opcode == T_MNEM_mov
  23427. || opcode == T_MNEM_movs) ? 0 : 8;
  23428. insn = THUMB_OP32 (opcode);
  23429. insn = (insn & 0xe1ffffff) | 0x10000000;
  23430. insn |= (old_op & 0x700) << r0off;
  23431. put_thumb32_insn (buf, insn);
  23432. reloc_type = BFD_RELOC_ARM_T32_IMMEDIATE;
  23433. }
  23434. else
  23435. {
  23436. reloc_type = BFD_RELOC_ARM_THUMB_IMM;
  23437. }
  23438. pc_rel = 0;
  23439. break;
  23440. case T_MNEM_b:
  23441. if (fragp->fr_var == 4)
  23442. {
  23443. insn = THUMB_OP32(opcode);
  23444. put_thumb32_insn (buf, insn);
  23445. reloc_type = BFD_RELOC_THUMB_PCREL_BRANCH25;
  23446. }
  23447. else
  23448. reloc_type = BFD_RELOC_THUMB_PCREL_BRANCH12;
  23449. pc_rel = 1;
  23450. break;
  23451. case T_MNEM_bcond:
  23452. if (fragp->fr_var == 4)
  23453. {
  23454. insn = THUMB_OP32(opcode);
  23455. insn |= (old_op & 0xf00) << 14;
  23456. put_thumb32_insn (buf, insn);
  23457. reloc_type = BFD_RELOC_THUMB_PCREL_BRANCH20;
  23458. }
  23459. else
  23460. reloc_type = BFD_RELOC_THUMB_PCREL_BRANCH9;
  23461. pc_rel = 1;
  23462. break;
  23463. case T_MNEM_add_sp:
  23464. case T_MNEM_add_pc:
  23465. case T_MNEM_inc_sp:
  23466. case T_MNEM_dec_sp:
  23467. if (fragp->fr_var == 4)
  23468. {
  23469. /* ??? Choose between add and addw. */
  23470. insn = THUMB_OP32 (opcode);
  23471. insn |= (old_op & 0xf0) << 4;
  23472. put_thumb32_insn (buf, insn);
  23473. if (opcode == T_MNEM_add_pc)
  23474. reloc_type = BFD_RELOC_ARM_T32_IMM12;
  23475. else
  23476. reloc_type = BFD_RELOC_ARM_T32_ADD_IMM;
  23477. }
  23478. else
  23479. reloc_type = BFD_RELOC_ARM_THUMB_ADD;
  23480. pc_rel = 0;
  23481. break;
  23482. case T_MNEM_addi:
  23483. case T_MNEM_addis:
  23484. case T_MNEM_subi:
  23485. case T_MNEM_subis:
  23486. if (fragp->fr_var == 4)
  23487. {
  23488. insn = THUMB_OP32 (opcode);
  23489. insn |= (old_op & 0xf0) << 4;
  23490. insn |= (old_op & 0xf) << 16;
  23491. put_thumb32_insn (buf, insn);
  23492. if (insn & (1 << 20))
  23493. reloc_type = BFD_RELOC_ARM_T32_ADD_IMM;
  23494. else
  23495. reloc_type = BFD_RELOC_ARM_T32_IMMEDIATE;
  23496. }
  23497. else
  23498. reloc_type = BFD_RELOC_ARM_THUMB_ADD;
  23499. pc_rel = 0;
  23500. break;
  23501. default:
  23502. abort ();
  23503. }
  23504. fixp = fix_new_exp (fragp, fragp->fr_fix, fragp->fr_var, &exp, pc_rel,
  23505. (enum bfd_reloc_code_real) reloc_type);
  23506. fixp->fx_file = fragp->fr_file;
  23507. fixp->fx_line = fragp->fr_line;
  23508. fragp->fr_fix += fragp->fr_var;
  23509. /* Set whether we use thumb-2 ISA based on final relaxation results. */
  23510. if (thumb_mode && fragp->fr_var == 4 && no_cpu_selected ()
  23511. && !ARM_CPU_HAS_FEATURE (thumb_arch_used, arm_arch_t2))
  23512. ARM_MERGE_FEATURE_SETS (arm_arch_used, thumb_arch_used, arm_ext_v6t2);
  23513. }
  23514. /* Return the size of a relaxable immediate operand instruction.
  23515. SHIFT and SIZE specify the form of the allowable immediate. */
  23516. static int
  23517. relax_immediate (fragS *fragp, int size, int shift)
  23518. {
  23519. offsetT offset;
  23520. offsetT mask;
  23521. offsetT low;
  23522. /* ??? Should be able to do better than this. */
  23523. if (fragp->fr_symbol)
  23524. return 4;
  23525. low = (1 << shift) - 1;
  23526. mask = (1 << (shift + size)) - (1 << shift);
  23527. offset = fragp->fr_offset;
  23528. /* Force misaligned offsets to 32-bit variant. */
  23529. if (offset & low)
  23530. return 4;
  23531. if (offset & ~mask)
  23532. return 4;
  23533. return 2;
  23534. }
  23535. /* Get the address of a symbol during relaxation. */
  23536. static addressT
  23537. relaxed_symbol_addr (fragS *fragp, long stretch)
  23538. {
  23539. fragS *sym_frag;
  23540. addressT addr;
  23541. symbolS *sym;
  23542. sym = fragp->fr_symbol;
  23543. sym_frag = symbol_get_frag (sym);
  23544. know (S_GET_SEGMENT (sym) != absolute_section
  23545. || sym_frag == &zero_address_frag);
  23546. addr = S_GET_VALUE (sym) + fragp->fr_offset;
  23547. /* If frag has yet to be reached on this pass, assume it will
  23548. move by STRETCH just as we did. If this is not so, it will
  23549. be because some frag between grows, and that will force
  23550. another pass. */
  23551. if (stretch != 0
  23552. && sym_frag->relax_marker != fragp->relax_marker)
  23553. {
  23554. fragS *f;
  23555. /* Adjust stretch for any alignment frag. Note that if have
  23556. been expanding the earlier code, the symbol may be
  23557. defined in what appears to be an earlier frag. FIXME:
  23558. This doesn't handle the fr_subtype field, which specifies
  23559. a maximum number of bytes to skip when doing an
  23560. alignment. */
  23561. for (f = fragp; f != NULL && f != sym_frag; f = f->fr_next)
  23562. {
  23563. if (f->fr_type == rs_align || f->fr_type == rs_align_code)
  23564. {
  23565. if (stretch < 0)
  23566. stretch = - ((- stretch)
  23567. & ~ ((1 << (int) f->fr_offset) - 1));
  23568. else
  23569. stretch &= ~ ((1 << (int) f->fr_offset) - 1);
  23570. if (stretch == 0)
  23571. break;
  23572. }
  23573. }
  23574. if (f != NULL)
  23575. addr += stretch;
  23576. }
  23577. return addr;
  23578. }
  23579. /* Return the size of a relaxable adr pseudo-instruction or PC-relative
  23580. load. */
  23581. static int
  23582. relax_adr (fragS *fragp, asection *sec, long stretch)
  23583. {
  23584. addressT addr;
  23585. offsetT val;
  23586. /* Assume worst case for symbols not known to be in the same section. */
  23587. if (fragp->fr_symbol == NULL
  23588. || !S_IS_DEFINED (fragp->fr_symbol)
  23589. || sec != S_GET_SEGMENT (fragp->fr_symbol)
  23590. || S_IS_WEAK (fragp->fr_symbol)
  23591. || THUMB_IS_FUNC (fragp->fr_symbol))
  23592. return 4;
  23593. val = relaxed_symbol_addr (fragp, stretch);
  23594. addr = fragp->fr_address + fragp->fr_fix;
  23595. addr = (addr + 4) & ~3;
  23596. /* Force misaligned targets to 32-bit variant. */
  23597. if (val & 3)
  23598. return 4;
  23599. val -= addr;
  23600. if (val < 0 || val > 1020)
  23601. return 4;
  23602. return 2;
  23603. }
  23604. /* Return the size of a relaxable add/sub immediate instruction. */
  23605. static int
  23606. relax_addsub (fragS *fragp, asection *sec)
  23607. {
  23608. char *buf;
  23609. int op;
  23610. buf = fragp->fr_literal + fragp->fr_fix;
  23611. op = bfd_get_16(sec->owner, buf);
  23612. if ((op & 0xf) == ((op >> 4) & 0xf))
  23613. return relax_immediate (fragp, 8, 0);
  23614. else
  23615. return relax_immediate (fragp, 3, 0);
  23616. }
  23617. /* Return TRUE iff the definition of symbol S could be pre-empted
  23618. (overridden) at link or load time. */
  23619. static bool
  23620. symbol_preemptible (symbolS *s)
  23621. {
  23622. /* Weak symbols can always be pre-empted. */
  23623. if (S_IS_WEAK (s))
  23624. return true;
  23625. /* Non-global symbols cannot be pre-empted. */
  23626. if (! S_IS_EXTERNAL (s))
  23627. return false;
  23628. #ifdef OBJ_ELF
  23629. /* In ELF, a global symbol can be marked protected, or private. In that
  23630. case it can't be pre-empted (other definitions in the same link unit
  23631. would violate the ODR). */
  23632. if (ELF_ST_VISIBILITY (S_GET_OTHER (s)) > STV_DEFAULT)
  23633. return false;
  23634. #endif
  23635. /* Other global symbols might be pre-empted. */
  23636. return true;
  23637. }
  23638. /* Return the size of a relaxable branch instruction. BITS is the
  23639. size of the offset field in the narrow instruction. */
  23640. static int
  23641. relax_branch (fragS *fragp, asection *sec, int bits, long stretch)
  23642. {
  23643. addressT addr;
  23644. offsetT val;
  23645. offsetT limit;
  23646. /* Assume worst case for symbols not known to be in the same section. */
  23647. if (!S_IS_DEFINED (fragp->fr_symbol)
  23648. || sec != S_GET_SEGMENT (fragp->fr_symbol)
  23649. || S_IS_WEAK (fragp->fr_symbol))
  23650. return 4;
  23651. #ifdef OBJ_ELF
  23652. /* A branch to a function in ARM state will require interworking. */
  23653. if (S_IS_DEFINED (fragp->fr_symbol)
  23654. && ARM_IS_FUNC (fragp->fr_symbol))
  23655. return 4;
  23656. #endif
  23657. if (symbol_preemptible (fragp->fr_symbol))
  23658. return 4;
  23659. val = relaxed_symbol_addr (fragp, stretch);
  23660. addr = fragp->fr_address + fragp->fr_fix + 4;
  23661. val -= addr;
  23662. /* Offset is a signed value *2 */
  23663. limit = 1 << bits;
  23664. if (val >= limit || val < -limit)
  23665. return 4;
  23666. return 2;
  23667. }
  23668. /* Relax a machine dependent frag. This returns the amount by which
  23669. the current size of the frag should change. */
  23670. int
  23671. arm_relax_frag (asection *sec, fragS *fragp, long stretch)
  23672. {
  23673. int oldsize;
  23674. int newsize;
  23675. oldsize = fragp->fr_var;
  23676. switch (fragp->fr_subtype)
  23677. {
  23678. case T_MNEM_ldr_pc2:
  23679. newsize = relax_adr (fragp, sec, stretch);
  23680. break;
  23681. case T_MNEM_ldr_pc:
  23682. case T_MNEM_ldr_sp:
  23683. case T_MNEM_str_sp:
  23684. newsize = relax_immediate (fragp, 8, 2);
  23685. break;
  23686. case T_MNEM_ldr:
  23687. case T_MNEM_str:
  23688. newsize = relax_immediate (fragp, 5, 2);
  23689. break;
  23690. case T_MNEM_ldrh:
  23691. case T_MNEM_strh:
  23692. newsize = relax_immediate (fragp, 5, 1);
  23693. break;
  23694. case T_MNEM_ldrb:
  23695. case T_MNEM_strb:
  23696. newsize = relax_immediate (fragp, 5, 0);
  23697. break;
  23698. case T_MNEM_adr:
  23699. newsize = relax_adr (fragp, sec, stretch);
  23700. break;
  23701. case T_MNEM_mov:
  23702. case T_MNEM_movs:
  23703. case T_MNEM_cmp:
  23704. case T_MNEM_cmn:
  23705. newsize = relax_immediate (fragp, 8, 0);
  23706. break;
  23707. case T_MNEM_b:
  23708. newsize = relax_branch (fragp, sec, 11, stretch);
  23709. break;
  23710. case T_MNEM_bcond:
  23711. newsize = relax_branch (fragp, sec, 8, stretch);
  23712. break;
  23713. case T_MNEM_add_sp:
  23714. case T_MNEM_add_pc:
  23715. newsize = relax_immediate (fragp, 8, 2);
  23716. break;
  23717. case T_MNEM_inc_sp:
  23718. case T_MNEM_dec_sp:
  23719. newsize = relax_immediate (fragp, 7, 2);
  23720. break;
  23721. case T_MNEM_addi:
  23722. case T_MNEM_addis:
  23723. case T_MNEM_subi:
  23724. case T_MNEM_subis:
  23725. newsize = relax_addsub (fragp, sec);
  23726. break;
  23727. default:
  23728. abort ();
  23729. }
  23730. fragp->fr_var = newsize;
  23731. /* Freeze wide instructions that are at or before the same location as
  23732. in the previous pass. This avoids infinite loops.
  23733. Don't freeze them unconditionally because targets may be artificially
  23734. misaligned by the expansion of preceding frags. */
  23735. if (stretch <= 0 && newsize > 2)
  23736. {
  23737. md_convert_frag (sec->owner, sec, fragp);
  23738. frag_wane (fragp);
  23739. }
  23740. return newsize - oldsize;
  23741. }
  23742. /* Round up a section size to the appropriate boundary. */
  23743. valueT
  23744. md_section_align (segT segment ATTRIBUTE_UNUSED,
  23745. valueT size)
  23746. {
  23747. return size;
  23748. }
  23749. /* This is called from HANDLE_ALIGN in write.c. Fill in the contents
  23750. of an rs_align_code fragment. */
  23751. void
  23752. arm_handle_align (fragS * fragP)
  23753. {
  23754. static unsigned char const arm_noop[2][2][4] =
  23755. {
  23756. { /* ARMv1 */
  23757. {0x00, 0x00, 0xa0, 0xe1}, /* LE */
  23758. {0xe1, 0xa0, 0x00, 0x00}, /* BE */
  23759. },
  23760. { /* ARMv6k */
  23761. {0x00, 0xf0, 0x20, 0xe3}, /* LE */
  23762. {0xe3, 0x20, 0xf0, 0x00}, /* BE */
  23763. },
  23764. };
  23765. static unsigned char const thumb_noop[2][2][2] =
  23766. {
  23767. { /* Thumb-1 */
  23768. {0xc0, 0x46}, /* LE */
  23769. {0x46, 0xc0}, /* BE */
  23770. },
  23771. { /* Thumb-2 */
  23772. {0x00, 0xbf}, /* LE */
  23773. {0xbf, 0x00} /* BE */
  23774. }
  23775. };
  23776. static unsigned char const wide_thumb_noop[2][4] =
  23777. { /* Wide Thumb-2 */
  23778. {0xaf, 0xf3, 0x00, 0x80}, /* LE */
  23779. {0xf3, 0xaf, 0x80, 0x00}, /* BE */
  23780. };
  23781. unsigned bytes, fix, noop_size;
  23782. char * p;
  23783. const unsigned char * noop;
  23784. const unsigned char *narrow_noop = NULL;
  23785. #ifdef OBJ_ELF
  23786. enum mstate state;
  23787. #endif
  23788. if (fragP->fr_type != rs_align_code)
  23789. return;
  23790. bytes = fragP->fr_next->fr_address - fragP->fr_address - fragP->fr_fix;
  23791. p = fragP->fr_literal + fragP->fr_fix;
  23792. fix = 0;
  23793. if (bytes > MAX_MEM_FOR_RS_ALIGN_CODE)
  23794. bytes &= MAX_MEM_FOR_RS_ALIGN_CODE;
  23795. gas_assert ((fragP->tc_frag_data.thumb_mode & MODE_RECORDED) != 0);
  23796. if (fragP->tc_frag_data.thumb_mode & (~ MODE_RECORDED))
  23797. {
  23798. if (ARM_CPU_HAS_FEATURE (selected_cpu_name[0]
  23799. ? selected_cpu : arm_arch_none, arm_ext_v6t2))
  23800. {
  23801. narrow_noop = thumb_noop[1][target_big_endian];
  23802. noop = wide_thumb_noop[target_big_endian];
  23803. }
  23804. else
  23805. noop = thumb_noop[0][target_big_endian];
  23806. noop_size = 2;
  23807. #ifdef OBJ_ELF
  23808. state = MAP_THUMB;
  23809. #endif
  23810. }
  23811. else
  23812. {
  23813. noop = arm_noop[ARM_CPU_HAS_FEATURE (selected_cpu_name[0]
  23814. ? selected_cpu : arm_arch_none,
  23815. arm_ext_v6k) != 0]
  23816. [target_big_endian];
  23817. noop_size = 4;
  23818. #ifdef OBJ_ELF
  23819. state = MAP_ARM;
  23820. #endif
  23821. }
  23822. fragP->fr_var = noop_size;
  23823. if (bytes & (noop_size - 1))
  23824. {
  23825. fix = bytes & (noop_size - 1);
  23826. #ifdef OBJ_ELF
  23827. insert_data_mapping_symbol (state, fragP->fr_fix, fragP, fix);
  23828. #endif
  23829. memset (p, 0, fix);
  23830. p += fix;
  23831. bytes -= fix;
  23832. }
  23833. if (narrow_noop)
  23834. {
  23835. if (bytes & noop_size)
  23836. {
  23837. /* Insert a narrow noop. */
  23838. memcpy (p, narrow_noop, noop_size);
  23839. p += noop_size;
  23840. bytes -= noop_size;
  23841. fix += noop_size;
  23842. }
  23843. /* Use wide noops for the remainder */
  23844. noop_size = 4;
  23845. }
  23846. while (bytes >= noop_size)
  23847. {
  23848. memcpy (p, noop, noop_size);
  23849. p += noop_size;
  23850. bytes -= noop_size;
  23851. fix += noop_size;
  23852. }
  23853. fragP->fr_fix += fix;
  23854. }
  23855. /* Called from md_do_align. Used to create an alignment
  23856. frag in a code section. */
  23857. void
  23858. arm_frag_align_code (int n, int max)
  23859. {
  23860. char * p;
  23861. /* We assume that there will never be a requirement
  23862. to support alignments greater than MAX_MEM_FOR_RS_ALIGN_CODE bytes. */
  23863. if (max > MAX_MEM_FOR_RS_ALIGN_CODE)
  23864. {
  23865. char err_msg[128];
  23866. sprintf (err_msg,
  23867. _("alignments greater than %d bytes not supported in .text sections."),
  23868. MAX_MEM_FOR_RS_ALIGN_CODE + 1);
  23869. as_fatal ("%s", err_msg);
  23870. }
  23871. p = frag_var (rs_align_code,
  23872. MAX_MEM_FOR_RS_ALIGN_CODE,
  23873. 1,
  23874. (relax_substateT) max,
  23875. (symbolS *) NULL,
  23876. (offsetT) n,
  23877. (char *) NULL);
  23878. *p = 0;
  23879. }
  23880. /* Perform target specific initialisation of a frag.
  23881. Note - despite the name this initialisation is not done when the frag
  23882. is created, but only when its type is assigned. A frag can be created
  23883. and used a long time before its type is set, so beware of assuming that
  23884. this initialisation is performed first. */
  23885. #ifndef OBJ_ELF
  23886. void
  23887. arm_init_frag (fragS * fragP, int max_chars ATTRIBUTE_UNUSED)
  23888. {
  23889. /* Record whether this frag is in an ARM or a THUMB area. */
  23890. fragP->tc_frag_data.thumb_mode = thumb_mode | MODE_RECORDED;
  23891. }
  23892. #else /* OBJ_ELF is defined. */
  23893. void
  23894. arm_init_frag (fragS * fragP, int max_chars)
  23895. {
  23896. bool frag_thumb_mode;
  23897. /* If the current ARM vs THUMB mode has not already
  23898. been recorded into this frag then do so now. */
  23899. if ((fragP->tc_frag_data.thumb_mode & MODE_RECORDED) == 0)
  23900. fragP->tc_frag_data.thumb_mode = thumb_mode | MODE_RECORDED;
  23901. /* PR 21809: Do not set a mapping state for debug sections
  23902. - it just confuses other tools. */
  23903. if (bfd_section_flags (now_seg) & SEC_DEBUGGING)
  23904. return;
  23905. frag_thumb_mode = fragP->tc_frag_data.thumb_mode ^ MODE_RECORDED;
  23906. /* Record a mapping symbol for alignment frags. We will delete this
  23907. later if the alignment ends up empty. */
  23908. switch (fragP->fr_type)
  23909. {
  23910. case rs_align:
  23911. case rs_align_test:
  23912. case rs_fill:
  23913. mapping_state_2 (MAP_DATA, max_chars);
  23914. break;
  23915. case rs_align_code:
  23916. mapping_state_2 (frag_thumb_mode ? MAP_THUMB : MAP_ARM, max_chars);
  23917. break;
  23918. default:
  23919. break;
  23920. }
  23921. }
  23922. /* When we change sections we need to issue a new mapping symbol. */
  23923. void
  23924. arm_elf_change_section (void)
  23925. {
  23926. /* Link an unlinked unwind index table section to the .text section. */
  23927. if (elf_section_type (now_seg) == SHT_ARM_EXIDX
  23928. && elf_linked_to_section (now_seg) == NULL)
  23929. elf_linked_to_section (now_seg) = text_section;
  23930. }
  23931. int
  23932. arm_elf_section_type (const char * str, size_t len)
  23933. {
  23934. if (len == 5 && startswith (str, "exidx"))
  23935. return SHT_ARM_EXIDX;
  23936. return -1;
  23937. }
  23938. /* Code to deal with unwinding tables. */
  23939. static void add_unwind_adjustsp (offsetT);
  23940. /* Generate any deferred unwind frame offset. */
  23941. static void
  23942. flush_pending_unwind (void)
  23943. {
  23944. offsetT offset;
  23945. offset = unwind.pending_offset;
  23946. unwind.pending_offset = 0;
  23947. if (offset != 0)
  23948. add_unwind_adjustsp (offset);
  23949. }
  23950. /* Add an opcode to this list for this function. Two-byte opcodes should
  23951. be passed as op[0] << 8 | op[1]. The list of opcodes is built in reverse
  23952. order. */
  23953. static void
  23954. add_unwind_opcode (valueT op, int length)
  23955. {
  23956. /* Add any deferred stack adjustment. */
  23957. if (unwind.pending_offset)
  23958. flush_pending_unwind ();
  23959. unwind.sp_restored = 0;
  23960. if (unwind.opcode_count + length > unwind.opcode_alloc)
  23961. {
  23962. unwind.opcode_alloc += ARM_OPCODE_CHUNK_SIZE;
  23963. if (unwind.opcodes)
  23964. unwind.opcodes = XRESIZEVEC (unsigned char, unwind.opcodes,
  23965. unwind.opcode_alloc);
  23966. else
  23967. unwind.opcodes = XNEWVEC (unsigned char, unwind.opcode_alloc);
  23968. }
  23969. while (length > 0)
  23970. {
  23971. length--;
  23972. unwind.opcodes[unwind.opcode_count] = op & 0xff;
  23973. op >>= 8;
  23974. unwind.opcode_count++;
  23975. }
  23976. }
  23977. /* Add unwind opcodes to adjust the stack pointer. */
  23978. static void
  23979. add_unwind_adjustsp (offsetT offset)
  23980. {
  23981. valueT op;
  23982. if (offset > 0x200)
  23983. {
  23984. /* We need at most 5 bytes to hold a 32-bit value in a uleb128. */
  23985. char bytes[5];
  23986. int n;
  23987. valueT o;
  23988. /* Long form: 0xb2, uleb128. */
  23989. /* This might not fit in a word so add the individual bytes,
  23990. remembering the list is built in reverse order. */
  23991. o = (valueT) ((offset - 0x204) >> 2);
  23992. if (o == 0)
  23993. add_unwind_opcode (0, 1);
  23994. /* Calculate the uleb128 encoding of the offset. */
  23995. n = 0;
  23996. while (o)
  23997. {
  23998. bytes[n] = o & 0x7f;
  23999. o >>= 7;
  24000. if (o)
  24001. bytes[n] |= 0x80;
  24002. n++;
  24003. }
  24004. /* Add the insn. */
  24005. for (; n; n--)
  24006. add_unwind_opcode (bytes[n - 1], 1);
  24007. add_unwind_opcode (0xb2, 1);
  24008. }
  24009. else if (offset > 0x100)
  24010. {
  24011. /* Two short opcodes. */
  24012. add_unwind_opcode (0x3f, 1);
  24013. op = (offset - 0x104) >> 2;
  24014. add_unwind_opcode (op, 1);
  24015. }
  24016. else if (offset > 0)
  24017. {
  24018. /* Short opcode. */
  24019. op = (offset - 4) >> 2;
  24020. add_unwind_opcode (op, 1);
  24021. }
  24022. else if (offset < 0)
  24023. {
  24024. offset = -offset;
  24025. while (offset > 0x100)
  24026. {
  24027. add_unwind_opcode (0x7f, 1);
  24028. offset -= 0x100;
  24029. }
  24030. op = ((offset - 4) >> 2) | 0x40;
  24031. add_unwind_opcode (op, 1);
  24032. }
  24033. }
  24034. /* Finish the list of unwind opcodes for this function. */
  24035. static void
  24036. finish_unwind_opcodes (void)
  24037. {
  24038. valueT op;
  24039. if (unwind.fp_used)
  24040. {
  24041. /* Adjust sp as necessary. */
  24042. unwind.pending_offset += unwind.fp_offset - unwind.frame_size;
  24043. flush_pending_unwind ();
  24044. /* After restoring sp from the frame pointer. */
  24045. op = 0x90 | unwind.fp_reg;
  24046. add_unwind_opcode (op, 1);
  24047. }
  24048. else
  24049. flush_pending_unwind ();
  24050. }
  24051. /* Start an exception table entry. If idx is nonzero this is an index table
  24052. entry. */
  24053. static void
  24054. start_unwind_section (const segT text_seg, int idx)
  24055. {
  24056. const char * text_name;
  24057. const char * prefix;
  24058. const char * prefix_once;
  24059. struct elf_section_match match;
  24060. char * sec_name;
  24061. int type;
  24062. int flags;
  24063. int linkonce;
  24064. if (idx)
  24065. {
  24066. prefix = ELF_STRING_ARM_unwind;
  24067. prefix_once = ELF_STRING_ARM_unwind_once;
  24068. type = SHT_ARM_EXIDX;
  24069. }
  24070. else
  24071. {
  24072. prefix = ELF_STRING_ARM_unwind_info;
  24073. prefix_once = ELF_STRING_ARM_unwind_info_once;
  24074. type = SHT_PROGBITS;
  24075. }
  24076. text_name = segment_name (text_seg);
  24077. if (streq (text_name, ".text"))
  24078. text_name = "";
  24079. if (startswith (text_name, ".gnu.linkonce.t."))
  24080. {
  24081. prefix = prefix_once;
  24082. text_name += strlen (".gnu.linkonce.t.");
  24083. }
  24084. sec_name = concat (prefix, text_name, (char *) NULL);
  24085. flags = SHF_ALLOC;
  24086. linkonce = 0;
  24087. memset (&match, 0, sizeof (match));
  24088. /* Handle COMDAT group. */
  24089. if (prefix != prefix_once && (text_seg->flags & SEC_LINK_ONCE) != 0)
  24090. {
  24091. match.group_name = elf_group_name (text_seg);
  24092. if (match.group_name == NULL)
  24093. {
  24094. as_bad (_("Group section `%s' has no group signature"),
  24095. segment_name (text_seg));
  24096. ignore_rest_of_line ();
  24097. return;
  24098. }
  24099. flags |= SHF_GROUP;
  24100. linkonce = 1;
  24101. }
  24102. obj_elf_change_section (sec_name, type, flags, 0, &match,
  24103. linkonce, 0);
  24104. /* Set the section link for index tables. */
  24105. if (idx)
  24106. elf_linked_to_section (now_seg) = text_seg;
  24107. }
  24108. /* Start an unwind table entry. HAVE_DATA is nonzero if we have additional
  24109. personality routine data. Returns zero, or the index table value for
  24110. an inline entry. */
  24111. static valueT
  24112. create_unwind_entry (int have_data)
  24113. {
  24114. int size;
  24115. addressT where;
  24116. char *ptr;
  24117. /* The current word of data. */
  24118. valueT data;
  24119. /* The number of bytes left in this word. */
  24120. int n;
  24121. finish_unwind_opcodes ();
  24122. /* Remember the current text section. */
  24123. unwind.saved_seg = now_seg;
  24124. unwind.saved_subseg = now_subseg;
  24125. start_unwind_section (now_seg, 0);
  24126. if (unwind.personality_routine == NULL)
  24127. {
  24128. if (unwind.personality_index == -2)
  24129. {
  24130. if (have_data)
  24131. as_bad (_("handlerdata in cantunwind frame"));
  24132. return 1; /* EXIDX_CANTUNWIND. */
  24133. }
  24134. /* Use a default personality routine if none is specified. */
  24135. if (unwind.personality_index == -1)
  24136. {
  24137. if (unwind.opcode_count > 3)
  24138. unwind.personality_index = 1;
  24139. else
  24140. unwind.personality_index = 0;
  24141. }
  24142. /* Space for the personality routine entry. */
  24143. if (unwind.personality_index == 0)
  24144. {
  24145. if (unwind.opcode_count > 3)
  24146. as_bad (_("too many unwind opcodes for personality routine 0"));
  24147. if (!have_data)
  24148. {
  24149. /* All the data is inline in the index table. */
  24150. data = 0x80;
  24151. n = 3;
  24152. while (unwind.opcode_count > 0)
  24153. {
  24154. unwind.opcode_count--;
  24155. data = (data << 8) | unwind.opcodes[unwind.opcode_count];
  24156. n--;
  24157. }
  24158. /* Pad with "finish" opcodes. */
  24159. while (n--)
  24160. data = (data << 8) | 0xb0;
  24161. return data;
  24162. }
  24163. size = 0;
  24164. }
  24165. else
  24166. /* We get two opcodes "free" in the first word. */
  24167. size = unwind.opcode_count - 2;
  24168. }
  24169. else
  24170. {
  24171. /* PR 16765: Missing or misplaced unwind directives can trigger this. */
  24172. if (unwind.personality_index != -1)
  24173. {
  24174. as_bad (_("attempt to recreate an unwind entry"));
  24175. return 1;
  24176. }
  24177. /* An extra byte is required for the opcode count. */
  24178. size = unwind.opcode_count + 1;
  24179. }
  24180. size = (size + 3) >> 2;
  24181. if (size > 0xff)
  24182. as_bad (_("too many unwind opcodes"));
  24183. frag_align (2, 0, 0);
  24184. record_alignment (now_seg, 2);
  24185. unwind.table_entry = expr_build_dot ();
  24186. /* Allocate the table entry. */
  24187. ptr = frag_more ((size << 2) + 4);
  24188. /* PR 13449: Zero the table entries in case some of them are not used. */
  24189. memset (ptr, 0, (size << 2) + 4);
  24190. where = frag_now_fix () - ((size << 2) + 4);
  24191. switch (unwind.personality_index)
  24192. {
  24193. case -1:
  24194. /* ??? Should this be a PLT generating relocation? */
  24195. /* Custom personality routine. */
  24196. fix_new (frag_now, where, 4, unwind.personality_routine, 0, 1,
  24197. BFD_RELOC_ARM_PREL31);
  24198. where += 4;
  24199. ptr += 4;
  24200. /* Set the first byte to the number of additional words. */
  24201. data = size > 0 ? size - 1 : 0;
  24202. n = 3;
  24203. break;
  24204. /* ABI defined personality routines. */
  24205. case 0:
  24206. /* Three opcodes bytes are packed into the first word. */
  24207. data = 0x80;
  24208. n = 3;
  24209. break;
  24210. case 1:
  24211. case 2:
  24212. /* The size and first two opcode bytes go in the first word. */
  24213. data = ((0x80 + unwind.personality_index) << 8) | size;
  24214. n = 2;
  24215. break;
  24216. default:
  24217. /* Should never happen. */
  24218. abort ();
  24219. }
  24220. /* Pack the opcodes into words (MSB first), reversing the list at the same
  24221. time. */
  24222. while (unwind.opcode_count > 0)
  24223. {
  24224. if (n == 0)
  24225. {
  24226. md_number_to_chars (ptr, data, 4);
  24227. ptr += 4;
  24228. n = 4;
  24229. data = 0;
  24230. }
  24231. unwind.opcode_count--;
  24232. n--;
  24233. data = (data << 8) | unwind.opcodes[unwind.opcode_count];
  24234. }
  24235. /* Finish off the last word. */
  24236. if (n < 4)
  24237. {
  24238. /* Pad with "finish" opcodes. */
  24239. while (n--)
  24240. data = (data << 8) | 0xb0;
  24241. md_number_to_chars (ptr, data, 4);
  24242. }
  24243. if (!have_data)
  24244. {
  24245. /* Add an empty descriptor if there is no user-specified data. */
  24246. ptr = frag_more (4);
  24247. md_number_to_chars (ptr, 0, 4);
  24248. }
  24249. return 0;
  24250. }
  24251. /* Initialize the DWARF-2 unwind information for this procedure. */
  24252. void
  24253. tc_arm_frame_initial_instructions (void)
  24254. {
  24255. cfi_add_CFA_def_cfa (REG_SP, 0);
  24256. }
  24257. #endif /* OBJ_ELF */
  24258. /* Convert REGNAME to a DWARF-2 register number. */
  24259. int
  24260. tc_arm_regname_to_dw2regnum (char *regname)
  24261. {
  24262. int reg = arm_reg_parse (&regname, REG_TYPE_RN);
  24263. if (reg != FAIL)
  24264. return reg;
  24265. /* PR 16694: Allow VFP registers as well. */
  24266. reg = arm_reg_parse (&regname, REG_TYPE_VFS);
  24267. if (reg != FAIL)
  24268. return 64 + reg;
  24269. reg = arm_reg_parse (&regname, REG_TYPE_VFD);
  24270. if (reg != FAIL)
  24271. return reg + 256;
  24272. return FAIL;
  24273. }
  24274. #ifdef TE_PE
  24275. void
  24276. tc_pe_dwarf2_emit_offset (symbolS *symbol, unsigned int size)
  24277. {
  24278. expressionS exp;
  24279. exp.X_op = O_secrel;
  24280. exp.X_add_symbol = symbol;
  24281. exp.X_add_number = 0;
  24282. emit_expr (&exp, size);
  24283. }
  24284. #endif
  24285. /* MD interface: Symbol and relocation handling. */
  24286. /* Return the address within the segment that a PC-relative fixup is
  24287. relative to. For ARM, PC-relative fixups applied to instructions
  24288. are generally relative to the location of the fixup plus 8 bytes.
  24289. Thumb branches are offset by 4, and Thumb loads relative to PC
  24290. require special handling. */
  24291. long
  24292. md_pcrel_from_section (fixS * fixP, segT seg)
  24293. {
  24294. offsetT base = fixP->fx_where + fixP->fx_frag->fr_address;
  24295. /* If this is pc-relative and we are going to emit a relocation
  24296. then we just want to put out any pipeline compensation that the linker
  24297. will need. Otherwise we want to use the calculated base.
  24298. For WinCE we skip the bias for externals as well, since this
  24299. is how the MS ARM-CE assembler behaves and we want to be compatible. */
  24300. if (fixP->fx_pcrel
  24301. && ((fixP->fx_addsy && S_GET_SEGMENT (fixP->fx_addsy) != seg)
  24302. || (arm_force_relocation (fixP)
  24303. #ifdef TE_WINCE
  24304. && !S_IS_EXTERNAL (fixP->fx_addsy)
  24305. #endif
  24306. )))
  24307. base = 0;
  24308. switch (fixP->fx_r_type)
  24309. {
  24310. /* PC relative addressing on the Thumb is slightly odd as the
  24311. bottom two bits of the PC are forced to zero for the
  24312. calculation. This happens *after* application of the
  24313. pipeline offset. However, Thumb adrl already adjusts for
  24314. this, so we need not do it again. */
  24315. case BFD_RELOC_ARM_THUMB_ADD:
  24316. return base & ~3;
  24317. case BFD_RELOC_ARM_THUMB_OFFSET:
  24318. case BFD_RELOC_ARM_T32_OFFSET_IMM:
  24319. case BFD_RELOC_ARM_T32_ADD_PC12:
  24320. case BFD_RELOC_ARM_T32_CP_OFF_IMM:
  24321. return (base + 4) & ~3;
  24322. /* Thumb branches are simply offset by +4. */
  24323. case BFD_RELOC_THUMB_PCREL_BRANCH5:
  24324. case BFD_RELOC_THUMB_PCREL_BRANCH7:
  24325. case BFD_RELOC_THUMB_PCREL_BRANCH9:
  24326. case BFD_RELOC_THUMB_PCREL_BRANCH12:
  24327. case BFD_RELOC_THUMB_PCREL_BRANCH20:
  24328. case BFD_RELOC_THUMB_PCREL_BRANCH25:
  24329. case BFD_RELOC_THUMB_PCREL_BFCSEL:
  24330. case BFD_RELOC_ARM_THUMB_BF17:
  24331. case BFD_RELOC_ARM_THUMB_BF19:
  24332. case BFD_RELOC_ARM_THUMB_BF13:
  24333. case BFD_RELOC_ARM_THUMB_LOOP12:
  24334. return base + 4;
  24335. case BFD_RELOC_THUMB_PCREL_BRANCH23:
  24336. if (fixP->fx_addsy
  24337. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  24338. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  24339. && ARM_IS_FUNC (fixP->fx_addsy)
  24340. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t))
  24341. base = fixP->fx_where + fixP->fx_frag->fr_address;
  24342. return base + 4;
  24343. /* BLX is like branches above, but forces the low two bits of PC to
  24344. zero. */
  24345. case BFD_RELOC_THUMB_PCREL_BLX:
  24346. if (fixP->fx_addsy
  24347. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  24348. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  24349. && THUMB_IS_FUNC (fixP->fx_addsy)
  24350. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t))
  24351. base = fixP->fx_where + fixP->fx_frag->fr_address;
  24352. return (base + 4) & ~3;
  24353. /* ARM mode branches are offset by +8. However, the Windows CE
  24354. loader expects the relocation not to take this into account. */
  24355. case BFD_RELOC_ARM_PCREL_BLX:
  24356. if (fixP->fx_addsy
  24357. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  24358. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  24359. && ARM_IS_FUNC (fixP->fx_addsy)
  24360. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t))
  24361. base = fixP->fx_where + fixP->fx_frag->fr_address;
  24362. return base + 8;
  24363. case BFD_RELOC_ARM_PCREL_CALL:
  24364. if (fixP->fx_addsy
  24365. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  24366. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  24367. && THUMB_IS_FUNC (fixP->fx_addsy)
  24368. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t))
  24369. base = fixP->fx_where + fixP->fx_frag->fr_address;
  24370. return base + 8;
  24371. case BFD_RELOC_ARM_PCREL_BRANCH:
  24372. case BFD_RELOC_ARM_PCREL_JUMP:
  24373. case BFD_RELOC_ARM_PLT32:
  24374. #ifdef TE_WINCE
  24375. /* When handling fixups immediately, because we have already
  24376. discovered the value of a symbol, or the address of the frag involved
  24377. we must account for the offset by +8, as the OS loader will never see the reloc.
  24378. see fixup_segment() in write.c
  24379. The S_IS_EXTERNAL test handles the case of global symbols.
  24380. Those need the calculated base, not just the pipe compensation the linker will need. */
  24381. if (fixP->fx_pcrel
  24382. && fixP->fx_addsy != NULL
  24383. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  24384. && (S_IS_EXTERNAL (fixP->fx_addsy) || !arm_force_relocation (fixP)))
  24385. return base + 8;
  24386. return base;
  24387. #else
  24388. return base + 8;
  24389. #endif
  24390. /* ARM mode loads relative to PC are also offset by +8. Unlike
  24391. branches, the Windows CE loader *does* expect the relocation
  24392. to take this into account. */
  24393. case BFD_RELOC_ARM_OFFSET_IMM:
  24394. case BFD_RELOC_ARM_OFFSET_IMM8:
  24395. case BFD_RELOC_ARM_HWLITERAL:
  24396. case BFD_RELOC_ARM_LITERAL:
  24397. case BFD_RELOC_ARM_CP_OFF_IMM:
  24398. return base + 8;
  24399. /* Other PC-relative relocations are un-offset. */
  24400. default:
  24401. return base;
  24402. }
  24403. }
  24404. static bool flag_warn_syms = true;
  24405. bool
  24406. arm_tc_equal_in_insn (int c ATTRIBUTE_UNUSED, char * name)
  24407. {
  24408. /* PR 18347 - Warn if the user attempts to create a symbol with the same
  24409. name as an ARM instruction. Whilst strictly speaking it is allowed, it
  24410. does mean that the resulting code might be very confusing to the reader.
  24411. Also this warning can be triggered if the user omits an operand before
  24412. an immediate address, eg:
  24413. LDR =foo
  24414. GAS treats this as an assignment of the value of the symbol foo to a
  24415. symbol LDR, and so (without this code) it will not issue any kind of
  24416. warning or error message.
  24417. Note - ARM instructions are case-insensitive but the strings in the hash
  24418. table are all stored in lower case, so we must first ensure that name is
  24419. lower case too. */
  24420. if (flag_warn_syms && arm_ops_hsh)
  24421. {
  24422. char * nbuf = strdup (name);
  24423. char * p;
  24424. for (p = nbuf; *p; p++)
  24425. *p = TOLOWER (*p);
  24426. if (str_hash_find (arm_ops_hsh, nbuf) != NULL)
  24427. {
  24428. static htab_t already_warned = NULL;
  24429. if (already_warned == NULL)
  24430. already_warned = str_htab_create ();
  24431. /* Only warn about the symbol once. To keep the code
  24432. simple we let str_hash_insert do the lookup for us. */
  24433. if (str_hash_find (already_warned, nbuf) == NULL)
  24434. {
  24435. as_warn (_("[-mwarn-syms]: Assignment makes a symbol match an ARM instruction: %s"), name);
  24436. str_hash_insert (already_warned, nbuf, NULL, 0);
  24437. }
  24438. }
  24439. else
  24440. free (nbuf);
  24441. }
  24442. return false;
  24443. }
  24444. /* Under ELF we need to default _GLOBAL_OFFSET_TABLE.
  24445. Otherwise we have no need to default values of symbols. */
  24446. symbolS *
  24447. md_undefined_symbol (char * name ATTRIBUTE_UNUSED)
  24448. {
  24449. #ifdef OBJ_ELF
  24450. if (name[0] == '_' && name[1] == 'G'
  24451. && streq (name, GLOBAL_OFFSET_TABLE_NAME))
  24452. {
  24453. if (!GOT_symbol)
  24454. {
  24455. if (symbol_find (name))
  24456. as_bad (_("GOT already in the symbol table"));
  24457. GOT_symbol = symbol_new (name, undefined_section,
  24458. &zero_address_frag, 0);
  24459. }
  24460. return GOT_symbol;
  24461. }
  24462. #endif
  24463. return NULL;
  24464. }
  24465. /* Subroutine of md_apply_fix. Check to see if an immediate can be
  24466. computed as two separate immediate values, added together. We
  24467. already know that this value cannot be computed by just one ARM
  24468. instruction. */
  24469. static unsigned int
  24470. validate_immediate_twopart (unsigned int val,
  24471. unsigned int * highpart)
  24472. {
  24473. unsigned int a;
  24474. unsigned int i;
  24475. for (i = 0; i < 32; i += 2)
  24476. if (((a = rotate_left (val, i)) & 0xff) != 0)
  24477. {
  24478. if (a & 0xff00)
  24479. {
  24480. if (a & ~ 0xffff)
  24481. continue;
  24482. * highpart = (a >> 8) | ((i + 24) << 7);
  24483. }
  24484. else if (a & 0xff0000)
  24485. {
  24486. if (a & 0xff000000)
  24487. continue;
  24488. * highpart = (a >> 16) | ((i + 16) << 7);
  24489. }
  24490. else
  24491. {
  24492. gas_assert (a & 0xff000000);
  24493. * highpart = (a >> 24) | ((i + 8) << 7);
  24494. }
  24495. return (a & 0xff) | (i << 7);
  24496. }
  24497. return FAIL;
  24498. }
  24499. static int
  24500. validate_offset_imm (unsigned int val, int hwse)
  24501. {
  24502. if ((hwse && val > 255) || val > 4095)
  24503. return FAIL;
  24504. return val;
  24505. }
  24506. /* Subroutine of md_apply_fix. Do those data_ops which can take a
  24507. negative immediate constant by altering the instruction. A bit of
  24508. a hack really.
  24509. MOV <-> MVN
  24510. AND <-> BIC
  24511. ADC <-> SBC
  24512. by inverting the second operand, and
  24513. ADD <-> SUB
  24514. CMP <-> CMN
  24515. by negating the second operand. */
  24516. static int
  24517. negate_data_op (unsigned long * instruction,
  24518. unsigned long value)
  24519. {
  24520. int op, new_inst;
  24521. unsigned long negated, inverted;
  24522. negated = encode_arm_immediate (-value);
  24523. inverted = encode_arm_immediate (~value);
  24524. op = (*instruction >> DATA_OP_SHIFT) & 0xf;
  24525. switch (op)
  24526. {
  24527. /* First negates. */
  24528. case OPCODE_SUB: /* ADD <-> SUB */
  24529. new_inst = OPCODE_ADD;
  24530. value = negated;
  24531. break;
  24532. case OPCODE_ADD:
  24533. new_inst = OPCODE_SUB;
  24534. value = negated;
  24535. break;
  24536. case OPCODE_CMP: /* CMP <-> CMN */
  24537. new_inst = OPCODE_CMN;
  24538. value = negated;
  24539. break;
  24540. case OPCODE_CMN:
  24541. new_inst = OPCODE_CMP;
  24542. value = negated;
  24543. break;
  24544. /* Now Inverted ops. */
  24545. case OPCODE_MOV: /* MOV <-> MVN */
  24546. new_inst = OPCODE_MVN;
  24547. value = inverted;
  24548. break;
  24549. case OPCODE_MVN:
  24550. new_inst = OPCODE_MOV;
  24551. value = inverted;
  24552. break;
  24553. case OPCODE_AND: /* AND <-> BIC */
  24554. new_inst = OPCODE_BIC;
  24555. value = inverted;
  24556. break;
  24557. case OPCODE_BIC:
  24558. new_inst = OPCODE_AND;
  24559. value = inverted;
  24560. break;
  24561. case OPCODE_ADC: /* ADC <-> SBC */
  24562. new_inst = OPCODE_SBC;
  24563. value = inverted;
  24564. break;
  24565. case OPCODE_SBC:
  24566. new_inst = OPCODE_ADC;
  24567. value = inverted;
  24568. break;
  24569. /* We cannot do anything. */
  24570. default:
  24571. return FAIL;
  24572. }
  24573. if (value == (unsigned) FAIL)
  24574. return FAIL;
  24575. *instruction &= OPCODE_MASK;
  24576. *instruction |= new_inst << DATA_OP_SHIFT;
  24577. return value;
  24578. }
  24579. /* Like negate_data_op, but for Thumb-2. */
  24580. static unsigned int
  24581. thumb32_negate_data_op (valueT *instruction, unsigned int value)
  24582. {
  24583. unsigned int op, new_inst;
  24584. unsigned int rd;
  24585. unsigned int negated, inverted;
  24586. negated = encode_thumb32_immediate (-value);
  24587. inverted = encode_thumb32_immediate (~value);
  24588. rd = (*instruction >> 8) & 0xf;
  24589. op = (*instruction >> T2_DATA_OP_SHIFT) & 0xf;
  24590. switch (op)
  24591. {
  24592. /* ADD <-> SUB. Includes CMP <-> CMN. */
  24593. case T2_OPCODE_SUB:
  24594. new_inst = T2_OPCODE_ADD;
  24595. value = negated;
  24596. break;
  24597. case T2_OPCODE_ADD:
  24598. new_inst = T2_OPCODE_SUB;
  24599. value = negated;
  24600. break;
  24601. /* ORR <-> ORN. Includes MOV <-> MVN. */
  24602. case T2_OPCODE_ORR:
  24603. new_inst = T2_OPCODE_ORN;
  24604. value = inverted;
  24605. break;
  24606. case T2_OPCODE_ORN:
  24607. new_inst = T2_OPCODE_ORR;
  24608. value = inverted;
  24609. break;
  24610. /* AND <-> BIC. TST has no inverted equivalent. */
  24611. case T2_OPCODE_AND:
  24612. new_inst = T2_OPCODE_BIC;
  24613. if (rd == 15)
  24614. value = FAIL;
  24615. else
  24616. value = inverted;
  24617. break;
  24618. case T2_OPCODE_BIC:
  24619. new_inst = T2_OPCODE_AND;
  24620. value = inverted;
  24621. break;
  24622. /* ADC <-> SBC */
  24623. case T2_OPCODE_ADC:
  24624. new_inst = T2_OPCODE_SBC;
  24625. value = inverted;
  24626. break;
  24627. case T2_OPCODE_SBC:
  24628. new_inst = T2_OPCODE_ADC;
  24629. value = inverted;
  24630. break;
  24631. /* We cannot do anything. */
  24632. default:
  24633. return FAIL;
  24634. }
  24635. if (value == (unsigned int)FAIL)
  24636. return FAIL;
  24637. *instruction &= T2_OPCODE_MASK;
  24638. *instruction |= new_inst << T2_DATA_OP_SHIFT;
  24639. return value;
  24640. }
  24641. /* Read a 32-bit thumb instruction from buf. */
  24642. static unsigned long
  24643. get_thumb32_insn (char * buf)
  24644. {
  24645. unsigned long insn;
  24646. insn = md_chars_to_number (buf, THUMB_SIZE) << 16;
  24647. insn |= md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  24648. return insn;
  24649. }
  24650. /* We usually want to set the low bit on the address of thumb function
  24651. symbols. In particular .word foo - . should have the low bit set.
  24652. Generic code tries to fold the difference of two symbols to
  24653. a constant. Prevent this and force a relocation when the first symbols
  24654. is a thumb function. */
  24655. bool
  24656. arm_optimize_expr (expressionS *l, operatorT op, expressionS *r)
  24657. {
  24658. if (op == O_subtract
  24659. && l->X_op == O_symbol
  24660. && r->X_op == O_symbol
  24661. && THUMB_IS_FUNC (l->X_add_symbol))
  24662. {
  24663. l->X_op = O_subtract;
  24664. l->X_op_symbol = r->X_add_symbol;
  24665. l->X_add_number -= r->X_add_number;
  24666. return true;
  24667. }
  24668. /* Process as normal. */
  24669. return false;
  24670. }
  24671. /* Encode Thumb2 unconditional branches and calls. The encoding
  24672. for the 2 are identical for the immediate values. */
  24673. static void
  24674. encode_thumb2_b_bl_offset (char * buf, offsetT value)
  24675. {
  24676. #define T2I1I2MASK ((1 << 13) | (1 << 11))
  24677. offsetT newval;
  24678. offsetT newval2;
  24679. addressT S, I1, I2, lo, hi;
  24680. S = (value >> 24) & 0x01;
  24681. I1 = (value >> 23) & 0x01;
  24682. I2 = (value >> 22) & 0x01;
  24683. hi = (value >> 12) & 0x3ff;
  24684. lo = (value >> 1) & 0x7ff;
  24685. newval = md_chars_to_number (buf, THUMB_SIZE);
  24686. newval2 = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  24687. newval |= (S << 10) | hi;
  24688. newval2 &= ~T2I1I2MASK;
  24689. newval2 |= (((I1 ^ S) << 13) | ((I2 ^ S) << 11) | lo) ^ T2I1I2MASK;
  24690. md_number_to_chars (buf, newval, THUMB_SIZE);
  24691. md_number_to_chars (buf + THUMB_SIZE, newval2, THUMB_SIZE);
  24692. }
  24693. void
  24694. md_apply_fix (fixS * fixP,
  24695. valueT * valP,
  24696. segT seg)
  24697. {
  24698. valueT value = * valP;
  24699. valueT newval;
  24700. unsigned int newimm;
  24701. unsigned long temp;
  24702. int sign;
  24703. char * buf = fixP->fx_where + fixP->fx_frag->fr_literal;
  24704. gas_assert (fixP->fx_r_type <= BFD_RELOC_UNUSED);
  24705. /* Note whether this will delete the relocation. */
  24706. if (fixP->fx_addsy == 0 && !fixP->fx_pcrel)
  24707. fixP->fx_done = 1;
  24708. /* On a 64-bit host, silently truncate 'value' to 32 bits for
  24709. consistency with the behaviour on 32-bit hosts. Remember value
  24710. for emit_reloc. */
  24711. value &= 0xffffffff;
  24712. value ^= 0x80000000;
  24713. value -= 0x80000000;
  24714. *valP = value;
  24715. fixP->fx_addnumber = value;
  24716. /* Same treatment for fixP->fx_offset. */
  24717. fixP->fx_offset &= 0xffffffff;
  24718. fixP->fx_offset ^= 0x80000000;
  24719. fixP->fx_offset -= 0x80000000;
  24720. switch (fixP->fx_r_type)
  24721. {
  24722. case BFD_RELOC_NONE:
  24723. /* This will need to go in the object file. */
  24724. fixP->fx_done = 0;
  24725. break;
  24726. case BFD_RELOC_ARM_IMMEDIATE:
  24727. /* We claim that this fixup has been processed here,
  24728. even if in fact we generate an error because we do
  24729. not have a reloc for it, so tc_gen_reloc will reject it. */
  24730. fixP->fx_done = 1;
  24731. if (fixP->fx_addsy)
  24732. {
  24733. const char *msg = 0;
  24734. if (! S_IS_DEFINED (fixP->fx_addsy))
  24735. msg = _("undefined symbol %s used as an immediate value");
  24736. else if (S_GET_SEGMENT (fixP->fx_addsy) != seg)
  24737. msg = _("symbol %s is in a different section");
  24738. else if (S_IS_WEAK (fixP->fx_addsy))
  24739. msg = _("symbol %s is weak and may be overridden later");
  24740. if (msg)
  24741. {
  24742. as_bad_where (fixP->fx_file, fixP->fx_line,
  24743. msg, S_GET_NAME (fixP->fx_addsy));
  24744. break;
  24745. }
  24746. }
  24747. temp = md_chars_to_number (buf, INSN_SIZE);
  24748. /* If the offset is negative, we should use encoding A2 for ADR. */
  24749. if ((temp & 0xfff0000) == 0x28f0000 && (offsetT) value < 0)
  24750. newimm = negate_data_op (&temp, value);
  24751. else
  24752. {
  24753. newimm = encode_arm_immediate (value);
  24754. /* If the instruction will fail, see if we can fix things up by
  24755. changing the opcode. */
  24756. if (newimm == (unsigned int) FAIL)
  24757. newimm = negate_data_op (&temp, value);
  24758. /* MOV accepts both ARM modified immediate (A1 encoding) and
  24759. UINT16 (A2 encoding) when possible, MOVW only accepts UINT16.
  24760. When disassembling, MOV is preferred when there is no encoding
  24761. overlap. */
  24762. if (newimm == (unsigned int) FAIL
  24763. && ((temp >> DATA_OP_SHIFT) & 0xf) == OPCODE_MOV
  24764. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2)
  24765. && !((temp >> SBIT_SHIFT) & 0x1)
  24766. && value <= 0xffff)
  24767. {
  24768. /* Clear bits[23:20] to change encoding from A1 to A2. */
  24769. temp &= 0xff0fffff;
  24770. /* Encoding high 4bits imm. Code below will encode the remaining
  24771. low 12bits. */
  24772. temp |= (value & 0x0000f000) << 4;
  24773. newimm = value & 0x00000fff;
  24774. }
  24775. }
  24776. if (newimm == (unsigned int) FAIL)
  24777. {
  24778. as_bad_where (fixP->fx_file, fixP->fx_line,
  24779. _("invalid constant (%lx) after fixup"),
  24780. (unsigned long) value);
  24781. break;
  24782. }
  24783. newimm |= (temp & 0xfffff000);
  24784. md_number_to_chars (buf, (valueT) newimm, INSN_SIZE);
  24785. break;
  24786. case BFD_RELOC_ARM_ADRL_IMMEDIATE:
  24787. {
  24788. unsigned int highpart = 0;
  24789. unsigned int newinsn = 0xe1a00000; /* nop. */
  24790. if (fixP->fx_addsy)
  24791. {
  24792. const char *msg = 0;
  24793. if (! S_IS_DEFINED (fixP->fx_addsy))
  24794. msg = _("undefined symbol %s used as an immediate value");
  24795. else if (S_GET_SEGMENT (fixP->fx_addsy) != seg)
  24796. msg = _("symbol %s is in a different section");
  24797. else if (S_IS_WEAK (fixP->fx_addsy))
  24798. msg = _("symbol %s is weak and may be overridden later");
  24799. if (msg)
  24800. {
  24801. as_bad_where (fixP->fx_file, fixP->fx_line,
  24802. msg, S_GET_NAME (fixP->fx_addsy));
  24803. break;
  24804. }
  24805. }
  24806. newimm = encode_arm_immediate (value);
  24807. temp = md_chars_to_number (buf, INSN_SIZE);
  24808. /* If the instruction will fail, see if we can fix things up by
  24809. changing the opcode. */
  24810. if (newimm == (unsigned int) FAIL
  24811. && (newimm = negate_data_op (& temp, value)) == (unsigned int) FAIL)
  24812. {
  24813. /* No ? OK - try using two ADD instructions to generate
  24814. the value. */
  24815. newimm = validate_immediate_twopart (value, & highpart);
  24816. /* Yes - then make sure that the second instruction is
  24817. also an add. */
  24818. if (newimm != (unsigned int) FAIL)
  24819. newinsn = temp;
  24820. /* Still No ? Try using a negated value. */
  24821. else if ((newimm = validate_immediate_twopart (- value, & highpart)) != (unsigned int) FAIL)
  24822. temp = newinsn = (temp & OPCODE_MASK) | OPCODE_SUB << DATA_OP_SHIFT;
  24823. /* Otherwise - give up. */
  24824. else
  24825. {
  24826. as_bad_where (fixP->fx_file, fixP->fx_line,
  24827. _("unable to compute ADRL instructions for PC offset of 0x%lx"),
  24828. (long) value);
  24829. break;
  24830. }
  24831. /* Replace the first operand in the 2nd instruction (which
  24832. is the PC) with the destination register. We have
  24833. already added in the PC in the first instruction and we
  24834. do not want to do it again. */
  24835. newinsn &= ~ 0xf0000;
  24836. newinsn |= ((newinsn & 0x0f000) << 4);
  24837. }
  24838. newimm |= (temp & 0xfffff000);
  24839. md_number_to_chars (buf, (valueT) newimm, INSN_SIZE);
  24840. highpart |= (newinsn & 0xfffff000);
  24841. md_number_to_chars (buf + INSN_SIZE, (valueT) highpart, INSN_SIZE);
  24842. }
  24843. break;
  24844. case BFD_RELOC_ARM_OFFSET_IMM:
  24845. if (!fixP->fx_done && seg->use_rela_p)
  24846. value = 0;
  24847. /* Fall through. */
  24848. case BFD_RELOC_ARM_LITERAL:
  24849. sign = (offsetT) value > 0;
  24850. if ((offsetT) value < 0)
  24851. value = - value;
  24852. if (validate_offset_imm (value, 0) == FAIL)
  24853. {
  24854. if (fixP->fx_r_type == BFD_RELOC_ARM_LITERAL)
  24855. as_bad_where (fixP->fx_file, fixP->fx_line,
  24856. _("invalid literal constant: pool needs to be closer"));
  24857. else
  24858. as_bad_where (fixP->fx_file, fixP->fx_line,
  24859. _("bad immediate value for offset (%ld)"),
  24860. (long) value);
  24861. break;
  24862. }
  24863. newval = md_chars_to_number (buf, INSN_SIZE);
  24864. if (value == 0)
  24865. newval &= 0xfffff000;
  24866. else
  24867. {
  24868. newval &= 0xff7ff000;
  24869. newval |= value | (sign ? INDEX_UP : 0);
  24870. }
  24871. md_number_to_chars (buf, newval, INSN_SIZE);
  24872. break;
  24873. case BFD_RELOC_ARM_OFFSET_IMM8:
  24874. case BFD_RELOC_ARM_HWLITERAL:
  24875. sign = (offsetT) value > 0;
  24876. if ((offsetT) value < 0)
  24877. value = - value;
  24878. if (validate_offset_imm (value, 1) == FAIL)
  24879. {
  24880. if (fixP->fx_r_type == BFD_RELOC_ARM_HWLITERAL)
  24881. as_bad_where (fixP->fx_file, fixP->fx_line,
  24882. _("invalid literal constant: pool needs to be closer"));
  24883. else
  24884. as_bad_where (fixP->fx_file, fixP->fx_line,
  24885. _("bad immediate value for 8-bit offset (%ld)"),
  24886. (long) value);
  24887. break;
  24888. }
  24889. newval = md_chars_to_number (buf, INSN_SIZE);
  24890. if (value == 0)
  24891. newval &= 0xfffff0f0;
  24892. else
  24893. {
  24894. newval &= 0xff7ff0f0;
  24895. newval |= ((value >> 4) << 8) | (value & 0xf) | (sign ? INDEX_UP : 0);
  24896. }
  24897. md_number_to_chars (buf, newval, INSN_SIZE);
  24898. break;
  24899. case BFD_RELOC_ARM_T32_OFFSET_U8:
  24900. if (value > 1020 || value % 4 != 0)
  24901. as_bad_where (fixP->fx_file, fixP->fx_line,
  24902. _("bad immediate value for offset (%ld)"), (long) value);
  24903. value /= 4;
  24904. newval = md_chars_to_number (buf+2, THUMB_SIZE);
  24905. newval |= value;
  24906. md_number_to_chars (buf+2, newval, THUMB_SIZE);
  24907. break;
  24908. case BFD_RELOC_ARM_T32_OFFSET_IMM:
  24909. /* This is a complicated relocation used for all varieties of Thumb32
  24910. load/store instruction with immediate offset:
  24911. 1110 100P u1WL NNNN XXXX YYYY iiii iiii - +/-(U) pre/post(P) 8-bit,
  24912. *4, optional writeback(W)
  24913. (doubleword load/store)
  24914. 1111 100S uTTL 1111 XXXX iiii iiii iiii - +/-(U) 12-bit PC-rel
  24915. 1111 100S 0TTL NNNN XXXX 1Pu1 iiii iiii - +/-(U) pre/post(P) 8-bit
  24916. 1111 100S 0TTL NNNN XXXX 1110 iiii iiii - positive 8-bit (T instruction)
  24917. 1111 100S 1TTL NNNN XXXX iiii iiii iiii - positive 12-bit
  24918. 1111 100S 0TTL NNNN XXXX 1100 iiii iiii - negative 8-bit
  24919. Uppercase letters indicate bits that are already encoded at
  24920. this point. Lowercase letters are our problem. For the
  24921. second block of instructions, the secondary opcode nybble
  24922. (bits 8..11) is present, and bit 23 is zero, even if this is
  24923. a PC-relative operation. */
  24924. newval = md_chars_to_number (buf, THUMB_SIZE);
  24925. newval <<= 16;
  24926. newval |= md_chars_to_number (buf+THUMB_SIZE, THUMB_SIZE);
  24927. if ((newval & 0xf0000000) == 0xe0000000)
  24928. {
  24929. /* Doubleword load/store: 8-bit offset, scaled by 4. */
  24930. if ((offsetT) value >= 0)
  24931. newval |= (1 << 23);
  24932. else
  24933. value = -value;
  24934. if (value % 4 != 0)
  24935. {
  24936. as_bad_where (fixP->fx_file, fixP->fx_line,
  24937. _("offset not a multiple of 4"));
  24938. break;
  24939. }
  24940. value /= 4;
  24941. if (value > 0xff)
  24942. {
  24943. as_bad_where (fixP->fx_file, fixP->fx_line,
  24944. _("offset out of range"));
  24945. break;
  24946. }
  24947. newval &= ~0xff;
  24948. }
  24949. else if ((newval & 0x000f0000) == 0x000f0000)
  24950. {
  24951. /* PC-relative, 12-bit offset. */
  24952. if ((offsetT) value >= 0)
  24953. newval |= (1 << 23);
  24954. else
  24955. value = -value;
  24956. if (value > 0xfff)
  24957. {
  24958. as_bad_where (fixP->fx_file, fixP->fx_line,
  24959. _("offset out of range"));
  24960. break;
  24961. }
  24962. newval &= ~0xfff;
  24963. }
  24964. else if ((newval & 0x00000100) == 0x00000100)
  24965. {
  24966. /* Writeback: 8-bit, +/- offset. */
  24967. if ((offsetT) value >= 0)
  24968. newval |= (1 << 9);
  24969. else
  24970. value = -value;
  24971. if (value > 0xff)
  24972. {
  24973. as_bad_where (fixP->fx_file, fixP->fx_line,
  24974. _("offset out of range"));
  24975. break;
  24976. }
  24977. newval &= ~0xff;
  24978. }
  24979. else if ((newval & 0x00000f00) == 0x00000e00)
  24980. {
  24981. /* T-instruction: positive 8-bit offset. */
  24982. if (value > 0xff)
  24983. {
  24984. as_bad_where (fixP->fx_file, fixP->fx_line,
  24985. _("offset out of range"));
  24986. break;
  24987. }
  24988. newval &= ~0xff;
  24989. newval |= value;
  24990. }
  24991. else
  24992. {
  24993. /* Positive 12-bit or negative 8-bit offset. */
  24994. unsigned int limit;
  24995. if ((offsetT) value >= 0)
  24996. {
  24997. newval |= (1 << 23);
  24998. limit = 0xfff;
  24999. }
  25000. else
  25001. {
  25002. value = -value;
  25003. limit = 0xff;
  25004. }
  25005. if (value > limit)
  25006. {
  25007. as_bad_where (fixP->fx_file, fixP->fx_line,
  25008. _("offset out of range"));
  25009. break;
  25010. }
  25011. newval &= ~limit;
  25012. }
  25013. newval |= value;
  25014. md_number_to_chars (buf, (newval >> 16) & 0xffff, THUMB_SIZE);
  25015. md_number_to_chars (buf + THUMB_SIZE, newval & 0xffff, THUMB_SIZE);
  25016. break;
  25017. case BFD_RELOC_ARM_SHIFT_IMM:
  25018. newval = md_chars_to_number (buf, INSN_SIZE);
  25019. if (value > 32
  25020. || (value == 32
  25021. && (((newval & 0x60) == 0) || (newval & 0x60) == 0x60)))
  25022. {
  25023. as_bad_where (fixP->fx_file, fixP->fx_line,
  25024. _("shift expression is too large"));
  25025. break;
  25026. }
  25027. if (value == 0)
  25028. /* Shifts of zero must be done as lsl. */
  25029. newval &= ~0x60;
  25030. else if (value == 32)
  25031. value = 0;
  25032. newval &= 0xfffff07f;
  25033. newval |= (value & 0x1f) << 7;
  25034. md_number_to_chars (buf, newval, INSN_SIZE);
  25035. break;
  25036. case BFD_RELOC_ARM_T32_IMMEDIATE:
  25037. case BFD_RELOC_ARM_T32_ADD_IMM:
  25038. case BFD_RELOC_ARM_T32_IMM12:
  25039. case BFD_RELOC_ARM_T32_ADD_PC12:
  25040. /* We claim that this fixup has been processed here,
  25041. even if in fact we generate an error because we do
  25042. not have a reloc for it, so tc_gen_reloc will reject it. */
  25043. fixP->fx_done = 1;
  25044. if (fixP->fx_addsy
  25045. && ! S_IS_DEFINED (fixP->fx_addsy))
  25046. {
  25047. as_bad_where (fixP->fx_file, fixP->fx_line,
  25048. _("undefined symbol %s used as an immediate value"),
  25049. S_GET_NAME (fixP->fx_addsy));
  25050. break;
  25051. }
  25052. newval = md_chars_to_number (buf, THUMB_SIZE);
  25053. newval <<= 16;
  25054. newval |= md_chars_to_number (buf+2, THUMB_SIZE);
  25055. newimm = FAIL;
  25056. if ((fixP->fx_r_type == BFD_RELOC_ARM_T32_IMMEDIATE
  25057. /* ARMv8-M Baseline MOV will reach here, but it doesn't support
  25058. Thumb2 modified immediate encoding (T2). */
  25059. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2))
  25060. || fixP->fx_r_type == BFD_RELOC_ARM_T32_ADD_IMM)
  25061. {
  25062. newimm = encode_thumb32_immediate (value);
  25063. if (newimm == (unsigned int) FAIL)
  25064. newimm = thumb32_negate_data_op (&newval, value);
  25065. }
  25066. if (newimm == (unsigned int) FAIL)
  25067. {
  25068. if (fixP->fx_r_type != BFD_RELOC_ARM_T32_IMMEDIATE)
  25069. {
  25070. /* Turn add/sum into addw/subw. */
  25071. if (fixP->fx_r_type == BFD_RELOC_ARM_T32_ADD_IMM)
  25072. newval = (newval & 0xfeffffff) | 0x02000000;
  25073. /* No flat 12-bit imm encoding for addsw/subsw. */
  25074. if ((newval & 0x00100000) == 0)
  25075. {
  25076. /* 12 bit immediate for addw/subw. */
  25077. if ((offsetT) value < 0)
  25078. {
  25079. value = -value;
  25080. newval ^= 0x00a00000;
  25081. }
  25082. if (value > 0xfff)
  25083. newimm = (unsigned int) FAIL;
  25084. else
  25085. newimm = value;
  25086. }
  25087. }
  25088. else
  25089. {
  25090. /* MOV accepts both Thumb2 modified immediate (T2 encoding) and
  25091. UINT16 (T3 encoding), MOVW only accepts UINT16. When
  25092. disassembling, MOV is preferred when there is no encoding
  25093. overlap. */
  25094. if (((newval >> T2_DATA_OP_SHIFT) & 0xf) == T2_OPCODE_ORR
  25095. /* NOTE: MOV uses the ORR opcode in Thumb 2 mode
  25096. but with the Rn field [19:16] set to 1111. */
  25097. && (((newval >> 16) & 0xf) == 0xf)
  25098. && ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2_v8m)
  25099. && !((newval >> T2_SBIT_SHIFT) & 0x1)
  25100. && value <= 0xffff)
  25101. {
  25102. /* Toggle bit[25] to change encoding from T2 to T3. */
  25103. newval ^= 1 << 25;
  25104. /* Clear bits[19:16]. */
  25105. newval &= 0xfff0ffff;
  25106. /* Encoding high 4bits imm. Code below will encode the
  25107. remaining low 12bits. */
  25108. newval |= (value & 0x0000f000) << 4;
  25109. newimm = value & 0x00000fff;
  25110. }
  25111. }
  25112. }
  25113. if (newimm == (unsigned int)FAIL)
  25114. {
  25115. as_bad_where (fixP->fx_file, fixP->fx_line,
  25116. _("invalid constant (%lx) after fixup"),
  25117. (unsigned long) value);
  25118. break;
  25119. }
  25120. newval |= (newimm & 0x800) << 15;
  25121. newval |= (newimm & 0x700) << 4;
  25122. newval |= (newimm & 0x0ff);
  25123. md_number_to_chars (buf, (valueT) ((newval >> 16) & 0xffff), THUMB_SIZE);
  25124. md_number_to_chars (buf+2, (valueT) (newval & 0xffff), THUMB_SIZE);
  25125. break;
  25126. case BFD_RELOC_ARM_SMC:
  25127. if (value > 0xf)
  25128. as_bad_where (fixP->fx_file, fixP->fx_line,
  25129. _("invalid smc expression"));
  25130. newval = md_chars_to_number (buf, INSN_SIZE);
  25131. newval |= (value & 0xf);
  25132. md_number_to_chars (buf, newval, INSN_SIZE);
  25133. break;
  25134. case BFD_RELOC_ARM_HVC:
  25135. if (value > 0xffff)
  25136. as_bad_where (fixP->fx_file, fixP->fx_line,
  25137. _("invalid hvc expression"));
  25138. newval = md_chars_to_number (buf, INSN_SIZE);
  25139. newval |= (value & 0xf) | ((value & 0xfff0) << 4);
  25140. md_number_to_chars (buf, newval, INSN_SIZE);
  25141. break;
  25142. case BFD_RELOC_ARM_SWI:
  25143. if (fixP->tc_fix_data != 0)
  25144. {
  25145. if (value > 0xff)
  25146. as_bad_where (fixP->fx_file, fixP->fx_line,
  25147. _("invalid swi expression"));
  25148. newval = md_chars_to_number (buf, THUMB_SIZE);
  25149. newval |= value;
  25150. md_number_to_chars (buf, newval, THUMB_SIZE);
  25151. }
  25152. else
  25153. {
  25154. if (value > 0x00ffffff)
  25155. as_bad_where (fixP->fx_file, fixP->fx_line,
  25156. _("invalid swi expression"));
  25157. newval = md_chars_to_number (buf, INSN_SIZE);
  25158. newval |= value;
  25159. md_number_to_chars (buf, newval, INSN_SIZE);
  25160. }
  25161. break;
  25162. case BFD_RELOC_ARM_MULTI:
  25163. if (value > 0xffff)
  25164. as_bad_where (fixP->fx_file, fixP->fx_line,
  25165. _("invalid expression in load/store multiple"));
  25166. newval = value | md_chars_to_number (buf, INSN_SIZE);
  25167. md_number_to_chars (buf, newval, INSN_SIZE);
  25168. break;
  25169. #ifdef OBJ_ELF
  25170. case BFD_RELOC_ARM_PCREL_CALL:
  25171. if (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t)
  25172. && fixP->fx_addsy
  25173. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  25174. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  25175. && THUMB_IS_FUNC (fixP->fx_addsy))
  25176. /* Flip the bl to blx. This is a simple flip
  25177. bit here because we generate PCREL_CALL for
  25178. unconditional bls. */
  25179. {
  25180. newval = md_chars_to_number (buf, INSN_SIZE);
  25181. newval = newval | 0x10000000;
  25182. md_number_to_chars (buf, newval, INSN_SIZE);
  25183. temp = 1;
  25184. fixP->fx_done = 1;
  25185. }
  25186. else
  25187. temp = 3;
  25188. goto arm_branch_common;
  25189. case BFD_RELOC_ARM_PCREL_JUMP:
  25190. if (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t)
  25191. && fixP->fx_addsy
  25192. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  25193. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  25194. && THUMB_IS_FUNC (fixP->fx_addsy))
  25195. {
  25196. /* This would map to a bl<cond>, b<cond>,
  25197. b<always> to a Thumb function. We
  25198. need to force a relocation for this particular
  25199. case. */
  25200. newval = md_chars_to_number (buf, INSN_SIZE);
  25201. fixP->fx_done = 0;
  25202. }
  25203. /* Fall through. */
  25204. case BFD_RELOC_ARM_PLT32:
  25205. #endif
  25206. case BFD_RELOC_ARM_PCREL_BRANCH:
  25207. temp = 3;
  25208. goto arm_branch_common;
  25209. case BFD_RELOC_ARM_PCREL_BLX:
  25210. temp = 1;
  25211. if (ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t)
  25212. && fixP->fx_addsy
  25213. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  25214. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  25215. && ARM_IS_FUNC (fixP->fx_addsy))
  25216. {
  25217. /* Flip the blx to a bl and warn. */
  25218. const char *name = S_GET_NAME (fixP->fx_addsy);
  25219. newval = 0xeb000000;
  25220. as_warn_where (fixP->fx_file, fixP->fx_line,
  25221. _("blx to '%s' an ARM ISA state function changed to bl"),
  25222. name);
  25223. md_number_to_chars (buf, newval, INSN_SIZE);
  25224. temp = 3;
  25225. fixP->fx_done = 1;
  25226. }
  25227. #ifdef OBJ_ELF
  25228. if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
  25229. fixP->fx_r_type = BFD_RELOC_ARM_PCREL_CALL;
  25230. #endif
  25231. arm_branch_common:
  25232. /* We are going to store value (shifted right by two) in the
  25233. instruction, in a 24 bit, signed field. Bits 26 through 32 either
  25234. all clear or all set and bit 0 must be clear. For B/BL bit 1 must
  25235. also be clear. */
  25236. if (value & temp)
  25237. as_bad_where (fixP->fx_file, fixP->fx_line,
  25238. _("misaligned branch destination"));
  25239. if ((value & 0xfe000000) != 0
  25240. && (value & 0xfe000000) != 0xfe000000)
  25241. as_bad_where (fixP->fx_file, fixP->fx_line, BAD_RANGE);
  25242. if (fixP->fx_done || !seg->use_rela_p)
  25243. {
  25244. newval = md_chars_to_number (buf, INSN_SIZE);
  25245. newval |= (value >> 2) & 0x00ffffff;
  25246. /* Set the H bit on BLX instructions. */
  25247. if (temp == 1)
  25248. {
  25249. if (value & 2)
  25250. newval |= 0x01000000;
  25251. else
  25252. newval &= ~0x01000000;
  25253. }
  25254. md_number_to_chars (buf, newval, INSN_SIZE);
  25255. }
  25256. break;
  25257. case BFD_RELOC_THUMB_PCREL_BRANCH7: /* CBZ */
  25258. /* CBZ can only branch forward. */
  25259. /* Attempts to use CBZ to branch to the next instruction
  25260. (which, strictly speaking, are prohibited) will be turned into
  25261. no-ops.
  25262. FIXME: It may be better to remove the instruction completely and
  25263. perform relaxation. */
  25264. if ((offsetT) value == -2)
  25265. {
  25266. newval = md_chars_to_number (buf, THUMB_SIZE);
  25267. newval = 0xbf00; /* NOP encoding T1 */
  25268. md_number_to_chars (buf, newval, THUMB_SIZE);
  25269. }
  25270. else
  25271. {
  25272. if (value & ~0x7e)
  25273. as_bad_where (fixP->fx_file, fixP->fx_line, BAD_RANGE);
  25274. if (fixP->fx_done || !seg->use_rela_p)
  25275. {
  25276. newval = md_chars_to_number (buf, THUMB_SIZE);
  25277. newval |= ((value & 0x3e) << 2) | ((value & 0x40) << 3);
  25278. md_number_to_chars (buf, newval, THUMB_SIZE);
  25279. }
  25280. }
  25281. break;
  25282. case BFD_RELOC_THUMB_PCREL_BRANCH9: /* Conditional branch. */
  25283. if (out_of_range_p (value, 8))
  25284. as_bad_where (fixP->fx_file, fixP->fx_line, BAD_RANGE);
  25285. if (fixP->fx_done || !seg->use_rela_p)
  25286. {
  25287. newval = md_chars_to_number (buf, THUMB_SIZE);
  25288. newval |= (value & 0x1ff) >> 1;
  25289. md_number_to_chars (buf, newval, THUMB_SIZE);
  25290. }
  25291. break;
  25292. case BFD_RELOC_THUMB_PCREL_BRANCH12: /* Unconditional branch. */
  25293. if (out_of_range_p (value, 11))
  25294. as_bad_where (fixP->fx_file, fixP->fx_line, BAD_RANGE);
  25295. if (fixP->fx_done || !seg->use_rela_p)
  25296. {
  25297. newval = md_chars_to_number (buf, THUMB_SIZE);
  25298. newval |= (value & 0xfff) >> 1;
  25299. md_number_to_chars (buf, newval, THUMB_SIZE);
  25300. }
  25301. break;
  25302. /* This relocation is misnamed, it should be BRANCH21. */
  25303. case BFD_RELOC_THUMB_PCREL_BRANCH20:
  25304. if (fixP->fx_addsy
  25305. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  25306. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  25307. && ARM_IS_FUNC (fixP->fx_addsy)
  25308. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t))
  25309. {
  25310. /* Force a relocation for a branch 20 bits wide. */
  25311. fixP->fx_done = 0;
  25312. }
  25313. if (out_of_range_p (value, 20))
  25314. as_bad_where (fixP->fx_file, fixP->fx_line,
  25315. _("conditional branch out of range"));
  25316. if (fixP->fx_done || !seg->use_rela_p)
  25317. {
  25318. offsetT newval2;
  25319. addressT S, J1, J2, lo, hi;
  25320. S = (value & 0x00100000) >> 20;
  25321. J2 = (value & 0x00080000) >> 19;
  25322. J1 = (value & 0x00040000) >> 18;
  25323. hi = (value & 0x0003f000) >> 12;
  25324. lo = (value & 0x00000ffe) >> 1;
  25325. newval = md_chars_to_number (buf, THUMB_SIZE);
  25326. newval2 = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  25327. newval |= (S << 10) | hi;
  25328. newval2 |= (J1 << 13) | (J2 << 11) | lo;
  25329. md_number_to_chars (buf, newval, THUMB_SIZE);
  25330. md_number_to_chars (buf + THUMB_SIZE, newval2, THUMB_SIZE);
  25331. }
  25332. break;
  25333. case BFD_RELOC_THUMB_PCREL_BLX:
  25334. /* If there is a blx from a thumb state function to
  25335. another thumb function flip this to a bl and warn
  25336. about it. */
  25337. if (fixP->fx_addsy
  25338. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  25339. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  25340. && THUMB_IS_FUNC (fixP->fx_addsy))
  25341. {
  25342. const char *name = S_GET_NAME (fixP->fx_addsy);
  25343. as_warn_where (fixP->fx_file, fixP->fx_line,
  25344. _("blx to Thumb func '%s' from Thumb ISA state changed to bl"),
  25345. name);
  25346. newval = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  25347. newval = newval | 0x1000;
  25348. md_number_to_chars (buf+THUMB_SIZE, newval, THUMB_SIZE);
  25349. fixP->fx_r_type = BFD_RELOC_THUMB_PCREL_BRANCH23;
  25350. fixP->fx_done = 1;
  25351. }
  25352. goto thumb_bl_common;
  25353. case BFD_RELOC_THUMB_PCREL_BRANCH23:
  25354. /* A bl from Thumb state ISA to an internal ARM state function
  25355. is converted to a blx. */
  25356. if (fixP->fx_addsy
  25357. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  25358. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  25359. && ARM_IS_FUNC (fixP->fx_addsy)
  25360. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t))
  25361. {
  25362. newval = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  25363. newval = newval & ~0x1000;
  25364. md_number_to_chars (buf+THUMB_SIZE, newval, THUMB_SIZE);
  25365. fixP->fx_r_type = BFD_RELOC_THUMB_PCREL_BLX;
  25366. fixP->fx_done = 1;
  25367. }
  25368. thumb_bl_common:
  25369. if (fixP->fx_r_type == BFD_RELOC_THUMB_PCREL_BLX)
  25370. /* For a BLX instruction, make sure that the relocation is rounded up
  25371. to a word boundary. This follows the semantics of the instruction
  25372. which specifies that bit 1 of the target address will come from bit
  25373. 1 of the base address. */
  25374. value = (value + 3) & ~ 3;
  25375. #ifdef OBJ_ELF
  25376. if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4
  25377. && fixP->fx_r_type == BFD_RELOC_THUMB_PCREL_BLX)
  25378. fixP->fx_r_type = BFD_RELOC_THUMB_PCREL_BRANCH23;
  25379. #endif
  25380. if (out_of_range_p (value, 22))
  25381. {
  25382. if (!(ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v6t2)))
  25383. as_bad_where (fixP->fx_file, fixP->fx_line, BAD_RANGE);
  25384. else if (out_of_range_p (value, 24))
  25385. as_bad_where (fixP->fx_file, fixP->fx_line,
  25386. _("Thumb2 branch out of range"));
  25387. }
  25388. if (fixP->fx_done || !seg->use_rela_p)
  25389. encode_thumb2_b_bl_offset (buf, value);
  25390. break;
  25391. case BFD_RELOC_THUMB_PCREL_BRANCH25:
  25392. if (out_of_range_p (value, 24))
  25393. as_bad_where (fixP->fx_file, fixP->fx_line, BAD_RANGE);
  25394. if (fixP->fx_done || !seg->use_rela_p)
  25395. encode_thumb2_b_bl_offset (buf, value);
  25396. break;
  25397. case BFD_RELOC_8:
  25398. if (fixP->fx_done || !seg->use_rela_p)
  25399. *buf = value;
  25400. break;
  25401. case BFD_RELOC_16:
  25402. if (fixP->fx_done || !seg->use_rela_p)
  25403. md_number_to_chars (buf, value, 2);
  25404. break;
  25405. #ifdef OBJ_ELF
  25406. case BFD_RELOC_ARM_TLS_CALL:
  25407. case BFD_RELOC_ARM_THM_TLS_CALL:
  25408. case BFD_RELOC_ARM_TLS_DESCSEQ:
  25409. case BFD_RELOC_ARM_THM_TLS_DESCSEQ:
  25410. case BFD_RELOC_ARM_TLS_GOTDESC:
  25411. case BFD_RELOC_ARM_TLS_GD32:
  25412. case BFD_RELOC_ARM_TLS_LE32:
  25413. case BFD_RELOC_ARM_TLS_IE32:
  25414. case BFD_RELOC_ARM_TLS_LDM32:
  25415. case BFD_RELOC_ARM_TLS_LDO32:
  25416. S_SET_THREAD_LOCAL (fixP->fx_addsy);
  25417. break;
  25418. /* Same handling as above, but with the arm_fdpic guard. */
  25419. case BFD_RELOC_ARM_TLS_GD32_FDPIC:
  25420. case BFD_RELOC_ARM_TLS_IE32_FDPIC:
  25421. case BFD_RELOC_ARM_TLS_LDM32_FDPIC:
  25422. if (arm_fdpic)
  25423. {
  25424. S_SET_THREAD_LOCAL (fixP->fx_addsy);
  25425. }
  25426. else
  25427. {
  25428. as_bad_where (fixP->fx_file, fixP->fx_line,
  25429. _("Relocation supported only in FDPIC mode"));
  25430. }
  25431. break;
  25432. case BFD_RELOC_ARM_GOT32:
  25433. case BFD_RELOC_ARM_GOTOFF:
  25434. break;
  25435. case BFD_RELOC_ARM_GOT_PREL:
  25436. if (fixP->fx_done || !seg->use_rela_p)
  25437. md_number_to_chars (buf, value, 4);
  25438. break;
  25439. case BFD_RELOC_ARM_TARGET2:
  25440. /* TARGET2 is not partial-inplace, so we need to write the
  25441. addend here for REL targets, because it won't be written out
  25442. during reloc processing later. */
  25443. if (fixP->fx_done || !seg->use_rela_p)
  25444. md_number_to_chars (buf, fixP->fx_offset, 4);
  25445. break;
  25446. /* Relocations for FDPIC. */
  25447. case BFD_RELOC_ARM_GOTFUNCDESC:
  25448. case BFD_RELOC_ARM_GOTOFFFUNCDESC:
  25449. case BFD_RELOC_ARM_FUNCDESC:
  25450. if (arm_fdpic)
  25451. {
  25452. if (fixP->fx_done || !seg->use_rela_p)
  25453. md_number_to_chars (buf, 0, 4);
  25454. }
  25455. else
  25456. {
  25457. as_bad_where (fixP->fx_file, fixP->fx_line,
  25458. _("Relocation supported only in FDPIC mode"));
  25459. }
  25460. break;
  25461. #endif
  25462. case BFD_RELOC_RVA:
  25463. case BFD_RELOC_32:
  25464. case BFD_RELOC_ARM_TARGET1:
  25465. case BFD_RELOC_ARM_ROSEGREL32:
  25466. case BFD_RELOC_ARM_SBREL32:
  25467. case BFD_RELOC_32_PCREL:
  25468. #ifdef TE_PE
  25469. case BFD_RELOC_32_SECREL:
  25470. #endif
  25471. if (fixP->fx_done || !seg->use_rela_p)
  25472. #ifdef TE_WINCE
  25473. /* For WinCE we only do this for pcrel fixups. */
  25474. if (fixP->fx_done || fixP->fx_pcrel)
  25475. #endif
  25476. md_number_to_chars (buf, value, 4);
  25477. break;
  25478. #ifdef OBJ_ELF
  25479. case BFD_RELOC_ARM_PREL31:
  25480. if (fixP->fx_done || !seg->use_rela_p)
  25481. {
  25482. newval = md_chars_to_number (buf, 4) & 0x80000000;
  25483. if ((value ^ (value >> 1)) & 0x40000000)
  25484. {
  25485. as_bad_where (fixP->fx_file, fixP->fx_line,
  25486. _("rel31 relocation overflow"));
  25487. }
  25488. newval |= value & 0x7fffffff;
  25489. md_number_to_chars (buf, newval, 4);
  25490. }
  25491. break;
  25492. #endif
  25493. case BFD_RELOC_ARM_CP_OFF_IMM:
  25494. case BFD_RELOC_ARM_T32_CP_OFF_IMM:
  25495. case BFD_RELOC_ARM_T32_VLDR_VSTR_OFF_IMM:
  25496. if (fixP->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM)
  25497. newval = md_chars_to_number (buf, INSN_SIZE);
  25498. else
  25499. newval = get_thumb32_insn (buf);
  25500. if ((newval & 0x0f200f00) == 0x0d000900)
  25501. {
  25502. /* This is a fp16 vstr/vldr. The immediate offset in the mnemonic
  25503. has permitted values that are multiples of 2, in the range -510
  25504. to 510. */
  25505. if (value + 510 > 510 + 510 || (value & 1))
  25506. as_bad_where (fixP->fx_file, fixP->fx_line,
  25507. _("co-processor offset out of range"));
  25508. }
  25509. else if ((newval & 0xfe001f80) == 0xec000f80)
  25510. {
  25511. if (value + 511 > 512 + 511 || (value & 3))
  25512. as_bad_where (fixP->fx_file, fixP->fx_line,
  25513. _("co-processor offset out of range"));
  25514. }
  25515. else if (value + 1023 > 1023 + 1023 || (value & 3))
  25516. as_bad_where (fixP->fx_file, fixP->fx_line,
  25517. _("co-processor offset out of range"));
  25518. cp_off_common:
  25519. sign = (offsetT) value > 0;
  25520. if ((offsetT) value < 0)
  25521. value = -value;
  25522. if (fixP->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM
  25523. || fixP->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM_S2)
  25524. newval = md_chars_to_number (buf, INSN_SIZE);
  25525. else
  25526. newval = get_thumb32_insn (buf);
  25527. if (value == 0)
  25528. {
  25529. if (fixP->fx_r_type == BFD_RELOC_ARM_T32_VLDR_VSTR_OFF_IMM)
  25530. newval &= 0xffffff80;
  25531. else
  25532. newval &= 0xffffff00;
  25533. }
  25534. else
  25535. {
  25536. if (fixP->fx_r_type == BFD_RELOC_ARM_T32_VLDR_VSTR_OFF_IMM)
  25537. newval &= 0xff7fff80;
  25538. else
  25539. newval &= 0xff7fff00;
  25540. if ((newval & 0x0f200f00) == 0x0d000900)
  25541. {
  25542. /* This is a fp16 vstr/vldr.
  25543. It requires the immediate offset in the instruction is shifted
  25544. left by 1 to be a half-word offset.
  25545. Here, left shift by 1 first, and later right shift by 2
  25546. should get the right offset. */
  25547. value <<= 1;
  25548. }
  25549. newval |= (value >> 2) | (sign ? INDEX_UP : 0);
  25550. }
  25551. if (fixP->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM
  25552. || fixP->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM_S2)
  25553. md_number_to_chars (buf, newval, INSN_SIZE);
  25554. else
  25555. put_thumb32_insn (buf, newval);
  25556. break;
  25557. case BFD_RELOC_ARM_CP_OFF_IMM_S2:
  25558. case BFD_RELOC_ARM_T32_CP_OFF_IMM_S2:
  25559. if (value + 255 > 255 + 255)
  25560. as_bad_where (fixP->fx_file, fixP->fx_line,
  25561. _("co-processor offset out of range"));
  25562. value *= 4;
  25563. goto cp_off_common;
  25564. case BFD_RELOC_ARM_THUMB_OFFSET:
  25565. newval = md_chars_to_number (buf, THUMB_SIZE);
  25566. /* Exactly what ranges, and where the offset is inserted depends
  25567. on the type of instruction, we can establish this from the
  25568. top 4 bits. */
  25569. switch (newval >> 12)
  25570. {
  25571. case 4: /* PC load. */
  25572. /* Thumb PC loads are somewhat odd, bit 1 of the PC is
  25573. forced to zero for these loads; md_pcrel_from has already
  25574. compensated for this. */
  25575. if (value & 3)
  25576. as_bad_where (fixP->fx_file, fixP->fx_line,
  25577. _("invalid offset, target not word aligned (0x%08lX)"),
  25578. (((unsigned long) fixP->fx_frag->fr_address
  25579. + (unsigned long) fixP->fx_where) & ~3)
  25580. + (unsigned long) value);
  25581. else if (get_recorded_alignment (seg) < 2)
  25582. as_warn_where (fixP->fx_file, fixP->fx_line,
  25583. _("section does not have enough alignment to ensure safe PC-relative loads"));
  25584. if (value & ~0x3fc)
  25585. as_bad_where (fixP->fx_file, fixP->fx_line,
  25586. _("invalid offset, value too big (0x%08lX)"),
  25587. (long) value);
  25588. newval |= value >> 2;
  25589. break;
  25590. case 9: /* SP load/store. */
  25591. if (value & ~0x3fc)
  25592. as_bad_where (fixP->fx_file, fixP->fx_line,
  25593. _("invalid offset, value too big (0x%08lX)"),
  25594. (long) value);
  25595. newval |= value >> 2;
  25596. break;
  25597. case 6: /* Word load/store. */
  25598. if (value & ~0x7c)
  25599. as_bad_where (fixP->fx_file, fixP->fx_line,
  25600. _("invalid offset, value too big (0x%08lX)"),
  25601. (long) value);
  25602. newval |= value << 4; /* 6 - 2. */
  25603. break;
  25604. case 7: /* Byte load/store. */
  25605. if (value & ~0x1f)
  25606. as_bad_where (fixP->fx_file, fixP->fx_line,
  25607. _("invalid offset, value too big (0x%08lX)"),
  25608. (long) value);
  25609. newval |= value << 6;
  25610. break;
  25611. case 8: /* Halfword load/store. */
  25612. if (value & ~0x3e)
  25613. as_bad_where (fixP->fx_file, fixP->fx_line,
  25614. _("invalid offset, value too big (0x%08lX)"),
  25615. (long) value);
  25616. newval |= value << 5; /* 6 - 1. */
  25617. break;
  25618. default:
  25619. as_bad_where (fixP->fx_file, fixP->fx_line,
  25620. "Unable to process relocation for thumb opcode: %lx",
  25621. (unsigned long) newval);
  25622. break;
  25623. }
  25624. md_number_to_chars (buf, newval, THUMB_SIZE);
  25625. break;
  25626. case BFD_RELOC_ARM_THUMB_ADD:
  25627. /* This is a complicated relocation, since we use it for all of
  25628. the following immediate relocations:
  25629. 3bit ADD/SUB
  25630. 8bit ADD/SUB
  25631. 9bit ADD/SUB SP word-aligned
  25632. 10bit ADD PC/SP word-aligned
  25633. The type of instruction being processed is encoded in the
  25634. instruction field:
  25635. 0x8000 SUB
  25636. 0x00F0 Rd
  25637. 0x000F Rs
  25638. */
  25639. newval = md_chars_to_number (buf, THUMB_SIZE);
  25640. {
  25641. int rd = (newval >> 4) & 0xf;
  25642. int rs = newval & 0xf;
  25643. int subtract = !!(newval & 0x8000);
  25644. /* Check for HI regs, only very restricted cases allowed:
  25645. Adjusting SP, and using PC or SP to get an address. */
  25646. if ((rd > 7 && (rd != REG_SP || rs != REG_SP))
  25647. || (rs > 7 && rs != REG_SP && rs != REG_PC))
  25648. as_bad_where (fixP->fx_file, fixP->fx_line,
  25649. _("invalid Hi register with immediate"));
  25650. /* If value is negative, choose the opposite instruction. */
  25651. if ((offsetT) value < 0)
  25652. {
  25653. value = -value;
  25654. subtract = !subtract;
  25655. if ((offsetT) value < 0)
  25656. as_bad_where (fixP->fx_file, fixP->fx_line,
  25657. _("immediate value out of range"));
  25658. }
  25659. if (rd == REG_SP)
  25660. {
  25661. if (value & ~0x1fc)
  25662. as_bad_where (fixP->fx_file, fixP->fx_line,
  25663. _("invalid immediate for stack address calculation"));
  25664. newval = subtract ? T_OPCODE_SUB_ST : T_OPCODE_ADD_ST;
  25665. newval |= value >> 2;
  25666. }
  25667. else if (rs == REG_PC || rs == REG_SP)
  25668. {
  25669. /* PR gas/18541. If the addition is for a defined symbol
  25670. within range of an ADR instruction then accept it. */
  25671. if (subtract
  25672. && value == 4
  25673. && fixP->fx_addsy != NULL)
  25674. {
  25675. subtract = 0;
  25676. if (! S_IS_DEFINED (fixP->fx_addsy)
  25677. || S_GET_SEGMENT (fixP->fx_addsy) != seg
  25678. || S_IS_WEAK (fixP->fx_addsy))
  25679. {
  25680. as_bad_where (fixP->fx_file, fixP->fx_line,
  25681. _("address calculation needs a strongly defined nearby symbol"));
  25682. }
  25683. else
  25684. {
  25685. offsetT v = fixP->fx_where + fixP->fx_frag->fr_address;
  25686. /* Round up to the next 4-byte boundary. */
  25687. if (v & 3)
  25688. v = (v + 3) & ~ 3;
  25689. else
  25690. v += 4;
  25691. v = S_GET_VALUE (fixP->fx_addsy) - v;
  25692. if (v & ~0x3fc)
  25693. {
  25694. as_bad_where (fixP->fx_file, fixP->fx_line,
  25695. _("symbol too far away"));
  25696. }
  25697. else
  25698. {
  25699. fixP->fx_done = 1;
  25700. value = v;
  25701. }
  25702. }
  25703. }
  25704. if (subtract || value & ~0x3fc)
  25705. as_bad_where (fixP->fx_file, fixP->fx_line,
  25706. _("invalid immediate for address calculation (value = 0x%08lX)"),
  25707. (unsigned long) (subtract ? - value : value));
  25708. newval = (rs == REG_PC ? T_OPCODE_ADD_PC : T_OPCODE_ADD_SP);
  25709. newval |= rd << 8;
  25710. newval |= value >> 2;
  25711. }
  25712. else if (rs == rd)
  25713. {
  25714. if (value & ~0xff)
  25715. as_bad_where (fixP->fx_file, fixP->fx_line,
  25716. _("immediate value out of range"));
  25717. newval = subtract ? T_OPCODE_SUB_I8 : T_OPCODE_ADD_I8;
  25718. newval |= (rd << 8) | value;
  25719. }
  25720. else
  25721. {
  25722. if (value & ~0x7)
  25723. as_bad_where (fixP->fx_file, fixP->fx_line,
  25724. _("immediate value out of range"));
  25725. newval = subtract ? T_OPCODE_SUB_I3 : T_OPCODE_ADD_I3;
  25726. newval |= rd | (rs << 3) | (value << 6);
  25727. }
  25728. }
  25729. md_number_to_chars (buf, newval, THUMB_SIZE);
  25730. break;
  25731. case BFD_RELOC_ARM_THUMB_IMM:
  25732. newval = md_chars_to_number (buf, THUMB_SIZE);
  25733. if (value > 255)
  25734. as_bad_where (fixP->fx_file, fixP->fx_line,
  25735. _("invalid immediate: %ld is out of range"),
  25736. (long) value);
  25737. newval |= value;
  25738. md_number_to_chars (buf, newval, THUMB_SIZE);
  25739. break;
  25740. case BFD_RELOC_ARM_THUMB_SHIFT:
  25741. /* 5bit shift value (0..32). LSL cannot take 32. */
  25742. newval = md_chars_to_number (buf, THUMB_SIZE) & 0xf83f;
  25743. temp = newval & 0xf800;
  25744. if (value > 32 || (value == 32 && temp == T_OPCODE_LSL_I))
  25745. as_bad_where (fixP->fx_file, fixP->fx_line,
  25746. _("invalid shift value: %ld"), (long) value);
  25747. /* Shifts of zero must be encoded as LSL. */
  25748. if (value == 0)
  25749. newval = (newval & 0x003f) | T_OPCODE_LSL_I;
  25750. /* Shifts of 32 are encoded as zero. */
  25751. else if (value == 32)
  25752. value = 0;
  25753. newval |= value << 6;
  25754. md_number_to_chars (buf, newval, THUMB_SIZE);
  25755. break;
  25756. case BFD_RELOC_VTABLE_INHERIT:
  25757. case BFD_RELOC_VTABLE_ENTRY:
  25758. fixP->fx_done = 0;
  25759. return;
  25760. case BFD_RELOC_ARM_MOVW:
  25761. case BFD_RELOC_ARM_MOVT:
  25762. case BFD_RELOC_ARM_THUMB_MOVW:
  25763. case BFD_RELOC_ARM_THUMB_MOVT:
  25764. if (fixP->fx_done || !seg->use_rela_p)
  25765. {
  25766. /* REL format relocations are limited to a 16-bit addend. */
  25767. if (!fixP->fx_done)
  25768. {
  25769. if (value + 0x8000 > 0x7fff + 0x8000)
  25770. as_bad_where (fixP->fx_file, fixP->fx_line,
  25771. _("offset out of range"));
  25772. }
  25773. else if (fixP->fx_r_type == BFD_RELOC_ARM_MOVT
  25774. || fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVT)
  25775. {
  25776. value >>= 16;
  25777. }
  25778. if (fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVW
  25779. || fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVT)
  25780. {
  25781. newval = get_thumb32_insn (buf);
  25782. newval &= 0xfbf08f00;
  25783. newval |= (value & 0xf000) << 4;
  25784. newval |= (value & 0x0800) << 15;
  25785. newval |= (value & 0x0700) << 4;
  25786. newval |= (value & 0x00ff);
  25787. put_thumb32_insn (buf, newval);
  25788. }
  25789. else
  25790. {
  25791. newval = md_chars_to_number (buf, 4);
  25792. newval &= 0xfff0f000;
  25793. newval |= value & 0x0fff;
  25794. newval |= (value & 0xf000) << 4;
  25795. md_number_to_chars (buf, newval, 4);
  25796. }
  25797. }
  25798. return;
  25799. case BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC:
  25800. case BFD_RELOC_ARM_THUMB_ALU_ABS_G1_NC:
  25801. case BFD_RELOC_ARM_THUMB_ALU_ABS_G2_NC:
  25802. case BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC:
  25803. gas_assert (!fixP->fx_done);
  25804. {
  25805. bfd_vma insn;
  25806. bool is_mov;
  25807. bfd_vma encoded_addend = value;
  25808. /* Check that addend can be encoded in instruction. */
  25809. if (!seg->use_rela_p && value > 255)
  25810. as_bad_where (fixP->fx_file, fixP->fx_line,
  25811. _("the offset 0x%08lX is not representable"),
  25812. (unsigned long) encoded_addend);
  25813. /* Extract the instruction. */
  25814. insn = md_chars_to_number (buf, THUMB_SIZE);
  25815. is_mov = (insn & 0xf800) == 0x2000;
  25816. /* Encode insn. */
  25817. if (is_mov)
  25818. {
  25819. if (!seg->use_rela_p)
  25820. insn |= encoded_addend;
  25821. }
  25822. else
  25823. {
  25824. int rd, rs;
  25825. /* Extract the instruction. */
  25826. /* Encoding is the following
  25827. 0x8000 SUB
  25828. 0x00F0 Rd
  25829. 0x000F Rs
  25830. */
  25831. /* The following conditions must be true :
  25832. - ADD
  25833. - Rd == Rs
  25834. - Rd <= 7
  25835. */
  25836. rd = (insn >> 4) & 0xf;
  25837. rs = insn & 0xf;
  25838. if ((insn & 0x8000) || (rd != rs) || rd > 7)
  25839. as_bad_where (fixP->fx_file, fixP->fx_line,
  25840. _("Unable to process relocation for thumb opcode: %lx"),
  25841. (unsigned long) insn);
  25842. /* Encode as ADD immediate8 thumb 1 code. */
  25843. insn = 0x3000 | (rd << 8);
  25844. /* Place the encoded addend into the first 8 bits of the
  25845. instruction. */
  25846. if (!seg->use_rela_p)
  25847. insn |= encoded_addend;
  25848. }
  25849. /* Update the instruction. */
  25850. md_number_to_chars (buf, insn, THUMB_SIZE);
  25851. }
  25852. break;
  25853. case BFD_RELOC_ARM_ALU_PC_G0_NC:
  25854. case BFD_RELOC_ARM_ALU_PC_G0:
  25855. case BFD_RELOC_ARM_ALU_PC_G1_NC:
  25856. case BFD_RELOC_ARM_ALU_PC_G1:
  25857. case BFD_RELOC_ARM_ALU_PC_G2:
  25858. case BFD_RELOC_ARM_ALU_SB_G0_NC:
  25859. case BFD_RELOC_ARM_ALU_SB_G0:
  25860. case BFD_RELOC_ARM_ALU_SB_G1_NC:
  25861. case BFD_RELOC_ARM_ALU_SB_G1:
  25862. case BFD_RELOC_ARM_ALU_SB_G2:
  25863. gas_assert (!fixP->fx_done);
  25864. if (!seg->use_rela_p)
  25865. {
  25866. bfd_vma insn;
  25867. bfd_vma encoded_addend;
  25868. bfd_vma addend_abs = llabs ((offsetT) value);
  25869. /* Check that the absolute value of the addend can be
  25870. expressed as an 8-bit constant plus a rotation. */
  25871. encoded_addend = encode_arm_immediate (addend_abs);
  25872. if (encoded_addend == (unsigned int) FAIL)
  25873. as_bad_where (fixP->fx_file, fixP->fx_line,
  25874. _("the offset 0x%08lX is not representable"),
  25875. (unsigned long) addend_abs);
  25876. /* Extract the instruction. */
  25877. insn = md_chars_to_number (buf, INSN_SIZE);
  25878. /* If the addend is positive, use an ADD instruction.
  25879. Otherwise use a SUB. Take care not to destroy the S bit. */
  25880. insn &= 0xff1fffff;
  25881. if ((offsetT) value < 0)
  25882. insn |= 1 << 22;
  25883. else
  25884. insn |= 1 << 23;
  25885. /* Place the encoded addend into the first 12 bits of the
  25886. instruction. */
  25887. insn &= 0xfffff000;
  25888. insn |= encoded_addend;
  25889. /* Update the instruction. */
  25890. md_number_to_chars (buf, insn, INSN_SIZE);
  25891. }
  25892. break;
  25893. case BFD_RELOC_ARM_LDR_PC_G0:
  25894. case BFD_RELOC_ARM_LDR_PC_G1:
  25895. case BFD_RELOC_ARM_LDR_PC_G2:
  25896. case BFD_RELOC_ARM_LDR_SB_G0:
  25897. case BFD_RELOC_ARM_LDR_SB_G1:
  25898. case BFD_RELOC_ARM_LDR_SB_G2:
  25899. gas_assert (!fixP->fx_done);
  25900. if (!seg->use_rela_p)
  25901. {
  25902. bfd_vma insn;
  25903. bfd_vma addend_abs = llabs ((offsetT) value);
  25904. /* Check that the absolute value of the addend can be
  25905. encoded in 12 bits. */
  25906. if (addend_abs >= 0x1000)
  25907. as_bad_where (fixP->fx_file, fixP->fx_line,
  25908. _("bad offset 0x%08lX (only 12 bits available for the magnitude)"),
  25909. (unsigned long) addend_abs);
  25910. /* Extract the instruction. */
  25911. insn = md_chars_to_number (buf, INSN_SIZE);
  25912. /* If the addend is negative, clear bit 23 of the instruction.
  25913. Otherwise set it. */
  25914. if ((offsetT) value < 0)
  25915. insn &= ~(1 << 23);
  25916. else
  25917. insn |= 1 << 23;
  25918. /* Place the absolute value of the addend into the first 12 bits
  25919. of the instruction. */
  25920. insn &= 0xfffff000;
  25921. insn |= addend_abs;
  25922. /* Update the instruction. */
  25923. md_number_to_chars (buf, insn, INSN_SIZE);
  25924. }
  25925. break;
  25926. case BFD_RELOC_ARM_LDRS_PC_G0:
  25927. case BFD_RELOC_ARM_LDRS_PC_G1:
  25928. case BFD_RELOC_ARM_LDRS_PC_G2:
  25929. case BFD_RELOC_ARM_LDRS_SB_G0:
  25930. case BFD_RELOC_ARM_LDRS_SB_G1:
  25931. case BFD_RELOC_ARM_LDRS_SB_G2:
  25932. gas_assert (!fixP->fx_done);
  25933. if (!seg->use_rela_p)
  25934. {
  25935. bfd_vma insn;
  25936. bfd_vma addend_abs = llabs ((offsetT) value);
  25937. /* Check that the absolute value of the addend can be
  25938. encoded in 8 bits. */
  25939. if (addend_abs >= 0x100)
  25940. as_bad_where (fixP->fx_file, fixP->fx_line,
  25941. _("bad offset 0x%08lX (only 8 bits available for the magnitude)"),
  25942. (unsigned long) addend_abs);
  25943. /* Extract the instruction. */
  25944. insn = md_chars_to_number (buf, INSN_SIZE);
  25945. /* If the addend is negative, clear bit 23 of the instruction.
  25946. Otherwise set it. */
  25947. if ((offsetT) value < 0)
  25948. insn &= ~(1 << 23);
  25949. else
  25950. insn |= 1 << 23;
  25951. /* Place the first four bits of the absolute value of the addend
  25952. into the first 4 bits of the instruction, and the remaining
  25953. four into bits 8 .. 11. */
  25954. insn &= 0xfffff0f0;
  25955. insn |= (addend_abs & 0xf) | ((addend_abs & 0xf0) << 4);
  25956. /* Update the instruction. */
  25957. md_number_to_chars (buf, insn, INSN_SIZE);
  25958. }
  25959. break;
  25960. case BFD_RELOC_ARM_LDC_PC_G0:
  25961. case BFD_RELOC_ARM_LDC_PC_G1:
  25962. case BFD_RELOC_ARM_LDC_PC_G2:
  25963. case BFD_RELOC_ARM_LDC_SB_G0:
  25964. case BFD_RELOC_ARM_LDC_SB_G1:
  25965. case BFD_RELOC_ARM_LDC_SB_G2:
  25966. gas_assert (!fixP->fx_done);
  25967. if (!seg->use_rela_p)
  25968. {
  25969. bfd_vma insn;
  25970. bfd_vma addend_abs = llabs ((offsetT) value);
  25971. /* Check that the absolute value of the addend is a multiple of
  25972. four and, when divided by four, fits in 8 bits. */
  25973. if (addend_abs & 0x3)
  25974. as_bad_where (fixP->fx_file, fixP->fx_line,
  25975. _("bad offset 0x%08lX (must be word-aligned)"),
  25976. (unsigned long) addend_abs);
  25977. if ((addend_abs >> 2) > 0xff)
  25978. as_bad_where (fixP->fx_file, fixP->fx_line,
  25979. _("bad offset 0x%08lX (must be an 8-bit number of words)"),
  25980. (unsigned long) addend_abs);
  25981. /* Extract the instruction. */
  25982. insn = md_chars_to_number (buf, INSN_SIZE);
  25983. /* If the addend is negative, clear bit 23 of the instruction.
  25984. Otherwise set it. */
  25985. if ((offsetT) value < 0)
  25986. insn &= ~(1 << 23);
  25987. else
  25988. insn |= 1 << 23;
  25989. /* Place the addend (divided by four) into the first eight
  25990. bits of the instruction. */
  25991. insn &= 0xfffffff0;
  25992. insn |= addend_abs >> 2;
  25993. /* Update the instruction. */
  25994. md_number_to_chars (buf, insn, INSN_SIZE);
  25995. }
  25996. break;
  25997. case BFD_RELOC_THUMB_PCREL_BRANCH5:
  25998. if (fixP->fx_addsy
  25999. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  26000. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  26001. && ARM_IS_FUNC (fixP->fx_addsy)
  26002. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
  26003. {
  26004. /* Force a relocation for a branch 5 bits wide. */
  26005. fixP->fx_done = 0;
  26006. }
  26007. if (v8_1_branch_value_check (value, 5, false) == FAIL)
  26008. as_bad_where (fixP->fx_file, fixP->fx_line,
  26009. BAD_BRANCH_OFF);
  26010. if (fixP->fx_done || !seg->use_rela_p)
  26011. {
  26012. addressT boff = value >> 1;
  26013. newval = md_chars_to_number (buf, THUMB_SIZE);
  26014. newval |= (boff << 7);
  26015. md_number_to_chars (buf, newval, THUMB_SIZE);
  26016. }
  26017. break;
  26018. case BFD_RELOC_THUMB_PCREL_BFCSEL:
  26019. if (fixP->fx_addsy
  26020. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  26021. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  26022. && ARM_IS_FUNC (fixP->fx_addsy)
  26023. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
  26024. {
  26025. fixP->fx_done = 0;
  26026. }
  26027. if ((value & ~0x7f) && ((value & ~0x3f) != (valueT) ~0x3f))
  26028. as_bad_where (fixP->fx_file, fixP->fx_line,
  26029. _("branch out of range"));
  26030. if (fixP->fx_done || !seg->use_rela_p)
  26031. {
  26032. newval = md_chars_to_number (buf, THUMB_SIZE);
  26033. addressT boff = ((newval & 0x0780) >> 7) << 1;
  26034. addressT diff = value - boff;
  26035. if (diff == 4)
  26036. {
  26037. newval |= 1 << 1; /* T bit. */
  26038. }
  26039. else if (diff != 2)
  26040. {
  26041. as_bad_where (fixP->fx_file, fixP->fx_line,
  26042. _("out of range label-relative fixup value"));
  26043. }
  26044. md_number_to_chars (buf, newval, THUMB_SIZE);
  26045. }
  26046. break;
  26047. case BFD_RELOC_ARM_THUMB_BF17:
  26048. if (fixP->fx_addsy
  26049. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  26050. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  26051. && ARM_IS_FUNC (fixP->fx_addsy)
  26052. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
  26053. {
  26054. /* Force a relocation for a branch 17 bits wide. */
  26055. fixP->fx_done = 0;
  26056. }
  26057. if (v8_1_branch_value_check (value, 17, true) == FAIL)
  26058. as_bad_where (fixP->fx_file, fixP->fx_line,
  26059. BAD_BRANCH_OFF);
  26060. if (fixP->fx_done || !seg->use_rela_p)
  26061. {
  26062. offsetT newval2;
  26063. addressT immA, immB, immC;
  26064. immA = (value & 0x0001f000) >> 12;
  26065. immB = (value & 0x00000ffc) >> 2;
  26066. immC = (value & 0x00000002) >> 1;
  26067. newval = md_chars_to_number (buf, THUMB_SIZE);
  26068. newval2 = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  26069. newval |= immA;
  26070. newval2 |= (immC << 11) | (immB << 1);
  26071. md_number_to_chars (buf, newval, THUMB_SIZE);
  26072. md_number_to_chars (buf + THUMB_SIZE, newval2, THUMB_SIZE);
  26073. }
  26074. break;
  26075. case BFD_RELOC_ARM_THUMB_BF19:
  26076. if (fixP->fx_addsy
  26077. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  26078. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  26079. && ARM_IS_FUNC (fixP->fx_addsy)
  26080. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
  26081. {
  26082. /* Force a relocation for a branch 19 bits wide. */
  26083. fixP->fx_done = 0;
  26084. }
  26085. if (v8_1_branch_value_check (value, 19, true) == FAIL)
  26086. as_bad_where (fixP->fx_file, fixP->fx_line,
  26087. BAD_BRANCH_OFF);
  26088. if (fixP->fx_done || !seg->use_rela_p)
  26089. {
  26090. offsetT newval2;
  26091. addressT immA, immB, immC;
  26092. immA = (value & 0x0007f000) >> 12;
  26093. immB = (value & 0x00000ffc) >> 2;
  26094. immC = (value & 0x00000002) >> 1;
  26095. newval = md_chars_to_number (buf, THUMB_SIZE);
  26096. newval2 = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  26097. newval |= immA;
  26098. newval2 |= (immC << 11) | (immB << 1);
  26099. md_number_to_chars (buf, newval, THUMB_SIZE);
  26100. md_number_to_chars (buf + THUMB_SIZE, newval2, THUMB_SIZE);
  26101. }
  26102. break;
  26103. case BFD_RELOC_ARM_THUMB_BF13:
  26104. if (fixP->fx_addsy
  26105. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  26106. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  26107. && ARM_IS_FUNC (fixP->fx_addsy)
  26108. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
  26109. {
  26110. /* Force a relocation for a branch 13 bits wide. */
  26111. fixP->fx_done = 0;
  26112. }
  26113. if (v8_1_branch_value_check (value, 13, true) == FAIL)
  26114. as_bad_where (fixP->fx_file, fixP->fx_line,
  26115. BAD_BRANCH_OFF);
  26116. if (fixP->fx_done || !seg->use_rela_p)
  26117. {
  26118. offsetT newval2;
  26119. addressT immA, immB, immC;
  26120. immA = (value & 0x00001000) >> 12;
  26121. immB = (value & 0x00000ffc) >> 2;
  26122. immC = (value & 0x00000002) >> 1;
  26123. newval = md_chars_to_number (buf, THUMB_SIZE);
  26124. newval2 = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  26125. newval |= immA;
  26126. newval2 |= (immC << 11) | (immB << 1);
  26127. md_number_to_chars (buf, newval, THUMB_SIZE);
  26128. md_number_to_chars (buf + THUMB_SIZE, newval2, THUMB_SIZE);
  26129. }
  26130. break;
  26131. case BFD_RELOC_ARM_THUMB_LOOP12:
  26132. if (fixP->fx_addsy
  26133. && (S_GET_SEGMENT (fixP->fx_addsy) == seg)
  26134. && !S_FORCE_RELOC (fixP->fx_addsy, true)
  26135. && ARM_IS_FUNC (fixP->fx_addsy)
  26136. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v8_1m_main))
  26137. {
  26138. /* Force a relocation for a branch 12 bits wide. */
  26139. fixP->fx_done = 0;
  26140. }
  26141. bfd_vma insn = get_thumb32_insn (buf);
  26142. /* le lr, <label>, le <label> or letp lr, <label> */
  26143. if (((insn & 0xffffffff) == 0xf00fc001)
  26144. || ((insn & 0xffffffff) == 0xf02fc001)
  26145. || ((insn & 0xffffffff) == 0xf01fc001))
  26146. value = -value;
  26147. if (v8_1_branch_value_check (value, 12, false) == FAIL)
  26148. as_bad_where (fixP->fx_file, fixP->fx_line,
  26149. BAD_BRANCH_OFF);
  26150. if (fixP->fx_done || !seg->use_rela_p)
  26151. {
  26152. addressT imml, immh;
  26153. immh = (value & 0x00000ffc) >> 2;
  26154. imml = (value & 0x00000002) >> 1;
  26155. newval = md_chars_to_number (buf + THUMB_SIZE, THUMB_SIZE);
  26156. newval |= (imml << 11) | (immh << 1);
  26157. md_number_to_chars (buf + THUMB_SIZE, newval, THUMB_SIZE);
  26158. }
  26159. break;
  26160. case BFD_RELOC_ARM_V4BX:
  26161. /* This will need to go in the object file. */
  26162. fixP->fx_done = 0;
  26163. break;
  26164. case BFD_RELOC_UNUSED:
  26165. default:
  26166. as_bad_where (fixP->fx_file, fixP->fx_line,
  26167. _("bad relocation fixup type (%d)"), fixP->fx_r_type);
  26168. }
  26169. }
  26170. /* Translate internal representation of relocation info to BFD target
  26171. format. */
  26172. arelent *
  26173. tc_gen_reloc (asection *section, fixS *fixp)
  26174. {
  26175. arelent * reloc;
  26176. bfd_reloc_code_real_type code;
  26177. reloc = XNEW (arelent);
  26178. reloc->sym_ptr_ptr = XNEW (asymbol *);
  26179. *reloc->sym_ptr_ptr = symbol_get_bfdsym (fixp->fx_addsy);
  26180. reloc->address = fixp->fx_frag->fr_address + fixp->fx_where;
  26181. if (fixp->fx_pcrel)
  26182. {
  26183. if (section->use_rela_p)
  26184. fixp->fx_offset -= md_pcrel_from_section (fixp, section);
  26185. else
  26186. fixp->fx_offset = reloc->address;
  26187. }
  26188. reloc->addend = fixp->fx_offset;
  26189. switch (fixp->fx_r_type)
  26190. {
  26191. case BFD_RELOC_8:
  26192. if (fixp->fx_pcrel)
  26193. {
  26194. code = BFD_RELOC_8_PCREL;
  26195. break;
  26196. }
  26197. /* Fall through. */
  26198. case BFD_RELOC_16:
  26199. if (fixp->fx_pcrel)
  26200. {
  26201. code = BFD_RELOC_16_PCREL;
  26202. break;
  26203. }
  26204. /* Fall through. */
  26205. case BFD_RELOC_32:
  26206. if (fixp->fx_pcrel)
  26207. {
  26208. code = BFD_RELOC_32_PCREL;
  26209. break;
  26210. }
  26211. /* Fall through. */
  26212. case BFD_RELOC_ARM_MOVW:
  26213. if (fixp->fx_pcrel)
  26214. {
  26215. code = BFD_RELOC_ARM_MOVW_PCREL;
  26216. break;
  26217. }
  26218. /* Fall through. */
  26219. case BFD_RELOC_ARM_MOVT:
  26220. if (fixp->fx_pcrel)
  26221. {
  26222. code = BFD_RELOC_ARM_MOVT_PCREL;
  26223. break;
  26224. }
  26225. /* Fall through. */
  26226. case BFD_RELOC_ARM_THUMB_MOVW:
  26227. if (fixp->fx_pcrel)
  26228. {
  26229. code = BFD_RELOC_ARM_THUMB_MOVW_PCREL;
  26230. break;
  26231. }
  26232. /* Fall through. */
  26233. case BFD_RELOC_ARM_THUMB_MOVT:
  26234. if (fixp->fx_pcrel)
  26235. {
  26236. code = BFD_RELOC_ARM_THUMB_MOVT_PCREL;
  26237. break;
  26238. }
  26239. /* Fall through. */
  26240. case BFD_RELOC_NONE:
  26241. case BFD_RELOC_ARM_PCREL_BRANCH:
  26242. case BFD_RELOC_ARM_PCREL_BLX:
  26243. case BFD_RELOC_RVA:
  26244. case BFD_RELOC_THUMB_PCREL_BRANCH7:
  26245. case BFD_RELOC_THUMB_PCREL_BRANCH9:
  26246. case BFD_RELOC_THUMB_PCREL_BRANCH12:
  26247. case BFD_RELOC_THUMB_PCREL_BRANCH20:
  26248. case BFD_RELOC_THUMB_PCREL_BRANCH23:
  26249. case BFD_RELOC_THUMB_PCREL_BRANCH25:
  26250. case BFD_RELOC_VTABLE_ENTRY:
  26251. case BFD_RELOC_VTABLE_INHERIT:
  26252. #ifdef TE_PE
  26253. case BFD_RELOC_32_SECREL:
  26254. #endif
  26255. code = fixp->fx_r_type;
  26256. break;
  26257. case BFD_RELOC_THUMB_PCREL_BLX:
  26258. #ifdef OBJ_ELF
  26259. if (EF_ARM_EABI_VERSION (meabi_flags) >= EF_ARM_EABI_VER4)
  26260. code = BFD_RELOC_THUMB_PCREL_BRANCH23;
  26261. else
  26262. #endif
  26263. code = BFD_RELOC_THUMB_PCREL_BLX;
  26264. break;
  26265. case BFD_RELOC_ARM_LITERAL:
  26266. case BFD_RELOC_ARM_HWLITERAL:
  26267. /* If this is called then the a literal has
  26268. been referenced across a section boundary. */
  26269. as_bad_where (fixp->fx_file, fixp->fx_line,
  26270. _("literal referenced across section boundary"));
  26271. return NULL;
  26272. #ifdef OBJ_ELF
  26273. case BFD_RELOC_ARM_TLS_CALL:
  26274. case BFD_RELOC_ARM_THM_TLS_CALL:
  26275. case BFD_RELOC_ARM_TLS_DESCSEQ:
  26276. case BFD_RELOC_ARM_THM_TLS_DESCSEQ:
  26277. case BFD_RELOC_ARM_GOT32:
  26278. case BFD_RELOC_ARM_GOTOFF:
  26279. case BFD_RELOC_ARM_GOT_PREL:
  26280. case BFD_RELOC_ARM_PLT32:
  26281. case BFD_RELOC_ARM_TARGET1:
  26282. case BFD_RELOC_ARM_ROSEGREL32:
  26283. case BFD_RELOC_ARM_SBREL32:
  26284. case BFD_RELOC_ARM_PREL31:
  26285. case BFD_RELOC_ARM_TARGET2:
  26286. case BFD_RELOC_ARM_TLS_LDO32:
  26287. case BFD_RELOC_ARM_PCREL_CALL:
  26288. case BFD_RELOC_ARM_PCREL_JUMP:
  26289. case BFD_RELOC_ARM_ALU_PC_G0_NC:
  26290. case BFD_RELOC_ARM_ALU_PC_G0:
  26291. case BFD_RELOC_ARM_ALU_PC_G1_NC:
  26292. case BFD_RELOC_ARM_ALU_PC_G1:
  26293. case BFD_RELOC_ARM_ALU_PC_G2:
  26294. case BFD_RELOC_ARM_LDR_PC_G0:
  26295. case BFD_RELOC_ARM_LDR_PC_G1:
  26296. case BFD_RELOC_ARM_LDR_PC_G2:
  26297. case BFD_RELOC_ARM_LDRS_PC_G0:
  26298. case BFD_RELOC_ARM_LDRS_PC_G1:
  26299. case BFD_RELOC_ARM_LDRS_PC_G2:
  26300. case BFD_RELOC_ARM_LDC_PC_G0:
  26301. case BFD_RELOC_ARM_LDC_PC_G1:
  26302. case BFD_RELOC_ARM_LDC_PC_G2:
  26303. case BFD_RELOC_ARM_ALU_SB_G0_NC:
  26304. case BFD_RELOC_ARM_ALU_SB_G0:
  26305. case BFD_RELOC_ARM_ALU_SB_G1_NC:
  26306. case BFD_RELOC_ARM_ALU_SB_G1:
  26307. case BFD_RELOC_ARM_ALU_SB_G2:
  26308. case BFD_RELOC_ARM_LDR_SB_G0:
  26309. case BFD_RELOC_ARM_LDR_SB_G1:
  26310. case BFD_RELOC_ARM_LDR_SB_G2:
  26311. case BFD_RELOC_ARM_LDRS_SB_G0:
  26312. case BFD_RELOC_ARM_LDRS_SB_G1:
  26313. case BFD_RELOC_ARM_LDRS_SB_G2:
  26314. case BFD_RELOC_ARM_LDC_SB_G0:
  26315. case BFD_RELOC_ARM_LDC_SB_G1:
  26316. case BFD_RELOC_ARM_LDC_SB_G2:
  26317. case BFD_RELOC_ARM_V4BX:
  26318. case BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC:
  26319. case BFD_RELOC_ARM_THUMB_ALU_ABS_G1_NC:
  26320. case BFD_RELOC_ARM_THUMB_ALU_ABS_G2_NC:
  26321. case BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC:
  26322. case BFD_RELOC_ARM_GOTFUNCDESC:
  26323. case BFD_RELOC_ARM_GOTOFFFUNCDESC:
  26324. case BFD_RELOC_ARM_FUNCDESC:
  26325. case BFD_RELOC_ARM_THUMB_BF17:
  26326. case BFD_RELOC_ARM_THUMB_BF19:
  26327. case BFD_RELOC_ARM_THUMB_BF13:
  26328. code = fixp->fx_r_type;
  26329. break;
  26330. case BFD_RELOC_ARM_TLS_GOTDESC:
  26331. case BFD_RELOC_ARM_TLS_GD32:
  26332. case BFD_RELOC_ARM_TLS_GD32_FDPIC:
  26333. case BFD_RELOC_ARM_TLS_LE32:
  26334. case BFD_RELOC_ARM_TLS_IE32:
  26335. case BFD_RELOC_ARM_TLS_IE32_FDPIC:
  26336. case BFD_RELOC_ARM_TLS_LDM32:
  26337. case BFD_RELOC_ARM_TLS_LDM32_FDPIC:
  26338. /* BFD will include the symbol's address in the addend.
  26339. But we don't want that, so subtract it out again here. */
  26340. if (!S_IS_COMMON (fixp->fx_addsy))
  26341. reloc->addend -= (*reloc->sym_ptr_ptr)->value;
  26342. code = fixp->fx_r_type;
  26343. break;
  26344. #endif
  26345. case BFD_RELOC_ARM_IMMEDIATE:
  26346. as_bad_where (fixp->fx_file, fixp->fx_line,
  26347. _("internal relocation (type: IMMEDIATE) not fixed up"));
  26348. return NULL;
  26349. case BFD_RELOC_ARM_ADRL_IMMEDIATE:
  26350. as_bad_where (fixp->fx_file, fixp->fx_line,
  26351. _("ADRL used for a symbol not defined in the same file"));
  26352. return NULL;
  26353. case BFD_RELOC_THUMB_PCREL_BRANCH5:
  26354. case BFD_RELOC_THUMB_PCREL_BFCSEL:
  26355. case BFD_RELOC_ARM_THUMB_LOOP12:
  26356. as_bad_where (fixp->fx_file, fixp->fx_line,
  26357. _("%s used for a symbol not defined in the same file"),
  26358. bfd_get_reloc_code_name (fixp->fx_r_type));
  26359. return NULL;
  26360. case BFD_RELOC_ARM_OFFSET_IMM:
  26361. if (section->use_rela_p)
  26362. {
  26363. code = fixp->fx_r_type;
  26364. break;
  26365. }
  26366. if (fixp->fx_addsy != NULL
  26367. && !S_IS_DEFINED (fixp->fx_addsy)
  26368. && S_IS_LOCAL (fixp->fx_addsy))
  26369. {
  26370. as_bad_where (fixp->fx_file, fixp->fx_line,
  26371. _("undefined local label `%s'"),
  26372. S_GET_NAME (fixp->fx_addsy));
  26373. return NULL;
  26374. }
  26375. as_bad_where (fixp->fx_file, fixp->fx_line,
  26376. _("internal_relocation (type: OFFSET_IMM) not fixed up"));
  26377. return NULL;
  26378. default:
  26379. {
  26380. const char * type;
  26381. switch (fixp->fx_r_type)
  26382. {
  26383. case BFD_RELOC_NONE: type = "NONE"; break;
  26384. case BFD_RELOC_ARM_OFFSET_IMM8: type = "OFFSET_IMM8"; break;
  26385. case BFD_RELOC_ARM_SHIFT_IMM: type = "SHIFT_IMM"; break;
  26386. case BFD_RELOC_ARM_SMC: type = "SMC"; break;
  26387. case BFD_RELOC_ARM_SWI: type = "SWI"; break;
  26388. case BFD_RELOC_ARM_MULTI: type = "MULTI"; break;
  26389. case BFD_RELOC_ARM_CP_OFF_IMM: type = "CP_OFF_IMM"; break;
  26390. case BFD_RELOC_ARM_T32_OFFSET_IMM: type = "T32_OFFSET_IMM"; break;
  26391. case BFD_RELOC_ARM_T32_CP_OFF_IMM: type = "T32_CP_OFF_IMM"; break;
  26392. case BFD_RELOC_ARM_THUMB_ADD: type = "THUMB_ADD"; break;
  26393. case BFD_RELOC_ARM_THUMB_SHIFT: type = "THUMB_SHIFT"; break;
  26394. case BFD_RELOC_ARM_THUMB_IMM: type = "THUMB_IMM"; break;
  26395. case BFD_RELOC_ARM_THUMB_OFFSET: type = "THUMB_OFFSET"; break;
  26396. default: type = _("<unknown>"); break;
  26397. }
  26398. as_bad_where (fixp->fx_file, fixp->fx_line,
  26399. _("cannot represent %s relocation in this object file format"),
  26400. type);
  26401. return NULL;
  26402. }
  26403. }
  26404. #ifdef OBJ_ELF
  26405. if ((code == BFD_RELOC_32_PCREL || code == BFD_RELOC_32)
  26406. && GOT_symbol
  26407. && fixp->fx_addsy == GOT_symbol)
  26408. {
  26409. code = BFD_RELOC_ARM_GOTPC;
  26410. reloc->addend = fixp->fx_offset = reloc->address;
  26411. }
  26412. #endif
  26413. reloc->howto = bfd_reloc_type_lookup (stdoutput, code);
  26414. if (reloc->howto == NULL)
  26415. {
  26416. as_bad_where (fixp->fx_file, fixp->fx_line,
  26417. _("cannot represent %s relocation in this object file format"),
  26418. bfd_get_reloc_code_name (code));
  26419. return NULL;
  26420. }
  26421. /* HACK: Since arm ELF uses Rel instead of Rela, encode the
  26422. vtable entry to be used in the relocation's section offset. */
  26423. if (fixp->fx_r_type == BFD_RELOC_VTABLE_ENTRY)
  26424. reloc->address = fixp->fx_offset;
  26425. return reloc;
  26426. }
  26427. /* This fix_new is called by cons via TC_CONS_FIX_NEW. */
  26428. void
  26429. cons_fix_new_arm (fragS * frag,
  26430. int where,
  26431. int size,
  26432. expressionS * exp,
  26433. bfd_reloc_code_real_type reloc)
  26434. {
  26435. int pcrel = 0;
  26436. /* Pick a reloc.
  26437. FIXME: @@ Should look at CPU word size. */
  26438. switch (size)
  26439. {
  26440. case 1:
  26441. reloc = BFD_RELOC_8;
  26442. break;
  26443. case 2:
  26444. reloc = BFD_RELOC_16;
  26445. break;
  26446. case 4:
  26447. default:
  26448. reloc = BFD_RELOC_32;
  26449. break;
  26450. case 8:
  26451. reloc = BFD_RELOC_64;
  26452. break;
  26453. }
  26454. #ifdef TE_PE
  26455. if (exp->X_op == O_secrel)
  26456. {
  26457. exp->X_op = O_symbol;
  26458. reloc = BFD_RELOC_32_SECREL;
  26459. }
  26460. #endif
  26461. fix_new_exp (frag, where, size, exp, pcrel, reloc);
  26462. }
  26463. #if defined (OBJ_COFF)
  26464. void
  26465. arm_validate_fix (fixS * fixP)
  26466. {
  26467. /* If the destination of the branch is a defined symbol which does not have
  26468. the THUMB_FUNC attribute, then we must be calling a function which has
  26469. the (interfacearm) attribute. We look for the Thumb entry point to that
  26470. function and change the branch to refer to that function instead. */
  26471. if (fixP->fx_r_type == BFD_RELOC_THUMB_PCREL_BRANCH23
  26472. && fixP->fx_addsy != NULL
  26473. && S_IS_DEFINED (fixP->fx_addsy)
  26474. && ! THUMB_IS_FUNC (fixP->fx_addsy))
  26475. {
  26476. fixP->fx_addsy = find_real_start (fixP->fx_addsy);
  26477. }
  26478. }
  26479. #endif
  26480. int
  26481. arm_force_relocation (struct fix * fixp)
  26482. {
  26483. #if defined (OBJ_COFF) && defined (TE_PE)
  26484. if (fixp->fx_r_type == BFD_RELOC_RVA)
  26485. return 1;
  26486. #endif
  26487. /* In case we have a call or a branch to a function in ARM ISA mode from
  26488. a thumb function or vice-versa force the relocation. These relocations
  26489. are cleared off for some cores that might have blx and simple transformations
  26490. are possible. */
  26491. #ifdef OBJ_ELF
  26492. switch (fixp->fx_r_type)
  26493. {
  26494. case BFD_RELOC_ARM_PCREL_JUMP:
  26495. case BFD_RELOC_ARM_PCREL_CALL:
  26496. case BFD_RELOC_THUMB_PCREL_BLX:
  26497. if (THUMB_IS_FUNC (fixp->fx_addsy))
  26498. return 1;
  26499. break;
  26500. case BFD_RELOC_ARM_PCREL_BLX:
  26501. case BFD_RELOC_THUMB_PCREL_BRANCH25:
  26502. case BFD_RELOC_THUMB_PCREL_BRANCH20:
  26503. case BFD_RELOC_THUMB_PCREL_BRANCH23:
  26504. if (ARM_IS_FUNC (fixp->fx_addsy))
  26505. return 1;
  26506. break;
  26507. default:
  26508. break;
  26509. }
  26510. #endif
  26511. /* Resolve these relocations even if the symbol is extern or weak.
  26512. Technically this is probably wrong due to symbol preemption.
  26513. In practice these relocations do not have enough range to be useful
  26514. at dynamic link time, and some code (e.g. in the Linux kernel)
  26515. expects these references to be resolved. */
  26516. if (fixp->fx_r_type == BFD_RELOC_ARM_IMMEDIATE
  26517. || fixp->fx_r_type == BFD_RELOC_ARM_OFFSET_IMM
  26518. || fixp->fx_r_type == BFD_RELOC_ARM_OFFSET_IMM8
  26519. || fixp->fx_r_type == BFD_RELOC_ARM_ADRL_IMMEDIATE
  26520. || fixp->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM
  26521. || fixp->fx_r_type == BFD_RELOC_ARM_CP_OFF_IMM_S2
  26522. || fixp->fx_r_type == BFD_RELOC_ARM_THUMB_OFFSET
  26523. || fixp->fx_r_type == BFD_RELOC_THUMB_PCREL_BRANCH12
  26524. || fixp->fx_r_type == BFD_RELOC_ARM_T32_ADD_IMM
  26525. || fixp->fx_r_type == BFD_RELOC_ARM_T32_IMMEDIATE
  26526. || fixp->fx_r_type == BFD_RELOC_ARM_T32_IMM12
  26527. || fixp->fx_r_type == BFD_RELOC_ARM_T32_OFFSET_IMM
  26528. || fixp->fx_r_type == BFD_RELOC_ARM_T32_ADD_PC12
  26529. || fixp->fx_r_type == BFD_RELOC_ARM_T32_CP_OFF_IMM
  26530. || fixp->fx_r_type == BFD_RELOC_ARM_T32_CP_OFF_IMM_S2)
  26531. return 0;
  26532. /* Always leave these relocations for the linker. */
  26533. if ((fixp->fx_r_type >= BFD_RELOC_ARM_ALU_PC_G0_NC
  26534. && fixp->fx_r_type <= BFD_RELOC_ARM_LDC_SB_G2)
  26535. || fixp->fx_r_type == BFD_RELOC_ARM_LDR_PC_G0)
  26536. return 1;
  26537. /* Always generate relocations against function symbols. */
  26538. if (fixp->fx_r_type == BFD_RELOC_32
  26539. && fixp->fx_addsy
  26540. && (symbol_get_bfdsym (fixp->fx_addsy)->flags & BSF_FUNCTION))
  26541. return 1;
  26542. return generic_force_reloc (fixp);
  26543. }
  26544. #if defined (OBJ_ELF) || defined (OBJ_COFF)
  26545. /* Relocations against function names must be left unadjusted,
  26546. so that the linker can use this information to generate interworking
  26547. stubs. The MIPS version of this function
  26548. also prevents relocations that are mips-16 specific, but I do not
  26549. know why it does this.
  26550. FIXME:
  26551. There is one other problem that ought to be addressed here, but
  26552. which currently is not: Taking the address of a label (rather
  26553. than a function) and then later jumping to that address. Such
  26554. addresses also ought to have their bottom bit set (assuming that
  26555. they reside in Thumb code), but at the moment they will not. */
  26556. bool
  26557. arm_fix_adjustable (fixS * fixP)
  26558. {
  26559. if (fixP->fx_addsy == NULL)
  26560. return 1;
  26561. /* Preserve relocations against symbols with function type. */
  26562. if (symbol_get_bfdsym (fixP->fx_addsy)->flags & BSF_FUNCTION)
  26563. return false;
  26564. if (THUMB_IS_FUNC (fixP->fx_addsy)
  26565. && fixP->fx_subsy == NULL)
  26566. return false;
  26567. /* We need the symbol name for the VTABLE entries. */
  26568. if ( fixP->fx_r_type == BFD_RELOC_VTABLE_INHERIT
  26569. || fixP->fx_r_type == BFD_RELOC_VTABLE_ENTRY)
  26570. return false;
  26571. /* Don't allow symbols to be discarded on GOT related relocs. */
  26572. if (fixP->fx_r_type == BFD_RELOC_ARM_PLT32
  26573. || fixP->fx_r_type == BFD_RELOC_ARM_GOT32
  26574. || fixP->fx_r_type == BFD_RELOC_ARM_GOTOFF
  26575. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_GD32
  26576. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_GD32_FDPIC
  26577. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_LE32
  26578. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_IE32
  26579. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_IE32_FDPIC
  26580. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_LDM32
  26581. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_LDM32_FDPIC
  26582. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_LDO32
  26583. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_GOTDESC
  26584. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_CALL
  26585. || fixP->fx_r_type == BFD_RELOC_ARM_THM_TLS_CALL
  26586. || fixP->fx_r_type == BFD_RELOC_ARM_TLS_DESCSEQ
  26587. || fixP->fx_r_type == BFD_RELOC_ARM_THM_TLS_DESCSEQ
  26588. || fixP->fx_r_type == BFD_RELOC_ARM_TARGET2)
  26589. return false;
  26590. /* Similarly for group relocations. */
  26591. if ((fixP->fx_r_type >= BFD_RELOC_ARM_ALU_PC_G0_NC
  26592. && fixP->fx_r_type <= BFD_RELOC_ARM_LDC_SB_G2)
  26593. || fixP->fx_r_type == BFD_RELOC_ARM_LDR_PC_G0)
  26594. return false;
  26595. /* MOVW/MOVT REL relocations have limited offsets, so keep the symbols. */
  26596. if (fixP->fx_r_type == BFD_RELOC_ARM_MOVW
  26597. || fixP->fx_r_type == BFD_RELOC_ARM_MOVT
  26598. || fixP->fx_r_type == BFD_RELOC_ARM_MOVW_PCREL
  26599. || fixP->fx_r_type == BFD_RELOC_ARM_MOVT_PCREL
  26600. || fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVW
  26601. || fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVT
  26602. || fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVW_PCREL
  26603. || fixP->fx_r_type == BFD_RELOC_ARM_THUMB_MOVT_PCREL)
  26604. return false;
  26605. /* BFD_RELOC_ARM_THUMB_ALU_ABS_Gx_NC relocations have VERY limited
  26606. offsets, so keep these symbols. */
  26607. if (fixP->fx_r_type >= BFD_RELOC_ARM_THUMB_ALU_ABS_G0_NC
  26608. && fixP->fx_r_type <= BFD_RELOC_ARM_THUMB_ALU_ABS_G3_NC)
  26609. return false;
  26610. return true;
  26611. }
  26612. #endif /* defined (OBJ_ELF) || defined (OBJ_COFF) */
  26613. #ifdef OBJ_ELF
  26614. const char *
  26615. elf32_arm_target_format (void)
  26616. {
  26617. #if defined (TE_VXWORKS)
  26618. return (target_big_endian
  26619. ? "elf32-bigarm-vxworks"
  26620. : "elf32-littlearm-vxworks");
  26621. #elif defined (TE_NACL)
  26622. return (target_big_endian
  26623. ? "elf32-bigarm-nacl"
  26624. : "elf32-littlearm-nacl");
  26625. #else
  26626. if (arm_fdpic)
  26627. {
  26628. if (target_big_endian)
  26629. return "elf32-bigarm-fdpic";
  26630. else
  26631. return "elf32-littlearm-fdpic";
  26632. }
  26633. else
  26634. {
  26635. if (target_big_endian)
  26636. return "elf32-bigarm";
  26637. else
  26638. return "elf32-littlearm";
  26639. }
  26640. #endif
  26641. }
  26642. void
  26643. armelf_frob_symbol (symbolS * symp,
  26644. int * puntp)
  26645. {
  26646. elf_frob_symbol (symp, puntp);
  26647. }
  26648. #endif
  26649. /* MD interface: Finalization. */
  26650. void
  26651. arm_cleanup (void)
  26652. {
  26653. literal_pool * pool;
  26654. /* Ensure that all the predication blocks are properly closed. */
  26655. check_pred_blocks_finished ();
  26656. for (pool = list_of_pools; pool; pool = pool->next)
  26657. {
  26658. /* Put it at the end of the relevant section. */
  26659. subseg_set (pool->section, pool->sub_section);
  26660. #ifdef OBJ_ELF
  26661. arm_elf_change_section ();
  26662. #endif
  26663. s_ltorg (0);
  26664. }
  26665. }
  26666. #ifdef OBJ_ELF
  26667. /* Remove any excess mapping symbols generated for alignment frags in
  26668. SEC. We may have created a mapping symbol before a zero byte
  26669. alignment; remove it if there's a mapping symbol after the
  26670. alignment. */
  26671. static void
  26672. check_mapping_symbols (bfd *abfd ATTRIBUTE_UNUSED, asection *sec,
  26673. void *dummy ATTRIBUTE_UNUSED)
  26674. {
  26675. segment_info_type *seginfo = seg_info (sec);
  26676. fragS *fragp;
  26677. if (seginfo == NULL || seginfo->frchainP == NULL)
  26678. return;
  26679. for (fragp = seginfo->frchainP->frch_root;
  26680. fragp != NULL;
  26681. fragp = fragp->fr_next)
  26682. {
  26683. symbolS *sym = fragp->tc_frag_data.last_map;
  26684. fragS *next = fragp->fr_next;
  26685. /* Variable-sized frags have been converted to fixed size by
  26686. this point. But if this was variable-sized to start with,
  26687. there will be a fixed-size frag after it. So don't handle
  26688. next == NULL. */
  26689. if (sym == NULL || next == NULL)
  26690. continue;
  26691. if (S_GET_VALUE (sym) < next->fr_address)
  26692. /* Not at the end of this frag. */
  26693. continue;
  26694. know (S_GET_VALUE (sym) == next->fr_address);
  26695. do
  26696. {
  26697. if (next->tc_frag_data.first_map != NULL)
  26698. {
  26699. /* Next frag starts with a mapping symbol. Discard this
  26700. one. */
  26701. symbol_remove (sym, &symbol_rootP, &symbol_lastP);
  26702. break;
  26703. }
  26704. if (next->fr_next == NULL)
  26705. {
  26706. /* This mapping symbol is at the end of the section. Discard
  26707. it. */
  26708. know (next->fr_fix == 0 && next->fr_var == 0);
  26709. symbol_remove (sym, &symbol_rootP, &symbol_lastP);
  26710. break;
  26711. }
  26712. /* As long as we have empty frags without any mapping symbols,
  26713. keep looking. */
  26714. /* If the next frag is non-empty and does not start with a
  26715. mapping symbol, then this mapping symbol is required. */
  26716. if (next->fr_address != next->fr_next->fr_address)
  26717. break;
  26718. next = next->fr_next;
  26719. }
  26720. while (next != NULL);
  26721. }
  26722. }
  26723. #endif
  26724. /* Adjust the symbol table. This marks Thumb symbols as distinct from
  26725. ARM ones. */
  26726. void
  26727. arm_adjust_symtab (void)
  26728. {
  26729. #ifdef OBJ_COFF
  26730. symbolS * sym;
  26731. for (sym = symbol_rootP; sym != NULL; sym = symbol_next (sym))
  26732. {
  26733. if (ARM_IS_THUMB (sym))
  26734. {
  26735. if (THUMB_IS_FUNC (sym))
  26736. {
  26737. /* Mark the symbol as a Thumb function. */
  26738. if ( S_GET_STORAGE_CLASS (sym) == C_STAT
  26739. || S_GET_STORAGE_CLASS (sym) == C_LABEL) /* This can happen! */
  26740. S_SET_STORAGE_CLASS (sym, C_THUMBSTATFUNC);
  26741. else if (S_GET_STORAGE_CLASS (sym) == C_EXT)
  26742. S_SET_STORAGE_CLASS (sym, C_THUMBEXTFUNC);
  26743. else
  26744. as_bad (_("%s: unexpected function type: %d"),
  26745. S_GET_NAME (sym), S_GET_STORAGE_CLASS (sym));
  26746. }
  26747. else switch (S_GET_STORAGE_CLASS (sym))
  26748. {
  26749. case C_EXT:
  26750. S_SET_STORAGE_CLASS (sym, C_THUMBEXT);
  26751. break;
  26752. case C_STAT:
  26753. S_SET_STORAGE_CLASS (sym, C_THUMBSTAT);
  26754. break;
  26755. case C_LABEL:
  26756. S_SET_STORAGE_CLASS (sym, C_THUMBLABEL);
  26757. break;
  26758. default:
  26759. /* Do nothing. */
  26760. break;
  26761. }
  26762. }
  26763. if (ARM_IS_INTERWORK (sym))
  26764. coffsymbol (symbol_get_bfdsym (sym))->native->u.syment.n_flags = 0xFF;
  26765. }
  26766. #endif
  26767. #ifdef OBJ_ELF
  26768. symbolS * sym;
  26769. char bind;
  26770. for (sym = symbol_rootP; sym != NULL; sym = symbol_next (sym))
  26771. {
  26772. if (ARM_IS_THUMB (sym))
  26773. {
  26774. elf_symbol_type * elf_sym;
  26775. elf_sym = elf_symbol (symbol_get_bfdsym (sym));
  26776. bind = ELF_ST_BIND (elf_sym->internal_elf_sym.st_info);
  26777. if (! bfd_is_arm_special_symbol_name (elf_sym->symbol.name,
  26778. BFD_ARM_SPECIAL_SYM_TYPE_ANY))
  26779. {
  26780. /* If it's a .thumb_func, declare it as so,
  26781. otherwise tag label as .code 16. */
  26782. if (THUMB_IS_FUNC (sym))
  26783. ARM_SET_SYM_BRANCH_TYPE (elf_sym->internal_elf_sym.st_target_internal,
  26784. ST_BRANCH_TO_THUMB);
  26785. else if (EF_ARM_EABI_VERSION (meabi_flags) < EF_ARM_EABI_VER4)
  26786. elf_sym->internal_elf_sym.st_info =
  26787. ELF_ST_INFO (bind, STT_ARM_16BIT);
  26788. }
  26789. }
  26790. }
  26791. /* Remove any overlapping mapping symbols generated by alignment frags. */
  26792. bfd_map_over_sections (stdoutput, check_mapping_symbols, (char *) 0);
  26793. /* Now do generic ELF adjustments. */
  26794. elf_adjust_symtab ();
  26795. #endif
  26796. }
  26797. /* MD interface: Initialization. */
  26798. static void
  26799. set_constant_flonums (void)
  26800. {
  26801. int i;
  26802. for (i = 0; i < NUM_FLOAT_VALS; i++)
  26803. if (atof_ieee ((char *) fp_const[i], 'x', fp_values[i]) == NULL)
  26804. abort ();
  26805. }
  26806. /* Auto-select Thumb mode if it's the only available instruction set for the
  26807. given architecture. */
  26808. static void
  26809. autoselect_thumb_from_cpu_variant (void)
  26810. {
  26811. if (!ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v1))
  26812. opcode_select (16);
  26813. }
  26814. void
  26815. md_begin (void)
  26816. {
  26817. unsigned mach;
  26818. unsigned int i;
  26819. arm_ops_hsh = str_htab_create ();
  26820. arm_cond_hsh = str_htab_create ();
  26821. arm_vcond_hsh = str_htab_create ();
  26822. arm_shift_hsh = str_htab_create ();
  26823. arm_psr_hsh = str_htab_create ();
  26824. arm_v7m_psr_hsh = str_htab_create ();
  26825. arm_reg_hsh = str_htab_create ();
  26826. arm_reloc_hsh = str_htab_create ();
  26827. arm_barrier_opt_hsh = str_htab_create ();
  26828. for (i = 0; i < sizeof (insns) / sizeof (struct asm_opcode); i++)
  26829. if (str_hash_find (arm_ops_hsh, insns[i].template_name) == NULL)
  26830. str_hash_insert (arm_ops_hsh, insns[i].template_name, insns + i, 0);
  26831. for (i = 0; i < sizeof (conds) / sizeof (struct asm_cond); i++)
  26832. str_hash_insert (arm_cond_hsh, conds[i].template_name, conds + i, 0);
  26833. for (i = 0; i < sizeof (vconds) / sizeof (struct asm_cond); i++)
  26834. str_hash_insert (arm_vcond_hsh, vconds[i].template_name, vconds + i, 0);
  26835. for (i = 0; i < sizeof (shift_names) / sizeof (struct asm_shift_name); i++)
  26836. str_hash_insert (arm_shift_hsh, shift_names[i].name, shift_names + i, 0);
  26837. for (i = 0; i < sizeof (psrs) / sizeof (struct asm_psr); i++)
  26838. str_hash_insert (arm_psr_hsh, psrs[i].template_name, psrs + i, 0);
  26839. for (i = 0; i < sizeof (v7m_psrs) / sizeof (struct asm_psr); i++)
  26840. str_hash_insert (arm_v7m_psr_hsh, v7m_psrs[i].template_name,
  26841. v7m_psrs + i, 0);
  26842. for (i = 0; i < sizeof (reg_names) / sizeof (struct reg_entry); i++)
  26843. str_hash_insert (arm_reg_hsh, reg_names[i].name, reg_names + i, 0);
  26844. for (i = 0;
  26845. i < sizeof (barrier_opt_names) / sizeof (struct asm_barrier_opt);
  26846. i++)
  26847. str_hash_insert (arm_barrier_opt_hsh, barrier_opt_names[i].template_name,
  26848. barrier_opt_names + i, 0);
  26849. #ifdef OBJ_ELF
  26850. for (i = 0; i < ARRAY_SIZE (reloc_names); i++)
  26851. {
  26852. struct reloc_entry * entry = reloc_names + i;
  26853. if (arm_is_eabi() && entry->reloc == BFD_RELOC_ARM_PLT32)
  26854. /* This makes encode_branch() use the EABI versions of this relocation. */
  26855. entry->reloc = BFD_RELOC_UNUSED;
  26856. str_hash_insert (arm_reloc_hsh, entry->name, entry, 0);
  26857. }
  26858. #endif
  26859. set_constant_flonums ();
  26860. /* Set the cpu variant based on the command-line options. We prefer
  26861. -mcpu= over -march= if both are set (as for GCC); and we prefer
  26862. -mfpu= over any other way of setting the floating point unit.
  26863. Use of legacy options with new options are faulted. */
  26864. if (legacy_cpu)
  26865. {
  26866. if (mcpu_cpu_opt || march_cpu_opt)
  26867. as_bad (_("use of old and new-style options to set CPU type"));
  26868. selected_arch = *legacy_cpu;
  26869. }
  26870. else if (mcpu_cpu_opt)
  26871. {
  26872. selected_arch = *mcpu_cpu_opt;
  26873. selected_ext = *mcpu_ext_opt;
  26874. }
  26875. else if (march_cpu_opt)
  26876. {
  26877. selected_arch = *march_cpu_opt;
  26878. selected_ext = *march_ext_opt;
  26879. }
  26880. ARM_MERGE_FEATURE_SETS (selected_cpu, selected_arch, selected_ext);
  26881. if (legacy_fpu)
  26882. {
  26883. if (mfpu_opt)
  26884. as_bad (_("use of old and new-style options to set FPU type"));
  26885. selected_fpu = *legacy_fpu;
  26886. }
  26887. else if (mfpu_opt)
  26888. selected_fpu = *mfpu_opt;
  26889. else
  26890. {
  26891. #if !(defined (EABI_DEFAULT) || defined (TE_LINUX) \
  26892. || defined (TE_NetBSD) || defined (TE_VXWORKS))
  26893. /* Some environments specify a default FPU. If they don't, infer it
  26894. from the processor. */
  26895. if (mcpu_fpu_opt)
  26896. selected_fpu = *mcpu_fpu_opt;
  26897. else if (march_fpu_opt)
  26898. selected_fpu = *march_fpu_opt;
  26899. #else
  26900. selected_fpu = fpu_default;
  26901. #endif
  26902. }
  26903. if (ARM_FEATURE_ZERO (selected_fpu))
  26904. {
  26905. if (!no_cpu_selected ())
  26906. selected_fpu = fpu_default;
  26907. else
  26908. selected_fpu = fpu_arch_fpa;
  26909. }
  26910. #ifdef CPU_DEFAULT
  26911. if (ARM_FEATURE_ZERO (selected_arch))
  26912. {
  26913. selected_arch = cpu_default;
  26914. selected_cpu = selected_arch;
  26915. }
  26916. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  26917. #else
  26918. /* Autodection of feature mode: allow all features in cpu_variant but leave
  26919. selected_cpu unset. It will be set in aeabi_set_public_attributes ()
  26920. after all instruction have been processed and we can decide what CPU
  26921. should be selected. */
  26922. if (ARM_FEATURE_ZERO (selected_arch))
  26923. ARM_MERGE_FEATURE_SETS (cpu_variant, arm_arch_any, selected_fpu);
  26924. else
  26925. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  26926. #endif
  26927. autoselect_thumb_from_cpu_variant ();
  26928. arm_arch_used = thumb_arch_used = arm_arch_none;
  26929. #if defined OBJ_COFF || defined OBJ_ELF
  26930. {
  26931. unsigned int flags = 0;
  26932. #if defined OBJ_ELF
  26933. flags = meabi_flags;
  26934. switch (meabi_flags)
  26935. {
  26936. case EF_ARM_EABI_UNKNOWN:
  26937. #endif
  26938. /* Set the flags in the private structure. */
  26939. if (uses_apcs_26) flags |= F_APCS26;
  26940. if (support_interwork) flags |= F_INTERWORK;
  26941. if (uses_apcs_float) flags |= F_APCS_FLOAT;
  26942. if (pic_code) flags |= F_PIC;
  26943. if (!ARM_CPU_HAS_FEATURE (cpu_variant, fpu_any_hard))
  26944. flags |= F_SOFT_FLOAT;
  26945. switch (mfloat_abi_opt)
  26946. {
  26947. case ARM_FLOAT_ABI_SOFT:
  26948. case ARM_FLOAT_ABI_SOFTFP:
  26949. flags |= F_SOFT_FLOAT;
  26950. break;
  26951. case ARM_FLOAT_ABI_HARD:
  26952. if (flags & F_SOFT_FLOAT)
  26953. as_bad (_("hard-float conflicts with specified fpu"));
  26954. break;
  26955. }
  26956. /* Using pure-endian doubles (even if soft-float). */
  26957. if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_endian_pure))
  26958. flags |= F_VFP_FLOAT;
  26959. #if defined OBJ_ELF
  26960. if (ARM_CPU_HAS_FEATURE (cpu_variant, fpu_arch_maverick))
  26961. flags |= EF_ARM_MAVERICK_FLOAT;
  26962. break;
  26963. case EF_ARM_EABI_VER4:
  26964. case EF_ARM_EABI_VER5:
  26965. /* No additional flags to set. */
  26966. break;
  26967. default:
  26968. abort ();
  26969. }
  26970. #endif
  26971. bfd_set_private_flags (stdoutput, flags);
  26972. /* We have run out flags in the COFF header to encode the
  26973. status of ATPCS support, so instead we create a dummy,
  26974. empty, debug section called .arm.atpcs. */
  26975. if (atpcs)
  26976. {
  26977. asection * sec;
  26978. sec = bfd_make_section (stdoutput, ".arm.atpcs");
  26979. if (sec != NULL)
  26980. {
  26981. bfd_set_section_flags (sec, SEC_READONLY | SEC_DEBUGGING);
  26982. bfd_set_section_size (sec, 0);
  26983. bfd_set_section_contents (stdoutput, sec, NULL, 0, 0);
  26984. }
  26985. }
  26986. }
  26987. #endif
  26988. /* Record the CPU type as well. */
  26989. if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_iwmmxt2))
  26990. mach = bfd_mach_arm_iWMMXt2;
  26991. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_iwmmxt))
  26992. mach = bfd_mach_arm_iWMMXt;
  26993. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_xscale))
  26994. mach = bfd_mach_arm_XScale;
  26995. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_cext_maverick))
  26996. mach = bfd_mach_arm_ep9312;
  26997. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v5e))
  26998. mach = bfd_mach_arm_5TE;
  26999. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v5))
  27000. {
  27001. if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4t))
  27002. mach = bfd_mach_arm_5T;
  27003. else
  27004. mach = bfd_mach_arm_5;
  27005. }
  27006. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4))
  27007. {
  27008. if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v4t))
  27009. mach = bfd_mach_arm_4T;
  27010. else
  27011. mach = bfd_mach_arm_4;
  27012. }
  27013. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v3m))
  27014. mach = bfd_mach_arm_3M;
  27015. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v3))
  27016. mach = bfd_mach_arm_3;
  27017. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v2s))
  27018. mach = bfd_mach_arm_2a;
  27019. else if (ARM_CPU_HAS_FEATURE (cpu_variant, arm_ext_v2))
  27020. mach = bfd_mach_arm_2;
  27021. else
  27022. mach = bfd_mach_arm_unknown;
  27023. bfd_set_arch_mach (stdoutput, TARGET_ARCH, mach);
  27024. }
  27025. /* Command line processing. */
  27026. /* md_parse_option
  27027. Invocation line includes a switch not recognized by the base assembler.
  27028. See if it's a processor-specific option.
  27029. This routine is somewhat complicated by the need for backwards
  27030. compatibility (since older releases of gcc can't be changed).
  27031. The new options try to make the interface as compatible as
  27032. possible with GCC.
  27033. New options (supported) are:
  27034. -mcpu=<cpu name> Assemble for selected processor
  27035. -march=<architecture name> Assemble for selected architecture
  27036. -mfpu=<fpu architecture> Assemble for selected FPU.
  27037. -EB/-mbig-endian Big-endian
  27038. -EL/-mlittle-endian Little-endian
  27039. -k Generate PIC code
  27040. -mthumb Start in Thumb mode
  27041. -mthumb-interwork Code supports ARM/Thumb interworking
  27042. -m[no-]warn-deprecated Warn about deprecated features
  27043. -m[no-]warn-syms Warn when symbols match instructions
  27044. For now we will also provide support for:
  27045. -mapcs-32 32-bit Program counter
  27046. -mapcs-26 26-bit Program counter
  27047. -macps-float Floats passed in FP registers
  27048. -mapcs-reentrant Reentrant code
  27049. -matpcs
  27050. (sometime these will probably be replaced with -mapcs=<list of options>
  27051. and -matpcs=<list of options>)
  27052. The remaining options are only supported for back-wards compatibility.
  27053. Cpu variants, the arm part is optional:
  27054. -m[arm]1 Currently not supported.
  27055. -m[arm]2, -m[arm]250 Arm 2 and Arm 250 processor
  27056. -m[arm]3 Arm 3 processor
  27057. -m[arm]6[xx], Arm 6 processors
  27058. -m[arm]7[xx][t][[d]m] Arm 7 processors
  27059. -m[arm]8[10] Arm 8 processors
  27060. -m[arm]9[20][tdmi] Arm 9 processors
  27061. -mstrongarm[110[0]] StrongARM processors
  27062. -mxscale XScale processors
  27063. -m[arm]v[2345[t[e]]] Arm architectures
  27064. -mall All (except the ARM1)
  27065. FP variants:
  27066. -mfpa10, -mfpa11 FPA10 and 11 co-processor instructions
  27067. -mfpe-old (No float load/store multiples)
  27068. -mvfpxd VFP Single precision
  27069. -mvfp All VFP
  27070. -mno-fpu Disable all floating point instructions
  27071. The following CPU names are recognized:
  27072. arm1, arm2, arm250, arm3, arm6, arm600, arm610, arm620,
  27073. arm7, arm7m, arm7d, arm7dm, arm7di, arm7dmi, arm70, arm700,
  27074. arm700i, arm710 arm710t, arm720, arm720t, arm740t, arm710c,
  27075. arm7100, arm7500, arm7500fe, arm7tdmi, arm8, arm810, arm9,
  27076. arm920, arm920t, arm940t, arm946, arm966, arm9tdmi, arm9e,
  27077. arm10t arm10e, arm1020t, arm1020e, arm10200e,
  27078. strongarm, strongarm110, strongarm1100, strongarm1110, xscale.
  27079. */
  27080. const char * md_shortopts = "m:k";
  27081. #ifdef ARM_BI_ENDIAN
  27082. #define OPTION_EB (OPTION_MD_BASE + 0)
  27083. #define OPTION_EL (OPTION_MD_BASE + 1)
  27084. #else
  27085. #if TARGET_BYTES_BIG_ENDIAN
  27086. #define OPTION_EB (OPTION_MD_BASE + 0)
  27087. #else
  27088. #define OPTION_EL (OPTION_MD_BASE + 1)
  27089. #endif
  27090. #endif
  27091. #define OPTION_FIX_V4BX (OPTION_MD_BASE + 2)
  27092. #define OPTION_FDPIC (OPTION_MD_BASE + 3)
  27093. struct option md_longopts[] =
  27094. {
  27095. #ifdef OPTION_EB
  27096. {"EB", no_argument, NULL, OPTION_EB},
  27097. #endif
  27098. #ifdef OPTION_EL
  27099. {"EL", no_argument, NULL, OPTION_EL},
  27100. #endif
  27101. {"fix-v4bx", no_argument, NULL, OPTION_FIX_V4BX},
  27102. #ifdef OBJ_ELF
  27103. {"fdpic", no_argument, NULL, OPTION_FDPIC},
  27104. #endif
  27105. {NULL, no_argument, NULL, 0}
  27106. };
  27107. size_t md_longopts_size = sizeof (md_longopts);
  27108. struct arm_option_table
  27109. {
  27110. const char * option; /* Option name to match. */
  27111. const char * help; /* Help information. */
  27112. int * var; /* Variable to change. */
  27113. int value; /* What to change it to. */
  27114. const char * deprecated; /* If non-null, print this message. */
  27115. };
  27116. struct arm_option_table arm_opts[] =
  27117. {
  27118. {"k", N_("generate PIC code"), &pic_code, 1, NULL},
  27119. {"mthumb", N_("assemble Thumb code"), &thumb_mode, 1, NULL},
  27120. {"mthumb-interwork", N_("support ARM/Thumb interworking"),
  27121. &support_interwork, 1, NULL},
  27122. {"mapcs-32", N_("code uses 32-bit program counter"), &uses_apcs_26, 0, NULL},
  27123. {"mapcs-26", N_("code uses 26-bit program counter"), &uses_apcs_26, 1, NULL},
  27124. {"mapcs-float", N_("floating point args are in fp regs"), &uses_apcs_float,
  27125. 1, NULL},
  27126. {"mapcs-reentrant", N_("re-entrant code"), &pic_code, 1, NULL},
  27127. {"matpcs", N_("code is ATPCS conformant"), &atpcs, 1, NULL},
  27128. {"mbig-endian", N_("assemble for big-endian"), &target_big_endian, 1, NULL},
  27129. {"mlittle-endian", N_("assemble for little-endian"), &target_big_endian, 0,
  27130. NULL},
  27131. /* These are recognized by the assembler, but have no affect on code. */
  27132. {"mapcs-frame", N_("use frame pointer"), NULL, 0, NULL},
  27133. {"mapcs-stack-check", N_("use stack size checking"), NULL, 0, NULL},
  27134. {"mwarn-deprecated", NULL, &warn_on_deprecated, 1, NULL},
  27135. {"mno-warn-deprecated", N_("do not warn on use of deprecated feature"),
  27136. &warn_on_deprecated, 0, NULL},
  27137. {"mwarn-restrict-it", N_("warn about performance deprecated IT instructions"
  27138. " in ARMv8-A and ARMv8-R"), &warn_on_restrict_it, 1, NULL},
  27139. {"mno-warn-restrict-it", NULL, &warn_on_restrict_it, 0, NULL},
  27140. {"mwarn-syms", N_("warn about symbols that match instruction names [default]"), (int *) (& flag_warn_syms), true, NULL},
  27141. {"mno-warn-syms", N_("disable warnings about symobls that match instructions"), (int *) (& flag_warn_syms), false, NULL},
  27142. {NULL, NULL, NULL, 0, NULL}
  27143. };
  27144. struct arm_legacy_option_table
  27145. {
  27146. const char * option; /* Option name to match. */
  27147. const arm_feature_set ** var; /* Variable to change. */
  27148. const arm_feature_set value; /* What to change it to. */
  27149. const char * deprecated; /* If non-null, print this message. */
  27150. };
  27151. const struct arm_legacy_option_table arm_legacy_opts[] =
  27152. {
  27153. /* DON'T add any new processors to this list -- we want the whole list
  27154. to go away... Add them to the processors table instead. */
  27155. {"marm1", &legacy_cpu, ARM_ARCH_V1, N_("use -mcpu=arm1")},
  27156. {"m1", &legacy_cpu, ARM_ARCH_V1, N_("use -mcpu=arm1")},
  27157. {"marm2", &legacy_cpu, ARM_ARCH_V2, N_("use -mcpu=arm2")},
  27158. {"m2", &legacy_cpu, ARM_ARCH_V2, N_("use -mcpu=arm2")},
  27159. {"marm250", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm250")},
  27160. {"m250", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm250")},
  27161. {"marm3", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm3")},
  27162. {"m3", &legacy_cpu, ARM_ARCH_V2S, N_("use -mcpu=arm3")},
  27163. {"marm6", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm6")},
  27164. {"m6", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm6")},
  27165. {"marm600", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm600")},
  27166. {"m600", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm600")},
  27167. {"marm610", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm610")},
  27168. {"m610", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm610")},
  27169. {"marm620", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm620")},
  27170. {"m620", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm620")},
  27171. {"marm7", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7")},
  27172. {"m7", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7")},
  27173. {"marm70", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm70")},
  27174. {"m70", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm70")},
  27175. {"marm700", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700")},
  27176. {"m700", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700")},
  27177. {"marm700i", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700i")},
  27178. {"m700i", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm700i")},
  27179. {"marm710", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710")},
  27180. {"m710", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710")},
  27181. {"marm710c", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710c")},
  27182. {"m710c", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm710c")},
  27183. {"marm720", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm720")},
  27184. {"m720", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm720")},
  27185. {"marm7d", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7d")},
  27186. {"m7d", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7d")},
  27187. {"marm7di", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7di")},
  27188. {"m7di", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7di")},
  27189. {"marm7m", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7m")},
  27190. {"m7m", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7m")},
  27191. {"marm7dm", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dm")},
  27192. {"m7dm", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dm")},
  27193. {"marm7dmi", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dmi")},
  27194. {"m7dmi", &legacy_cpu, ARM_ARCH_V3M, N_("use -mcpu=arm7dmi")},
  27195. {"marm7100", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7100")},
  27196. {"m7100", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7100")},
  27197. {"marm7500", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500")},
  27198. {"m7500", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500")},
  27199. {"marm7500fe", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500fe")},
  27200. {"m7500fe", &legacy_cpu, ARM_ARCH_V3, N_("use -mcpu=arm7500fe")},
  27201. {"marm7t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
  27202. {"m7t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
  27203. {"marm7tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
  27204. {"m7tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm7tdmi")},
  27205. {"marm710t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm710t")},
  27206. {"m710t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm710t")},
  27207. {"marm720t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm720t")},
  27208. {"m720t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm720t")},
  27209. {"marm740t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm740t")},
  27210. {"m740t", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm740t")},
  27211. {"marm8", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm8")},
  27212. {"m8", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm8")},
  27213. {"marm810", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm810")},
  27214. {"m810", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=arm810")},
  27215. {"marm9", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9")},
  27216. {"m9", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9")},
  27217. {"marm9tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9tdmi")},
  27218. {"m9tdmi", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm9tdmi")},
  27219. {"marm920", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm920")},
  27220. {"m920", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm920")},
  27221. {"marm940", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm940")},
  27222. {"m940", &legacy_cpu, ARM_ARCH_V4T, N_("use -mcpu=arm940")},
  27223. {"mstrongarm", &legacy_cpu, ARM_ARCH_V4, N_("use -mcpu=strongarm")},
  27224. {"mstrongarm110", &legacy_cpu, ARM_ARCH_V4,
  27225. N_("use -mcpu=strongarm110")},
  27226. {"mstrongarm1100", &legacy_cpu, ARM_ARCH_V4,
  27227. N_("use -mcpu=strongarm1100")},
  27228. {"mstrongarm1110", &legacy_cpu, ARM_ARCH_V4,
  27229. N_("use -mcpu=strongarm1110")},
  27230. {"mxscale", &legacy_cpu, ARM_ARCH_XSCALE, N_("use -mcpu=xscale")},
  27231. {"miwmmxt", &legacy_cpu, ARM_ARCH_IWMMXT, N_("use -mcpu=iwmmxt")},
  27232. {"mall", &legacy_cpu, ARM_ANY, N_("use -mcpu=all")},
  27233. /* Architecture variants -- don't add any more to this list either. */
  27234. {"mv2", &legacy_cpu, ARM_ARCH_V2, N_("use -march=armv2")},
  27235. {"marmv2", &legacy_cpu, ARM_ARCH_V2, N_("use -march=armv2")},
  27236. {"mv2a", &legacy_cpu, ARM_ARCH_V2S, N_("use -march=armv2a")},
  27237. {"marmv2a", &legacy_cpu, ARM_ARCH_V2S, N_("use -march=armv2a")},
  27238. {"mv3", &legacy_cpu, ARM_ARCH_V3, N_("use -march=armv3")},
  27239. {"marmv3", &legacy_cpu, ARM_ARCH_V3, N_("use -march=armv3")},
  27240. {"mv3m", &legacy_cpu, ARM_ARCH_V3M, N_("use -march=armv3m")},
  27241. {"marmv3m", &legacy_cpu, ARM_ARCH_V3M, N_("use -march=armv3m")},
  27242. {"mv4", &legacy_cpu, ARM_ARCH_V4, N_("use -march=armv4")},
  27243. {"marmv4", &legacy_cpu, ARM_ARCH_V4, N_("use -march=armv4")},
  27244. {"mv4t", &legacy_cpu, ARM_ARCH_V4T, N_("use -march=armv4t")},
  27245. {"marmv4t", &legacy_cpu, ARM_ARCH_V4T, N_("use -march=armv4t")},
  27246. {"mv5", &legacy_cpu, ARM_ARCH_V5, N_("use -march=armv5")},
  27247. {"marmv5", &legacy_cpu, ARM_ARCH_V5, N_("use -march=armv5")},
  27248. {"mv5t", &legacy_cpu, ARM_ARCH_V5T, N_("use -march=armv5t")},
  27249. {"marmv5t", &legacy_cpu, ARM_ARCH_V5T, N_("use -march=armv5t")},
  27250. {"mv5e", &legacy_cpu, ARM_ARCH_V5TE, N_("use -march=armv5te")},
  27251. {"marmv5e", &legacy_cpu, ARM_ARCH_V5TE, N_("use -march=armv5te")},
  27252. /* Floating point variants -- don't add any more to this list either. */
  27253. {"mfpe-old", &legacy_fpu, FPU_ARCH_FPE, N_("use -mfpu=fpe")},
  27254. {"mfpa10", &legacy_fpu, FPU_ARCH_FPA, N_("use -mfpu=fpa10")},
  27255. {"mfpa11", &legacy_fpu, FPU_ARCH_FPA, N_("use -mfpu=fpa11")},
  27256. {"mno-fpu", &legacy_fpu, ARM_ARCH_NONE,
  27257. N_("use either -mfpu=softfpa or -mfpu=softvfp")},
  27258. {NULL, NULL, ARM_ARCH_NONE, NULL}
  27259. };
  27260. struct arm_cpu_option_table
  27261. {
  27262. const char * name;
  27263. size_t name_len;
  27264. const arm_feature_set value;
  27265. const arm_feature_set ext;
  27266. /* For some CPUs we assume an FPU unless the user explicitly sets
  27267. -mfpu=... */
  27268. const arm_feature_set default_fpu;
  27269. /* The canonical name of the CPU, or NULL to use NAME converted to upper
  27270. case. */
  27271. const char * canonical_name;
  27272. };
  27273. /* This list should, at a minimum, contain all the cpu names
  27274. recognized by GCC. */
  27275. #define ARM_CPU_OPT(N, CN, V, E, DF) { N, sizeof (N) - 1, V, E, DF, CN }
  27276. static const struct arm_cpu_option_table arm_cpus[] =
  27277. {
  27278. ARM_CPU_OPT ("all", NULL, ARM_ANY,
  27279. ARM_ARCH_NONE,
  27280. FPU_ARCH_FPA),
  27281. ARM_CPU_OPT ("arm1", NULL, ARM_ARCH_V1,
  27282. ARM_ARCH_NONE,
  27283. FPU_ARCH_FPA),
  27284. ARM_CPU_OPT ("arm2", NULL, ARM_ARCH_V2,
  27285. ARM_ARCH_NONE,
  27286. FPU_ARCH_FPA),
  27287. ARM_CPU_OPT ("arm250", NULL, ARM_ARCH_V2S,
  27288. ARM_ARCH_NONE,
  27289. FPU_ARCH_FPA),
  27290. ARM_CPU_OPT ("arm3", NULL, ARM_ARCH_V2S,
  27291. ARM_ARCH_NONE,
  27292. FPU_ARCH_FPA),
  27293. ARM_CPU_OPT ("arm6", NULL, ARM_ARCH_V3,
  27294. ARM_ARCH_NONE,
  27295. FPU_ARCH_FPA),
  27296. ARM_CPU_OPT ("arm60", NULL, ARM_ARCH_V3,
  27297. ARM_ARCH_NONE,
  27298. FPU_ARCH_FPA),
  27299. ARM_CPU_OPT ("arm600", NULL, ARM_ARCH_V3,
  27300. ARM_ARCH_NONE,
  27301. FPU_ARCH_FPA),
  27302. ARM_CPU_OPT ("arm610", NULL, ARM_ARCH_V3,
  27303. ARM_ARCH_NONE,
  27304. FPU_ARCH_FPA),
  27305. ARM_CPU_OPT ("arm620", NULL, ARM_ARCH_V3,
  27306. ARM_ARCH_NONE,
  27307. FPU_ARCH_FPA),
  27308. ARM_CPU_OPT ("arm7", NULL, ARM_ARCH_V3,
  27309. ARM_ARCH_NONE,
  27310. FPU_ARCH_FPA),
  27311. ARM_CPU_OPT ("arm7m", NULL, ARM_ARCH_V3M,
  27312. ARM_ARCH_NONE,
  27313. FPU_ARCH_FPA),
  27314. ARM_CPU_OPT ("arm7d", NULL, ARM_ARCH_V3,
  27315. ARM_ARCH_NONE,
  27316. FPU_ARCH_FPA),
  27317. ARM_CPU_OPT ("arm7dm", NULL, ARM_ARCH_V3M,
  27318. ARM_ARCH_NONE,
  27319. FPU_ARCH_FPA),
  27320. ARM_CPU_OPT ("arm7di", NULL, ARM_ARCH_V3,
  27321. ARM_ARCH_NONE,
  27322. FPU_ARCH_FPA),
  27323. ARM_CPU_OPT ("arm7dmi", NULL, ARM_ARCH_V3M,
  27324. ARM_ARCH_NONE,
  27325. FPU_ARCH_FPA),
  27326. ARM_CPU_OPT ("arm70", NULL, ARM_ARCH_V3,
  27327. ARM_ARCH_NONE,
  27328. FPU_ARCH_FPA),
  27329. ARM_CPU_OPT ("arm700", NULL, ARM_ARCH_V3,
  27330. ARM_ARCH_NONE,
  27331. FPU_ARCH_FPA),
  27332. ARM_CPU_OPT ("arm700i", NULL, ARM_ARCH_V3,
  27333. ARM_ARCH_NONE,
  27334. FPU_ARCH_FPA),
  27335. ARM_CPU_OPT ("arm710", NULL, ARM_ARCH_V3,
  27336. ARM_ARCH_NONE,
  27337. FPU_ARCH_FPA),
  27338. ARM_CPU_OPT ("arm710t", NULL, ARM_ARCH_V4T,
  27339. ARM_ARCH_NONE,
  27340. FPU_ARCH_FPA),
  27341. ARM_CPU_OPT ("arm720", NULL, ARM_ARCH_V3,
  27342. ARM_ARCH_NONE,
  27343. FPU_ARCH_FPA),
  27344. ARM_CPU_OPT ("arm720t", NULL, ARM_ARCH_V4T,
  27345. ARM_ARCH_NONE,
  27346. FPU_ARCH_FPA),
  27347. ARM_CPU_OPT ("arm740t", NULL, ARM_ARCH_V4T,
  27348. ARM_ARCH_NONE,
  27349. FPU_ARCH_FPA),
  27350. ARM_CPU_OPT ("arm710c", NULL, ARM_ARCH_V3,
  27351. ARM_ARCH_NONE,
  27352. FPU_ARCH_FPA),
  27353. ARM_CPU_OPT ("arm7100", NULL, ARM_ARCH_V3,
  27354. ARM_ARCH_NONE,
  27355. FPU_ARCH_FPA),
  27356. ARM_CPU_OPT ("arm7500", NULL, ARM_ARCH_V3,
  27357. ARM_ARCH_NONE,
  27358. FPU_ARCH_FPA),
  27359. ARM_CPU_OPT ("arm7500fe", NULL, ARM_ARCH_V3,
  27360. ARM_ARCH_NONE,
  27361. FPU_ARCH_FPA),
  27362. ARM_CPU_OPT ("arm7t", NULL, ARM_ARCH_V4T,
  27363. ARM_ARCH_NONE,
  27364. FPU_ARCH_FPA),
  27365. ARM_CPU_OPT ("arm7tdmi", NULL, ARM_ARCH_V4T,
  27366. ARM_ARCH_NONE,
  27367. FPU_ARCH_FPA),
  27368. ARM_CPU_OPT ("arm7tdmi-s", NULL, ARM_ARCH_V4T,
  27369. ARM_ARCH_NONE,
  27370. FPU_ARCH_FPA),
  27371. ARM_CPU_OPT ("arm8", NULL, ARM_ARCH_V4,
  27372. ARM_ARCH_NONE,
  27373. FPU_ARCH_FPA),
  27374. ARM_CPU_OPT ("arm810", NULL, ARM_ARCH_V4,
  27375. ARM_ARCH_NONE,
  27376. FPU_ARCH_FPA),
  27377. ARM_CPU_OPT ("strongarm", NULL, ARM_ARCH_V4,
  27378. ARM_ARCH_NONE,
  27379. FPU_ARCH_FPA),
  27380. ARM_CPU_OPT ("strongarm1", NULL, ARM_ARCH_V4,
  27381. ARM_ARCH_NONE,
  27382. FPU_ARCH_FPA),
  27383. ARM_CPU_OPT ("strongarm110", NULL, ARM_ARCH_V4,
  27384. ARM_ARCH_NONE,
  27385. FPU_ARCH_FPA),
  27386. ARM_CPU_OPT ("strongarm1100", NULL, ARM_ARCH_V4,
  27387. ARM_ARCH_NONE,
  27388. FPU_ARCH_FPA),
  27389. ARM_CPU_OPT ("strongarm1110", NULL, ARM_ARCH_V4,
  27390. ARM_ARCH_NONE,
  27391. FPU_ARCH_FPA),
  27392. ARM_CPU_OPT ("arm9", NULL, ARM_ARCH_V4T,
  27393. ARM_ARCH_NONE,
  27394. FPU_ARCH_FPA),
  27395. ARM_CPU_OPT ("arm920", "ARM920T", ARM_ARCH_V4T,
  27396. ARM_ARCH_NONE,
  27397. FPU_ARCH_FPA),
  27398. ARM_CPU_OPT ("arm920t", NULL, ARM_ARCH_V4T,
  27399. ARM_ARCH_NONE,
  27400. FPU_ARCH_FPA),
  27401. ARM_CPU_OPT ("arm922t", NULL, ARM_ARCH_V4T,
  27402. ARM_ARCH_NONE,
  27403. FPU_ARCH_FPA),
  27404. ARM_CPU_OPT ("arm940t", NULL, ARM_ARCH_V4T,
  27405. ARM_ARCH_NONE,
  27406. FPU_ARCH_FPA),
  27407. ARM_CPU_OPT ("arm9tdmi", NULL, ARM_ARCH_V4T,
  27408. ARM_ARCH_NONE,
  27409. FPU_ARCH_FPA),
  27410. ARM_CPU_OPT ("fa526", NULL, ARM_ARCH_V4,
  27411. ARM_ARCH_NONE,
  27412. FPU_ARCH_FPA),
  27413. ARM_CPU_OPT ("fa626", NULL, ARM_ARCH_V4,
  27414. ARM_ARCH_NONE,
  27415. FPU_ARCH_FPA),
  27416. /* For V5 or later processors we default to using VFP; but the user
  27417. should really set the FPU type explicitly. */
  27418. ARM_CPU_OPT ("arm9e-r0", NULL, ARM_ARCH_V5TExP,
  27419. ARM_ARCH_NONE,
  27420. FPU_ARCH_VFP_V2),
  27421. ARM_CPU_OPT ("arm9e", NULL, ARM_ARCH_V5TE,
  27422. ARM_ARCH_NONE,
  27423. FPU_ARCH_VFP_V2),
  27424. ARM_CPU_OPT ("arm926ej", "ARM926EJ-S", ARM_ARCH_V5TEJ,
  27425. ARM_ARCH_NONE,
  27426. FPU_ARCH_VFP_V2),
  27427. ARM_CPU_OPT ("arm926ejs", "ARM926EJ-S", ARM_ARCH_V5TEJ,
  27428. ARM_ARCH_NONE,
  27429. FPU_ARCH_VFP_V2),
  27430. ARM_CPU_OPT ("arm926ej-s", NULL, ARM_ARCH_V5TEJ,
  27431. ARM_ARCH_NONE,
  27432. FPU_ARCH_VFP_V2),
  27433. ARM_CPU_OPT ("arm946e-r0", NULL, ARM_ARCH_V5TExP,
  27434. ARM_ARCH_NONE,
  27435. FPU_ARCH_VFP_V2),
  27436. ARM_CPU_OPT ("arm946e", "ARM946E-S", ARM_ARCH_V5TE,
  27437. ARM_ARCH_NONE,
  27438. FPU_ARCH_VFP_V2),
  27439. ARM_CPU_OPT ("arm946e-s", NULL, ARM_ARCH_V5TE,
  27440. ARM_ARCH_NONE,
  27441. FPU_ARCH_VFP_V2),
  27442. ARM_CPU_OPT ("arm966e-r0", NULL, ARM_ARCH_V5TExP,
  27443. ARM_ARCH_NONE,
  27444. FPU_ARCH_VFP_V2),
  27445. ARM_CPU_OPT ("arm966e", "ARM966E-S", ARM_ARCH_V5TE,
  27446. ARM_ARCH_NONE,
  27447. FPU_ARCH_VFP_V2),
  27448. ARM_CPU_OPT ("arm966e-s", NULL, ARM_ARCH_V5TE,
  27449. ARM_ARCH_NONE,
  27450. FPU_ARCH_VFP_V2),
  27451. ARM_CPU_OPT ("arm968e-s", NULL, ARM_ARCH_V5TE,
  27452. ARM_ARCH_NONE,
  27453. FPU_ARCH_VFP_V2),
  27454. ARM_CPU_OPT ("arm10t", NULL, ARM_ARCH_V5T,
  27455. ARM_ARCH_NONE,
  27456. FPU_ARCH_VFP_V1),
  27457. ARM_CPU_OPT ("arm10tdmi", NULL, ARM_ARCH_V5T,
  27458. ARM_ARCH_NONE,
  27459. FPU_ARCH_VFP_V1),
  27460. ARM_CPU_OPT ("arm10e", NULL, ARM_ARCH_V5TE,
  27461. ARM_ARCH_NONE,
  27462. FPU_ARCH_VFP_V2),
  27463. ARM_CPU_OPT ("arm1020", "ARM1020E", ARM_ARCH_V5TE,
  27464. ARM_ARCH_NONE,
  27465. FPU_ARCH_VFP_V2),
  27466. ARM_CPU_OPT ("arm1020t", NULL, ARM_ARCH_V5T,
  27467. ARM_ARCH_NONE,
  27468. FPU_ARCH_VFP_V1),
  27469. ARM_CPU_OPT ("arm1020e", NULL, ARM_ARCH_V5TE,
  27470. ARM_ARCH_NONE,
  27471. FPU_ARCH_VFP_V2),
  27472. ARM_CPU_OPT ("arm1022e", NULL, ARM_ARCH_V5TE,
  27473. ARM_ARCH_NONE,
  27474. FPU_ARCH_VFP_V2),
  27475. ARM_CPU_OPT ("arm1026ejs", "ARM1026EJ-S", ARM_ARCH_V5TEJ,
  27476. ARM_ARCH_NONE,
  27477. FPU_ARCH_VFP_V2),
  27478. ARM_CPU_OPT ("arm1026ej-s", NULL, ARM_ARCH_V5TEJ,
  27479. ARM_ARCH_NONE,
  27480. FPU_ARCH_VFP_V2),
  27481. ARM_CPU_OPT ("fa606te", NULL, ARM_ARCH_V5TE,
  27482. ARM_ARCH_NONE,
  27483. FPU_ARCH_VFP_V2),
  27484. ARM_CPU_OPT ("fa616te", NULL, ARM_ARCH_V5TE,
  27485. ARM_ARCH_NONE,
  27486. FPU_ARCH_VFP_V2),
  27487. ARM_CPU_OPT ("fa626te", NULL, ARM_ARCH_V5TE,
  27488. ARM_ARCH_NONE,
  27489. FPU_ARCH_VFP_V2),
  27490. ARM_CPU_OPT ("fmp626", NULL, ARM_ARCH_V5TE,
  27491. ARM_ARCH_NONE,
  27492. FPU_ARCH_VFP_V2),
  27493. ARM_CPU_OPT ("fa726te", NULL, ARM_ARCH_V5TE,
  27494. ARM_ARCH_NONE,
  27495. FPU_ARCH_VFP_V2),
  27496. ARM_CPU_OPT ("arm1136js", "ARM1136J-S", ARM_ARCH_V6,
  27497. ARM_ARCH_NONE,
  27498. FPU_NONE),
  27499. ARM_CPU_OPT ("arm1136j-s", NULL, ARM_ARCH_V6,
  27500. ARM_ARCH_NONE,
  27501. FPU_NONE),
  27502. ARM_CPU_OPT ("arm1136jfs", "ARM1136JF-S", ARM_ARCH_V6,
  27503. ARM_ARCH_NONE,
  27504. FPU_ARCH_VFP_V2),
  27505. ARM_CPU_OPT ("arm1136jf-s", NULL, ARM_ARCH_V6,
  27506. ARM_ARCH_NONE,
  27507. FPU_ARCH_VFP_V2),
  27508. ARM_CPU_OPT ("mpcore", "MPCore", ARM_ARCH_V6K,
  27509. ARM_ARCH_NONE,
  27510. FPU_ARCH_VFP_V2),
  27511. ARM_CPU_OPT ("mpcorenovfp", "MPCore", ARM_ARCH_V6K,
  27512. ARM_ARCH_NONE,
  27513. FPU_NONE),
  27514. ARM_CPU_OPT ("arm1156t2-s", NULL, ARM_ARCH_V6T2,
  27515. ARM_ARCH_NONE,
  27516. FPU_NONE),
  27517. ARM_CPU_OPT ("arm1156t2f-s", NULL, ARM_ARCH_V6T2,
  27518. ARM_ARCH_NONE,
  27519. FPU_ARCH_VFP_V2),
  27520. ARM_CPU_OPT ("arm1176jz-s", NULL, ARM_ARCH_V6KZ,
  27521. ARM_ARCH_NONE,
  27522. FPU_NONE),
  27523. ARM_CPU_OPT ("arm1176jzf-s", NULL, ARM_ARCH_V6KZ,
  27524. ARM_ARCH_NONE,
  27525. FPU_ARCH_VFP_V2),
  27526. ARM_CPU_OPT ("cortex-a5", "Cortex-A5", ARM_ARCH_V7A,
  27527. ARM_FEATURE_CORE_LOW (ARM_EXT_MP | ARM_EXT_SEC),
  27528. FPU_NONE),
  27529. ARM_CPU_OPT ("cortex-a7", "Cortex-A7", ARM_ARCH_V7VE,
  27530. ARM_ARCH_NONE,
  27531. FPU_ARCH_NEON_VFP_V4),
  27532. ARM_CPU_OPT ("cortex-a8", "Cortex-A8", ARM_ARCH_V7A,
  27533. ARM_FEATURE_CORE_LOW (ARM_EXT_SEC),
  27534. ARM_FEATURE_COPROC (FPU_VFP_V3 | FPU_NEON_EXT_V1)),
  27535. ARM_CPU_OPT ("cortex-a9", "Cortex-A9", ARM_ARCH_V7A,
  27536. ARM_FEATURE_CORE_LOW (ARM_EXT_MP | ARM_EXT_SEC),
  27537. ARM_FEATURE_COPROC (FPU_VFP_V3 | FPU_NEON_EXT_V1)),
  27538. ARM_CPU_OPT ("cortex-a12", "Cortex-A12", ARM_ARCH_V7VE,
  27539. ARM_ARCH_NONE,
  27540. FPU_ARCH_NEON_VFP_V4),
  27541. ARM_CPU_OPT ("cortex-a15", "Cortex-A15", ARM_ARCH_V7VE,
  27542. ARM_ARCH_NONE,
  27543. FPU_ARCH_NEON_VFP_V4),
  27544. ARM_CPU_OPT ("cortex-a17", "Cortex-A17", ARM_ARCH_V7VE,
  27545. ARM_ARCH_NONE,
  27546. FPU_ARCH_NEON_VFP_V4),
  27547. ARM_CPU_OPT ("cortex-a32", "Cortex-A32", ARM_ARCH_V8A,
  27548. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27549. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27550. ARM_CPU_OPT ("cortex-a35", "Cortex-A35", ARM_ARCH_V8A,
  27551. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27552. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27553. ARM_CPU_OPT ("cortex-a53", "Cortex-A53", ARM_ARCH_V8A,
  27554. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27555. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27556. ARM_CPU_OPT ("cortex-a55", "Cortex-A55", ARM_ARCH_V8_2A,
  27557. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27558. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27559. ARM_CPU_OPT ("cortex-a57", "Cortex-A57", ARM_ARCH_V8A,
  27560. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27561. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27562. ARM_CPU_OPT ("cortex-a72", "Cortex-A72", ARM_ARCH_V8A,
  27563. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27564. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27565. ARM_CPU_OPT ("cortex-a73", "Cortex-A73", ARM_ARCH_V8A,
  27566. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27567. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27568. ARM_CPU_OPT ("cortex-a75", "Cortex-A75", ARM_ARCH_V8_2A,
  27569. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27570. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27571. ARM_CPU_OPT ("cortex-a76", "Cortex-A76", ARM_ARCH_V8_2A,
  27572. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27573. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27574. ARM_CPU_OPT ("cortex-a76ae", "Cortex-A76AE", ARM_ARCH_V8_2A,
  27575. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27576. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27577. ARM_CPU_OPT ("cortex-a77", "Cortex-A77", ARM_ARCH_V8_2A,
  27578. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27579. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27580. ARM_CPU_OPT ("cortex-a78", "Cortex-A78", ARM_ARCH_V8_2A,
  27581. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST | ARM_EXT2_SB),
  27582. FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27583. ARM_CPU_OPT ("cortex-a78ae", "Cortex-A78AE", ARM_ARCH_V8_2A,
  27584. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST | ARM_EXT2_SB),
  27585. FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27586. ARM_CPU_OPT ("cortex-a78c", "Cortex-A78C", ARM_ARCH_V8_2A,
  27587. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST | ARM_EXT2_SB),
  27588. FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27589. ARM_CPU_OPT ("cortex-a710", "Cortex-A710", ARM_ARCH_V9A,
  27590. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST
  27591. | ARM_EXT2_BF16
  27592. | ARM_EXT2_I8MM),
  27593. FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27594. ARM_CPU_OPT ("ares", "Ares", ARM_ARCH_V8_2A,
  27595. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27596. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27597. ARM_CPU_OPT ("cortex-r4", "Cortex-R4", ARM_ARCH_V7R,
  27598. ARM_ARCH_NONE,
  27599. FPU_NONE),
  27600. ARM_CPU_OPT ("cortex-r4f", "Cortex-R4F", ARM_ARCH_V7R,
  27601. ARM_ARCH_NONE,
  27602. FPU_ARCH_VFP_V3D16),
  27603. ARM_CPU_OPT ("cortex-r5", "Cortex-R5", ARM_ARCH_V7R,
  27604. ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV),
  27605. FPU_NONE),
  27606. ARM_CPU_OPT ("cortex-r7", "Cortex-R7", ARM_ARCH_V7R,
  27607. ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV),
  27608. FPU_ARCH_VFP_V3D16),
  27609. ARM_CPU_OPT ("cortex-r8", "Cortex-R8", ARM_ARCH_V7R,
  27610. ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV),
  27611. FPU_ARCH_VFP_V3D16),
  27612. ARM_CPU_OPT ("cortex-r52", "Cortex-R52", ARM_ARCH_V8R,
  27613. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27614. FPU_ARCH_NEON_VFP_ARMV8),
  27615. ARM_CPU_OPT ("cortex-r52plus", "Cortex-R52+", ARM_ARCH_V8R,
  27616. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27617. FPU_ARCH_NEON_VFP_ARMV8),
  27618. ARM_CPU_OPT ("cortex-m35p", "Cortex-M35P", ARM_ARCH_V8M_MAIN,
  27619. ARM_FEATURE_CORE_LOW (ARM_EXT_V5ExP | ARM_EXT_V6_DSP),
  27620. FPU_NONE),
  27621. ARM_CPU_OPT ("cortex-m33", "Cortex-M33", ARM_ARCH_V8M_MAIN,
  27622. ARM_FEATURE_CORE_LOW (ARM_EXT_V5ExP | ARM_EXT_V6_DSP),
  27623. FPU_NONE),
  27624. ARM_CPU_OPT ("cortex-m23", "Cortex-M23", ARM_ARCH_V8M_BASE,
  27625. ARM_ARCH_NONE,
  27626. FPU_NONE),
  27627. ARM_CPU_OPT ("cortex-m7", "Cortex-M7", ARM_ARCH_V7EM,
  27628. ARM_ARCH_NONE,
  27629. FPU_NONE),
  27630. ARM_CPU_OPT ("cortex-m4", "Cortex-M4", ARM_ARCH_V7EM,
  27631. ARM_ARCH_NONE,
  27632. FPU_NONE),
  27633. ARM_CPU_OPT ("cortex-m3", "Cortex-M3", ARM_ARCH_V7M,
  27634. ARM_ARCH_NONE,
  27635. FPU_NONE),
  27636. ARM_CPU_OPT ("cortex-m1", "Cortex-M1", ARM_ARCH_V6SM,
  27637. ARM_ARCH_NONE,
  27638. FPU_NONE),
  27639. ARM_CPU_OPT ("cortex-m0", "Cortex-M0", ARM_ARCH_V6SM,
  27640. ARM_ARCH_NONE,
  27641. FPU_NONE),
  27642. ARM_CPU_OPT ("cortex-m0plus", "Cortex-M0+", ARM_ARCH_V6SM,
  27643. ARM_ARCH_NONE,
  27644. FPU_NONE),
  27645. ARM_CPU_OPT ("cortex-x1", "Cortex-X1", ARM_ARCH_V8_2A,
  27646. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST | ARM_EXT2_SB),
  27647. FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27648. ARM_CPU_OPT ("exynos-m1", "Samsung Exynos M1", ARM_ARCH_V8A,
  27649. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27650. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27651. ARM_CPU_OPT ("neoverse-n1", "Neoverse N1", ARM_ARCH_V8_2A,
  27652. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  27653. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_DOTPROD),
  27654. ARM_CPU_OPT ("neoverse-n2", "Neoverse N2", ARM_ARCH_V8_5A,
  27655. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST
  27656. | ARM_EXT2_BF16
  27657. | ARM_EXT2_I8MM),
  27658. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_4),
  27659. ARM_CPU_OPT ("neoverse-v1", "Neoverse V1", ARM_ARCH_V8_4A,
  27660. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST
  27661. | ARM_EXT2_BF16
  27662. | ARM_EXT2_I8MM),
  27663. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_4),
  27664. /* ??? XSCALE is really an architecture. */
  27665. ARM_CPU_OPT ("xscale", NULL, ARM_ARCH_XSCALE,
  27666. ARM_ARCH_NONE,
  27667. FPU_ARCH_VFP_V2),
  27668. /* ??? iwmmxt is not a processor. */
  27669. ARM_CPU_OPT ("iwmmxt", NULL, ARM_ARCH_IWMMXT,
  27670. ARM_ARCH_NONE,
  27671. FPU_ARCH_VFP_V2),
  27672. ARM_CPU_OPT ("iwmmxt2", NULL, ARM_ARCH_IWMMXT2,
  27673. ARM_ARCH_NONE,
  27674. FPU_ARCH_VFP_V2),
  27675. ARM_CPU_OPT ("i80200", NULL, ARM_ARCH_XSCALE,
  27676. ARM_ARCH_NONE,
  27677. FPU_ARCH_VFP_V2),
  27678. /* Maverick. */
  27679. ARM_CPU_OPT ("ep9312", "ARM920T",
  27680. ARM_FEATURE_LOW (ARM_AEXT_V4T, ARM_CEXT_MAVERICK),
  27681. ARM_ARCH_NONE, FPU_ARCH_MAVERICK),
  27682. /* Marvell processors. */
  27683. ARM_CPU_OPT ("marvell-pj4", NULL, ARM_ARCH_V7A,
  27684. ARM_FEATURE_CORE_LOW (ARM_EXT_MP | ARM_EXT_SEC),
  27685. FPU_ARCH_VFP_V3D16),
  27686. ARM_CPU_OPT ("marvell-whitney", NULL, ARM_ARCH_V7A,
  27687. ARM_FEATURE_CORE_LOW (ARM_EXT_MP | ARM_EXT_SEC),
  27688. FPU_ARCH_NEON_VFP_V4),
  27689. /* APM X-Gene family. */
  27690. ARM_CPU_OPT ("xgene1", "APM X-Gene 1", ARM_ARCH_V8A,
  27691. ARM_ARCH_NONE,
  27692. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27693. ARM_CPU_OPT ("xgene2", "APM X-Gene 2", ARM_ARCH_V8A,
  27694. ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC),
  27695. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8),
  27696. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE, ARM_ARCH_NONE, NULL }
  27697. };
  27698. #undef ARM_CPU_OPT
  27699. struct arm_ext_table
  27700. {
  27701. const char * name;
  27702. size_t name_len;
  27703. const arm_feature_set merge;
  27704. const arm_feature_set clear;
  27705. };
  27706. struct arm_arch_option_table
  27707. {
  27708. const char * name;
  27709. size_t name_len;
  27710. const arm_feature_set value;
  27711. const arm_feature_set default_fpu;
  27712. const struct arm_ext_table * ext_table;
  27713. };
  27714. /* Used to add support for +E and +noE extension. */
  27715. #define ARM_EXT(E, M, C) { E, sizeof (E) - 1, M, C }
  27716. /* Used to add support for a +E extension. */
  27717. #define ARM_ADD(E, M) { E, sizeof(E) - 1, M, ARM_ARCH_NONE }
  27718. /* Used to add support for a +noE extension. */
  27719. #define ARM_REMOVE(E, C) { E, sizeof(E) -1, ARM_ARCH_NONE, C }
  27720. #define ALL_FP ARM_FEATURE (0, ARM_EXT2_FP16_INST | ARM_EXT2_FP16_FML, \
  27721. ~0 & ~FPU_ENDIAN_PURE)
  27722. static const struct arm_ext_table armv5te_ext_table[] =
  27723. {
  27724. ARM_EXT ("fp", FPU_ARCH_VFP_V2, ALL_FP),
  27725. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27726. };
  27727. static const struct arm_ext_table armv7_ext_table[] =
  27728. {
  27729. ARM_EXT ("fp", FPU_ARCH_VFP_V3D16, ALL_FP),
  27730. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27731. };
  27732. static const struct arm_ext_table armv7ve_ext_table[] =
  27733. {
  27734. ARM_EXT ("fp", FPU_ARCH_VFP_V4D16, ALL_FP),
  27735. ARM_ADD ("vfpv3-d16", FPU_ARCH_VFP_V3D16),
  27736. ARM_ADD ("vfpv3", FPU_ARCH_VFP_V3),
  27737. ARM_ADD ("vfpv3-d16-fp16", FPU_ARCH_VFP_V3D16_FP16),
  27738. ARM_ADD ("vfpv3-fp16", FPU_ARCH_VFP_V3_FP16),
  27739. ARM_ADD ("vfpv4-d16", FPU_ARCH_VFP_V4D16), /* Alias for +fp. */
  27740. ARM_ADD ("vfpv4", FPU_ARCH_VFP_V4),
  27741. ARM_EXT ("simd", FPU_ARCH_NEON_VFP_V4,
  27742. ARM_FEATURE_COPROC (FPU_NEON_EXT_V1 | FPU_NEON_EXT_FMA)),
  27743. /* Aliases for +simd. */
  27744. ARM_ADD ("neon-vfpv4", FPU_ARCH_NEON_VFP_V4),
  27745. ARM_ADD ("neon", FPU_ARCH_VFP_V3_PLUS_NEON_V1),
  27746. ARM_ADD ("neon-vfpv3", FPU_ARCH_VFP_V3_PLUS_NEON_V1),
  27747. ARM_ADD ("neon-fp16", FPU_ARCH_NEON_FP16),
  27748. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27749. };
  27750. static const struct arm_ext_table armv7a_ext_table[] =
  27751. {
  27752. ARM_EXT ("fp", FPU_ARCH_VFP_V3D16, ALL_FP),
  27753. ARM_ADD ("vfpv3-d16", FPU_ARCH_VFP_V3D16), /* Alias for +fp. */
  27754. ARM_ADD ("vfpv3", FPU_ARCH_VFP_V3),
  27755. ARM_ADD ("vfpv3-d16-fp16", FPU_ARCH_VFP_V3D16_FP16),
  27756. ARM_ADD ("vfpv3-fp16", FPU_ARCH_VFP_V3_FP16),
  27757. ARM_ADD ("vfpv4-d16", FPU_ARCH_VFP_V4D16),
  27758. ARM_ADD ("vfpv4", FPU_ARCH_VFP_V4),
  27759. ARM_EXT ("simd", FPU_ARCH_VFP_V3_PLUS_NEON_V1,
  27760. ARM_FEATURE_COPROC (FPU_NEON_EXT_V1 | FPU_NEON_EXT_FMA)),
  27761. /* Aliases for +simd. */
  27762. ARM_ADD ("neon", FPU_ARCH_VFP_V3_PLUS_NEON_V1),
  27763. ARM_ADD ("neon-vfpv3", FPU_ARCH_VFP_V3_PLUS_NEON_V1),
  27764. ARM_ADD ("neon-fp16", FPU_ARCH_NEON_FP16),
  27765. ARM_ADD ("neon-vfpv4", FPU_ARCH_NEON_VFP_V4),
  27766. ARM_ADD ("mp", ARM_FEATURE_CORE_LOW (ARM_EXT_MP)),
  27767. ARM_ADD ("sec", ARM_FEATURE_CORE_LOW (ARM_EXT_SEC)),
  27768. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27769. };
  27770. static const struct arm_ext_table armv7r_ext_table[] =
  27771. {
  27772. ARM_ADD ("fp.sp", FPU_ARCH_VFP_V3xD),
  27773. ARM_ADD ("vfpv3xd", FPU_ARCH_VFP_V3xD), /* Alias for +fp.sp. */
  27774. ARM_EXT ("fp", FPU_ARCH_VFP_V3D16, ALL_FP),
  27775. ARM_ADD ("vfpv3-d16", FPU_ARCH_VFP_V3D16), /* Alias for +fp. */
  27776. ARM_ADD ("vfpv3xd-fp16", FPU_ARCH_VFP_V3xD_FP16),
  27777. ARM_ADD ("vfpv3-d16-fp16", FPU_ARCH_VFP_V3D16_FP16),
  27778. ARM_EXT ("idiv", ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV | ARM_EXT_DIV),
  27779. ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV | ARM_EXT_DIV)),
  27780. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27781. };
  27782. static const struct arm_ext_table armv7em_ext_table[] =
  27783. {
  27784. ARM_EXT ("fp", FPU_ARCH_VFP_V4_SP_D16, ALL_FP),
  27785. /* Alias for +fp, used to be known as fpv4-sp-d16. */
  27786. ARM_ADD ("vfpv4-sp-d16", FPU_ARCH_VFP_V4_SP_D16),
  27787. ARM_ADD ("fpv5", FPU_ARCH_VFP_V5_SP_D16),
  27788. ARM_ADD ("fp.dp", FPU_ARCH_VFP_V5D16),
  27789. ARM_ADD ("fpv5-d16", FPU_ARCH_VFP_V5D16),
  27790. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27791. };
  27792. static const struct arm_ext_table armv8a_ext_table[] =
  27793. {
  27794. ARM_ADD ("crc", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC)),
  27795. ARM_ADD ("simd", FPU_ARCH_NEON_VFP_ARMV8),
  27796. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8,
  27797. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27798. /* Armv8-a does not allow an FP implementation without SIMD, so the user
  27799. should use the +simd option to turn on FP. */
  27800. ARM_REMOVE ("fp", ALL_FP),
  27801. ARM_ADD ("sb", ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB)),
  27802. ARM_ADD ("predres", ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES)),
  27803. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27804. };
  27805. static const struct arm_ext_table armv81a_ext_table[] =
  27806. {
  27807. ARM_ADD ("simd", FPU_ARCH_NEON_VFP_ARMV8_1),
  27808. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_1,
  27809. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27810. /* Armv8-a does not allow an FP implementation without SIMD, so the user
  27811. should use the +simd option to turn on FP. */
  27812. ARM_REMOVE ("fp", ALL_FP),
  27813. ARM_ADD ("sb", ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB)),
  27814. ARM_ADD ("predres", ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES)),
  27815. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27816. };
  27817. static const struct arm_ext_table armv82a_ext_table[] =
  27818. {
  27819. ARM_ADD ("simd", FPU_ARCH_NEON_VFP_ARMV8_1),
  27820. ARM_ADD ("fp16", FPU_ARCH_NEON_VFP_ARMV8_2_FP16),
  27821. ARM_ADD ("fp16fml", FPU_ARCH_NEON_VFP_ARMV8_2_FP16FML),
  27822. ARM_ADD ("bf16", ARM_FEATURE_CORE_HIGH (ARM_EXT2_BF16)),
  27823. ARM_ADD ("i8mm", ARM_FEATURE_CORE_HIGH (ARM_EXT2_I8MM)),
  27824. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_1,
  27825. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27826. ARM_ADD ("dotprod", FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27827. /* Armv8-a does not allow an FP implementation without SIMD, so the user
  27828. should use the +simd option to turn on FP. */
  27829. ARM_REMOVE ("fp", ALL_FP),
  27830. ARM_ADD ("sb", ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB)),
  27831. ARM_ADD ("predres", ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES)),
  27832. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27833. };
  27834. static const struct arm_ext_table armv84a_ext_table[] =
  27835. {
  27836. ARM_ADD ("simd", FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27837. ARM_ADD ("fp16", FPU_ARCH_NEON_VFP_ARMV8_4_FP16FML),
  27838. ARM_ADD ("bf16", ARM_FEATURE_CORE_HIGH (ARM_EXT2_BF16)),
  27839. ARM_ADD ("i8mm", ARM_FEATURE_CORE_HIGH (ARM_EXT2_I8MM)),
  27840. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_4,
  27841. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27842. /* Armv8-a does not allow an FP implementation without SIMD, so the user
  27843. should use the +simd option to turn on FP. */
  27844. ARM_REMOVE ("fp", ALL_FP),
  27845. ARM_ADD ("sb", ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB)),
  27846. ARM_ADD ("predres", ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES)),
  27847. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27848. };
  27849. static const struct arm_ext_table armv85a_ext_table[] =
  27850. {
  27851. ARM_ADD ("simd", FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27852. ARM_ADD ("fp16", FPU_ARCH_NEON_VFP_ARMV8_4_FP16FML),
  27853. ARM_ADD ("bf16", ARM_FEATURE_CORE_HIGH (ARM_EXT2_BF16)),
  27854. ARM_ADD ("i8mm", ARM_FEATURE_CORE_HIGH (ARM_EXT2_I8MM)),
  27855. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_4,
  27856. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27857. /* Armv8-a does not allow an FP implementation without SIMD, so the user
  27858. should use the +simd option to turn on FP. */
  27859. ARM_REMOVE ("fp", ALL_FP),
  27860. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27861. };
  27862. static const struct arm_ext_table armv86a_ext_table[] =
  27863. {
  27864. ARM_ADD ("i8mm", ARM_FEATURE_CORE_HIGH (ARM_EXT2_I8MM)),
  27865. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27866. };
  27867. #define armv87a_ext_table armv86a_ext_table
  27868. #define armv88a_ext_table armv87a_ext_table
  27869. static const struct arm_ext_table armv9a_ext_table[] =
  27870. {
  27871. ARM_ADD ("simd", FPU_ARCH_DOTPROD_NEON_VFP_ARMV8),
  27872. ARM_ADD ("fp16", FPU_ARCH_NEON_VFP_ARMV8_4_FP16FML),
  27873. ARM_ADD ("bf16", ARM_FEATURE_CORE_HIGH (ARM_EXT2_BF16)),
  27874. ARM_ADD ("i8mm", ARM_FEATURE_CORE_HIGH (ARM_EXT2_I8MM)),
  27875. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_4,
  27876. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27877. /* Armv9-a does not allow an FP implementation without SIMD, so the user
  27878. should use the +simd option to turn on FP. */
  27879. ARM_REMOVE ("fp", ALL_FP),
  27880. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27881. };
  27882. #define armv91a_ext_table armv86a_ext_table
  27883. #define armv92a_ext_table armv91a_ext_table
  27884. #define armv93a_ext_table armv92a_ext_table
  27885. #define CDE_EXTENSIONS \
  27886. ARM_ADD ("cdecp0", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE0)), \
  27887. ARM_ADD ("cdecp1", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE1)), \
  27888. ARM_ADD ("cdecp2", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE2)), \
  27889. ARM_ADD ("cdecp3", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE3)), \
  27890. ARM_ADD ("cdecp4", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE4)), \
  27891. ARM_ADD ("cdecp5", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE5)), \
  27892. ARM_ADD ("cdecp6", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE6)), \
  27893. ARM_ADD ("cdecp7", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CDE | ARM_EXT2_CDE7))
  27894. static const struct arm_ext_table armv8m_main_ext_table[] =
  27895. {
  27896. ARM_EXT ("dsp", ARM_FEATURE_CORE_LOW (ARM_AEXT_V8M_MAIN_DSP),
  27897. ARM_FEATURE_CORE_LOW (ARM_AEXT_V8M_MAIN_DSP)),
  27898. ARM_EXT ("fp", FPU_ARCH_VFP_V5_SP_D16, ALL_FP),
  27899. ARM_ADD ("fp.dp", FPU_ARCH_VFP_V5D16),
  27900. CDE_EXTENSIONS,
  27901. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27902. };
  27903. static const struct arm_ext_table armv8_1m_main_ext_table[] =
  27904. {
  27905. ARM_EXT ("dsp", ARM_FEATURE_CORE_LOW (ARM_AEXT_V8M_MAIN_DSP),
  27906. ARM_FEATURE_CORE_LOW (ARM_AEXT_V8M_MAIN_DSP)),
  27907. ARM_EXT ("fp",
  27908. ARM_FEATURE (0, ARM_EXT2_FP16_INST,
  27909. FPU_VFP_V5_SP_D16 | FPU_VFP_EXT_FP16 | FPU_VFP_EXT_FMA),
  27910. ALL_FP),
  27911. ARM_ADD ("fp.dp",
  27912. ARM_FEATURE (0, ARM_EXT2_FP16_INST,
  27913. FPU_VFP_V5D16 | FPU_VFP_EXT_FP16 | FPU_VFP_EXT_FMA)),
  27914. ARM_EXT ("mve", ARM_FEATURE (ARM_AEXT_V8M_MAIN_DSP, ARM_EXT2_MVE, 0),
  27915. ARM_FEATURE_CORE_HIGH (ARM_EXT2_MVE | ARM_EXT2_MVE_FP)),
  27916. ARM_ADD ("mve.fp",
  27917. ARM_FEATURE (ARM_AEXT_V8M_MAIN_DSP,
  27918. ARM_EXT2_FP16_INST | ARM_EXT2_MVE | ARM_EXT2_MVE_FP,
  27919. FPU_VFP_V5_SP_D16 | FPU_VFP_EXT_FP16 | FPU_VFP_EXT_FMA)),
  27920. CDE_EXTENSIONS,
  27921. ARM_ADD ("pacbti", ARM_FEATURE_CORE_HIGH_HIGH (ARM_AEXT3_V8_1M_MAIN_PACBTI)),
  27922. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27923. };
  27924. #undef CDE_EXTENSIONS
  27925. static const struct arm_ext_table armv8r_ext_table[] =
  27926. {
  27927. ARM_ADD ("crc", ARM_FEATURE_CORE_HIGH (ARM_EXT2_CRC)),
  27928. ARM_ADD ("simd", FPU_ARCH_NEON_VFP_ARMV8),
  27929. ARM_EXT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8,
  27930. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8)),
  27931. ARM_REMOVE ("fp", ALL_FP),
  27932. ARM_ADD ("fp.sp", FPU_ARCH_VFP_V5_SP_D16),
  27933. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE }
  27934. };
  27935. /* This list should, at a minimum, contain all the architecture names
  27936. recognized by GCC. */
  27937. #define ARM_ARCH_OPT(N, V, DF) { N, sizeof (N) - 1, V, DF, NULL }
  27938. #define ARM_ARCH_OPT2(N, V, DF, ext) \
  27939. { N, sizeof (N) - 1, V, DF, ext##_ext_table }
  27940. static const struct arm_arch_option_table arm_archs[] =
  27941. {
  27942. ARM_ARCH_OPT ("all", ARM_ANY, FPU_ARCH_FPA),
  27943. ARM_ARCH_OPT ("armv1", ARM_ARCH_V1, FPU_ARCH_FPA),
  27944. ARM_ARCH_OPT ("armv2", ARM_ARCH_V2, FPU_ARCH_FPA),
  27945. ARM_ARCH_OPT ("armv2a", ARM_ARCH_V2S, FPU_ARCH_FPA),
  27946. ARM_ARCH_OPT ("armv2s", ARM_ARCH_V2S, FPU_ARCH_FPA),
  27947. ARM_ARCH_OPT ("armv3", ARM_ARCH_V3, FPU_ARCH_FPA),
  27948. ARM_ARCH_OPT ("armv3m", ARM_ARCH_V3M, FPU_ARCH_FPA),
  27949. ARM_ARCH_OPT ("armv4", ARM_ARCH_V4, FPU_ARCH_FPA),
  27950. ARM_ARCH_OPT ("armv4xm", ARM_ARCH_V4xM, FPU_ARCH_FPA),
  27951. ARM_ARCH_OPT ("armv4t", ARM_ARCH_V4T, FPU_ARCH_FPA),
  27952. ARM_ARCH_OPT ("armv4txm", ARM_ARCH_V4TxM, FPU_ARCH_FPA),
  27953. ARM_ARCH_OPT ("armv5", ARM_ARCH_V5, FPU_ARCH_VFP),
  27954. ARM_ARCH_OPT ("armv5t", ARM_ARCH_V5T, FPU_ARCH_VFP),
  27955. ARM_ARCH_OPT ("armv5txm", ARM_ARCH_V5TxM, FPU_ARCH_VFP),
  27956. ARM_ARCH_OPT2 ("armv5te", ARM_ARCH_V5TE, FPU_ARCH_VFP, armv5te),
  27957. ARM_ARCH_OPT2 ("armv5texp", ARM_ARCH_V5TExP, FPU_ARCH_VFP, armv5te),
  27958. ARM_ARCH_OPT2 ("armv5tej", ARM_ARCH_V5TEJ, FPU_ARCH_VFP, armv5te),
  27959. ARM_ARCH_OPT2 ("armv6", ARM_ARCH_V6, FPU_ARCH_VFP, armv5te),
  27960. ARM_ARCH_OPT2 ("armv6j", ARM_ARCH_V6, FPU_ARCH_VFP, armv5te),
  27961. ARM_ARCH_OPT2 ("armv6k", ARM_ARCH_V6K, FPU_ARCH_VFP, armv5te),
  27962. ARM_ARCH_OPT2 ("armv6z", ARM_ARCH_V6Z, FPU_ARCH_VFP, armv5te),
  27963. /* The official spelling of this variant is ARMv6KZ, the name "armv6zk" is
  27964. kept to preserve existing behaviour. */
  27965. ARM_ARCH_OPT2 ("armv6kz", ARM_ARCH_V6KZ, FPU_ARCH_VFP, armv5te),
  27966. ARM_ARCH_OPT2 ("armv6zk", ARM_ARCH_V6KZ, FPU_ARCH_VFP, armv5te),
  27967. ARM_ARCH_OPT2 ("armv6t2", ARM_ARCH_V6T2, FPU_ARCH_VFP, armv5te),
  27968. ARM_ARCH_OPT2 ("armv6kt2", ARM_ARCH_V6KT2, FPU_ARCH_VFP, armv5te),
  27969. ARM_ARCH_OPT2 ("armv6zt2", ARM_ARCH_V6ZT2, FPU_ARCH_VFP, armv5te),
  27970. /* The official spelling of this variant is ARMv6KZ, the name "armv6zkt2" is
  27971. kept to preserve existing behaviour. */
  27972. ARM_ARCH_OPT2 ("armv6kzt2", ARM_ARCH_V6KZT2, FPU_ARCH_VFP, armv5te),
  27973. ARM_ARCH_OPT2 ("armv6zkt2", ARM_ARCH_V6KZT2, FPU_ARCH_VFP, armv5te),
  27974. ARM_ARCH_OPT ("armv6-m", ARM_ARCH_V6M, FPU_ARCH_VFP),
  27975. ARM_ARCH_OPT ("armv6s-m", ARM_ARCH_V6SM, FPU_ARCH_VFP),
  27976. ARM_ARCH_OPT2 ("armv7", ARM_ARCH_V7, FPU_ARCH_VFP, armv7),
  27977. /* The official spelling of the ARMv7 profile variants is the dashed form.
  27978. Accept the non-dashed form for compatibility with old toolchains. */
  27979. ARM_ARCH_OPT2 ("armv7a", ARM_ARCH_V7A, FPU_ARCH_VFP, armv7a),
  27980. ARM_ARCH_OPT2 ("armv7ve", ARM_ARCH_V7VE, FPU_ARCH_VFP, armv7ve),
  27981. ARM_ARCH_OPT2 ("armv7r", ARM_ARCH_V7R, FPU_ARCH_VFP, armv7r),
  27982. ARM_ARCH_OPT ("armv7m", ARM_ARCH_V7M, FPU_ARCH_VFP),
  27983. ARM_ARCH_OPT2 ("armv7-a", ARM_ARCH_V7A, FPU_ARCH_VFP, armv7a),
  27984. ARM_ARCH_OPT2 ("armv7-r", ARM_ARCH_V7R, FPU_ARCH_VFP, armv7r),
  27985. ARM_ARCH_OPT ("armv7-m", ARM_ARCH_V7M, FPU_ARCH_VFP),
  27986. ARM_ARCH_OPT2 ("armv7e-m", ARM_ARCH_V7EM, FPU_ARCH_VFP, armv7em),
  27987. ARM_ARCH_OPT ("armv8-m.base", ARM_ARCH_V8M_BASE, FPU_ARCH_VFP),
  27988. ARM_ARCH_OPT2 ("armv8-m.main", ARM_ARCH_V8M_MAIN, FPU_ARCH_VFP,
  27989. armv8m_main),
  27990. ARM_ARCH_OPT2 ("armv8.1-m.main", ARM_ARCH_V8_1M_MAIN, FPU_ARCH_VFP,
  27991. armv8_1m_main),
  27992. ARM_ARCH_OPT2 ("armv8-a", ARM_ARCH_V8A, FPU_ARCH_VFP, armv8a),
  27993. ARM_ARCH_OPT2 ("armv8.1-a", ARM_ARCH_V8_1A, FPU_ARCH_VFP, armv81a),
  27994. ARM_ARCH_OPT2 ("armv8.2-a", ARM_ARCH_V8_2A, FPU_ARCH_VFP, armv82a),
  27995. ARM_ARCH_OPT2 ("armv8.3-a", ARM_ARCH_V8_3A, FPU_ARCH_VFP, armv82a),
  27996. ARM_ARCH_OPT2 ("armv8-r", ARM_ARCH_V8R, FPU_ARCH_VFP, armv8r),
  27997. ARM_ARCH_OPT2 ("armv8.4-a", ARM_ARCH_V8_4A, FPU_ARCH_VFP, armv84a),
  27998. ARM_ARCH_OPT2 ("armv8.5-a", ARM_ARCH_V8_5A, FPU_ARCH_VFP, armv85a),
  27999. ARM_ARCH_OPT2 ("armv8.6-a", ARM_ARCH_V8_6A, FPU_ARCH_VFP, armv86a),
  28000. ARM_ARCH_OPT2 ("armv8.7-a", ARM_ARCH_V8_7A, FPU_ARCH_VFP, armv87a),
  28001. ARM_ARCH_OPT2 ("armv8.8-a", ARM_ARCH_V8_8A, FPU_ARCH_VFP, armv88a),
  28002. ARM_ARCH_OPT2 ("armv9-a", ARM_ARCH_V9A, FPU_ARCH_VFP, armv9a),
  28003. ARM_ARCH_OPT2 ("armv9.1-a", ARM_ARCH_V9_1A, FPU_ARCH_VFP, armv91a),
  28004. ARM_ARCH_OPT2 ("armv9.2-a", ARM_ARCH_V9_2A, FPU_ARCH_VFP, armv92a),
  28005. ARM_ARCH_OPT2 ("armv9.3-a", ARM_ARCH_V9_2A, FPU_ARCH_VFP, armv93a),
  28006. ARM_ARCH_OPT ("xscale", ARM_ARCH_XSCALE, FPU_ARCH_VFP),
  28007. ARM_ARCH_OPT ("iwmmxt", ARM_ARCH_IWMMXT, FPU_ARCH_VFP),
  28008. ARM_ARCH_OPT ("iwmmxt2", ARM_ARCH_IWMMXT2, FPU_ARCH_VFP),
  28009. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE, NULL }
  28010. };
  28011. #undef ARM_ARCH_OPT
  28012. /* ISA extensions in the co-processor and main instruction set space. */
  28013. struct arm_option_extension_value_table
  28014. {
  28015. const char * name;
  28016. size_t name_len;
  28017. const arm_feature_set merge_value;
  28018. const arm_feature_set clear_value;
  28019. /* List of architectures for which an extension is available. ARM_ARCH_NONE
  28020. indicates that an extension is available for all architectures while
  28021. ARM_ANY marks an empty entry. */
  28022. const arm_feature_set allowed_archs[2];
  28023. };
  28024. /* The following table must be in alphabetical order with a NULL last entry. */
  28025. #define ARM_EXT_OPT(N, M, C, AA) { N, sizeof (N) - 1, M, C, { AA, ARM_ANY } }
  28026. #define ARM_EXT_OPT2(N, M, C, AA1, AA2) { N, sizeof (N) - 1, M, C, {AA1, AA2} }
  28027. /* DEPRECATED: Refrain from using this table to add any new extensions, instead
  28028. use the context sensitive approach using arm_ext_table's. */
  28029. static const struct arm_option_extension_value_table arm_extensions[] =
  28030. {
  28031. ARM_EXT_OPT ("crc", ARM_FEATURE_CORE_HIGH(ARM_EXT2_CRC),
  28032. ARM_FEATURE_CORE_HIGH(ARM_EXT2_CRC),
  28033. ARM_FEATURE_CORE_LOW (ARM_EXT_V8)),
  28034. ARM_EXT_OPT ("crypto", FPU_ARCH_CRYPTO_NEON_VFP_ARMV8,
  28035. ARM_FEATURE_COPROC (FPU_CRYPTO_ARMV8),
  28036. ARM_FEATURE_CORE_LOW (ARM_EXT_V8)),
  28037. ARM_EXT_OPT ("dotprod", FPU_ARCH_DOTPROD_NEON_VFP_ARMV8,
  28038. ARM_FEATURE_COPROC (FPU_NEON_EXT_DOTPROD),
  28039. ARM_ARCH_V8_2A),
  28040. ARM_EXT_OPT ("dsp", ARM_FEATURE_CORE_LOW (ARM_EXT_V5ExP | ARM_EXT_V6_DSP),
  28041. ARM_FEATURE_CORE_LOW (ARM_EXT_V5ExP | ARM_EXT_V6_DSP),
  28042. ARM_FEATURE_CORE (ARM_EXT_V7M, ARM_EXT2_V8M)),
  28043. ARM_EXT_OPT ("fp", FPU_ARCH_VFP_ARMV8, ARM_FEATURE_COPROC (FPU_VFP_ARMV8),
  28044. ARM_FEATURE_CORE_LOW (ARM_EXT_V8)),
  28045. ARM_EXT_OPT ("fp16", ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  28046. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST),
  28047. ARM_ARCH_V8_2A),
  28048. ARM_EXT_OPT ("fp16fml", ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST
  28049. | ARM_EXT2_FP16_FML),
  28050. ARM_FEATURE_CORE_HIGH (ARM_EXT2_FP16_INST
  28051. | ARM_EXT2_FP16_FML),
  28052. ARM_ARCH_V8_2A),
  28053. ARM_EXT_OPT2 ("idiv", ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV | ARM_EXT_DIV),
  28054. ARM_FEATURE_CORE_LOW (ARM_EXT_ADIV | ARM_EXT_DIV),
  28055. ARM_FEATURE_CORE_LOW (ARM_EXT_V7A),
  28056. ARM_FEATURE_CORE_LOW (ARM_EXT_V7R)),
  28057. /* Duplicate entry for the purpose of allowing ARMv7 to match in presence of
  28058. Thumb divide instruction. Due to this having the same name as the
  28059. previous entry, this will be ignored when doing command-line parsing and
  28060. only considered by build attribute selection code. */
  28061. ARM_EXT_OPT ("idiv", ARM_FEATURE_CORE_LOW (ARM_EXT_DIV),
  28062. ARM_FEATURE_CORE_LOW (ARM_EXT_DIV),
  28063. ARM_FEATURE_CORE_LOW (ARM_EXT_V7)),
  28064. ARM_EXT_OPT ("iwmmxt",ARM_FEATURE_COPROC (ARM_CEXT_IWMMXT),
  28065. ARM_FEATURE_COPROC (ARM_CEXT_IWMMXT), ARM_ARCH_NONE),
  28066. ARM_EXT_OPT ("iwmmxt2", ARM_FEATURE_COPROC (ARM_CEXT_IWMMXT2),
  28067. ARM_FEATURE_COPROC (ARM_CEXT_IWMMXT2), ARM_ARCH_NONE),
  28068. ARM_EXT_OPT ("maverick", ARM_FEATURE_COPROC (ARM_CEXT_MAVERICK),
  28069. ARM_FEATURE_COPROC (ARM_CEXT_MAVERICK), ARM_ARCH_NONE),
  28070. ARM_EXT_OPT2 ("mp", ARM_FEATURE_CORE_LOW (ARM_EXT_MP),
  28071. ARM_FEATURE_CORE_LOW (ARM_EXT_MP),
  28072. ARM_FEATURE_CORE_LOW (ARM_EXT_V7A),
  28073. ARM_FEATURE_CORE_LOW (ARM_EXT_V7R)),
  28074. ARM_EXT_OPT ("os", ARM_FEATURE_CORE_LOW (ARM_EXT_OS),
  28075. ARM_FEATURE_CORE_LOW (ARM_EXT_OS),
  28076. ARM_FEATURE_CORE_LOW (ARM_EXT_V6M)),
  28077. ARM_EXT_OPT ("pan", ARM_FEATURE_CORE_HIGH (ARM_EXT2_PAN),
  28078. ARM_FEATURE (ARM_EXT_V8, ARM_EXT2_PAN, 0),
  28079. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8A)),
  28080. ARM_EXT_OPT ("predres", ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES),
  28081. ARM_FEATURE_CORE_HIGH (ARM_EXT2_PREDRES),
  28082. ARM_ARCH_V8A),
  28083. ARM_EXT_OPT ("ras", ARM_FEATURE_CORE_HIGH (ARM_EXT2_RAS),
  28084. ARM_FEATURE (ARM_EXT_V8, ARM_EXT2_RAS, 0),
  28085. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8A)),
  28086. ARM_EXT_OPT ("rdma", FPU_ARCH_NEON_VFP_ARMV8_1,
  28087. ARM_FEATURE_COPROC (FPU_NEON_ARMV8 | FPU_NEON_EXT_RDMA),
  28088. ARM_FEATURE_CORE_HIGH (ARM_EXT2_V8A)),
  28089. ARM_EXT_OPT ("sb", ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB),
  28090. ARM_FEATURE_CORE_HIGH (ARM_EXT2_SB),
  28091. ARM_ARCH_V8A),
  28092. ARM_EXT_OPT2 ("sec", ARM_FEATURE_CORE_LOW (ARM_EXT_SEC),
  28093. ARM_FEATURE_CORE_LOW (ARM_EXT_SEC),
  28094. ARM_FEATURE_CORE_LOW (ARM_EXT_V6K),
  28095. ARM_FEATURE_CORE_LOW (ARM_EXT_V7A)),
  28096. ARM_EXT_OPT ("simd", FPU_ARCH_NEON_VFP_ARMV8,
  28097. ARM_FEATURE_COPROC (FPU_NEON_ARMV8),
  28098. ARM_FEATURE_CORE_LOW (ARM_EXT_V8)),
  28099. ARM_EXT_OPT ("virt", ARM_FEATURE_CORE_LOW (ARM_EXT_VIRT | ARM_EXT_ADIV
  28100. | ARM_EXT_DIV),
  28101. ARM_FEATURE_CORE_LOW (ARM_EXT_VIRT),
  28102. ARM_FEATURE_CORE_LOW (ARM_EXT_V7A)),
  28103. ARM_EXT_OPT ("xscale",ARM_FEATURE_COPROC (ARM_CEXT_XSCALE),
  28104. ARM_FEATURE_COPROC (ARM_CEXT_XSCALE), ARM_ARCH_NONE),
  28105. { NULL, 0, ARM_ARCH_NONE, ARM_ARCH_NONE, { ARM_ARCH_NONE, ARM_ARCH_NONE } }
  28106. };
  28107. #undef ARM_EXT_OPT
  28108. /* ISA floating-point and Advanced SIMD extensions. */
  28109. struct arm_option_fpu_value_table
  28110. {
  28111. const char * name;
  28112. const arm_feature_set value;
  28113. };
  28114. /* This list should, at a minimum, contain all the fpu names
  28115. recognized by GCC. */
  28116. static const struct arm_option_fpu_value_table arm_fpus[] =
  28117. {
  28118. {"softfpa", FPU_NONE},
  28119. {"fpe", FPU_ARCH_FPE},
  28120. {"fpe2", FPU_ARCH_FPE},
  28121. {"fpe3", FPU_ARCH_FPA}, /* Third release supports LFM/SFM. */
  28122. {"fpa", FPU_ARCH_FPA},
  28123. {"fpa10", FPU_ARCH_FPA},
  28124. {"fpa11", FPU_ARCH_FPA},
  28125. {"arm7500fe", FPU_ARCH_FPA},
  28126. {"softvfp", FPU_ARCH_VFP},
  28127. {"softvfp+vfp", FPU_ARCH_VFP_V2},
  28128. {"vfp", FPU_ARCH_VFP_V2},
  28129. {"vfp9", FPU_ARCH_VFP_V2},
  28130. {"vfp3", FPU_ARCH_VFP_V3}, /* Undocumented, use vfpv3. */
  28131. {"vfp10", FPU_ARCH_VFP_V2},
  28132. {"vfp10-r0", FPU_ARCH_VFP_V1},
  28133. {"vfpxd", FPU_ARCH_VFP_V1xD},
  28134. {"vfpv2", FPU_ARCH_VFP_V2},
  28135. {"vfpv3", FPU_ARCH_VFP_V3},
  28136. {"vfpv3-fp16", FPU_ARCH_VFP_V3_FP16},
  28137. {"vfpv3-d16", FPU_ARCH_VFP_V3D16},
  28138. {"vfpv3-d16-fp16", FPU_ARCH_VFP_V3D16_FP16},
  28139. {"vfpv3xd", FPU_ARCH_VFP_V3xD},
  28140. {"vfpv3xd-fp16", FPU_ARCH_VFP_V3xD_FP16},
  28141. {"arm1020t", FPU_ARCH_VFP_V1},
  28142. {"arm1020e", FPU_ARCH_VFP_V2},
  28143. {"arm1136jfs", FPU_ARCH_VFP_V2}, /* Undocumented, use arm1136jf-s. */
  28144. {"arm1136jf-s", FPU_ARCH_VFP_V2},
  28145. {"maverick", FPU_ARCH_MAVERICK},
  28146. {"neon", FPU_ARCH_VFP_V3_PLUS_NEON_V1},
  28147. {"neon-vfpv3", FPU_ARCH_VFP_V3_PLUS_NEON_V1},
  28148. {"neon-fp16", FPU_ARCH_NEON_FP16},
  28149. {"vfpv4", FPU_ARCH_VFP_V4},
  28150. {"vfpv4-d16", FPU_ARCH_VFP_V4D16},
  28151. {"fpv4-sp-d16", FPU_ARCH_VFP_V4_SP_D16},
  28152. {"fpv5-d16", FPU_ARCH_VFP_V5D16},
  28153. {"fpv5-sp-d16", FPU_ARCH_VFP_V5_SP_D16},
  28154. {"neon-vfpv4", FPU_ARCH_NEON_VFP_V4},
  28155. {"fp-armv8", FPU_ARCH_VFP_ARMV8},
  28156. {"neon-fp-armv8", FPU_ARCH_NEON_VFP_ARMV8},
  28157. {"crypto-neon-fp-armv8",
  28158. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8},
  28159. {"neon-fp-armv8.1", FPU_ARCH_NEON_VFP_ARMV8_1},
  28160. {"crypto-neon-fp-armv8.1",
  28161. FPU_ARCH_CRYPTO_NEON_VFP_ARMV8_1},
  28162. {NULL, ARM_ARCH_NONE}
  28163. };
  28164. struct arm_option_value_table
  28165. {
  28166. const char *name;
  28167. long value;
  28168. };
  28169. static const struct arm_option_value_table arm_float_abis[] =
  28170. {
  28171. {"hard", ARM_FLOAT_ABI_HARD},
  28172. {"softfp", ARM_FLOAT_ABI_SOFTFP},
  28173. {"soft", ARM_FLOAT_ABI_SOFT},
  28174. {NULL, 0}
  28175. };
  28176. #ifdef OBJ_ELF
  28177. /* We only know how to output GNU and ver 4/5 (AAELF) formats. */
  28178. static const struct arm_option_value_table arm_eabis[] =
  28179. {
  28180. {"gnu", EF_ARM_EABI_UNKNOWN},
  28181. {"4", EF_ARM_EABI_VER4},
  28182. {"5", EF_ARM_EABI_VER5},
  28183. {NULL, 0}
  28184. };
  28185. #endif
  28186. struct arm_long_option_table
  28187. {
  28188. const char *option; /* Substring to match. */
  28189. const char *help; /* Help information. */
  28190. bool (*func) (const char *subopt); /* Function to decode sub-option. */
  28191. const char *deprecated; /* If non-null, print this message. */
  28192. };
  28193. static bool
  28194. arm_parse_extension (const char *str, const arm_feature_set *opt_set,
  28195. arm_feature_set *ext_set,
  28196. const struct arm_ext_table *ext_table)
  28197. {
  28198. /* We insist on extensions being specified in alphabetical order, and with
  28199. extensions being added before being removed. We achieve this by having
  28200. the global ARM_EXTENSIONS table in alphabetical order, and using the
  28201. ADDING_VALUE variable to indicate whether we are adding an extension (1)
  28202. or removing it (0) and only allowing it to change in the order
  28203. -1 -> 1 -> 0. */
  28204. const struct arm_option_extension_value_table * opt = NULL;
  28205. const arm_feature_set arm_any = ARM_ANY;
  28206. int adding_value = -1;
  28207. while (str != NULL && *str != 0)
  28208. {
  28209. const char *ext;
  28210. size_t len;
  28211. if (*str != '+')
  28212. {
  28213. as_bad (_("invalid architectural extension"));
  28214. return false;
  28215. }
  28216. str++;
  28217. ext = strchr (str, '+');
  28218. if (ext != NULL)
  28219. len = ext - str;
  28220. else
  28221. len = strlen (str);
  28222. if (len >= 2 && startswith (str, "no"))
  28223. {
  28224. if (adding_value != 0)
  28225. {
  28226. adding_value = 0;
  28227. opt = arm_extensions;
  28228. }
  28229. len -= 2;
  28230. str += 2;
  28231. }
  28232. else if (len > 0)
  28233. {
  28234. if (adding_value == -1)
  28235. {
  28236. adding_value = 1;
  28237. opt = arm_extensions;
  28238. }
  28239. else if (adding_value != 1)
  28240. {
  28241. as_bad (_("must specify extensions to add before specifying "
  28242. "those to remove"));
  28243. return false;
  28244. }
  28245. }
  28246. if (len == 0)
  28247. {
  28248. as_bad (_("missing architectural extension"));
  28249. return false;
  28250. }
  28251. gas_assert (adding_value != -1);
  28252. gas_assert (opt != NULL);
  28253. if (ext_table != NULL)
  28254. {
  28255. const struct arm_ext_table * ext_opt = ext_table;
  28256. bool found = false;
  28257. for (; ext_opt->name != NULL; ext_opt++)
  28258. if (ext_opt->name_len == len
  28259. && strncmp (ext_opt->name, str, len) == 0)
  28260. {
  28261. if (adding_value)
  28262. {
  28263. if (ARM_FEATURE_ZERO (ext_opt->merge))
  28264. /* TODO: Option not supported. When we remove the
  28265. legacy table this case should error out. */
  28266. continue;
  28267. ARM_MERGE_FEATURE_SETS (*ext_set, *ext_set, ext_opt->merge);
  28268. }
  28269. else
  28270. {
  28271. if (ARM_FEATURE_ZERO (ext_opt->clear))
  28272. /* TODO: Option not supported. When we remove the
  28273. legacy table this case should error out. */
  28274. continue;
  28275. ARM_CLEAR_FEATURE (*ext_set, *ext_set, ext_opt->clear);
  28276. }
  28277. found = true;
  28278. break;
  28279. }
  28280. if (found)
  28281. {
  28282. str = ext;
  28283. continue;
  28284. }
  28285. }
  28286. /* Scan over the options table trying to find an exact match. */
  28287. for (; opt->name != NULL; opt++)
  28288. if (opt->name_len == len && strncmp (opt->name, str, len) == 0)
  28289. {
  28290. int i, nb_allowed_archs =
  28291. sizeof (opt->allowed_archs) / sizeof (opt->allowed_archs[0]);
  28292. /* Check we can apply the extension to this architecture. */
  28293. for (i = 0; i < nb_allowed_archs; i++)
  28294. {
  28295. /* Empty entry. */
  28296. if (ARM_FEATURE_EQUAL (opt->allowed_archs[i], arm_any))
  28297. continue;
  28298. if (ARM_FSET_CPU_SUBSET (opt->allowed_archs[i], *opt_set))
  28299. break;
  28300. }
  28301. if (i == nb_allowed_archs)
  28302. {
  28303. as_bad (_("extension does not apply to the base architecture"));
  28304. return false;
  28305. }
  28306. /* Add or remove the extension. */
  28307. if (adding_value)
  28308. ARM_MERGE_FEATURE_SETS (*ext_set, *ext_set, opt->merge_value);
  28309. else
  28310. ARM_CLEAR_FEATURE (*ext_set, *ext_set, opt->clear_value);
  28311. /* Allowing Thumb division instructions for ARMv7 in autodetection
  28312. rely on this break so that duplicate extensions (extensions
  28313. with the same name as a previous extension in the list) are not
  28314. considered for command-line parsing. */
  28315. break;
  28316. }
  28317. if (opt->name == NULL)
  28318. {
  28319. /* Did we fail to find an extension because it wasn't specified in
  28320. alphabetical order, or because it does not exist? */
  28321. for (opt = arm_extensions; opt->name != NULL; opt++)
  28322. if (opt->name_len == len && strncmp (opt->name, str, len) == 0)
  28323. break;
  28324. if (opt->name == NULL)
  28325. as_bad (_("unknown architectural extension `%s'"), str);
  28326. else
  28327. as_bad (_("architectural extensions must be specified in "
  28328. "alphabetical order"));
  28329. return false;
  28330. }
  28331. else
  28332. {
  28333. /* We should skip the extension we've just matched the next time
  28334. round. */
  28335. opt++;
  28336. }
  28337. str = ext;
  28338. };
  28339. return true;
  28340. }
  28341. static bool
  28342. arm_parse_fp16_opt (const char *str)
  28343. {
  28344. if (strcasecmp (str, "ieee") == 0)
  28345. fp16_format = ARM_FP16_FORMAT_IEEE;
  28346. else if (strcasecmp (str, "alternative") == 0)
  28347. fp16_format = ARM_FP16_FORMAT_ALTERNATIVE;
  28348. else
  28349. {
  28350. as_bad (_("unrecognised float16 format \"%s\""), str);
  28351. return false;
  28352. }
  28353. return true;
  28354. }
  28355. static bool
  28356. arm_parse_cpu (const char *str)
  28357. {
  28358. const struct arm_cpu_option_table *opt;
  28359. const char *ext = strchr (str, '+');
  28360. size_t len;
  28361. if (ext != NULL)
  28362. len = ext - str;
  28363. else
  28364. len = strlen (str);
  28365. if (len == 0)
  28366. {
  28367. as_bad (_("missing cpu name `%s'"), str);
  28368. return false;
  28369. }
  28370. for (opt = arm_cpus; opt->name != NULL; opt++)
  28371. if (opt->name_len == len && strncmp (opt->name, str, len) == 0)
  28372. {
  28373. mcpu_cpu_opt = &opt->value;
  28374. if (mcpu_ext_opt == NULL)
  28375. mcpu_ext_opt = XNEW (arm_feature_set);
  28376. *mcpu_ext_opt = opt->ext;
  28377. mcpu_fpu_opt = &opt->default_fpu;
  28378. if (opt->canonical_name)
  28379. {
  28380. gas_assert (sizeof selected_cpu_name > strlen (opt->canonical_name));
  28381. strcpy (selected_cpu_name, opt->canonical_name);
  28382. }
  28383. else
  28384. {
  28385. size_t i;
  28386. if (len >= sizeof selected_cpu_name)
  28387. len = (sizeof selected_cpu_name) - 1;
  28388. for (i = 0; i < len; i++)
  28389. selected_cpu_name[i] = TOUPPER (opt->name[i]);
  28390. selected_cpu_name[i] = 0;
  28391. }
  28392. if (ext != NULL)
  28393. return arm_parse_extension (ext, mcpu_cpu_opt, mcpu_ext_opt, NULL);
  28394. return true;
  28395. }
  28396. as_bad (_("unknown cpu `%s'"), str);
  28397. return false;
  28398. }
  28399. static bool
  28400. arm_parse_arch (const char *str)
  28401. {
  28402. const struct arm_arch_option_table *opt;
  28403. const char *ext = strchr (str, '+');
  28404. size_t len;
  28405. if (ext != NULL)
  28406. len = ext - str;
  28407. else
  28408. len = strlen (str);
  28409. if (len == 0)
  28410. {
  28411. as_bad (_("missing architecture name `%s'"), str);
  28412. return false;
  28413. }
  28414. for (opt = arm_archs; opt->name != NULL; opt++)
  28415. if (opt->name_len == len && strncmp (opt->name, str, len) == 0)
  28416. {
  28417. march_cpu_opt = &opt->value;
  28418. if (march_ext_opt == NULL)
  28419. march_ext_opt = XNEW (arm_feature_set);
  28420. *march_ext_opt = arm_arch_none;
  28421. march_fpu_opt = &opt->default_fpu;
  28422. selected_ctx_ext_table = opt->ext_table;
  28423. strcpy (selected_cpu_name, opt->name);
  28424. if (ext != NULL)
  28425. return arm_parse_extension (ext, march_cpu_opt, march_ext_opt,
  28426. opt->ext_table);
  28427. return true;
  28428. }
  28429. as_bad (_("unknown architecture `%s'\n"), str);
  28430. return false;
  28431. }
  28432. static bool
  28433. arm_parse_fpu (const char * str)
  28434. {
  28435. const struct arm_option_fpu_value_table * opt;
  28436. for (opt = arm_fpus; opt->name != NULL; opt++)
  28437. if (streq (opt->name, str))
  28438. {
  28439. mfpu_opt = &opt->value;
  28440. return true;
  28441. }
  28442. as_bad (_("unknown floating point format `%s'\n"), str);
  28443. return false;
  28444. }
  28445. static bool
  28446. arm_parse_float_abi (const char * str)
  28447. {
  28448. const struct arm_option_value_table * opt;
  28449. for (opt = arm_float_abis; opt->name != NULL; opt++)
  28450. if (streq (opt->name, str))
  28451. {
  28452. mfloat_abi_opt = opt->value;
  28453. return true;
  28454. }
  28455. as_bad (_("unknown floating point abi `%s'\n"), str);
  28456. return false;
  28457. }
  28458. #ifdef OBJ_ELF
  28459. static bool
  28460. arm_parse_eabi (const char * str)
  28461. {
  28462. const struct arm_option_value_table *opt;
  28463. for (opt = arm_eabis; opt->name != NULL; opt++)
  28464. if (streq (opt->name, str))
  28465. {
  28466. meabi_flags = opt->value;
  28467. return true;
  28468. }
  28469. as_bad (_("unknown EABI `%s'\n"), str);
  28470. return false;
  28471. }
  28472. #endif
  28473. static bool
  28474. arm_parse_it_mode (const char * str)
  28475. {
  28476. bool ret = true;
  28477. if (streq ("arm", str))
  28478. implicit_it_mode = IMPLICIT_IT_MODE_ARM;
  28479. else if (streq ("thumb", str))
  28480. implicit_it_mode = IMPLICIT_IT_MODE_THUMB;
  28481. else if (streq ("always", str))
  28482. implicit_it_mode = IMPLICIT_IT_MODE_ALWAYS;
  28483. else if (streq ("never", str))
  28484. implicit_it_mode = IMPLICIT_IT_MODE_NEVER;
  28485. else
  28486. {
  28487. as_bad (_("unknown implicit IT mode `%s', should be "\
  28488. "arm, thumb, always, or never."), str);
  28489. ret = false;
  28490. }
  28491. return ret;
  28492. }
  28493. static bool
  28494. arm_ccs_mode (const char * unused ATTRIBUTE_UNUSED)
  28495. {
  28496. codecomposer_syntax = true;
  28497. arm_comment_chars[0] = ';';
  28498. arm_line_separator_chars[0] = 0;
  28499. return true;
  28500. }
  28501. struct arm_long_option_table arm_long_opts[] =
  28502. {
  28503. {"mcpu=", N_("<cpu name>\t assemble for CPU <cpu name>"),
  28504. arm_parse_cpu, NULL},
  28505. {"march=", N_("<arch name>\t assemble for architecture <arch name>"),
  28506. arm_parse_arch, NULL},
  28507. {"mfpu=", N_("<fpu name>\t assemble for FPU architecture <fpu name>"),
  28508. arm_parse_fpu, NULL},
  28509. {"mfloat-abi=", N_("<abi>\t assemble for floating point ABI <abi>"),
  28510. arm_parse_float_abi, NULL},
  28511. #ifdef OBJ_ELF
  28512. {"meabi=", N_("<ver>\t\t assemble for eabi version <ver>"),
  28513. arm_parse_eabi, NULL},
  28514. #endif
  28515. {"mimplicit-it=", N_("<mode>\t controls implicit insertion of IT instructions"),
  28516. arm_parse_it_mode, NULL},
  28517. {"mccs", N_("\t\t\t TI CodeComposer Studio syntax compatibility mode"),
  28518. arm_ccs_mode, NULL},
  28519. {"mfp16-format=",
  28520. N_("[ieee|alternative]\n\
  28521. set the encoding for half precision floating point "
  28522. "numbers to IEEE\n\
  28523. or Arm alternative format."),
  28524. arm_parse_fp16_opt, NULL },
  28525. {NULL, NULL, 0, NULL}
  28526. };
  28527. int
  28528. md_parse_option (int c, const char * arg)
  28529. {
  28530. struct arm_option_table *opt;
  28531. const struct arm_legacy_option_table *fopt;
  28532. struct arm_long_option_table *lopt;
  28533. switch (c)
  28534. {
  28535. #ifdef OPTION_EB
  28536. case OPTION_EB:
  28537. target_big_endian = 1;
  28538. break;
  28539. #endif
  28540. #ifdef OPTION_EL
  28541. case OPTION_EL:
  28542. target_big_endian = 0;
  28543. break;
  28544. #endif
  28545. case OPTION_FIX_V4BX:
  28546. fix_v4bx = true;
  28547. break;
  28548. #ifdef OBJ_ELF
  28549. case OPTION_FDPIC:
  28550. arm_fdpic = true;
  28551. break;
  28552. #endif /* OBJ_ELF */
  28553. case 'a':
  28554. /* Listing option. Just ignore these, we don't support additional
  28555. ones. */
  28556. return 0;
  28557. default:
  28558. for (opt = arm_opts; opt->option != NULL; opt++)
  28559. {
  28560. if (c == opt->option[0]
  28561. && ((arg == NULL && opt->option[1] == 0)
  28562. || streq (arg, opt->option + 1)))
  28563. {
  28564. /* If the option is deprecated, tell the user. */
  28565. if (warn_on_deprecated && opt->deprecated != NULL)
  28566. as_tsktsk (_("option `-%c%s' is deprecated: %s"), c,
  28567. arg ? arg : "", _(opt->deprecated));
  28568. if (opt->var != NULL)
  28569. *opt->var = opt->value;
  28570. return 1;
  28571. }
  28572. }
  28573. for (fopt = arm_legacy_opts; fopt->option != NULL; fopt++)
  28574. {
  28575. if (c == fopt->option[0]
  28576. && ((arg == NULL && fopt->option[1] == 0)
  28577. || streq (arg, fopt->option + 1)))
  28578. {
  28579. /* If the option is deprecated, tell the user. */
  28580. if (warn_on_deprecated && fopt->deprecated != NULL)
  28581. as_tsktsk (_("option `-%c%s' is deprecated: %s"), c,
  28582. arg ? arg : "", _(fopt->deprecated));
  28583. if (fopt->var != NULL)
  28584. *fopt->var = &fopt->value;
  28585. return 1;
  28586. }
  28587. }
  28588. for (lopt = arm_long_opts; lopt->option != NULL; lopt++)
  28589. {
  28590. /* These options are expected to have an argument. */
  28591. if (c == lopt->option[0]
  28592. && arg != NULL
  28593. && strncmp (arg, lopt->option + 1,
  28594. strlen (lopt->option + 1)) == 0)
  28595. {
  28596. /* If the option is deprecated, tell the user. */
  28597. if (warn_on_deprecated && lopt->deprecated != NULL)
  28598. as_tsktsk (_("option `-%c%s' is deprecated: %s"), c, arg,
  28599. _(lopt->deprecated));
  28600. /* Call the sup-option parser. */
  28601. return lopt->func (arg + strlen (lopt->option) - 1);
  28602. }
  28603. }
  28604. return 0;
  28605. }
  28606. return 1;
  28607. }
  28608. void
  28609. md_show_usage (FILE * fp)
  28610. {
  28611. struct arm_option_table *opt;
  28612. struct arm_long_option_table *lopt;
  28613. fprintf (fp, _(" ARM-specific assembler options:\n"));
  28614. for (opt = arm_opts; opt->option != NULL; opt++)
  28615. if (opt->help != NULL)
  28616. fprintf (fp, " -%-23s%s\n", opt->option, _(opt->help));
  28617. for (lopt = arm_long_opts; lopt->option != NULL; lopt++)
  28618. if (lopt->help != NULL)
  28619. fprintf (fp, " -%s%s\n", lopt->option, _(lopt->help));
  28620. #ifdef OPTION_EB
  28621. fprintf (fp, _("\
  28622. -EB assemble code for a big-endian cpu\n"));
  28623. #endif
  28624. #ifdef OPTION_EL
  28625. fprintf (fp, _("\
  28626. -EL assemble code for a little-endian cpu\n"));
  28627. #endif
  28628. fprintf (fp, _("\
  28629. --fix-v4bx Allow BX in ARMv4 code\n"));
  28630. #ifdef OBJ_ELF
  28631. fprintf (fp, _("\
  28632. --fdpic generate an FDPIC object file\n"));
  28633. #endif /* OBJ_ELF */
  28634. }
  28635. #ifdef OBJ_ELF
  28636. typedef struct
  28637. {
  28638. int val;
  28639. arm_feature_set flags;
  28640. } cpu_arch_ver_table;
  28641. /* Mapping from CPU features to EABI CPU arch values. Table must be sorted
  28642. chronologically for architectures, with an exception for ARMv6-M and
  28643. ARMv6S-M due to legacy reasons. No new architecture should have a
  28644. special case. This allows for build attribute selection results to be
  28645. stable when new architectures are added. */
  28646. static const cpu_arch_ver_table cpu_arch_ver[] =
  28647. {
  28648. {TAG_CPU_ARCH_PRE_V4, ARM_ARCH_V1},
  28649. {TAG_CPU_ARCH_PRE_V4, ARM_ARCH_V2},
  28650. {TAG_CPU_ARCH_PRE_V4, ARM_ARCH_V2S},
  28651. {TAG_CPU_ARCH_PRE_V4, ARM_ARCH_V3},
  28652. {TAG_CPU_ARCH_PRE_V4, ARM_ARCH_V3M},
  28653. {TAG_CPU_ARCH_V4, ARM_ARCH_V4xM},
  28654. {TAG_CPU_ARCH_V4, ARM_ARCH_V4},
  28655. {TAG_CPU_ARCH_V4T, ARM_ARCH_V4TxM},
  28656. {TAG_CPU_ARCH_V4T, ARM_ARCH_V4T},
  28657. {TAG_CPU_ARCH_V5T, ARM_ARCH_V5xM},
  28658. {TAG_CPU_ARCH_V5T, ARM_ARCH_V5},
  28659. {TAG_CPU_ARCH_V5T, ARM_ARCH_V5TxM},
  28660. {TAG_CPU_ARCH_V5T, ARM_ARCH_V5T},
  28661. {TAG_CPU_ARCH_V5TE, ARM_ARCH_V5TExP},
  28662. {TAG_CPU_ARCH_V5TE, ARM_ARCH_V5TE},
  28663. {TAG_CPU_ARCH_V5TEJ, ARM_ARCH_V5TEJ},
  28664. {TAG_CPU_ARCH_V6, ARM_ARCH_V6},
  28665. {TAG_CPU_ARCH_V6KZ, ARM_ARCH_V6Z},
  28666. {TAG_CPU_ARCH_V6KZ, ARM_ARCH_V6KZ},
  28667. {TAG_CPU_ARCH_V6K, ARM_ARCH_V6K},
  28668. {TAG_CPU_ARCH_V6T2, ARM_ARCH_V6T2},
  28669. {TAG_CPU_ARCH_V6T2, ARM_ARCH_V6KT2},
  28670. {TAG_CPU_ARCH_V6T2, ARM_ARCH_V6ZT2},
  28671. {TAG_CPU_ARCH_V6T2, ARM_ARCH_V6KZT2},
  28672. /* When assembling a file with only ARMv6-M or ARMv6S-M instruction, GNU as
  28673. always selected build attributes to match those of ARMv6-M
  28674. (resp. ARMv6S-M). However, due to these architectures being a strict
  28675. subset of ARMv7-M in terms of instructions available, ARMv7-M attributes
  28676. would be selected when fully respecting chronology of architectures.
  28677. It is thus necessary to make a special case of ARMv6-M and ARMv6S-M and
  28678. move them before ARMv7 architectures. */
  28679. {TAG_CPU_ARCH_V6_M, ARM_ARCH_V6M},
  28680. {TAG_CPU_ARCH_V6S_M, ARM_ARCH_V6SM},
  28681. {TAG_CPU_ARCH_V7, ARM_ARCH_V7},
  28682. {TAG_CPU_ARCH_V7, ARM_ARCH_V7A},
  28683. {TAG_CPU_ARCH_V7, ARM_ARCH_V7R},
  28684. {TAG_CPU_ARCH_V7, ARM_ARCH_V7M},
  28685. {TAG_CPU_ARCH_V7, ARM_ARCH_V7VE},
  28686. {TAG_CPU_ARCH_V7E_M, ARM_ARCH_V7EM},
  28687. {TAG_CPU_ARCH_V8, ARM_ARCH_V8A},
  28688. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_1A},
  28689. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_2A},
  28690. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_3A},
  28691. {TAG_CPU_ARCH_V8M_BASE, ARM_ARCH_V8M_BASE},
  28692. {TAG_CPU_ARCH_V8M_MAIN, ARM_ARCH_V8M_MAIN},
  28693. {TAG_CPU_ARCH_V8R, ARM_ARCH_V8R},
  28694. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_4A},
  28695. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_5A},
  28696. {TAG_CPU_ARCH_V8_1M_MAIN, ARM_ARCH_V8_1M_MAIN},
  28697. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_6A},
  28698. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_7A},
  28699. {TAG_CPU_ARCH_V8, ARM_ARCH_V8_8A},
  28700. {TAG_CPU_ARCH_V9, ARM_ARCH_V9A},
  28701. {TAG_CPU_ARCH_V9, ARM_ARCH_V9_1A},
  28702. {TAG_CPU_ARCH_V9, ARM_ARCH_V9_2A},
  28703. {TAG_CPU_ARCH_V9, ARM_ARCH_V9_3A},
  28704. {-1, ARM_ARCH_NONE}
  28705. };
  28706. /* Set an attribute if it has not already been set by the user. */
  28707. static void
  28708. aeabi_set_attribute_int (int tag, int value)
  28709. {
  28710. if (tag < 1
  28711. || tag >= NUM_KNOWN_OBJ_ATTRIBUTES
  28712. || !attributes_set_explicitly[tag])
  28713. bfd_elf_add_proc_attr_int (stdoutput, tag, value);
  28714. }
  28715. static void
  28716. aeabi_set_attribute_string (int tag, const char *value)
  28717. {
  28718. if (tag < 1
  28719. || tag >= NUM_KNOWN_OBJ_ATTRIBUTES
  28720. || !attributes_set_explicitly[tag])
  28721. bfd_elf_add_proc_attr_string (stdoutput, tag, value);
  28722. }
  28723. /* Return whether features in the *NEEDED feature set are available via
  28724. extensions for the architecture whose feature set is *ARCH_FSET. */
  28725. static bool
  28726. have_ext_for_needed_feat_p (const arm_feature_set *arch_fset,
  28727. const arm_feature_set *needed)
  28728. {
  28729. int i, nb_allowed_archs;
  28730. arm_feature_set ext_fset;
  28731. const struct arm_option_extension_value_table *opt;
  28732. ext_fset = arm_arch_none;
  28733. for (opt = arm_extensions; opt->name != NULL; opt++)
  28734. {
  28735. /* Extension does not provide any feature we need. */
  28736. if (!ARM_CPU_HAS_FEATURE (*needed, opt->merge_value))
  28737. continue;
  28738. nb_allowed_archs =
  28739. sizeof (opt->allowed_archs) / sizeof (opt->allowed_archs[0]);
  28740. for (i = 0; i < nb_allowed_archs; i++)
  28741. {
  28742. /* Empty entry. */
  28743. if (ARM_FEATURE_EQUAL (opt->allowed_archs[i], arm_arch_any))
  28744. break;
  28745. /* Extension is available, add it. */
  28746. if (ARM_FSET_CPU_SUBSET (opt->allowed_archs[i], *arch_fset))
  28747. ARM_MERGE_FEATURE_SETS (ext_fset, ext_fset, opt->merge_value);
  28748. }
  28749. }
  28750. /* Can we enable all features in *needed? */
  28751. return ARM_FSET_CPU_SUBSET (*needed, ext_fset);
  28752. }
  28753. /* Select value for Tag_CPU_arch and Tag_CPU_arch_profile build attributes for
  28754. a given architecture feature set *ARCH_EXT_FSET including extension feature
  28755. set *EXT_FSET. Selection logic used depend on EXACT_MATCH:
  28756. - if true, check for an exact match of the architecture modulo extensions;
  28757. - otherwise, select build attribute value of the first superset
  28758. architecture released so that results remains stable when new architectures
  28759. are added.
  28760. For -march/-mcpu=all the build attribute value of the most featureful
  28761. architecture is returned. Tag_CPU_arch_profile result is returned in
  28762. PROFILE. */
  28763. static int
  28764. get_aeabi_cpu_arch_from_fset (const arm_feature_set *arch_ext_fset,
  28765. const arm_feature_set *ext_fset,
  28766. char *profile, int exact_match)
  28767. {
  28768. arm_feature_set arch_fset;
  28769. const cpu_arch_ver_table *p_ver, *p_ver_ret = NULL;
  28770. /* Select most featureful architecture with all its extensions if building
  28771. for -march=all as the feature sets used to set build attributes. */
  28772. if (ARM_FEATURE_EQUAL (*arch_ext_fset, arm_arch_any))
  28773. {
  28774. /* Force revisiting of decision for each new architecture. */
  28775. gas_assert (MAX_TAG_CPU_ARCH <= TAG_CPU_ARCH_V9);
  28776. *profile = 'A';
  28777. return TAG_CPU_ARCH_V9;
  28778. }
  28779. ARM_CLEAR_FEATURE (arch_fset, *arch_ext_fset, *ext_fset);
  28780. for (p_ver = cpu_arch_ver; p_ver->val != -1; p_ver++)
  28781. {
  28782. arm_feature_set known_arch_fset;
  28783. ARM_CLEAR_FEATURE (known_arch_fset, p_ver->flags, fpu_any);
  28784. if (exact_match)
  28785. {
  28786. /* Base architecture match user-specified architecture and
  28787. extensions, eg. ARMv6S-M matching -march=armv6-m+os. */
  28788. if (ARM_FEATURE_EQUAL (*arch_ext_fset, known_arch_fset))
  28789. {
  28790. p_ver_ret = p_ver;
  28791. goto found;
  28792. }
  28793. /* Base architecture match user-specified architecture only
  28794. (eg. ARMv6-M in the same case as above). Record it in case we
  28795. find a match with above condition. */
  28796. else if (p_ver_ret == NULL
  28797. && ARM_FEATURE_EQUAL (arch_fset, known_arch_fset))
  28798. p_ver_ret = p_ver;
  28799. }
  28800. else
  28801. {
  28802. /* Architecture has all features wanted. */
  28803. if (ARM_FSET_CPU_SUBSET (arch_fset, known_arch_fset))
  28804. {
  28805. arm_feature_set added_fset;
  28806. /* Compute features added by this architecture over the one
  28807. recorded in p_ver_ret. */
  28808. if (p_ver_ret != NULL)
  28809. ARM_CLEAR_FEATURE (added_fset, known_arch_fset,
  28810. p_ver_ret->flags);
  28811. /* First architecture that match incl. with extensions, or the
  28812. only difference in features over the recorded match is
  28813. features that were optional and are now mandatory. */
  28814. if (p_ver_ret == NULL
  28815. || ARM_FSET_CPU_SUBSET (added_fset, arch_fset))
  28816. {
  28817. p_ver_ret = p_ver;
  28818. goto found;
  28819. }
  28820. }
  28821. else if (p_ver_ret == NULL)
  28822. {
  28823. arm_feature_set needed_ext_fset;
  28824. ARM_CLEAR_FEATURE (needed_ext_fset, arch_fset, known_arch_fset);
  28825. /* Architecture has all features needed when using some
  28826. extensions. Record it and continue searching in case there
  28827. exist an architecture providing all needed features without
  28828. the need for extensions (eg. ARMv6S-M Vs ARMv6-M with
  28829. OS extension). */
  28830. if (have_ext_for_needed_feat_p (&known_arch_fset,
  28831. &needed_ext_fset))
  28832. p_ver_ret = p_ver;
  28833. }
  28834. }
  28835. }
  28836. if (p_ver_ret == NULL)
  28837. return -1;
  28838. found:
  28839. /* Tag_CPU_arch_profile. */
  28840. if (!ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_v8r)
  28841. && (ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_v7a)
  28842. || ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_v8)
  28843. || (ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_atomics)
  28844. && !ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_v8m_m_only))))
  28845. *profile = 'A';
  28846. else if (ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_v7r)
  28847. || ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_v8r))
  28848. *profile = 'R';
  28849. else if (ARM_CPU_HAS_FEATURE (p_ver_ret->flags, arm_ext_m))
  28850. *profile = 'M';
  28851. else
  28852. *profile = '\0';
  28853. return p_ver_ret->val;
  28854. }
  28855. /* Set the public EABI object attributes. */
  28856. static void
  28857. aeabi_set_public_attributes (void)
  28858. {
  28859. char profile = '\0';
  28860. int arch = -1;
  28861. int virt_sec = 0;
  28862. int fp16_optional = 0;
  28863. int skip_exact_match = 0;
  28864. arm_feature_set flags, flags_arch, flags_ext;
  28865. /* Autodetection mode, choose the architecture based the instructions
  28866. actually used. */
  28867. if (no_cpu_selected ())
  28868. {
  28869. ARM_MERGE_FEATURE_SETS (flags, arm_arch_used, thumb_arch_used);
  28870. if (ARM_CPU_HAS_FEATURE (arm_arch_used, arm_arch_any))
  28871. ARM_MERGE_FEATURE_SETS (flags, flags, arm_ext_v1);
  28872. if (ARM_CPU_HAS_FEATURE (thumb_arch_used, arm_arch_any))
  28873. ARM_MERGE_FEATURE_SETS (flags, flags, arm_ext_v4t);
  28874. /* Code run during relaxation relies on selected_cpu being set. */
  28875. ARM_CLEAR_FEATURE (flags_arch, flags, fpu_any);
  28876. flags_ext = arm_arch_none;
  28877. ARM_CLEAR_FEATURE (selected_arch, flags_arch, flags_ext);
  28878. selected_ext = flags_ext;
  28879. selected_cpu = flags;
  28880. }
  28881. /* Otherwise, choose the architecture based on the capabilities of the
  28882. requested cpu. */
  28883. else
  28884. {
  28885. ARM_MERGE_FEATURE_SETS (flags_arch, selected_arch, selected_ext);
  28886. ARM_CLEAR_FEATURE (flags_arch, flags_arch, fpu_any);
  28887. flags_ext = selected_ext;
  28888. flags = selected_cpu;
  28889. }
  28890. ARM_MERGE_FEATURE_SETS (flags, flags, selected_fpu);
  28891. /* Allow the user to override the reported architecture. */
  28892. if (!ARM_FEATURE_ZERO (selected_object_arch))
  28893. {
  28894. ARM_CLEAR_FEATURE (flags_arch, selected_object_arch, fpu_any);
  28895. flags_ext = arm_arch_none;
  28896. }
  28897. else
  28898. skip_exact_match = ARM_FEATURE_EQUAL (selected_cpu, arm_arch_any);
  28899. /* When this function is run again after relaxation has happened there is no
  28900. way to determine whether an architecture or CPU was specified by the user:
  28901. - selected_cpu is set above for relaxation to work;
  28902. - march_cpu_opt is not set if only -mcpu or .cpu is used;
  28903. - mcpu_cpu_opt is set to arm_arch_any for autodetection.
  28904. Therefore, if not in -march=all case we first try an exact match and fall
  28905. back to autodetection. */
  28906. if (!skip_exact_match)
  28907. arch = get_aeabi_cpu_arch_from_fset (&flags_arch, &flags_ext, &profile, 1);
  28908. if (arch == -1)
  28909. arch = get_aeabi_cpu_arch_from_fset (&flags_arch, &flags_ext, &profile, 0);
  28910. if (arch == -1)
  28911. as_bad (_("no architecture contains all the instructions used\n"));
  28912. /* Tag_CPU_name. */
  28913. if (selected_cpu_name[0])
  28914. {
  28915. char *q;
  28916. q = selected_cpu_name;
  28917. if (startswith (q, "armv"))
  28918. {
  28919. int i;
  28920. q += 4;
  28921. for (i = 0; q[i]; i++)
  28922. q[i] = TOUPPER (q[i]);
  28923. }
  28924. aeabi_set_attribute_string (Tag_CPU_name, q);
  28925. }
  28926. /* Tag_CPU_arch. */
  28927. aeabi_set_attribute_int (Tag_CPU_arch, arch);
  28928. /* Tag_CPU_arch_profile. */
  28929. if (profile != '\0')
  28930. aeabi_set_attribute_int (Tag_CPU_arch_profile, profile);
  28931. /* Tag_DSP_extension. */
  28932. if (ARM_CPU_HAS_FEATURE (selected_ext, arm_ext_dsp))
  28933. aeabi_set_attribute_int (Tag_DSP_extension, 1);
  28934. ARM_CLEAR_FEATURE (flags_arch, flags, fpu_any);
  28935. /* Tag_ARM_ISA_use. */
  28936. if (ARM_CPU_HAS_FEATURE (flags, arm_ext_v1)
  28937. || ARM_FEATURE_ZERO (flags_arch))
  28938. aeabi_set_attribute_int (Tag_ARM_ISA_use, 1);
  28939. /* Tag_THUMB_ISA_use. */
  28940. if (ARM_CPU_HAS_FEATURE (flags, arm_ext_v4t)
  28941. || ARM_FEATURE_ZERO (flags_arch))
  28942. {
  28943. int thumb_isa_use;
  28944. if (!ARM_CPU_HAS_FEATURE (flags, arm_ext_v8)
  28945. && ARM_CPU_HAS_FEATURE (flags, arm_ext_v8m_m_only))
  28946. thumb_isa_use = 3;
  28947. else if (ARM_CPU_HAS_FEATURE (flags, arm_arch_t2))
  28948. thumb_isa_use = 2;
  28949. else
  28950. thumb_isa_use = 1;
  28951. aeabi_set_attribute_int (Tag_THUMB_ISA_use, thumb_isa_use);
  28952. }
  28953. /* Tag_VFP_arch. */
  28954. if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_armv8xd))
  28955. aeabi_set_attribute_int (Tag_VFP_arch,
  28956. ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_d32)
  28957. ? 7 : 8);
  28958. else if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_fma))
  28959. aeabi_set_attribute_int (Tag_VFP_arch,
  28960. ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_d32)
  28961. ? 5 : 6);
  28962. else if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_d32))
  28963. {
  28964. fp16_optional = 1;
  28965. aeabi_set_attribute_int (Tag_VFP_arch, 3);
  28966. }
  28967. else if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_v3xd))
  28968. {
  28969. aeabi_set_attribute_int (Tag_VFP_arch, 4);
  28970. fp16_optional = 1;
  28971. }
  28972. else if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_v2))
  28973. aeabi_set_attribute_int (Tag_VFP_arch, 2);
  28974. else if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_v1)
  28975. || ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_v1xd))
  28976. aeabi_set_attribute_int (Tag_VFP_arch, 1);
  28977. /* Tag_ABI_HardFP_use. */
  28978. if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_v1xd)
  28979. && !ARM_CPU_HAS_FEATURE (flags, fpu_vfp_ext_v1))
  28980. aeabi_set_attribute_int (Tag_ABI_HardFP_use, 1);
  28981. /* Tag_WMMX_arch. */
  28982. if (ARM_CPU_HAS_FEATURE (flags, arm_cext_iwmmxt2))
  28983. aeabi_set_attribute_int (Tag_WMMX_arch, 2);
  28984. else if (ARM_CPU_HAS_FEATURE (flags, arm_cext_iwmmxt))
  28985. aeabi_set_attribute_int (Tag_WMMX_arch, 1);
  28986. /* Tag_Advanced_SIMD_arch (formerly Tag_NEON_arch). */
  28987. if (ARM_CPU_HAS_FEATURE (flags, fpu_neon_ext_v8_1))
  28988. aeabi_set_attribute_int (Tag_Advanced_SIMD_arch, 4);
  28989. else if (ARM_CPU_HAS_FEATURE (flags, fpu_neon_ext_armv8))
  28990. aeabi_set_attribute_int (Tag_Advanced_SIMD_arch, 3);
  28991. else if (ARM_CPU_HAS_FEATURE (flags, fpu_neon_ext_v1))
  28992. {
  28993. if (ARM_CPU_HAS_FEATURE (flags, fpu_neon_ext_fma))
  28994. {
  28995. aeabi_set_attribute_int (Tag_Advanced_SIMD_arch, 2);
  28996. }
  28997. else
  28998. {
  28999. aeabi_set_attribute_int (Tag_Advanced_SIMD_arch, 1);
  29000. fp16_optional = 1;
  29001. }
  29002. }
  29003. if (ARM_CPU_HAS_FEATURE (flags, mve_fp_ext))
  29004. aeabi_set_attribute_int (Tag_MVE_arch, 2);
  29005. else if (ARM_CPU_HAS_FEATURE (flags, mve_ext))
  29006. aeabi_set_attribute_int (Tag_MVE_arch, 1);
  29007. /* Tag_VFP_HP_extension (formerly Tag_NEON_FP16_arch). */
  29008. if (ARM_CPU_HAS_FEATURE (flags, fpu_vfp_fp16) && fp16_optional)
  29009. aeabi_set_attribute_int (Tag_VFP_HP_extension, 1);
  29010. /* Tag_DIV_use.
  29011. We set Tag_DIV_use to two when integer divide instructions have been used
  29012. in ARM state, or when Thumb integer divide instructions have been used,
  29013. but we have no architecture profile set, nor have we any ARM instructions.
  29014. For ARMv8-A and ARMv8-M we set the tag to 0 as integer divide is implied
  29015. by the base architecture.
  29016. For new architectures we will have to check these tests. */
  29017. gas_assert (arch <= TAG_CPU_ARCH_V9);
  29018. if (ARM_CPU_HAS_FEATURE (flags, arm_ext_v8)
  29019. || ARM_CPU_HAS_FEATURE (flags, arm_ext_v8m))
  29020. aeabi_set_attribute_int (Tag_DIV_use, 0);
  29021. else if (ARM_CPU_HAS_FEATURE (flags, arm_ext_adiv)
  29022. || (profile == '\0'
  29023. && ARM_CPU_HAS_FEATURE (flags, arm_ext_div)
  29024. && !ARM_CPU_HAS_FEATURE (arm_arch_used, arm_arch_any)))
  29025. aeabi_set_attribute_int (Tag_DIV_use, 2);
  29026. /* Tag_MP_extension_use. */
  29027. if (ARM_CPU_HAS_FEATURE (flags, arm_ext_mp))
  29028. aeabi_set_attribute_int (Tag_MPextension_use, 1);
  29029. /* Tag Virtualization_use. */
  29030. if (ARM_CPU_HAS_FEATURE (flags, arm_ext_sec))
  29031. virt_sec |= 1;
  29032. if (ARM_CPU_HAS_FEATURE (flags, arm_ext_virt))
  29033. virt_sec |= 2;
  29034. if (virt_sec != 0)
  29035. aeabi_set_attribute_int (Tag_Virtualization_use, virt_sec);
  29036. if (fp16_format != ARM_FP16_FORMAT_DEFAULT)
  29037. aeabi_set_attribute_int (Tag_ABI_FP_16bit_format, fp16_format);
  29038. }
  29039. /* Post relaxation hook. Recompute ARM attributes now that relaxation is
  29040. finished and free extension feature bits which will not be used anymore. */
  29041. void
  29042. arm_md_post_relax (void)
  29043. {
  29044. aeabi_set_public_attributes ();
  29045. XDELETE (mcpu_ext_opt);
  29046. mcpu_ext_opt = NULL;
  29047. XDELETE (march_ext_opt);
  29048. march_ext_opt = NULL;
  29049. }
  29050. /* Add the default contents for the .ARM.attributes section. */
  29051. void
  29052. arm_md_end (void)
  29053. {
  29054. if (EF_ARM_EABI_VERSION (meabi_flags) < EF_ARM_EABI_VER4)
  29055. return;
  29056. aeabi_set_public_attributes ();
  29057. }
  29058. #endif /* OBJ_ELF */
  29059. /* Parse a .cpu directive. */
  29060. static void
  29061. s_arm_cpu (int ignored ATTRIBUTE_UNUSED)
  29062. {
  29063. const struct arm_cpu_option_table *opt;
  29064. char *name;
  29065. char saved_char;
  29066. name = input_line_pointer;
  29067. while (*input_line_pointer && !ISSPACE (*input_line_pointer))
  29068. input_line_pointer++;
  29069. saved_char = *input_line_pointer;
  29070. *input_line_pointer = 0;
  29071. /* Skip the first "all" entry. */
  29072. for (opt = arm_cpus + 1; opt->name != NULL; opt++)
  29073. if (streq (opt->name, name))
  29074. {
  29075. selected_arch = opt->value;
  29076. selected_ext = opt->ext;
  29077. ARM_MERGE_FEATURE_SETS (selected_cpu, selected_arch, selected_ext);
  29078. if (opt->canonical_name)
  29079. strcpy (selected_cpu_name, opt->canonical_name);
  29080. else
  29081. {
  29082. int i;
  29083. for (i = 0; opt->name[i]; i++)
  29084. selected_cpu_name[i] = TOUPPER (opt->name[i]);
  29085. selected_cpu_name[i] = 0;
  29086. }
  29087. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  29088. *input_line_pointer = saved_char;
  29089. demand_empty_rest_of_line ();
  29090. return;
  29091. }
  29092. as_bad (_("unknown cpu `%s'"), name);
  29093. *input_line_pointer = saved_char;
  29094. ignore_rest_of_line ();
  29095. }
  29096. /* Parse a .arch directive. */
  29097. static void
  29098. s_arm_arch (int ignored ATTRIBUTE_UNUSED)
  29099. {
  29100. const struct arm_arch_option_table *opt;
  29101. char saved_char;
  29102. char *name;
  29103. name = input_line_pointer;
  29104. while (*input_line_pointer && !ISSPACE (*input_line_pointer))
  29105. input_line_pointer++;
  29106. saved_char = *input_line_pointer;
  29107. *input_line_pointer = 0;
  29108. /* Skip the first "all" entry. */
  29109. for (opt = arm_archs + 1; opt->name != NULL; opt++)
  29110. if (streq (opt->name, name))
  29111. {
  29112. selected_arch = opt->value;
  29113. selected_ctx_ext_table = opt->ext_table;
  29114. selected_ext = arm_arch_none;
  29115. selected_cpu = selected_arch;
  29116. strcpy (selected_cpu_name, opt->name);
  29117. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  29118. *input_line_pointer = saved_char;
  29119. demand_empty_rest_of_line ();
  29120. return;
  29121. }
  29122. as_bad (_("unknown architecture `%s'\n"), name);
  29123. *input_line_pointer = saved_char;
  29124. ignore_rest_of_line ();
  29125. }
  29126. /* Parse a .object_arch directive. */
  29127. static void
  29128. s_arm_object_arch (int ignored ATTRIBUTE_UNUSED)
  29129. {
  29130. const struct arm_arch_option_table *opt;
  29131. char saved_char;
  29132. char *name;
  29133. name = input_line_pointer;
  29134. while (*input_line_pointer && !ISSPACE (*input_line_pointer))
  29135. input_line_pointer++;
  29136. saved_char = *input_line_pointer;
  29137. *input_line_pointer = 0;
  29138. /* Skip the first "all" entry. */
  29139. for (opt = arm_archs + 1; opt->name != NULL; opt++)
  29140. if (streq (opt->name, name))
  29141. {
  29142. selected_object_arch = opt->value;
  29143. *input_line_pointer = saved_char;
  29144. demand_empty_rest_of_line ();
  29145. return;
  29146. }
  29147. as_bad (_("unknown architecture `%s'\n"), name);
  29148. *input_line_pointer = saved_char;
  29149. ignore_rest_of_line ();
  29150. }
  29151. /* Parse a .arch_extension directive. */
  29152. static void
  29153. s_arm_arch_extension (int ignored ATTRIBUTE_UNUSED)
  29154. {
  29155. const struct arm_option_extension_value_table *opt;
  29156. char saved_char;
  29157. char *name;
  29158. int adding_value = 1;
  29159. name = input_line_pointer;
  29160. while (*input_line_pointer && !ISSPACE (*input_line_pointer))
  29161. input_line_pointer++;
  29162. saved_char = *input_line_pointer;
  29163. *input_line_pointer = 0;
  29164. if (strlen (name) >= 2
  29165. && startswith (name, "no"))
  29166. {
  29167. adding_value = 0;
  29168. name += 2;
  29169. }
  29170. /* Check the context specific extension table */
  29171. if (selected_ctx_ext_table)
  29172. {
  29173. const struct arm_ext_table * ext_opt;
  29174. for (ext_opt = selected_ctx_ext_table; ext_opt->name != NULL; ext_opt++)
  29175. {
  29176. if (streq (ext_opt->name, name))
  29177. {
  29178. if (adding_value)
  29179. {
  29180. if (ARM_FEATURE_ZERO (ext_opt->merge))
  29181. /* TODO: Option not supported. When we remove the
  29182. legacy table this case should error out. */
  29183. continue;
  29184. ARM_MERGE_FEATURE_SETS (selected_ext, selected_ext,
  29185. ext_opt->merge);
  29186. }
  29187. else
  29188. ARM_CLEAR_FEATURE (selected_ext, selected_ext, ext_opt->clear);
  29189. ARM_MERGE_FEATURE_SETS (selected_cpu, selected_arch, selected_ext);
  29190. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  29191. *input_line_pointer = saved_char;
  29192. demand_empty_rest_of_line ();
  29193. return;
  29194. }
  29195. }
  29196. }
  29197. for (opt = arm_extensions; opt->name != NULL; opt++)
  29198. if (streq (opt->name, name))
  29199. {
  29200. int i, nb_allowed_archs =
  29201. sizeof (opt->allowed_archs) / sizeof (opt->allowed_archs[i]);
  29202. for (i = 0; i < nb_allowed_archs; i++)
  29203. {
  29204. /* Empty entry. */
  29205. if (ARM_CPU_IS_ANY (opt->allowed_archs[i]))
  29206. continue;
  29207. if (ARM_FSET_CPU_SUBSET (opt->allowed_archs[i], selected_arch))
  29208. break;
  29209. }
  29210. if (i == nb_allowed_archs)
  29211. {
  29212. as_bad (_("architectural extension `%s' is not allowed for the "
  29213. "current base architecture"), name);
  29214. break;
  29215. }
  29216. if (adding_value)
  29217. ARM_MERGE_FEATURE_SETS (selected_ext, selected_ext,
  29218. opt->merge_value);
  29219. else
  29220. ARM_CLEAR_FEATURE (selected_ext, selected_ext, opt->clear_value);
  29221. ARM_MERGE_FEATURE_SETS (selected_cpu, selected_arch, selected_ext);
  29222. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  29223. *input_line_pointer = saved_char;
  29224. demand_empty_rest_of_line ();
  29225. /* Allowing Thumb division instructions for ARMv7 in autodetection rely
  29226. on this return so that duplicate extensions (extensions with the
  29227. same name as a previous extension in the list) are not considered
  29228. for command-line parsing. */
  29229. return;
  29230. }
  29231. if (opt->name == NULL)
  29232. as_bad (_("unknown architecture extension `%s'\n"), name);
  29233. *input_line_pointer = saved_char;
  29234. ignore_rest_of_line ();
  29235. }
  29236. /* Parse a .fpu directive. */
  29237. static void
  29238. s_arm_fpu (int ignored ATTRIBUTE_UNUSED)
  29239. {
  29240. const struct arm_option_fpu_value_table *opt;
  29241. char saved_char;
  29242. char *name;
  29243. name = input_line_pointer;
  29244. while (*input_line_pointer && !ISSPACE (*input_line_pointer))
  29245. input_line_pointer++;
  29246. saved_char = *input_line_pointer;
  29247. *input_line_pointer = 0;
  29248. for (opt = arm_fpus; opt->name != NULL; opt++)
  29249. if (streq (opt->name, name))
  29250. {
  29251. selected_fpu = opt->value;
  29252. ARM_CLEAR_FEATURE (selected_cpu, selected_cpu, fpu_any);
  29253. #ifndef CPU_DEFAULT
  29254. if (no_cpu_selected ())
  29255. ARM_MERGE_FEATURE_SETS (cpu_variant, arm_arch_any, selected_fpu);
  29256. else
  29257. #endif
  29258. ARM_MERGE_FEATURE_SETS (cpu_variant, selected_cpu, selected_fpu);
  29259. *input_line_pointer = saved_char;
  29260. demand_empty_rest_of_line ();
  29261. return;
  29262. }
  29263. as_bad (_("unknown floating point format `%s'\n"), name);
  29264. *input_line_pointer = saved_char;
  29265. ignore_rest_of_line ();
  29266. }
  29267. /* Copy symbol information. */
  29268. void
  29269. arm_copy_symbol_attributes (symbolS *dest, symbolS *src)
  29270. {
  29271. ARM_GET_FLAG (dest) = ARM_GET_FLAG (src);
  29272. }
  29273. #ifdef OBJ_ELF
  29274. /* Given a symbolic attribute NAME, return the proper integer value.
  29275. Returns -1 if the attribute is not known. */
  29276. int
  29277. arm_convert_symbolic_attribute (const char *name)
  29278. {
  29279. static const struct
  29280. {
  29281. const char * name;
  29282. const int tag;
  29283. }
  29284. attribute_table[] =
  29285. {
  29286. /* When you modify this table you should
  29287. also modify the list in doc/c-arm.texi. */
  29288. #define T(tag) {#tag, tag}
  29289. T (Tag_CPU_raw_name),
  29290. T (Tag_CPU_name),
  29291. T (Tag_CPU_arch),
  29292. T (Tag_CPU_arch_profile),
  29293. T (Tag_ARM_ISA_use),
  29294. T (Tag_THUMB_ISA_use),
  29295. T (Tag_FP_arch),
  29296. T (Tag_VFP_arch),
  29297. T (Tag_WMMX_arch),
  29298. T (Tag_Advanced_SIMD_arch),
  29299. T (Tag_PCS_config),
  29300. T (Tag_ABI_PCS_R9_use),
  29301. T (Tag_ABI_PCS_RW_data),
  29302. T (Tag_ABI_PCS_RO_data),
  29303. T (Tag_ABI_PCS_GOT_use),
  29304. T (Tag_ABI_PCS_wchar_t),
  29305. T (Tag_ABI_FP_rounding),
  29306. T (Tag_ABI_FP_denormal),
  29307. T (Tag_ABI_FP_exceptions),
  29308. T (Tag_ABI_FP_user_exceptions),
  29309. T (Tag_ABI_FP_number_model),
  29310. T (Tag_ABI_align_needed),
  29311. T (Tag_ABI_align8_needed),
  29312. T (Tag_ABI_align_preserved),
  29313. T (Tag_ABI_align8_preserved),
  29314. T (Tag_ABI_enum_size),
  29315. T (Tag_ABI_HardFP_use),
  29316. T (Tag_ABI_VFP_args),
  29317. T (Tag_ABI_WMMX_args),
  29318. T (Tag_ABI_optimization_goals),
  29319. T (Tag_ABI_FP_optimization_goals),
  29320. T (Tag_compatibility),
  29321. T (Tag_CPU_unaligned_access),
  29322. T (Tag_FP_HP_extension),
  29323. T (Tag_VFP_HP_extension),
  29324. T (Tag_ABI_FP_16bit_format),
  29325. T (Tag_MPextension_use),
  29326. T (Tag_DIV_use),
  29327. T (Tag_nodefaults),
  29328. T (Tag_also_compatible_with),
  29329. T (Tag_conformance),
  29330. T (Tag_T2EE_use),
  29331. T (Tag_Virtualization_use),
  29332. T (Tag_DSP_extension),
  29333. T (Tag_MVE_arch),
  29334. T (Tag_PAC_extension),
  29335. T (Tag_BTI_extension),
  29336. T (Tag_BTI_use),
  29337. T (Tag_PACRET_use),
  29338. /* We deliberately do not include Tag_MPextension_use_legacy. */
  29339. #undef T
  29340. };
  29341. unsigned int i;
  29342. if (name == NULL)
  29343. return -1;
  29344. for (i = 0; i < ARRAY_SIZE (attribute_table); i++)
  29345. if (streq (name, attribute_table[i].name))
  29346. return attribute_table[i].tag;
  29347. return -1;
  29348. }
  29349. /* Apply sym value for relocations only in the case that they are for
  29350. local symbols in the same segment as the fixup and you have the
  29351. respective architectural feature for blx and simple switches. */
  29352. int
  29353. arm_apply_sym_value (struct fix * fixP, segT this_seg)
  29354. {
  29355. if (fixP->fx_addsy
  29356. && ARM_CPU_HAS_FEATURE (selected_cpu, arm_ext_v5t)
  29357. /* PR 17444: If the local symbol is in a different section then a reloc
  29358. will always be generated for it, so applying the symbol value now
  29359. will result in a double offset being stored in the relocation. */
  29360. && (S_GET_SEGMENT (fixP->fx_addsy) == this_seg)
  29361. && !S_FORCE_RELOC (fixP->fx_addsy, true))
  29362. {
  29363. switch (fixP->fx_r_type)
  29364. {
  29365. case BFD_RELOC_ARM_PCREL_BLX:
  29366. case BFD_RELOC_THUMB_PCREL_BRANCH23:
  29367. if (ARM_IS_FUNC (fixP->fx_addsy))
  29368. return 1;
  29369. break;
  29370. case BFD_RELOC_ARM_PCREL_CALL:
  29371. case BFD_RELOC_THUMB_PCREL_BLX:
  29372. if (THUMB_IS_FUNC (fixP->fx_addsy))
  29373. return 1;
  29374. break;
  29375. default:
  29376. break;
  29377. }
  29378. }
  29379. return 0;
  29380. }
  29381. #endif /* OBJ_ELF */