123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827 |
- #
- #D. J. Bernstein
- #Public domain.
- #
- # qhasm: int64 r11_caller
- # qhasm: int64 r12_caller
- # qhasm: int64 r13_caller
- # qhasm: int64 r14_caller
- # qhasm: int64 r15_caller
- # qhasm: int64 rbx_caller
- # qhasm: int64 rbp_caller
- # qhasm: caller r11_caller
- # qhasm: caller r12_caller
- # qhasm: caller r13_caller
- # qhasm: caller r14_caller
- # qhasm: caller r15_caller
- # qhasm: caller rbx_caller
- # qhasm: caller rbp_caller
- # qhasm: stack64 r11_stack
- # qhasm: stack64 r12_stack
- # qhasm: stack64 r13_stack
- # qhasm: stack64 r14_stack
- # qhasm: stack64 r15_stack
- # qhasm: stack64 rbx_stack
- # qhasm: stack64 rbp_stack
- # qhasm: int64 a
- # qhasm: int64 arg1
- # qhasm: int64 arg2
- # qhasm: int64 arg3
- # qhasm: int64 arg4
- # qhasm: int64 arg5
- # qhasm: input arg1
- # qhasm: input arg2
- # qhasm: input arg3
- # qhasm: input arg4
- # qhasm: input arg5
- # qhasm: int64 k
- # qhasm: int64 kbits
- # qhasm: int64 iv
- # qhasm: int64 i
- # qhasm: stack128 x0
- # qhasm: stack128 x1
- # qhasm: stack128 x2
- # qhasm: stack128 x3
- # qhasm: int64 m
- # qhasm: int64 out
- # qhasm: int64 bytes
- # qhasm: stack32 eax_stack
- # qhasm: stack32 ebx_stack
- # qhasm: stack32 esi_stack
- # qhasm: stack32 edi_stack
- # qhasm: stack32 ebp_stack
- # qhasm: int6464 diag0
- # qhasm: int6464 diag1
- # qhasm: int6464 diag2
- # qhasm: int6464 diag3
- # qhasm: int6464 a0
- # qhasm: int6464 a1
- # qhasm: int6464 a2
- # qhasm: int6464 a3
- # qhasm: int6464 a4
- # qhasm: int6464 a5
- # qhasm: int6464 a6
- # qhasm: int6464 a7
- # qhasm: int6464 b0
- # qhasm: int6464 b1
- # qhasm: int6464 b2
- # qhasm: int6464 b3
- # qhasm: int6464 b4
- # qhasm: int6464 b5
- # qhasm: int6464 b6
- # qhasm: int6464 b7
- # qhasm: int6464 z0
- # qhasm: int6464 z1
- # qhasm: int6464 z2
- # qhasm: int6464 z3
- # qhasm: int6464 z4
- # qhasm: int6464 z5
- # qhasm: int6464 z6
- # qhasm: int6464 z7
- # qhasm: int6464 z8
- # qhasm: int6464 z9
- # qhasm: int6464 z10
- # qhasm: int6464 z11
- # qhasm: int6464 z12
- # qhasm: int6464 z13
- # qhasm: int6464 z14
- # qhasm: int6464 z15
- # qhasm: stack128 z0_stack
- # qhasm: stack128 z1_stack
- # qhasm: stack128 z2_stack
- # qhasm: stack128 z3_stack
- # qhasm: stack128 z4_stack
- # qhasm: stack128 z5_stack
- # qhasm: stack128 z6_stack
- # qhasm: stack128 z7_stack
- # qhasm: stack128 z8_stack
- # qhasm: stack128 z9_stack
- # qhasm: stack128 z10_stack
- # qhasm: stack128 z11_stack
- # qhasm: stack128 z12_stack
- # qhasm: stack128 z13_stack
- # qhasm: stack128 z14_stack
- # qhasm: stack128 z15_stack
- # qhasm: int6464 y0
- # qhasm: int6464 y1
- # qhasm: int6464 y2
- # qhasm: int6464 y3
- # qhasm: int6464 y4
- # qhasm: int6464 y5
- # qhasm: int6464 y6
- # qhasm: int6464 y7
- # qhasm: int6464 y8
- # qhasm: int6464 y9
- # qhasm: int6464 y10
- # qhasm: int6464 y11
- # qhasm: int6464 y12
- # qhasm: int6464 y13
- # qhasm: int6464 y14
- # qhasm: int6464 y15
- # qhasm: int6464 r0
- # qhasm: int6464 r1
- # qhasm: int6464 r2
- # qhasm: int6464 r3
- # qhasm: int6464 r4
- # qhasm: int6464 r5
- # qhasm: int6464 r6
- # qhasm: int6464 r7
- # qhasm: int6464 r8
- # qhasm: int6464 r9
- # qhasm: int6464 r10
- # qhasm: int6464 r11
- # qhasm: int6464 r12
- # qhasm: int6464 r13
- # qhasm: int6464 r14
- # qhasm: int6464 r15
- # qhasm: stack128 orig0
- # qhasm: stack128 orig1
- # qhasm: stack128 orig2
- # qhasm: stack128 orig3
- # qhasm: stack128 orig4
- # qhasm: stack128 orig5
- # qhasm: stack128 orig6
- # qhasm: stack128 orig7
- # qhasm: stack128 orig8
- # qhasm: stack128 orig9
- # qhasm: stack128 orig10
- # qhasm: stack128 orig11
- # qhasm: stack128 orig12
- # qhasm: stack128 orig13
- # qhasm: stack128 orig14
- # qhasm: stack128 orig15
- # qhasm: int64 in0
- # qhasm: int64 in1
- # qhasm: int64 in2
- # qhasm: int64 in3
- # qhasm: int64 in4
- # qhasm: int64 in5
- # qhasm: int64 in6
- # qhasm: int64 in7
- # qhasm: int64 in8
- # qhasm: int64 in9
- # qhasm: int64 in10
- # qhasm: int64 in11
- # qhasm: int64 in12
- # qhasm: int64 in13
- # qhasm: int64 in14
- # qhasm: int64 in15
- # qhasm: stack512 tmp
- # qhasm: int64 ctarget
- # qhasm: stack64 bytes_backup
- # qhasm: enter crypto_stream_salsa20_amd64_xmm6
- .text
- .p2align 5
- .globl _crypto_stream_salsa20_amd64_xmm6
- .globl crypto_stream_salsa20_amd64_xmm6
- _crypto_stream_salsa20_amd64_xmm6:
- crypto_stream_salsa20_amd64_xmm6:
- mov %rsp,%r11
- and $31,%r11
- add $480,%r11
- sub %r11,%rsp
- # qhasm: r11_stack = r11_caller
- # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1
- # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp)
- movq %r11,352(%rsp)
- # qhasm: r12_stack = r12_caller
- # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2
- # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp)
- movq %r12,360(%rsp)
- # qhasm: r13_stack = r13_caller
- # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3
- # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp)
- movq %r13,368(%rsp)
- # qhasm: r14_stack = r14_caller
- # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4
- # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp)
- movq %r14,376(%rsp)
- # qhasm: r15_stack = r15_caller
- # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5
- # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp)
- movq %r15,384(%rsp)
- # qhasm: rbx_stack = rbx_caller
- # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6
- # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp)
- movq %rbx,392(%rsp)
- # qhasm: rbp_stack = rbp_caller
- # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7
- # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp)
- movq %rbp,400(%rsp)
- # qhasm: bytes = arg2
- # asm 1: mov <arg2=int64#2,>bytes=int64#6
- # asm 2: mov <arg2=%rsi,>bytes=%r9
- mov %rsi,%r9
- # qhasm: out = arg1
- # asm 1: mov <arg1=int64#1,>out=int64#1
- # asm 2: mov <arg1=%rdi,>out=%rdi
- mov %rdi,%rdi
- # qhasm: m = out
- # asm 1: mov <out=int64#1,>m=int64#2
- # asm 2: mov <out=%rdi,>m=%rsi
- mov %rdi,%rsi
- # qhasm: iv = arg3
- # asm 1: mov <arg3=int64#3,>iv=int64#3
- # asm 2: mov <arg3=%rdx,>iv=%rdx
- mov %rdx,%rdx
- # qhasm: k = arg4
- # asm 1: mov <arg4=int64#4,>k=int64#8
- # asm 2: mov <arg4=%rcx,>k=%r10
- mov %rcx,%r10
- # qhasm: unsigned>? bytes - 0
- # asm 1: cmp $0,<bytes=int64#6
- # asm 2: cmp $0,<bytes=%r9
- cmp $0,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto done if !unsigned>
- jbe ._done
- # qhasm: a = 0
- # asm 1: mov $0,>a=int64#7
- # asm 2: mov $0,>a=%rax
- mov $0,%rax
- # qhasm: i = bytes
- # asm 1: mov <bytes=int64#6,>i=int64#4
- # asm 2: mov <bytes=%r9,>i=%rcx
- mov %r9,%rcx
- # qhasm: while (i) { *out++ = a; --i }
- rep stosb
- # qhasm: out -= bytes
- # asm 1: sub <bytes=int64#6,<out=int64#1
- # asm 2: sub <bytes=%r9,<out=%rdi
- sub %r9,%rdi
- # comment:fp stack unchanged by jump
- # qhasm: goto start
- jmp ._start
- # qhasm: enter crypto_stream_salsa20_amd64_xmm6_xor
- .text
- .p2align 5
- .globl _crypto_stream_salsa20_amd64_xmm6_xor
- .globl crypto_stream_salsa20_amd64_xmm6_xor
- _crypto_stream_salsa20_amd64_xmm6_xor:
- crypto_stream_salsa20_amd64_xmm6_xor:
- mov %rsp,%r11
- and $31,%r11
- add $480,%r11
- sub %r11,%rsp
- # qhasm: r11_stack = r11_caller
- # asm 1: movq <r11_caller=int64#9,>r11_stack=stack64#1
- # asm 2: movq <r11_caller=%r11,>r11_stack=352(%rsp)
- movq %r11,352(%rsp)
- # qhasm: r12_stack = r12_caller
- # asm 1: movq <r12_caller=int64#10,>r12_stack=stack64#2
- # asm 2: movq <r12_caller=%r12,>r12_stack=360(%rsp)
- movq %r12,360(%rsp)
- # qhasm: r13_stack = r13_caller
- # asm 1: movq <r13_caller=int64#11,>r13_stack=stack64#3
- # asm 2: movq <r13_caller=%r13,>r13_stack=368(%rsp)
- movq %r13,368(%rsp)
- # qhasm: r14_stack = r14_caller
- # asm 1: movq <r14_caller=int64#12,>r14_stack=stack64#4
- # asm 2: movq <r14_caller=%r14,>r14_stack=376(%rsp)
- movq %r14,376(%rsp)
- # qhasm: r15_stack = r15_caller
- # asm 1: movq <r15_caller=int64#13,>r15_stack=stack64#5
- # asm 2: movq <r15_caller=%r15,>r15_stack=384(%rsp)
- movq %r15,384(%rsp)
- # qhasm: rbx_stack = rbx_caller
- # asm 1: movq <rbx_caller=int64#14,>rbx_stack=stack64#6
- # asm 2: movq <rbx_caller=%rbx,>rbx_stack=392(%rsp)
- movq %rbx,392(%rsp)
- # qhasm: rbp_stack = rbp_caller
- # asm 1: movq <rbp_caller=int64#15,>rbp_stack=stack64#7
- # asm 2: movq <rbp_caller=%rbp,>rbp_stack=400(%rsp)
- movq %rbp,400(%rsp)
- # qhasm: out = arg1
- # asm 1: mov <arg1=int64#1,>out=int64#1
- # asm 2: mov <arg1=%rdi,>out=%rdi
- mov %rdi,%rdi
- # qhasm: m = arg2
- # asm 1: mov <arg2=int64#2,>m=int64#2
- # asm 2: mov <arg2=%rsi,>m=%rsi
- mov %rsi,%rsi
- # qhasm: bytes = arg3
- # asm 1: mov <arg3=int64#3,>bytes=int64#6
- # asm 2: mov <arg3=%rdx,>bytes=%r9
- mov %rdx,%r9
- # qhasm: iv = arg4
- # asm 1: mov <arg4=int64#4,>iv=int64#3
- # asm 2: mov <arg4=%rcx,>iv=%rdx
- mov %rcx,%rdx
- # qhasm: k = arg5
- # asm 1: mov <arg5=int64#5,>k=int64#8
- # asm 2: mov <arg5=%r8,>k=%r10
- mov %r8,%r10
- # qhasm: unsigned>? bytes - 0
- # asm 1: cmp $0,<bytes=int64#6
- # asm 2: cmp $0,<bytes=%r9
- cmp $0,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto done if !unsigned>
- jbe ._done
- # comment:fp stack unchanged by fallthrough
- # qhasm: start:
- ._start:
- # qhasm: in12 = *(uint32 *) (k + 20)
- # asm 1: movl 20(<k=int64#8),>in12=int64#4d
- # asm 2: movl 20(<k=%r10),>in12=%ecx
- movl 20(%r10),%ecx
- # qhasm: in1 = *(uint32 *) (k + 0)
- # asm 1: movl 0(<k=int64#8),>in1=int64#5d
- # asm 2: movl 0(<k=%r10),>in1=%r8d
- movl 0(%r10),%r8d
- # qhasm: in6 = *(uint32 *) (iv + 0)
- # asm 1: movl 0(<iv=int64#3),>in6=int64#7d
- # asm 2: movl 0(<iv=%rdx),>in6=%eax
- movl 0(%rdx),%eax
- # qhasm: in11 = *(uint32 *) (k + 16)
- # asm 1: movl 16(<k=int64#8),>in11=int64#9d
- # asm 2: movl 16(<k=%r10),>in11=%r11d
- movl 16(%r10),%r11d
- # qhasm: ((uint32 *)&x1)[0] = in12
- # asm 1: movl <in12=int64#4d,>x1=stack128#1
- # asm 2: movl <in12=%ecx,>x1=0(%rsp)
- movl %ecx,0(%rsp)
- # qhasm: ((uint32 *)&x1)[1] = in1
- # asm 1: movl <in1=int64#5d,4+<x1=stack128#1
- # asm 2: movl <in1=%r8d,4+<x1=0(%rsp)
- movl %r8d,4+0(%rsp)
- # qhasm: ((uint32 *)&x1)[2] = in6
- # asm 1: movl <in6=int64#7d,8+<x1=stack128#1
- # asm 2: movl <in6=%eax,8+<x1=0(%rsp)
- movl %eax,8+0(%rsp)
- # qhasm: ((uint32 *)&x1)[3] = in11
- # asm 1: movl <in11=int64#9d,12+<x1=stack128#1
- # asm 2: movl <in11=%r11d,12+<x1=0(%rsp)
- movl %r11d,12+0(%rsp)
- # qhasm: in8 = 0
- # asm 1: mov $0,>in8=int64#4
- # asm 2: mov $0,>in8=%rcx
- mov $0,%rcx
- # qhasm: in13 = *(uint32 *) (k + 24)
- # asm 1: movl 24(<k=int64#8),>in13=int64#5d
- # asm 2: movl 24(<k=%r10),>in13=%r8d
- movl 24(%r10),%r8d
- # qhasm: in2 = *(uint32 *) (k + 4)
- # asm 1: movl 4(<k=int64#8),>in2=int64#7d
- # asm 2: movl 4(<k=%r10),>in2=%eax
- movl 4(%r10),%eax
- # qhasm: in7 = *(uint32 *) (iv + 4)
- # asm 1: movl 4(<iv=int64#3),>in7=int64#3d
- # asm 2: movl 4(<iv=%rdx),>in7=%edx
- movl 4(%rdx),%edx
- # qhasm: ((uint32 *)&x2)[0] = in8
- # asm 1: movl <in8=int64#4d,>x2=stack128#2
- # asm 2: movl <in8=%ecx,>x2=16(%rsp)
- movl %ecx,16(%rsp)
- # qhasm: ((uint32 *)&x2)[1] = in13
- # asm 1: movl <in13=int64#5d,4+<x2=stack128#2
- # asm 2: movl <in13=%r8d,4+<x2=16(%rsp)
- movl %r8d,4+16(%rsp)
- # qhasm: ((uint32 *)&x2)[2] = in2
- # asm 1: movl <in2=int64#7d,8+<x2=stack128#2
- # asm 2: movl <in2=%eax,8+<x2=16(%rsp)
- movl %eax,8+16(%rsp)
- # qhasm: ((uint32 *)&x2)[3] = in7
- # asm 1: movl <in7=int64#3d,12+<x2=stack128#2
- # asm 2: movl <in7=%edx,12+<x2=16(%rsp)
- movl %edx,12+16(%rsp)
- # qhasm: in4 = *(uint32 *) (k + 12)
- # asm 1: movl 12(<k=int64#8),>in4=int64#3d
- # asm 2: movl 12(<k=%r10),>in4=%edx
- movl 12(%r10),%edx
- # qhasm: in9 = 0
- # asm 1: mov $0,>in9=int64#4
- # asm 2: mov $0,>in9=%rcx
- mov $0,%rcx
- # qhasm: in14 = *(uint32 *) (k + 28)
- # asm 1: movl 28(<k=int64#8),>in14=int64#5d
- # asm 2: movl 28(<k=%r10),>in14=%r8d
- movl 28(%r10),%r8d
- # qhasm: in3 = *(uint32 *) (k + 8)
- # asm 1: movl 8(<k=int64#8),>in3=int64#7d
- # asm 2: movl 8(<k=%r10),>in3=%eax
- movl 8(%r10),%eax
- # qhasm: ((uint32 *)&x3)[0] = in4
- # asm 1: movl <in4=int64#3d,>x3=stack128#3
- # asm 2: movl <in4=%edx,>x3=32(%rsp)
- movl %edx,32(%rsp)
- # qhasm: ((uint32 *)&x3)[1] = in9
- # asm 1: movl <in9=int64#4d,4+<x3=stack128#3
- # asm 2: movl <in9=%ecx,4+<x3=32(%rsp)
- movl %ecx,4+32(%rsp)
- # qhasm: ((uint32 *)&x3)[2] = in14
- # asm 1: movl <in14=int64#5d,8+<x3=stack128#3
- # asm 2: movl <in14=%r8d,8+<x3=32(%rsp)
- movl %r8d,8+32(%rsp)
- # qhasm: ((uint32 *)&x3)[3] = in3
- # asm 1: movl <in3=int64#7d,12+<x3=stack128#3
- # asm 2: movl <in3=%eax,12+<x3=32(%rsp)
- movl %eax,12+32(%rsp)
- # qhasm: in0 = 1634760805
- # asm 1: mov $1634760805,>in0=int64#3
- # asm 2: mov $1634760805,>in0=%rdx
- mov $1634760805,%rdx
- # qhasm: in5 = 857760878
- # asm 1: mov $857760878,>in5=int64#4
- # asm 2: mov $857760878,>in5=%rcx
- mov $857760878,%rcx
- # qhasm: in10 = 2036477234
- # asm 1: mov $2036477234,>in10=int64#5
- # asm 2: mov $2036477234,>in10=%r8
- mov $2036477234,%r8
- # qhasm: in15 = 1797285236
- # asm 1: mov $1797285236,>in15=int64#7
- # asm 2: mov $1797285236,>in15=%rax
- mov $1797285236,%rax
- # qhasm: ((uint32 *)&x0)[0] = in0
- # asm 1: movl <in0=int64#3d,>x0=stack128#4
- # asm 2: movl <in0=%edx,>x0=48(%rsp)
- movl %edx,48(%rsp)
- # qhasm: ((uint32 *)&x0)[1] = in5
- # asm 1: movl <in5=int64#4d,4+<x0=stack128#4
- # asm 2: movl <in5=%ecx,4+<x0=48(%rsp)
- movl %ecx,4+48(%rsp)
- # qhasm: ((uint32 *)&x0)[2] = in10
- # asm 1: movl <in10=int64#5d,8+<x0=stack128#4
- # asm 2: movl <in10=%r8d,8+<x0=48(%rsp)
- movl %r8d,8+48(%rsp)
- # qhasm: ((uint32 *)&x0)[3] = in15
- # asm 1: movl <in15=int64#7d,12+<x0=stack128#4
- # asm 2: movl <in15=%eax,12+<x0=48(%rsp)
- movl %eax,12+48(%rsp)
- # qhasm: unsigned<? bytes - 256
- # asm 1: cmp $256,<bytes=int64#6
- # asm 2: cmp $256,<bytes=%r9
- cmp $256,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto bytesbetween1and255 if unsigned<
- jb ._bytesbetween1and255
- # qhasm: z0 = x0
- # asm 1: movdqa <x0=stack128#4,>z0=int6464#1
- # asm 2: movdqa <x0=48(%rsp),>z0=%xmm0
- movdqa 48(%rsp),%xmm0
- # qhasm: z5 = z0[1,1,1,1]
- # asm 1: pshufd $0x55,<z0=int6464#1,>z5=int6464#2
- # asm 2: pshufd $0x55,<z0=%xmm0,>z5=%xmm1
- pshufd $0x55,%xmm0,%xmm1
- # qhasm: z10 = z0[2,2,2,2]
- # asm 1: pshufd $0xaa,<z0=int6464#1,>z10=int6464#3
- # asm 2: pshufd $0xaa,<z0=%xmm0,>z10=%xmm2
- pshufd $0xaa,%xmm0,%xmm2
- # qhasm: z15 = z0[3,3,3,3]
- # asm 1: pshufd $0xff,<z0=int6464#1,>z15=int6464#4
- # asm 2: pshufd $0xff,<z0=%xmm0,>z15=%xmm3
- pshufd $0xff,%xmm0,%xmm3
- # qhasm: z0 = z0[0,0,0,0]
- # asm 1: pshufd $0x00,<z0=int6464#1,>z0=int6464#1
- # asm 2: pshufd $0x00,<z0=%xmm0,>z0=%xmm0
- pshufd $0x00,%xmm0,%xmm0
- # qhasm: orig5 = z5
- # asm 1: movdqa <z5=int6464#2,>orig5=stack128#5
- # asm 2: movdqa <z5=%xmm1,>orig5=64(%rsp)
- movdqa %xmm1,64(%rsp)
- # qhasm: orig10 = z10
- # asm 1: movdqa <z10=int6464#3,>orig10=stack128#6
- # asm 2: movdqa <z10=%xmm2,>orig10=80(%rsp)
- movdqa %xmm2,80(%rsp)
- # qhasm: orig15 = z15
- # asm 1: movdqa <z15=int6464#4,>orig15=stack128#7
- # asm 2: movdqa <z15=%xmm3,>orig15=96(%rsp)
- movdqa %xmm3,96(%rsp)
- # qhasm: orig0 = z0
- # asm 1: movdqa <z0=int6464#1,>orig0=stack128#8
- # asm 2: movdqa <z0=%xmm0,>orig0=112(%rsp)
- movdqa %xmm0,112(%rsp)
- # qhasm: z1 = x1
- # asm 1: movdqa <x1=stack128#1,>z1=int6464#1
- # asm 2: movdqa <x1=0(%rsp),>z1=%xmm0
- movdqa 0(%rsp),%xmm0
- # qhasm: z6 = z1[2,2,2,2]
- # asm 1: pshufd $0xaa,<z1=int6464#1,>z6=int6464#2
- # asm 2: pshufd $0xaa,<z1=%xmm0,>z6=%xmm1
- pshufd $0xaa,%xmm0,%xmm1
- # qhasm: z11 = z1[3,3,3,3]
- # asm 1: pshufd $0xff,<z1=int6464#1,>z11=int6464#3
- # asm 2: pshufd $0xff,<z1=%xmm0,>z11=%xmm2
- pshufd $0xff,%xmm0,%xmm2
- # qhasm: z12 = z1[0,0,0,0]
- # asm 1: pshufd $0x00,<z1=int6464#1,>z12=int6464#4
- # asm 2: pshufd $0x00,<z1=%xmm0,>z12=%xmm3
- pshufd $0x00,%xmm0,%xmm3
- # qhasm: z1 = z1[1,1,1,1]
- # asm 1: pshufd $0x55,<z1=int6464#1,>z1=int6464#1
- # asm 2: pshufd $0x55,<z1=%xmm0,>z1=%xmm0
- pshufd $0x55,%xmm0,%xmm0
- # qhasm: orig6 = z6
- # asm 1: movdqa <z6=int6464#2,>orig6=stack128#9
- # asm 2: movdqa <z6=%xmm1,>orig6=128(%rsp)
- movdqa %xmm1,128(%rsp)
- # qhasm: orig11 = z11
- # asm 1: movdqa <z11=int6464#3,>orig11=stack128#10
- # asm 2: movdqa <z11=%xmm2,>orig11=144(%rsp)
- movdqa %xmm2,144(%rsp)
- # qhasm: orig12 = z12
- # asm 1: movdqa <z12=int6464#4,>orig12=stack128#11
- # asm 2: movdqa <z12=%xmm3,>orig12=160(%rsp)
- movdqa %xmm3,160(%rsp)
- # qhasm: orig1 = z1
- # asm 1: movdqa <z1=int6464#1,>orig1=stack128#12
- # asm 2: movdqa <z1=%xmm0,>orig1=176(%rsp)
- movdqa %xmm0,176(%rsp)
- # qhasm: z2 = x2
- # asm 1: movdqa <x2=stack128#2,>z2=int6464#1
- # asm 2: movdqa <x2=16(%rsp),>z2=%xmm0
- movdqa 16(%rsp),%xmm0
- # qhasm: z7 = z2[3,3,3,3]
- # asm 1: pshufd $0xff,<z2=int6464#1,>z7=int6464#2
- # asm 2: pshufd $0xff,<z2=%xmm0,>z7=%xmm1
- pshufd $0xff,%xmm0,%xmm1
- # qhasm: z13 = z2[1,1,1,1]
- # asm 1: pshufd $0x55,<z2=int6464#1,>z13=int6464#3
- # asm 2: pshufd $0x55,<z2=%xmm0,>z13=%xmm2
- pshufd $0x55,%xmm0,%xmm2
- # qhasm: z2 = z2[2,2,2,2]
- # asm 1: pshufd $0xaa,<z2=int6464#1,>z2=int6464#1
- # asm 2: pshufd $0xaa,<z2=%xmm0,>z2=%xmm0
- pshufd $0xaa,%xmm0,%xmm0
- # qhasm: orig7 = z7
- # asm 1: movdqa <z7=int6464#2,>orig7=stack128#13
- # asm 2: movdqa <z7=%xmm1,>orig7=192(%rsp)
- movdqa %xmm1,192(%rsp)
- # qhasm: orig13 = z13
- # asm 1: movdqa <z13=int6464#3,>orig13=stack128#14
- # asm 2: movdqa <z13=%xmm2,>orig13=208(%rsp)
- movdqa %xmm2,208(%rsp)
- # qhasm: orig2 = z2
- # asm 1: movdqa <z2=int6464#1,>orig2=stack128#15
- # asm 2: movdqa <z2=%xmm0,>orig2=224(%rsp)
- movdqa %xmm0,224(%rsp)
- # qhasm: z3 = x3
- # asm 1: movdqa <x3=stack128#3,>z3=int6464#1
- # asm 2: movdqa <x3=32(%rsp),>z3=%xmm0
- movdqa 32(%rsp),%xmm0
- # qhasm: z4 = z3[0,0,0,0]
- # asm 1: pshufd $0x00,<z3=int6464#1,>z4=int6464#2
- # asm 2: pshufd $0x00,<z3=%xmm0,>z4=%xmm1
- pshufd $0x00,%xmm0,%xmm1
- # qhasm: z14 = z3[2,2,2,2]
- # asm 1: pshufd $0xaa,<z3=int6464#1,>z14=int6464#3
- # asm 2: pshufd $0xaa,<z3=%xmm0,>z14=%xmm2
- pshufd $0xaa,%xmm0,%xmm2
- # qhasm: z3 = z3[3,3,3,3]
- # asm 1: pshufd $0xff,<z3=int6464#1,>z3=int6464#1
- # asm 2: pshufd $0xff,<z3=%xmm0,>z3=%xmm0
- pshufd $0xff,%xmm0,%xmm0
- # qhasm: orig4 = z4
- # asm 1: movdqa <z4=int6464#2,>orig4=stack128#16
- # asm 2: movdqa <z4=%xmm1,>orig4=240(%rsp)
- movdqa %xmm1,240(%rsp)
- # qhasm: orig14 = z14
- # asm 1: movdqa <z14=int6464#3,>orig14=stack128#17
- # asm 2: movdqa <z14=%xmm2,>orig14=256(%rsp)
- movdqa %xmm2,256(%rsp)
- # qhasm: orig3 = z3
- # asm 1: movdqa <z3=int6464#1,>orig3=stack128#18
- # asm 2: movdqa <z3=%xmm0,>orig3=272(%rsp)
- movdqa %xmm0,272(%rsp)
- # qhasm: bytesatleast256:
- ._bytesatleast256:
- # qhasm: in8 = ((uint32 *)&x2)[0]
- # asm 1: movl <x2=stack128#2,>in8=int64#3d
- # asm 2: movl <x2=16(%rsp),>in8=%edx
- movl 16(%rsp),%edx
- # qhasm: in9 = ((uint32 *)&x3)[1]
- # asm 1: movl 4+<x3=stack128#3,>in9=int64#4d
- # asm 2: movl 4+<x3=32(%rsp),>in9=%ecx
- movl 4+32(%rsp),%ecx
- # qhasm: ((uint32 *) &orig8)[0] = in8
- # asm 1: movl <in8=int64#3d,>orig8=stack128#19
- # asm 2: movl <in8=%edx,>orig8=288(%rsp)
- movl %edx,288(%rsp)
- # qhasm: ((uint32 *) &orig9)[0] = in9
- # asm 1: movl <in9=int64#4d,>orig9=stack128#20
- # asm 2: movl <in9=%ecx,>orig9=304(%rsp)
- movl %ecx,304(%rsp)
- # qhasm: in8 += 1
- # asm 1: add $1,<in8=int64#3
- # asm 2: add $1,<in8=%rdx
- add $1,%rdx
- # qhasm: in9 <<= 32
- # asm 1: shl $32,<in9=int64#4
- # asm 2: shl $32,<in9=%rcx
- shl $32,%rcx
- # qhasm: in8 += in9
- # asm 1: add <in9=int64#4,<in8=int64#3
- # asm 2: add <in9=%rcx,<in8=%rdx
- add %rcx,%rdx
- # qhasm: in9 = in8
- # asm 1: mov <in8=int64#3,>in9=int64#4
- # asm 2: mov <in8=%rdx,>in9=%rcx
- mov %rdx,%rcx
- # qhasm: (uint64) in9 >>= 32
- # asm 1: shr $32,<in9=int64#4
- # asm 2: shr $32,<in9=%rcx
- shr $32,%rcx
- # qhasm: ((uint32 *) &orig8)[1] = in8
- # asm 1: movl <in8=int64#3d,4+<orig8=stack128#19
- # asm 2: movl <in8=%edx,4+<orig8=288(%rsp)
- movl %edx,4+288(%rsp)
- # qhasm: ((uint32 *) &orig9)[1] = in9
- # asm 1: movl <in9=int64#4d,4+<orig9=stack128#20
- # asm 2: movl <in9=%ecx,4+<orig9=304(%rsp)
- movl %ecx,4+304(%rsp)
- # qhasm: in8 += 1
- # asm 1: add $1,<in8=int64#3
- # asm 2: add $1,<in8=%rdx
- add $1,%rdx
- # qhasm: in9 <<= 32
- # asm 1: shl $32,<in9=int64#4
- # asm 2: shl $32,<in9=%rcx
- shl $32,%rcx
- # qhasm: in8 += in9
- # asm 1: add <in9=int64#4,<in8=int64#3
- # asm 2: add <in9=%rcx,<in8=%rdx
- add %rcx,%rdx
- # qhasm: in9 = in8
- # asm 1: mov <in8=int64#3,>in9=int64#4
- # asm 2: mov <in8=%rdx,>in9=%rcx
- mov %rdx,%rcx
- # qhasm: (uint64) in9 >>= 32
- # asm 1: shr $32,<in9=int64#4
- # asm 2: shr $32,<in9=%rcx
- shr $32,%rcx
- # qhasm: ((uint32 *) &orig8)[2] = in8
- # asm 1: movl <in8=int64#3d,8+<orig8=stack128#19
- # asm 2: movl <in8=%edx,8+<orig8=288(%rsp)
- movl %edx,8+288(%rsp)
- # qhasm: ((uint32 *) &orig9)[2] = in9
- # asm 1: movl <in9=int64#4d,8+<orig9=stack128#20
- # asm 2: movl <in9=%ecx,8+<orig9=304(%rsp)
- movl %ecx,8+304(%rsp)
- # qhasm: in8 += 1
- # asm 1: add $1,<in8=int64#3
- # asm 2: add $1,<in8=%rdx
- add $1,%rdx
- # qhasm: in9 <<= 32
- # asm 1: shl $32,<in9=int64#4
- # asm 2: shl $32,<in9=%rcx
- shl $32,%rcx
- # qhasm: in8 += in9
- # asm 1: add <in9=int64#4,<in8=int64#3
- # asm 2: add <in9=%rcx,<in8=%rdx
- add %rcx,%rdx
- # qhasm: in9 = in8
- # asm 1: mov <in8=int64#3,>in9=int64#4
- # asm 2: mov <in8=%rdx,>in9=%rcx
- mov %rdx,%rcx
- # qhasm: (uint64) in9 >>= 32
- # asm 1: shr $32,<in9=int64#4
- # asm 2: shr $32,<in9=%rcx
- shr $32,%rcx
- # qhasm: ((uint32 *) &orig8)[3] = in8
- # asm 1: movl <in8=int64#3d,12+<orig8=stack128#19
- # asm 2: movl <in8=%edx,12+<orig8=288(%rsp)
- movl %edx,12+288(%rsp)
- # qhasm: ((uint32 *) &orig9)[3] = in9
- # asm 1: movl <in9=int64#4d,12+<orig9=stack128#20
- # asm 2: movl <in9=%ecx,12+<orig9=304(%rsp)
- movl %ecx,12+304(%rsp)
- # qhasm: in8 += 1
- # asm 1: add $1,<in8=int64#3
- # asm 2: add $1,<in8=%rdx
- add $1,%rdx
- # qhasm: in9 <<= 32
- # asm 1: shl $32,<in9=int64#4
- # asm 2: shl $32,<in9=%rcx
- shl $32,%rcx
- # qhasm: in8 += in9
- # asm 1: add <in9=int64#4,<in8=int64#3
- # asm 2: add <in9=%rcx,<in8=%rdx
- add %rcx,%rdx
- # qhasm: in9 = in8
- # asm 1: mov <in8=int64#3,>in9=int64#4
- # asm 2: mov <in8=%rdx,>in9=%rcx
- mov %rdx,%rcx
- # qhasm: (uint64) in9 >>= 32
- # asm 1: shr $32,<in9=int64#4
- # asm 2: shr $32,<in9=%rcx
- shr $32,%rcx
- # qhasm: ((uint32 *)&x2)[0] = in8
- # asm 1: movl <in8=int64#3d,>x2=stack128#2
- # asm 2: movl <in8=%edx,>x2=16(%rsp)
- movl %edx,16(%rsp)
- # qhasm: ((uint32 *)&x3)[1] = in9
- # asm 1: movl <in9=int64#4d,4+<x3=stack128#3
- # asm 2: movl <in9=%ecx,4+<x3=32(%rsp)
- movl %ecx,4+32(%rsp)
- # qhasm: bytes_backup = bytes
- # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8
- # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp)
- movq %r9,408(%rsp)
- # qhasm: i = 20
- # asm 1: mov $20,>i=int64#3
- # asm 2: mov $20,>i=%rdx
- mov $20,%rdx
- # qhasm: z5 = orig5
- # asm 1: movdqa <orig5=stack128#5,>z5=int6464#1
- # asm 2: movdqa <orig5=64(%rsp),>z5=%xmm0
- movdqa 64(%rsp),%xmm0
- # qhasm: z10 = orig10
- # asm 1: movdqa <orig10=stack128#6,>z10=int6464#2
- # asm 2: movdqa <orig10=80(%rsp),>z10=%xmm1
- movdqa 80(%rsp),%xmm1
- # qhasm: z15 = orig15
- # asm 1: movdqa <orig15=stack128#7,>z15=int6464#3
- # asm 2: movdqa <orig15=96(%rsp),>z15=%xmm2
- movdqa 96(%rsp),%xmm2
- # qhasm: z14 = orig14
- # asm 1: movdqa <orig14=stack128#17,>z14=int6464#4
- # asm 2: movdqa <orig14=256(%rsp),>z14=%xmm3
- movdqa 256(%rsp),%xmm3
- # qhasm: z3 = orig3
- # asm 1: movdqa <orig3=stack128#18,>z3=int6464#5
- # asm 2: movdqa <orig3=272(%rsp),>z3=%xmm4
- movdqa 272(%rsp),%xmm4
- # qhasm: z6 = orig6
- # asm 1: movdqa <orig6=stack128#9,>z6=int6464#6
- # asm 2: movdqa <orig6=128(%rsp),>z6=%xmm5
- movdqa 128(%rsp),%xmm5
- # qhasm: z11 = orig11
- # asm 1: movdqa <orig11=stack128#10,>z11=int6464#7
- # asm 2: movdqa <orig11=144(%rsp),>z11=%xmm6
- movdqa 144(%rsp),%xmm6
- # qhasm: z1 = orig1
- # asm 1: movdqa <orig1=stack128#12,>z1=int6464#8
- # asm 2: movdqa <orig1=176(%rsp),>z1=%xmm7
- movdqa 176(%rsp),%xmm7
- # qhasm: z7 = orig7
- # asm 1: movdqa <orig7=stack128#13,>z7=int6464#9
- # asm 2: movdqa <orig7=192(%rsp),>z7=%xmm8
- movdqa 192(%rsp),%xmm8
- # qhasm: z13 = orig13
- # asm 1: movdqa <orig13=stack128#14,>z13=int6464#10
- # asm 2: movdqa <orig13=208(%rsp),>z13=%xmm9
- movdqa 208(%rsp),%xmm9
- # qhasm: z2 = orig2
- # asm 1: movdqa <orig2=stack128#15,>z2=int6464#11
- # asm 2: movdqa <orig2=224(%rsp),>z2=%xmm10
- movdqa 224(%rsp),%xmm10
- # qhasm: z9 = orig9
- # asm 1: movdqa <orig9=stack128#20,>z9=int6464#12
- # asm 2: movdqa <orig9=304(%rsp),>z9=%xmm11
- movdqa 304(%rsp),%xmm11
- # qhasm: z0 = orig0
- # asm 1: movdqa <orig0=stack128#8,>z0=int6464#13
- # asm 2: movdqa <orig0=112(%rsp),>z0=%xmm12
- movdqa 112(%rsp),%xmm12
- # qhasm: z12 = orig12
- # asm 1: movdqa <orig12=stack128#11,>z12=int6464#14
- # asm 2: movdqa <orig12=160(%rsp),>z12=%xmm13
- movdqa 160(%rsp),%xmm13
- # qhasm: z4 = orig4
- # asm 1: movdqa <orig4=stack128#16,>z4=int6464#15
- # asm 2: movdqa <orig4=240(%rsp),>z4=%xmm14
- movdqa 240(%rsp),%xmm14
- # qhasm: z8 = orig8
- # asm 1: movdqa <orig8=stack128#19,>z8=int6464#16
- # asm 2: movdqa <orig8=288(%rsp),>z8=%xmm15
- movdqa 288(%rsp),%xmm15
- # qhasm: mainloop1:
- ._mainloop1:
- # qhasm: z10_stack = z10
- # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21
- # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp)
- movdqa %xmm1,320(%rsp)
- # qhasm: z15_stack = z15
- # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22
- # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp)
- movdqa %xmm2,336(%rsp)
- # qhasm: y4 = z12
- # asm 1: movdqa <z12=int6464#14,>y4=int6464#2
- # asm 2: movdqa <z12=%xmm13,>y4=%xmm1
- movdqa %xmm13,%xmm1
- # qhasm: uint32323232 y4 += z0
- # asm 1: paddd <z0=int6464#13,<y4=int6464#2
- # asm 2: paddd <z0=%xmm12,<y4=%xmm1
- paddd %xmm12,%xmm1
- # qhasm: r4 = y4
- # asm 1: movdqa <y4=int6464#2,>r4=int6464#3
- # asm 2: movdqa <y4=%xmm1,>r4=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y4 <<= 7
- # asm 1: pslld $7,<y4=int6464#2
- # asm 2: pslld $7,<y4=%xmm1
- pslld $7,%xmm1
- # qhasm: z4 ^= y4
- # asm 1: pxor <y4=int6464#2,<z4=int6464#15
- # asm 2: pxor <y4=%xmm1,<z4=%xmm14
- pxor %xmm1,%xmm14
- # qhasm: uint32323232 r4 >>= 25
- # asm 1: psrld $25,<r4=int6464#3
- # asm 2: psrld $25,<r4=%xmm2
- psrld $25,%xmm2
- # qhasm: z4 ^= r4
- # asm 1: pxor <r4=int6464#3,<z4=int6464#15
- # asm 2: pxor <r4=%xmm2,<z4=%xmm14
- pxor %xmm2,%xmm14
- # qhasm: y9 = z1
- # asm 1: movdqa <z1=int6464#8,>y9=int6464#2
- # asm 2: movdqa <z1=%xmm7,>y9=%xmm1
- movdqa %xmm7,%xmm1
- # qhasm: uint32323232 y9 += z5
- # asm 1: paddd <z5=int6464#1,<y9=int6464#2
- # asm 2: paddd <z5=%xmm0,<y9=%xmm1
- paddd %xmm0,%xmm1
- # qhasm: r9 = y9
- # asm 1: movdqa <y9=int6464#2,>r9=int6464#3
- # asm 2: movdqa <y9=%xmm1,>r9=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y9 <<= 7
- # asm 1: pslld $7,<y9=int6464#2
- # asm 2: pslld $7,<y9=%xmm1
- pslld $7,%xmm1
- # qhasm: z9 ^= y9
- # asm 1: pxor <y9=int6464#2,<z9=int6464#12
- # asm 2: pxor <y9=%xmm1,<z9=%xmm11
- pxor %xmm1,%xmm11
- # qhasm: uint32323232 r9 >>= 25
- # asm 1: psrld $25,<r9=int6464#3
- # asm 2: psrld $25,<r9=%xmm2
- psrld $25,%xmm2
- # qhasm: z9 ^= r9
- # asm 1: pxor <r9=int6464#3,<z9=int6464#12
- # asm 2: pxor <r9=%xmm2,<z9=%xmm11
- pxor %xmm2,%xmm11
- # qhasm: y8 = z0
- # asm 1: movdqa <z0=int6464#13,>y8=int6464#2
- # asm 2: movdqa <z0=%xmm12,>y8=%xmm1
- movdqa %xmm12,%xmm1
- # qhasm: uint32323232 y8 += z4
- # asm 1: paddd <z4=int6464#15,<y8=int6464#2
- # asm 2: paddd <z4=%xmm14,<y8=%xmm1
- paddd %xmm14,%xmm1
- # qhasm: r8 = y8
- # asm 1: movdqa <y8=int6464#2,>r8=int6464#3
- # asm 2: movdqa <y8=%xmm1,>r8=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y8 <<= 9
- # asm 1: pslld $9,<y8=int6464#2
- # asm 2: pslld $9,<y8=%xmm1
- pslld $9,%xmm1
- # qhasm: z8 ^= y8
- # asm 1: pxor <y8=int6464#2,<z8=int6464#16
- # asm 2: pxor <y8=%xmm1,<z8=%xmm15
- pxor %xmm1,%xmm15
- # qhasm: uint32323232 r8 >>= 23
- # asm 1: psrld $23,<r8=int6464#3
- # asm 2: psrld $23,<r8=%xmm2
- psrld $23,%xmm2
- # qhasm: z8 ^= r8
- # asm 1: pxor <r8=int6464#3,<z8=int6464#16
- # asm 2: pxor <r8=%xmm2,<z8=%xmm15
- pxor %xmm2,%xmm15
- # qhasm: y13 = z5
- # asm 1: movdqa <z5=int6464#1,>y13=int6464#2
- # asm 2: movdqa <z5=%xmm0,>y13=%xmm1
- movdqa %xmm0,%xmm1
- # qhasm: uint32323232 y13 += z9
- # asm 1: paddd <z9=int6464#12,<y13=int6464#2
- # asm 2: paddd <z9=%xmm11,<y13=%xmm1
- paddd %xmm11,%xmm1
- # qhasm: r13 = y13
- # asm 1: movdqa <y13=int6464#2,>r13=int6464#3
- # asm 2: movdqa <y13=%xmm1,>r13=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y13 <<= 9
- # asm 1: pslld $9,<y13=int6464#2
- # asm 2: pslld $9,<y13=%xmm1
- pslld $9,%xmm1
- # qhasm: z13 ^= y13
- # asm 1: pxor <y13=int6464#2,<z13=int6464#10
- # asm 2: pxor <y13=%xmm1,<z13=%xmm9
- pxor %xmm1,%xmm9
- # qhasm: uint32323232 r13 >>= 23
- # asm 1: psrld $23,<r13=int6464#3
- # asm 2: psrld $23,<r13=%xmm2
- psrld $23,%xmm2
- # qhasm: z13 ^= r13
- # asm 1: pxor <r13=int6464#3,<z13=int6464#10
- # asm 2: pxor <r13=%xmm2,<z13=%xmm9
- pxor %xmm2,%xmm9
- # qhasm: y12 = z4
- # asm 1: movdqa <z4=int6464#15,>y12=int6464#2
- # asm 2: movdqa <z4=%xmm14,>y12=%xmm1
- movdqa %xmm14,%xmm1
- # qhasm: uint32323232 y12 += z8
- # asm 1: paddd <z8=int6464#16,<y12=int6464#2
- # asm 2: paddd <z8=%xmm15,<y12=%xmm1
- paddd %xmm15,%xmm1
- # qhasm: r12 = y12
- # asm 1: movdqa <y12=int6464#2,>r12=int6464#3
- # asm 2: movdqa <y12=%xmm1,>r12=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y12 <<= 13
- # asm 1: pslld $13,<y12=int6464#2
- # asm 2: pslld $13,<y12=%xmm1
- pslld $13,%xmm1
- # qhasm: z12 ^= y12
- # asm 1: pxor <y12=int6464#2,<z12=int6464#14
- # asm 2: pxor <y12=%xmm1,<z12=%xmm13
- pxor %xmm1,%xmm13
- # qhasm: uint32323232 r12 >>= 19
- # asm 1: psrld $19,<r12=int6464#3
- # asm 2: psrld $19,<r12=%xmm2
- psrld $19,%xmm2
- # qhasm: z12 ^= r12
- # asm 1: pxor <r12=int6464#3,<z12=int6464#14
- # asm 2: pxor <r12=%xmm2,<z12=%xmm13
- pxor %xmm2,%xmm13
- # qhasm: y1 = z9
- # asm 1: movdqa <z9=int6464#12,>y1=int6464#2
- # asm 2: movdqa <z9=%xmm11,>y1=%xmm1
- movdqa %xmm11,%xmm1
- # qhasm: uint32323232 y1 += z13
- # asm 1: paddd <z13=int6464#10,<y1=int6464#2
- # asm 2: paddd <z13=%xmm9,<y1=%xmm1
- paddd %xmm9,%xmm1
- # qhasm: r1 = y1
- # asm 1: movdqa <y1=int6464#2,>r1=int6464#3
- # asm 2: movdqa <y1=%xmm1,>r1=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y1 <<= 13
- # asm 1: pslld $13,<y1=int6464#2
- # asm 2: pslld $13,<y1=%xmm1
- pslld $13,%xmm1
- # qhasm: z1 ^= y1
- # asm 1: pxor <y1=int6464#2,<z1=int6464#8
- # asm 2: pxor <y1=%xmm1,<z1=%xmm7
- pxor %xmm1,%xmm7
- # qhasm: uint32323232 r1 >>= 19
- # asm 1: psrld $19,<r1=int6464#3
- # asm 2: psrld $19,<r1=%xmm2
- psrld $19,%xmm2
- # qhasm: z1 ^= r1
- # asm 1: pxor <r1=int6464#3,<z1=int6464#8
- # asm 2: pxor <r1=%xmm2,<z1=%xmm7
- pxor %xmm2,%xmm7
- # qhasm: y0 = z8
- # asm 1: movdqa <z8=int6464#16,>y0=int6464#2
- # asm 2: movdqa <z8=%xmm15,>y0=%xmm1
- movdqa %xmm15,%xmm1
- # qhasm: uint32323232 y0 += z12
- # asm 1: paddd <z12=int6464#14,<y0=int6464#2
- # asm 2: paddd <z12=%xmm13,<y0=%xmm1
- paddd %xmm13,%xmm1
- # qhasm: r0 = y0
- # asm 1: movdqa <y0=int6464#2,>r0=int6464#3
- # asm 2: movdqa <y0=%xmm1,>r0=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y0 <<= 18
- # asm 1: pslld $18,<y0=int6464#2
- # asm 2: pslld $18,<y0=%xmm1
- pslld $18,%xmm1
- # qhasm: z0 ^= y0
- # asm 1: pxor <y0=int6464#2,<z0=int6464#13
- # asm 2: pxor <y0=%xmm1,<z0=%xmm12
- pxor %xmm1,%xmm12
- # qhasm: uint32323232 r0 >>= 14
- # asm 1: psrld $14,<r0=int6464#3
- # asm 2: psrld $14,<r0=%xmm2
- psrld $14,%xmm2
- # qhasm: z0 ^= r0
- # asm 1: pxor <r0=int6464#3,<z0=int6464#13
- # asm 2: pxor <r0=%xmm2,<z0=%xmm12
- pxor %xmm2,%xmm12
- # qhasm: z10 = z10_stack
- # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2
- # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1
- movdqa 320(%rsp),%xmm1
- # qhasm: z0_stack = z0
- # asm 1: movdqa <z0=int6464#13,>z0_stack=stack128#21
- # asm 2: movdqa <z0=%xmm12,>z0_stack=320(%rsp)
- movdqa %xmm12,320(%rsp)
- # qhasm: y5 = z13
- # asm 1: movdqa <z13=int6464#10,>y5=int6464#3
- # asm 2: movdqa <z13=%xmm9,>y5=%xmm2
- movdqa %xmm9,%xmm2
- # qhasm: uint32323232 y5 += z1
- # asm 1: paddd <z1=int6464#8,<y5=int6464#3
- # asm 2: paddd <z1=%xmm7,<y5=%xmm2
- paddd %xmm7,%xmm2
- # qhasm: r5 = y5
- # asm 1: movdqa <y5=int6464#3,>r5=int6464#13
- # asm 2: movdqa <y5=%xmm2,>r5=%xmm12
- movdqa %xmm2,%xmm12
- # qhasm: uint32323232 y5 <<= 18
- # asm 1: pslld $18,<y5=int6464#3
- # asm 2: pslld $18,<y5=%xmm2
- pslld $18,%xmm2
- # qhasm: z5 ^= y5
- # asm 1: pxor <y5=int6464#3,<z5=int6464#1
- # asm 2: pxor <y5=%xmm2,<z5=%xmm0
- pxor %xmm2,%xmm0
- # qhasm: uint32323232 r5 >>= 14
- # asm 1: psrld $14,<r5=int6464#13
- # asm 2: psrld $14,<r5=%xmm12
- psrld $14,%xmm12
- # qhasm: z5 ^= r5
- # asm 1: pxor <r5=int6464#13,<z5=int6464#1
- # asm 2: pxor <r5=%xmm12,<z5=%xmm0
- pxor %xmm12,%xmm0
- # qhasm: y14 = z6
- # asm 1: movdqa <z6=int6464#6,>y14=int6464#3
- # asm 2: movdqa <z6=%xmm5,>y14=%xmm2
- movdqa %xmm5,%xmm2
- # qhasm: uint32323232 y14 += z10
- # asm 1: paddd <z10=int6464#2,<y14=int6464#3
- # asm 2: paddd <z10=%xmm1,<y14=%xmm2
- paddd %xmm1,%xmm2
- # qhasm: r14 = y14
- # asm 1: movdqa <y14=int6464#3,>r14=int6464#13
- # asm 2: movdqa <y14=%xmm2,>r14=%xmm12
- movdqa %xmm2,%xmm12
- # qhasm: uint32323232 y14 <<= 7
- # asm 1: pslld $7,<y14=int6464#3
- # asm 2: pslld $7,<y14=%xmm2
- pslld $7,%xmm2
- # qhasm: z14 ^= y14
- # asm 1: pxor <y14=int6464#3,<z14=int6464#4
- # asm 2: pxor <y14=%xmm2,<z14=%xmm3
- pxor %xmm2,%xmm3
- # qhasm: uint32323232 r14 >>= 25
- # asm 1: psrld $25,<r14=int6464#13
- # asm 2: psrld $25,<r14=%xmm12
- psrld $25,%xmm12
- # qhasm: z14 ^= r14
- # asm 1: pxor <r14=int6464#13,<z14=int6464#4
- # asm 2: pxor <r14=%xmm12,<z14=%xmm3
- pxor %xmm12,%xmm3
- # qhasm: z15 = z15_stack
- # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3
- # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2
- movdqa 336(%rsp),%xmm2
- # qhasm: z5_stack = z5
- # asm 1: movdqa <z5=int6464#1,>z5_stack=stack128#22
- # asm 2: movdqa <z5=%xmm0,>z5_stack=336(%rsp)
- movdqa %xmm0,336(%rsp)
- # qhasm: y3 = z11
- # asm 1: movdqa <z11=int6464#7,>y3=int6464#1
- # asm 2: movdqa <z11=%xmm6,>y3=%xmm0
- movdqa %xmm6,%xmm0
- # qhasm: uint32323232 y3 += z15
- # asm 1: paddd <z15=int6464#3,<y3=int6464#1
- # asm 2: paddd <z15=%xmm2,<y3=%xmm0
- paddd %xmm2,%xmm0
- # qhasm: r3 = y3
- # asm 1: movdqa <y3=int6464#1,>r3=int6464#13
- # asm 2: movdqa <y3=%xmm0,>r3=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y3 <<= 7
- # asm 1: pslld $7,<y3=int6464#1
- # asm 2: pslld $7,<y3=%xmm0
- pslld $7,%xmm0
- # qhasm: z3 ^= y3
- # asm 1: pxor <y3=int6464#1,<z3=int6464#5
- # asm 2: pxor <y3=%xmm0,<z3=%xmm4
- pxor %xmm0,%xmm4
- # qhasm: uint32323232 r3 >>= 25
- # asm 1: psrld $25,<r3=int6464#13
- # asm 2: psrld $25,<r3=%xmm12
- psrld $25,%xmm12
- # qhasm: z3 ^= r3
- # asm 1: pxor <r3=int6464#13,<z3=int6464#5
- # asm 2: pxor <r3=%xmm12,<z3=%xmm4
- pxor %xmm12,%xmm4
- # qhasm: y2 = z10
- # asm 1: movdqa <z10=int6464#2,>y2=int6464#1
- # asm 2: movdqa <z10=%xmm1,>y2=%xmm0
- movdqa %xmm1,%xmm0
- # qhasm: uint32323232 y2 += z14
- # asm 1: paddd <z14=int6464#4,<y2=int6464#1
- # asm 2: paddd <z14=%xmm3,<y2=%xmm0
- paddd %xmm3,%xmm0
- # qhasm: r2 = y2
- # asm 1: movdqa <y2=int6464#1,>r2=int6464#13
- # asm 2: movdqa <y2=%xmm0,>r2=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y2 <<= 9
- # asm 1: pslld $9,<y2=int6464#1
- # asm 2: pslld $9,<y2=%xmm0
- pslld $9,%xmm0
- # qhasm: z2 ^= y2
- # asm 1: pxor <y2=int6464#1,<z2=int6464#11
- # asm 2: pxor <y2=%xmm0,<z2=%xmm10
- pxor %xmm0,%xmm10
- # qhasm: uint32323232 r2 >>= 23
- # asm 1: psrld $23,<r2=int6464#13
- # asm 2: psrld $23,<r2=%xmm12
- psrld $23,%xmm12
- # qhasm: z2 ^= r2
- # asm 1: pxor <r2=int6464#13,<z2=int6464#11
- # asm 2: pxor <r2=%xmm12,<z2=%xmm10
- pxor %xmm12,%xmm10
- # qhasm: y7 = z15
- # asm 1: movdqa <z15=int6464#3,>y7=int6464#1
- # asm 2: movdqa <z15=%xmm2,>y7=%xmm0
- movdqa %xmm2,%xmm0
- # qhasm: uint32323232 y7 += z3
- # asm 1: paddd <z3=int6464#5,<y7=int6464#1
- # asm 2: paddd <z3=%xmm4,<y7=%xmm0
- paddd %xmm4,%xmm0
- # qhasm: r7 = y7
- # asm 1: movdqa <y7=int6464#1,>r7=int6464#13
- # asm 2: movdqa <y7=%xmm0,>r7=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y7 <<= 9
- # asm 1: pslld $9,<y7=int6464#1
- # asm 2: pslld $9,<y7=%xmm0
- pslld $9,%xmm0
- # qhasm: z7 ^= y7
- # asm 1: pxor <y7=int6464#1,<z7=int6464#9
- # asm 2: pxor <y7=%xmm0,<z7=%xmm8
- pxor %xmm0,%xmm8
- # qhasm: uint32323232 r7 >>= 23
- # asm 1: psrld $23,<r7=int6464#13
- # asm 2: psrld $23,<r7=%xmm12
- psrld $23,%xmm12
- # qhasm: z7 ^= r7
- # asm 1: pxor <r7=int6464#13,<z7=int6464#9
- # asm 2: pxor <r7=%xmm12,<z7=%xmm8
- pxor %xmm12,%xmm8
- # qhasm: y6 = z14
- # asm 1: movdqa <z14=int6464#4,>y6=int6464#1
- # asm 2: movdqa <z14=%xmm3,>y6=%xmm0
- movdqa %xmm3,%xmm0
- # qhasm: uint32323232 y6 += z2
- # asm 1: paddd <z2=int6464#11,<y6=int6464#1
- # asm 2: paddd <z2=%xmm10,<y6=%xmm0
- paddd %xmm10,%xmm0
- # qhasm: r6 = y6
- # asm 1: movdqa <y6=int6464#1,>r6=int6464#13
- # asm 2: movdqa <y6=%xmm0,>r6=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y6 <<= 13
- # asm 1: pslld $13,<y6=int6464#1
- # asm 2: pslld $13,<y6=%xmm0
- pslld $13,%xmm0
- # qhasm: z6 ^= y6
- # asm 1: pxor <y6=int6464#1,<z6=int6464#6
- # asm 2: pxor <y6=%xmm0,<z6=%xmm5
- pxor %xmm0,%xmm5
- # qhasm: uint32323232 r6 >>= 19
- # asm 1: psrld $19,<r6=int6464#13
- # asm 2: psrld $19,<r6=%xmm12
- psrld $19,%xmm12
- # qhasm: z6 ^= r6
- # asm 1: pxor <r6=int6464#13,<z6=int6464#6
- # asm 2: pxor <r6=%xmm12,<z6=%xmm5
- pxor %xmm12,%xmm5
- # qhasm: y11 = z3
- # asm 1: movdqa <z3=int6464#5,>y11=int6464#1
- # asm 2: movdqa <z3=%xmm4,>y11=%xmm0
- movdqa %xmm4,%xmm0
- # qhasm: uint32323232 y11 += z7
- # asm 1: paddd <z7=int6464#9,<y11=int6464#1
- # asm 2: paddd <z7=%xmm8,<y11=%xmm0
- paddd %xmm8,%xmm0
- # qhasm: r11 = y11
- # asm 1: movdqa <y11=int6464#1,>r11=int6464#13
- # asm 2: movdqa <y11=%xmm0,>r11=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y11 <<= 13
- # asm 1: pslld $13,<y11=int6464#1
- # asm 2: pslld $13,<y11=%xmm0
- pslld $13,%xmm0
- # qhasm: z11 ^= y11
- # asm 1: pxor <y11=int6464#1,<z11=int6464#7
- # asm 2: pxor <y11=%xmm0,<z11=%xmm6
- pxor %xmm0,%xmm6
- # qhasm: uint32323232 r11 >>= 19
- # asm 1: psrld $19,<r11=int6464#13
- # asm 2: psrld $19,<r11=%xmm12
- psrld $19,%xmm12
- # qhasm: z11 ^= r11
- # asm 1: pxor <r11=int6464#13,<z11=int6464#7
- # asm 2: pxor <r11=%xmm12,<z11=%xmm6
- pxor %xmm12,%xmm6
- # qhasm: y10 = z2
- # asm 1: movdqa <z2=int6464#11,>y10=int6464#1
- # asm 2: movdqa <z2=%xmm10,>y10=%xmm0
- movdqa %xmm10,%xmm0
- # qhasm: uint32323232 y10 += z6
- # asm 1: paddd <z6=int6464#6,<y10=int6464#1
- # asm 2: paddd <z6=%xmm5,<y10=%xmm0
- paddd %xmm5,%xmm0
- # qhasm: r10 = y10
- # asm 1: movdqa <y10=int6464#1,>r10=int6464#13
- # asm 2: movdqa <y10=%xmm0,>r10=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y10 <<= 18
- # asm 1: pslld $18,<y10=int6464#1
- # asm 2: pslld $18,<y10=%xmm0
- pslld $18,%xmm0
- # qhasm: z10 ^= y10
- # asm 1: pxor <y10=int6464#1,<z10=int6464#2
- # asm 2: pxor <y10=%xmm0,<z10=%xmm1
- pxor %xmm0,%xmm1
- # qhasm: uint32323232 r10 >>= 14
- # asm 1: psrld $14,<r10=int6464#13
- # asm 2: psrld $14,<r10=%xmm12
- psrld $14,%xmm12
- # qhasm: z10 ^= r10
- # asm 1: pxor <r10=int6464#13,<z10=int6464#2
- # asm 2: pxor <r10=%xmm12,<z10=%xmm1
- pxor %xmm12,%xmm1
- # qhasm: z0 = z0_stack
- # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#1
- # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm0
- movdqa 320(%rsp),%xmm0
- # qhasm: z10_stack = z10
- # asm 1: movdqa <z10=int6464#2,>z10_stack=stack128#21
- # asm 2: movdqa <z10=%xmm1,>z10_stack=320(%rsp)
- movdqa %xmm1,320(%rsp)
- # qhasm: y1 = z3
- # asm 1: movdqa <z3=int6464#5,>y1=int6464#2
- # asm 2: movdqa <z3=%xmm4,>y1=%xmm1
- movdqa %xmm4,%xmm1
- # qhasm: uint32323232 y1 += z0
- # asm 1: paddd <z0=int6464#1,<y1=int6464#2
- # asm 2: paddd <z0=%xmm0,<y1=%xmm1
- paddd %xmm0,%xmm1
- # qhasm: r1 = y1
- # asm 1: movdqa <y1=int6464#2,>r1=int6464#13
- # asm 2: movdqa <y1=%xmm1,>r1=%xmm12
- movdqa %xmm1,%xmm12
- # qhasm: uint32323232 y1 <<= 7
- # asm 1: pslld $7,<y1=int6464#2
- # asm 2: pslld $7,<y1=%xmm1
- pslld $7,%xmm1
- # qhasm: z1 ^= y1
- # asm 1: pxor <y1=int6464#2,<z1=int6464#8
- # asm 2: pxor <y1=%xmm1,<z1=%xmm7
- pxor %xmm1,%xmm7
- # qhasm: uint32323232 r1 >>= 25
- # asm 1: psrld $25,<r1=int6464#13
- # asm 2: psrld $25,<r1=%xmm12
- psrld $25,%xmm12
- # qhasm: z1 ^= r1
- # asm 1: pxor <r1=int6464#13,<z1=int6464#8
- # asm 2: pxor <r1=%xmm12,<z1=%xmm7
- pxor %xmm12,%xmm7
- # qhasm: y15 = z7
- # asm 1: movdqa <z7=int6464#9,>y15=int6464#2
- # asm 2: movdqa <z7=%xmm8,>y15=%xmm1
- movdqa %xmm8,%xmm1
- # qhasm: uint32323232 y15 += z11
- # asm 1: paddd <z11=int6464#7,<y15=int6464#2
- # asm 2: paddd <z11=%xmm6,<y15=%xmm1
- paddd %xmm6,%xmm1
- # qhasm: r15 = y15
- # asm 1: movdqa <y15=int6464#2,>r15=int6464#13
- # asm 2: movdqa <y15=%xmm1,>r15=%xmm12
- movdqa %xmm1,%xmm12
- # qhasm: uint32323232 y15 <<= 18
- # asm 1: pslld $18,<y15=int6464#2
- # asm 2: pslld $18,<y15=%xmm1
- pslld $18,%xmm1
- # qhasm: z15 ^= y15
- # asm 1: pxor <y15=int6464#2,<z15=int6464#3
- # asm 2: pxor <y15=%xmm1,<z15=%xmm2
- pxor %xmm1,%xmm2
- # qhasm: uint32323232 r15 >>= 14
- # asm 1: psrld $14,<r15=int6464#13
- # asm 2: psrld $14,<r15=%xmm12
- psrld $14,%xmm12
- # qhasm: z15 ^= r15
- # asm 1: pxor <r15=int6464#13,<z15=int6464#3
- # asm 2: pxor <r15=%xmm12,<z15=%xmm2
- pxor %xmm12,%xmm2
- # qhasm: z5 = z5_stack
- # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#13
- # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm12
- movdqa 336(%rsp),%xmm12
- # qhasm: z15_stack = z15
- # asm 1: movdqa <z15=int6464#3,>z15_stack=stack128#22
- # asm 2: movdqa <z15=%xmm2,>z15_stack=336(%rsp)
- movdqa %xmm2,336(%rsp)
- # qhasm: y6 = z4
- # asm 1: movdqa <z4=int6464#15,>y6=int6464#2
- # asm 2: movdqa <z4=%xmm14,>y6=%xmm1
- movdqa %xmm14,%xmm1
- # qhasm: uint32323232 y6 += z5
- # asm 1: paddd <z5=int6464#13,<y6=int6464#2
- # asm 2: paddd <z5=%xmm12,<y6=%xmm1
- paddd %xmm12,%xmm1
- # qhasm: r6 = y6
- # asm 1: movdqa <y6=int6464#2,>r6=int6464#3
- # asm 2: movdqa <y6=%xmm1,>r6=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y6 <<= 7
- # asm 1: pslld $7,<y6=int6464#2
- # asm 2: pslld $7,<y6=%xmm1
- pslld $7,%xmm1
- # qhasm: z6 ^= y6
- # asm 1: pxor <y6=int6464#2,<z6=int6464#6
- # asm 2: pxor <y6=%xmm1,<z6=%xmm5
- pxor %xmm1,%xmm5
- # qhasm: uint32323232 r6 >>= 25
- # asm 1: psrld $25,<r6=int6464#3
- # asm 2: psrld $25,<r6=%xmm2
- psrld $25,%xmm2
- # qhasm: z6 ^= r6
- # asm 1: pxor <r6=int6464#3,<z6=int6464#6
- # asm 2: pxor <r6=%xmm2,<z6=%xmm5
- pxor %xmm2,%xmm5
- # qhasm: y2 = z0
- # asm 1: movdqa <z0=int6464#1,>y2=int6464#2
- # asm 2: movdqa <z0=%xmm0,>y2=%xmm1
- movdqa %xmm0,%xmm1
- # qhasm: uint32323232 y2 += z1
- # asm 1: paddd <z1=int6464#8,<y2=int6464#2
- # asm 2: paddd <z1=%xmm7,<y2=%xmm1
- paddd %xmm7,%xmm1
- # qhasm: r2 = y2
- # asm 1: movdqa <y2=int6464#2,>r2=int6464#3
- # asm 2: movdqa <y2=%xmm1,>r2=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y2 <<= 9
- # asm 1: pslld $9,<y2=int6464#2
- # asm 2: pslld $9,<y2=%xmm1
- pslld $9,%xmm1
- # qhasm: z2 ^= y2
- # asm 1: pxor <y2=int6464#2,<z2=int6464#11
- # asm 2: pxor <y2=%xmm1,<z2=%xmm10
- pxor %xmm1,%xmm10
- # qhasm: uint32323232 r2 >>= 23
- # asm 1: psrld $23,<r2=int6464#3
- # asm 2: psrld $23,<r2=%xmm2
- psrld $23,%xmm2
- # qhasm: z2 ^= r2
- # asm 1: pxor <r2=int6464#3,<z2=int6464#11
- # asm 2: pxor <r2=%xmm2,<z2=%xmm10
- pxor %xmm2,%xmm10
- # qhasm: y7 = z5
- # asm 1: movdqa <z5=int6464#13,>y7=int6464#2
- # asm 2: movdqa <z5=%xmm12,>y7=%xmm1
- movdqa %xmm12,%xmm1
- # qhasm: uint32323232 y7 += z6
- # asm 1: paddd <z6=int6464#6,<y7=int6464#2
- # asm 2: paddd <z6=%xmm5,<y7=%xmm1
- paddd %xmm5,%xmm1
- # qhasm: r7 = y7
- # asm 1: movdqa <y7=int6464#2,>r7=int6464#3
- # asm 2: movdqa <y7=%xmm1,>r7=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y7 <<= 9
- # asm 1: pslld $9,<y7=int6464#2
- # asm 2: pslld $9,<y7=%xmm1
- pslld $9,%xmm1
- # qhasm: z7 ^= y7
- # asm 1: pxor <y7=int6464#2,<z7=int6464#9
- # asm 2: pxor <y7=%xmm1,<z7=%xmm8
- pxor %xmm1,%xmm8
- # qhasm: uint32323232 r7 >>= 23
- # asm 1: psrld $23,<r7=int6464#3
- # asm 2: psrld $23,<r7=%xmm2
- psrld $23,%xmm2
- # qhasm: z7 ^= r7
- # asm 1: pxor <r7=int6464#3,<z7=int6464#9
- # asm 2: pxor <r7=%xmm2,<z7=%xmm8
- pxor %xmm2,%xmm8
- # qhasm: y3 = z1
- # asm 1: movdqa <z1=int6464#8,>y3=int6464#2
- # asm 2: movdqa <z1=%xmm7,>y3=%xmm1
- movdqa %xmm7,%xmm1
- # qhasm: uint32323232 y3 += z2
- # asm 1: paddd <z2=int6464#11,<y3=int6464#2
- # asm 2: paddd <z2=%xmm10,<y3=%xmm1
- paddd %xmm10,%xmm1
- # qhasm: r3 = y3
- # asm 1: movdqa <y3=int6464#2,>r3=int6464#3
- # asm 2: movdqa <y3=%xmm1,>r3=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y3 <<= 13
- # asm 1: pslld $13,<y3=int6464#2
- # asm 2: pslld $13,<y3=%xmm1
- pslld $13,%xmm1
- # qhasm: z3 ^= y3
- # asm 1: pxor <y3=int6464#2,<z3=int6464#5
- # asm 2: pxor <y3=%xmm1,<z3=%xmm4
- pxor %xmm1,%xmm4
- # qhasm: uint32323232 r3 >>= 19
- # asm 1: psrld $19,<r3=int6464#3
- # asm 2: psrld $19,<r3=%xmm2
- psrld $19,%xmm2
- # qhasm: z3 ^= r3
- # asm 1: pxor <r3=int6464#3,<z3=int6464#5
- # asm 2: pxor <r3=%xmm2,<z3=%xmm4
- pxor %xmm2,%xmm4
- # qhasm: y4 = z6
- # asm 1: movdqa <z6=int6464#6,>y4=int6464#2
- # asm 2: movdqa <z6=%xmm5,>y4=%xmm1
- movdqa %xmm5,%xmm1
- # qhasm: uint32323232 y4 += z7
- # asm 1: paddd <z7=int6464#9,<y4=int6464#2
- # asm 2: paddd <z7=%xmm8,<y4=%xmm1
- paddd %xmm8,%xmm1
- # qhasm: r4 = y4
- # asm 1: movdqa <y4=int6464#2,>r4=int6464#3
- # asm 2: movdqa <y4=%xmm1,>r4=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y4 <<= 13
- # asm 1: pslld $13,<y4=int6464#2
- # asm 2: pslld $13,<y4=%xmm1
- pslld $13,%xmm1
- # qhasm: z4 ^= y4
- # asm 1: pxor <y4=int6464#2,<z4=int6464#15
- # asm 2: pxor <y4=%xmm1,<z4=%xmm14
- pxor %xmm1,%xmm14
- # qhasm: uint32323232 r4 >>= 19
- # asm 1: psrld $19,<r4=int6464#3
- # asm 2: psrld $19,<r4=%xmm2
- psrld $19,%xmm2
- # qhasm: z4 ^= r4
- # asm 1: pxor <r4=int6464#3,<z4=int6464#15
- # asm 2: pxor <r4=%xmm2,<z4=%xmm14
- pxor %xmm2,%xmm14
- # qhasm: y0 = z2
- # asm 1: movdqa <z2=int6464#11,>y0=int6464#2
- # asm 2: movdqa <z2=%xmm10,>y0=%xmm1
- movdqa %xmm10,%xmm1
- # qhasm: uint32323232 y0 += z3
- # asm 1: paddd <z3=int6464#5,<y0=int6464#2
- # asm 2: paddd <z3=%xmm4,<y0=%xmm1
- paddd %xmm4,%xmm1
- # qhasm: r0 = y0
- # asm 1: movdqa <y0=int6464#2,>r0=int6464#3
- # asm 2: movdqa <y0=%xmm1,>r0=%xmm2
- movdqa %xmm1,%xmm2
- # qhasm: uint32323232 y0 <<= 18
- # asm 1: pslld $18,<y0=int6464#2
- # asm 2: pslld $18,<y0=%xmm1
- pslld $18,%xmm1
- # qhasm: z0 ^= y0
- # asm 1: pxor <y0=int6464#2,<z0=int6464#1
- # asm 2: pxor <y0=%xmm1,<z0=%xmm0
- pxor %xmm1,%xmm0
- # qhasm: uint32323232 r0 >>= 14
- # asm 1: psrld $14,<r0=int6464#3
- # asm 2: psrld $14,<r0=%xmm2
- psrld $14,%xmm2
- # qhasm: z0 ^= r0
- # asm 1: pxor <r0=int6464#3,<z0=int6464#1
- # asm 2: pxor <r0=%xmm2,<z0=%xmm0
- pxor %xmm2,%xmm0
- # qhasm: z10 = z10_stack
- # asm 1: movdqa <z10_stack=stack128#21,>z10=int6464#2
- # asm 2: movdqa <z10_stack=320(%rsp),>z10=%xmm1
- movdqa 320(%rsp),%xmm1
- # qhasm: z0_stack = z0
- # asm 1: movdqa <z0=int6464#1,>z0_stack=stack128#21
- # asm 2: movdqa <z0=%xmm0,>z0_stack=320(%rsp)
- movdqa %xmm0,320(%rsp)
- # qhasm: y5 = z7
- # asm 1: movdqa <z7=int6464#9,>y5=int6464#1
- # asm 2: movdqa <z7=%xmm8,>y5=%xmm0
- movdqa %xmm8,%xmm0
- # qhasm: uint32323232 y5 += z4
- # asm 1: paddd <z4=int6464#15,<y5=int6464#1
- # asm 2: paddd <z4=%xmm14,<y5=%xmm0
- paddd %xmm14,%xmm0
- # qhasm: r5 = y5
- # asm 1: movdqa <y5=int6464#1,>r5=int6464#3
- # asm 2: movdqa <y5=%xmm0,>r5=%xmm2
- movdqa %xmm0,%xmm2
- # qhasm: uint32323232 y5 <<= 18
- # asm 1: pslld $18,<y5=int6464#1
- # asm 2: pslld $18,<y5=%xmm0
- pslld $18,%xmm0
- # qhasm: z5 ^= y5
- # asm 1: pxor <y5=int6464#1,<z5=int6464#13
- # asm 2: pxor <y5=%xmm0,<z5=%xmm12
- pxor %xmm0,%xmm12
- # qhasm: uint32323232 r5 >>= 14
- # asm 1: psrld $14,<r5=int6464#3
- # asm 2: psrld $14,<r5=%xmm2
- psrld $14,%xmm2
- # qhasm: z5 ^= r5
- # asm 1: pxor <r5=int6464#3,<z5=int6464#13
- # asm 2: pxor <r5=%xmm2,<z5=%xmm12
- pxor %xmm2,%xmm12
- # qhasm: y11 = z9
- # asm 1: movdqa <z9=int6464#12,>y11=int6464#1
- # asm 2: movdqa <z9=%xmm11,>y11=%xmm0
- movdqa %xmm11,%xmm0
- # qhasm: uint32323232 y11 += z10
- # asm 1: paddd <z10=int6464#2,<y11=int6464#1
- # asm 2: paddd <z10=%xmm1,<y11=%xmm0
- paddd %xmm1,%xmm0
- # qhasm: r11 = y11
- # asm 1: movdqa <y11=int6464#1,>r11=int6464#3
- # asm 2: movdqa <y11=%xmm0,>r11=%xmm2
- movdqa %xmm0,%xmm2
- # qhasm: uint32323232 y11 <<= 7
- # asm 1: pslld $7,<y11=int6464#1
- # asm 2: pslld $7,<y11=%xmm0
- pslld $7,%xmm0
- # qhasm: z11 ^= y11
- # asm 1: pxor <y11=int6464#1,<z11=int6464#7
- # asm 2: pxor <y11=%xmm0,<z11=%xmm6
- pxor %xmm0,%xmm6
- # qhasm: uint32323232 r11 >>= 25
- # asm 1: psrld $25,<r11=int6464#3
- # asm 2: psrld $25,<r11=%xmm2
- psrld $25,%xmm2
- # qhasm: z11 ^= r11
- # asm 1: pxor <r11=int6464#3,<z11=int6464#7
- # asm 2: pxor <r11=%xmm2,<z11=%xmm6
- pxor %xmm2,%xmm6
- # qhasm: z15 = z15_stack
- # asm 1: movdqa <z15_stack=stack128#22,>z15=int6464#3
- # asm 2: movdqa <z15_stack=336(%rsp),>z15=%xmm2
- movdqa 336(%rsp),%xmm2
- # qhasm: z5_stack = z5
- # asm 1: movdqa <z5=int6464#13,>z5_stack=stack128#22
- # asm 2: movdqa <z5=%xmm12,>z5_stack=336(%rsp)
- movdqa %xmm12,336(%rsp)
- # qhasm: y12 = z14
- # asm 1: movdqa <z14=int6464#4,>y12=int6464#1
- # asm 2: movdqa <z14=%xmm3,>y12=%xmm0
- movdqa %xmm3,%xmm0
- # qhasm: uint32323232 y12 += z15
- # asm 1: paddd <z15=int6464#3,<y12=int6464#1
- # asm 2: paddd <z15=%xmm2,<y12=%xmm0
- paddd %xmm2,%xmm0
- # qhasm: r12 = y12
- # asm 1: movdqa <y12=int6464#1,>r12=int6464#13
- # asm 2: movdqa <y12=%xmm0,>r12=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y12 <<= 7
- # asm 1: pslld $7,<y12=int6464#1
- # asm 2: pslld $7,<y12=%xmm0
- pslld $7,%xmm0
- # qhasm: z12 ^= y12
- # asm 1: pxor <y12=int6464#1,<z12=int6464#14
- # asm 2: pxor <y12=%xmm0,<z12=%xmm13
- pxor %xmm0,%xmm13
- # qhasm: uint32323232 r12 >>= 25
- # asm 1: psrld $25,<r12=int6464#13
- # asm 2: psrld $25,<r12=%xmm12
- psrld $25,%xmm12
- # qhasm: z12 ^= r12
- # asm 1: pxor <r12=int6464#13,<z12=int6464#14
- # asm 2: pxor <r12=%xmm12,<z12=%xmm13
- pxor %xmm12,%xmm13
- # qhasm: y8 = z10
- # asm 1: movdqa <z10=int6464#2,>y8=int6464#1
- # asm 2: movdqa <z10=%xmm1,>y8=%xmm0
- movdqa %xmm1,%xmm0
- # qhasm: uint32323232 y8 += z11
- # asm 1: paddd <z11=int6464#7,<y8=int6464#1
- # asm 2: paddd <z11=%xmm6,<y8=%xmm0
- paddd %xmm6,%xmm0
- # qhasm: r8 = y8
- # asm 1: movdqa <y8=int6464#1,>r8=int6464#13
- # asm 2: movdqa <y8=%xmm0,>r8=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y8 <<= 9
- # asm 1: pslld $9,<y8=int6464#1
- # asm 2: pslld $9,<y8=%xmm0
- pslld $9,%xmm0
- # qhasm: z8 ^= y8
- # asm 1: pxor <y8=int6464#1,<z8=int6464#16
- # asm 2: pxor <y8=%xmm0,<z8=%xmm15
- pxor %xmm0,%xmm15
- # qhasm: uint32323232 r8 >>= 23
- # asm 1: psrld $23,<r8=int6464#13
- # asm 2: psrld $23,<r8=%xmm12
- psrld $23,%xmm12
- # qhasm: z8 ^= r8
- # asm 1: pxor <r8=int6464#13,<z8=int6464#16
- # asm 2: pxor <r8=%xmm12,<z8=%xmm15
- pxor %xmm12,%xmm15
- # qhasm: y13 = z15
- # asm 1: movdqa <z15=int6464#3,>y13=int6464#1
- # asm 2: movdqa <z15=%xmm2,>y13=%xmm0
- movdqa %xmm2,%xmm0
- # qhasm: uint32323232 y13 += z12
- # asm 1: paddd <z12=int6464#14,<y13=int6464#1
- # asm 2: paddd <z12=%xmm13,<y13=%xmm0
- paddd %xmm13,%xmm0
- # qhasm: r13 = y13
- # asm 1: movdqa <y13=int6464#1,>r13=int6464#13
- # asm 2: movdqa <y13=%xmm0,>r13=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y13 <<= 9
- # asm 1: pslld $9,<y13=int6464#1
- # asm 2: pslld $9,<y13=%xmm0
- pslld $9,%xmm0
- # qhasm: z13 ^= y13
- # asm 1: pxor <y13=int6464#1,<z13=int6464#10
- # asm 2: pxor <y13=%xmm0,<z13=%xmm9
- pxor %xmm0,%xmm9
- # qhasm: uint32323232 r13 >>= 23
- # asm 1: psrld $23,<r13=int6464#13
- # asm 2: psrld $23,<r13=%xmm12
- psrld $23,%xmm12
- # qhasm: z13 ^= r13
- # asm 1: pxor <r13=int6464#13,<z13=int6464#10
- # asm 2: pxor <r13=%xmm12,<z13=%xmm9
- pxor %xmm12,%xmm9
- # qhasm: y9 = z11
- # asm 1: movdqa <z11=int6464#7,>y9=int6464#1
- # asm 2: movdqa <z11=%xmm6,>y9=%xmm0
- movdqa %xmm6,%xmm0
- # qhasm: uint32323232 y9 += z8
- # asm 1: paddd <z8=int6464#16,<y9=int6464#1
- # asm 2: paddd <z8=%xmm15,<y9=%xmm0
- paddd %xmm15,%xmm0
- # qhasm: r9 = y9
- # asm 1: movdqa <y9=int6464#1,>r9=int6464#13
- # asm 2: movdqa <y9=%xmm0,>r9=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y9 <<= 13
- # asm 1: pslld $13,<y9=int6464#1
- # asm 2: pslld $13,<y9=%xmm0
- pslld $13,%xmm0
- # qhasm: z9 ^= y9
- # asm 1: pxor <y9=int6464#1,<z9=int6464#12
- # asm 2: pxor <y9=%xmm0,<z9=%xmm11
- pxor %xmm0,%xmm11
- # qhasm: uint32323232 r9 >>= 19
- # asm 1: psrld $19,<r9=int6464#13
- # asm 2: psrld $19,<r9=%xmm12
- psrld $19,%xmm12
- # qhasm: z9 ^= r9
- # asm 1: pxor <r9=int6464#13,<z9=int6464#12
- # asm 2: pxor <r9=%xmm12,<z9=%xmm11
- pxor %xmm12,%xmm11
- # qhasm: y14 = z12
- # asm 1: movdqa <z12=int6464#14,>y14=int6464#1
- # asm 2: movdqa <z12=%xmm13,>y14=%xmm0
- movdqa %xmm13,%xmm0
- # qhasm: uint32323232 y14 += z13
- # asm 1: paddd <z13=int6464#10,<y14=int6464#1
- # asm 2: paddd <z13=%xmm9,<y14=%xmm0
- paddd %xmm9,%xmm0
- # qhasm: r14 = y14
- # asm 1: movdqa <y14=int6464#1,>r14=int6464#13
- # asm 2: movdqa <y14=%xmm0,>r14=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y14 <<= 13
- # asm 1: pslld $13,<y14=int6464#1
- # asm 2: pslld $13,<y14=%xmm0
- pslld $13,%xmm0
- # qhasm: z14 ^= y14
- # asm 1: pxor <y14=int6464#1,<z14=int6464#4
- # asm 2: pxor <y14=%xmm0,<z14=%xmm3
- pxor %xmm0,%xmm3
- # qhasm: uint32323232 r14 >>= 19
- # asm 1: psrld $19,<r14=int6464#13
- # asm 2: psrld $19,<r14=%xmm12
- psrld $19,%xmm12
- # qhasm: z14 ^= r14
- # asm 1: pxor <r14=int6464#13,<z14=int6464#4
- # asm 2: pxor <r14=%xmm12,<z14=%xmm3
- pxor %xmm12,%xmm3
- # qhasm: y10 = z8
- # asm 1: movdqa <z8=int6464#16,>y10=int6464#1
- # asm 2: movdqa <z8=%xmm15,>y10=%xmm0
- movdqa %xmm15,%xmm0
- # qhasm: uint32323232 y10 += z9
- # asm 1: paddd <z9=int6464#12,<y10=int6464#1
- # asm 2: paddd <z9=%xmm11,<y10=%xmm0
- paddd %xmm11,%xmm0
- # qhasm: r10 = y10
- # asm 1: movdqa <y10=int6464#1,>r10=int6464#13
- # asm 2: movdqa <y10=%xmm0,>r10=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y10 <<= 18
- # asm 1: pslld $18,<y10=int6464#1
- # asm 2: pslld $18,<y10=%xmm0
- pslld $18,%xmm0
- # qhasm: z10 ^= y10
- # asm 1: pxor <y10=int6464#1,<z10=int6464#2
- # asm 2: pxor <y10=%xmm0,<z10=%xmm1
- pxor %xmm0,%xmm1
- # qhasm: uint32323232 r10 >>= 14
- # asm 1: psrld $14,<r10=int6464#13
- # asm 2: psrld $14,<r10=%xmm12
- psrld $14,%xmm12
- # qhasm: z10 ^= r10
- # asm 1: pxor <r10=int6464#13,<z10=int6464#2
- # asm 2: pxor <r10=%xmm12,<z10=%xmm1
- pxor %xmm12,%xmm1
- # qhasm: y15 = z13
- # asm 1: movdqa <z13=int6464#10,>y15=int6464#1
- # asm 2: movdqa <z13=%xmm9,>y15=%xmm0
- movdqa %xmm9,%xmm0
- # qhasm: uint32323232 y15 += z14
- # asm 1: paddd <z14=int6464#4,<y15=int6464#1
- # asm 2: paddd <z14=%xmm3,<y15=%xmm0
- paddd %xmm3,%xmm0
- # qhasm: r15 = y15
- # asm 1: movdqa <y15=int6464#1,>r15=int6464#13
- # asm 2: movdqa <y15=%xmm0,>r15=%xmm12
- movdqa %xmm0,%xmm12
- # qhasm: uint32323232 y15 <<= 18
- # asm 1: pslld $18,<y15=int6464#1
- # asm 2: pslld $18,<y15=%xmm0
- pslld $18,%xmm0
- # qhasm: z15 ^= y15
- # asm 1: pxor <y15=int6464#1,<z15=int6464#3
- # asm 2: pxor <y15=%xmm0,<z15=%xmm2
- pxor %xmm0,%xmm2
- # qhasm: uint32323232 r15 >>= 14
- # asm 1: psrld $14,<r15=int6464#13
- # asm 2: psrld $14,<r15=%xmm12
- psrld $14,%xmm12
- # qhasm: z15 ^= r15
- # asm 1: pxor <r15=int6464#13,<z15=int6464#3
- # asm 2: pxor <r15=%xmm12,<z15=%xmm2
- pxor %xmm12,%xmm2
- # qhasm: z0 = z0_stack
- # asm 1: movdqa <z0_stack=stack128#21,>z0=int6464#13
- # asm 2: movdqa <z0_stack=320(%rsp),>z0=%xmm12
- movdqa 320(%rsp),%xmm12
- # qhasm: z5 = z5_stack
- # asm 1: movdqa <z5_stack=stack128#22,>z5=int6464#1
- # asm 2: movdqa <z5_stack=336(%rsp),>z5=%xmm0
- movdqa 336(%rsp),%xmm0
- # qhasm: unsigned>? i -= 2
- # asm 1: sub $2,<i=int64#3
- # asm 2: sub $2,<i=%rdx
- sub $2,%rdx
- # comment:fp stack unchanged by jump
- # qhasm: goto mainloop1 if unsigned>
- ja ._mainloop1
- # qhasm: uint32323232 z0 += orig0
- # asm 1: paddd <orig0=stack128#8,<z0=int6464#13
- # asm 2: paddd <orig0=112(%rsp),<z0=%xmm12
- paddd 112(%rsp),%xmm12
- # qhasm: uint32323232 z1 += orig1
- # asm 1: paddd <orig1=stack128#12,<z1=int6464#8
- # asm 2: paddd <orig1=176(%rsp),<z1=%xmm7
- paddd 176(%rsp),%xmm7
- # qhasm: uint32323232 z2 += orig2
- # asm 1: paddd <orig2=stack128#15,<z2=int6464#11
- # asm 2: paddd <orig2=224(%rsp),<z2=%xmm10
- paddd 224(%rsp),%xmm10
- # qhasm: uint32323232 z3 += orig3
- # asm 1: paddd <orig3=stack128#18,<z3=int6464#5
- # asm 2: paddd <orig3=272(%rsp),<z3=%xmm4
- paddd 272(%rsp),%xmm4
- # qhasm: in0 = z0
- # asm 1: movd <z0=int6464#13,>in0=int64#3
- # asm 2: movd <z0=%xmm12,>in0=%rdx
- movd %xmm12,%rdx
- # qhasm: in1 = z1
- # asm 1: movd <z1=int6464#8,>in1=int64#4
- # asm 2: movd <z1=%xmm7,>in1=%rcx
- movd %xmm7,%rcx
- # qhasm: in2 = z2
- # asm 1: movd <z2=int6464#11,>in2=int64#5
- # asm 2: movd <z2=%xmm10,>in2=%r8
- movd %xmm10,%r8
- # qhasm: in3 = z3
- # asm 1: movd <z3=int6464#5,>in3=int64#6
- # asm 2: movd <z3=%xmm4,>in3=%r9
- movd %xmm4,%r9
- # qhasm: z0 <<<= 96
- # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13
- # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12
- pshufd $0x39,%xmm12,%xmm12
- # qhasm: z1 <<<= 96
- # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8
- # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7
- pshufd $0x39,%xmm7,%xmm7
- # qhasm: z2 <<<= 96
- # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11
- # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10
- pshufd $0x39,%xmm10,%xmm10
- # qhasm: z3 <<<= 96
- # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5
- # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4
- pshufd $0x39,%xmm4,%xmm4
- # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0)
- # asm 1: xorl 0(<m=int64#2),<in0=int64#3d
- # asm 2: xorl 0(<m=%rsi),<in0=%edx
- xorl 0(%rsi),%edx
- # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4)
- # asm 1: xorl 4(<m=int64#2),<in1=int64#4d
- # asm 2: xorl 4(<m=%rsi),<in1=%ecx
- xorl 4(%rsi),%ecx
- # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8)
- # asm 1: xorl 8(<m=int64#2),<in2=int64#5d
- # asm 2: xorl 8(<m=%rsi),<in2=%r8d
- xorl 8(%rsi),%r8d
- # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12)
- # asm 1: xorl 12(<m=int64#2),<in3=int64#6d
- # asm 2: xorl 12(<m=%rsi),<in3=%r9d
- xorl 12(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 0) = in0
- # asm 1: movl <in0=int64#3d,0(<out=int64#1)
- # asm 2: movl <in0=%edx,0(<out=%rdi)
- movl %edx,0(%rdi)
- # qhasm: *(uint32 *) (out + 4) = in1
- # asm 1: movl <in1=int64#4d,4(<out=int64#1)
- # asm 2: movl <in1=%ecx,4(<out=%rdi)
- movl %ecx,4(%rdi)
- # qhasm: *(uint32 *) (out + 8) = in2
- # asm 1: movl <in2=int64#5d,8(<out=int64#1)
- # asm 2: movl <in2=%r8d,8(<out=%rdi)
- movl %r8d,8(%rdi)
- # qhasm: *(uint32 *) (out + 12) = in3
- # asm 1: movl <in3=int64#6d,12(<out=int64#1)
- # asm 2: movl <in3=%r9d,12(<out=%rdi)
- movl %r9d,12(%rdi)
- # qhasm: in0 = z0
- # asm 1: movd <z0=int6464#13,>in0=int64#3
- # asm 2: movd <z0=%xmm12,>in0=%rdx
- movd %xmm12,%rdx
- # qhasm: in1 = z1
- # asm 1: movd <z1=int6464#8,>in1=int64#4
- # asm 2: movd <z1=%xmm7,>in1=%rcx
- movd %xmm7,%rcx
- # qhasm: in2 = z2
- # asm 1: movd <z2=int6464#11,>in2=int64#5
- # asm 2: movd <z2=%xmm10,>in2=%r8
- movd %xmm10,%r8
- # qhasm: in3 = z3
- # asm 1: movd <z3=int6464#5,>in3=int64#6
- # asm 2: movd <z3=%xmm4,>in3=%r9
- movd %xmm4,%r9
- # qhasm: z0 <<<= 96
- # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13
- # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12
- pshufd $0x39,%xmm12,%xmm12
- # qhasm: z1 <<<= 96
- # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8
- # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7
- pshufd $0x39,%xmm7,%xmm7
- # qhasm: z2 <<<= 96
- # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11
- # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10
- pshufd $0x39,%xmm10,%xmm10
- # qhasm: z3 <<<= 96
- # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5
- # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4
- pshufd $0x39,%xmm4,%xmm4
- # qhasm: (uint32) in0 ^= *(uint32 *) (m + 64)
- # asm 1: xorl 64(<m=int64#2),<in0=int64#3d
- # asm 2: xorl 64(<m=%rsi),<in0=%edx
- xorl 64(%rsi),%edx
- # qhasm: (uint32) in1 ^= *(uint32 *) (m + 68)
- # asm 1: xorl 68(<m=int64#2),<in1=int64#4d
- # asm 2: xorl 68(<m=%rsi),<in1=%ecx
- xorl 68(%rsi),%ecx
- # qhasm: (uint32) in2 ^= *(uint32 *) (m + 72)
- # asm 1: xorl 72(<m=int64#2),<in2=int64#5d
- # asm 2: xorl 72(<m=%rsi),<in2=%r8d
- xorl 72(%rsi),%r8d
- # qhasm: (uint32) in3 ^= *(uint32 *) (m + 76)
- # asm 1: xorl 76(<m=int64#2),<in3=int64#6d
- # asm 2: xorl 76(<m=%rsi),<in3=%r9d
- xorl 76(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 64) = in0
- # asm 1: movl <in0=int64#3d,64(<out=int64#1)
- # asm 2: movl <in0=%edx,64(<out=%rdi)
- movl %edx,64(%rdi)
- # qhasm: *(uint32 *) (out + 68) = in1
- # asm 1: movl <in1=int64#4d,68(<out=int64#1)
- # asm 2: movl <in1=%ecx,68(<out=%rdi)
- movl %ecx,68(%rdi)
- # qhasm: *(uint32 *) (out + 72) = in2
- # asm 1: movl <in2=int64#5d,72(<out=int64#1)
- # asm 2: movl <in2=%r8d,72(<out=%rdi)
- movl %r8d,72(%rdi)
- # qhasm: *(uint32 *) (out + 76) = in3
- # asm 1: movl <in3=int64#6d,76(<out=int64#1)
- # asm 2: movl <in3=%r9d,76(<out=%rdi)
- movl %r9d,76(%rdi)
- # qhasm: in0 = z0
- # asm 1: movd <z0=int6464#13,>in0=int64#3
- # asm 2: movd <z0=%xmm12,>in0=%rdx
- movd %xmm12,%rdx
- # qhasm: in1 = z1
- # asm 1: movd <z1=int6464#8,>in1=int64#4
- # asm 2: movd <z1=%xmm7,>in1=%rcx
- movd %xmm7,%rcx
- # qhasm: in2 = z2
- # asm 1: movd <z2=int6464#11,>in2=int64#5
- # asm 2: movd <z2=%xmm10,>in2=%r8
- movd %xmm10,%r8
- # qhasm: in3 = z3
- # asm 1: movd <z3=int6464#5,>in3=int64#6
- # asm 2: movd <z3=%xmm4,>in3=%r9
- movd %xmm4,%r9
- # qhasm: z0 <<<= 96
- # asm 1: pshufd $0x39,<z0=int6464#13,<z0=int6464#13
- # asm 2: pshufd $0x39,<z0=%xmm12,<z0=%xmm12
- pshufd $0x39,%xmm12,%xmm12
- # qhasm: z1 <<<= 96
- # asm 1: pshufd $0x39,<z1=int6464#8,<z1=int6464#8
- # asm 2: pshufd $0x39,<z1=%xmm7,<z1=%xmm7
- pshufd $0x39,%xmm7,%xmm7
- # qhasm: z2 <<<= 96
- # asm 1: pshufd $0x39,<z2=int6464#11,<z2=int6464#11
- # asm 2: pshufd $0x39,<z2=%xmm10,<z2=%xmm10
- pshufd $0x39,%xmm10,%xmm10
- # qhasm: z3 <<<= 96
- # asm 1: pshufd $0x39,<z3=int6464#5,<z3=int6464#5
- # asm 2: pshufd $0x39,<z3=%xmm4,<z3=%xmm4
- pshufd $0x39,%xmm4,%xmm4
- # qhasm: (uint32) in0 ^= *(uint32 *) (m + 128)
- # asm 1: xorl 128(<m=int64#2),<in0=int64#3d
- # asm 2: xorl 128(<m=%rsi),<in0=%edx
- xorl 128(%rsi),%edx
- # qhasm: (uint32) in1 ^= *(uint32 *) (m + 132)
- # asm 1: xorl 132(<m=int64#2),<in1=int64#4d
- # asm 2: xorl 132(<m=%rsi),<in1=%ecx
- xorl 132(%rsi),%ecx
- # qhasm: (uint32) in2 ^= *(uint32 *) (m + 136)
- # asm 1: xorl 136(<m=int64#2),<in2=int64#5d
- # asm 2: xorl 136(<m=%rsi),<in2=%r8d
- xorl 136(%rsi),%r8d
- # qhasm: (uint32) in3 ^= *(uint32 *) (m + 140)
- # asm 1: xorl 140(<m=int64#2),<in3=int64#6d
- # asm 2: xorl 140(<m=%rsi),<in3=%r9d
- xorl 140(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 128) = in0
- # asm 1: movl <in0=int64#3d,128(<out=int64#1)
- # asm 2: movl <in0=%edx,128(<out=%rdi)
- movl %edx,128(%rdi)
- # qhasm: *(uint32 *) (out + 132) = in1
- # asm 1: movl <in1=int64#4d,132(<out=int64#1)
- # asm 2: movl <in1=%ecx,132(<out=%rdi)
- movl %ecx,132(%rdi)
- # qhasm: *(uint32 *) (out + 136) = in2
- # asm 1: movl <in2=int64#5d,136(<out=int64#1)
- # asm 2: movl <in2=%r8d,136(<out=%rdi)
- movl %r8d,136(%rdi)
- # qhasm: *(uint32 *) (out + 140) = in3
- # asm 1: movl <in3=int64#6d,140(<out=int64#1)
- # asm 2: movl <in3=%r9d,140(<out=%rdi)
- movl %r9d,140(%rdi)
- # qhasm: in0 = z0
- # asm 1: movd <z0=int6464#13,>in0=int64#3
- # asm 2: movd <z0=%xmm12,>in0=%rdx
- movd %xmm12,%rdx
- # qhasm: in1 = z1
- # asm 1: movd <z1=int6464#8,>in1=int64#4
- # asm 2: movd <z1=%xmm7,>in1=%rcx
- movd %xmm7,%rcx
- # qhasm: in2 = z2
- # asm 1: movd <z2=int6464#11,>in2=int64#5
- # asm 2: movd <z2=%xmm10,>in2=%r8
- movd %xmm10,%r8
- # qhasm: in3 = z3
- # asm 1: movd <z3=int6464#5,>in3=int64#6
- # asm 2: movd <z3=%xmm4,>in3=%r9
- movd %xmm4,%r9
- # qhasm: (uint32) in0 ^= *(uint32 *) (m + 192)
- # asm 1: xorl 192(<m=int64#2),<in0=int64#3d
- # asm 2: xorl 192(<m=%rsi),<in0=%edx
- xorl 192(%rsi),%edx
- # qhasm: (uint32) in1 ^= *(uint32 *) (m + 196)
- # asm 1: xorl 196(<m=int64#2),<in1=int64#4d
- # asm 2: xorl 196(<m=%rsi),<in1=%ecx
- xorl 196(%rsi),%ecx
- # qhasm: (uint32) in2 ^= *(uint32 *) (m + 200)
- # asm 1: xorl 200(<m=int64#2),<in2=int64#5d
- # asm 2: xorl 200(<m=%rsi),<in2=%r8d
- xorl 200(%rsi),%r8d
- # qhasm: (uint32) in3 ^= *(uint32 *) (m + 204)
- # asm 1: xorl 204(<m=int64#2),<in3=int64#6d
- # asm 2: xorl 204(<m=%rsi),<in3=%r9d
- xorl 204(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 192) = in0
- # asm 1: movl <in0=int64#3d,192(<out=int64#1)
- # asm 2: movl <in0=%edx,192(<out=%rdi)
- movl %edx,192(%rdi)
- # qhasm: *(uint32 *) (out + 196) = in1
- # asm 1: movl <in1=int64#4d,196(<out=int64#1)
- # asm 2: movl <in1=%ecx,196(<out=%rdi)
- movl %ecx,196(%rdi)
- # qhasm: *(uint32 *) (out + 200) = in2
- # asm 1: movl <in2=int64#5d,200(<out=int64#1)
- # asm 2: movl <in2=%r8d,200(<out=%rdi)
- movl %r8d,200(%rdi)
- # qhasm: *(uint32 *) (out + 204) = in3
- # asm 1: movl <in3=int64#6d,204(<out=int64#1)
- # asm 2: movl <in3=%r9d,204(<out=%rdi)
- movl %r9d,204(%rdi)
- # qhasm: uint32323232 z4 += orig4
- # asm 1: paddd <orig4=stack128#16,<z4=int6464#15
- # asm 2: paddd <orig4=240(%rsp),<z4=%xmm14
- paddd 240(%rsp),%xmm14
- # qhasm: uint32323232 z5 += orig5
- # asm 1: paddd <orig5=stack128#5,<z5=int6464#1
- # asm 2: paddd <orig5=64(%rsp),<z5=%xmm0
- paddd 64(%rsp),%xmm0
- # qhasm: uint32323232 z6 += orig6
- # asm 1: paddd <orig6=stack128#9,<z6=int6464#6
- # asm 2: paddd <orig6=128(%rsp),<z6=%xmm5
- paddd 128(%rsp),%xmm5
- # qhasm: uint32323232 z7 += orig7
- # asm 1: paddd <orig7=stack128#13,<z7=int6464#9
- # asm 2: paddd <orig7=192(%rsp),<z7=%xmm8
- paddd 192(%rsp),%xmm8
- # qhasm: in4 = z4
- # asm 1: movd <z4=int6464#15,>in4=int64#3
- # asm 2: movd <z4=%xmm14,>in4=%rdx
- movd %xmm14,%rdx
- # qhasm: in5 = z5
- # asm 1: movd <z5=int6464#1,>in5=int64#4
- # asm 2: movd <z5=%xmm0,>in5=%rcx
- movd %xmm0,%rcx
- # qhasm: in6 = z6
- # asm 1: movd <z6=int6464#6,>in6=int64#5
- # asm 2: movd <z6=%xmm5,>in6=%r8
- movd %xmm5,%r8
- # qhasm: in7 = z7
- # asm 1: movd <z7=int6464#9,>in7=int64#6
- # asm 2: movd <z7=%xmm8,>in7=%r9
- movd %xmm8,%r9
- # qhasm: z4 <<<= 96
- # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15
- # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14
- pshufd $0x39,%xmm14,%xmm14
- # qhasm: z5 <<<= 96
- # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1
- # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0
- pshufd $0x39,%xmm0,%xmm0
- # qhasm: z6 <<<= 96
- # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6
- # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5
- pshufd $0x39,%xmm5,%xmm5
- # qhasm: z7 <<<= 96
- # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9
- # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8
- pshufd $0x39,%xmm8,%xmm8
- # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16)
- # asm 1: xorl 16(<m=int64#2),<in4=int64#3d
- # asm 2: xorl 16(<m=%rsi),<in4=%edx
- xorl 16(%rsi),%edx
- # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20)
- # asm 1: xorl 20(<m=int64#2),<in5=int64#4d
- # asm 2: xorl 20(<m=%rsi),<in5=%ecx
- xorl 20(%rsi),%ecx
- # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24)
- # asm 1: xorl 24(<m=int64#2),<in6=int64#5d
- # asm 2: xorl 24(<m=%rsi),<in6=%r8d
- xorl 24(%rsi),%r8d
- # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28)
- # asm 1: xorl 28(<m=int64#2),<in7=int64#6d
- # asm 2: xorl 28(<m=%rsi),<in7=%r9d
- xorl 28(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 16) = in4
- # asm 1: movl <in4=int64#3d,16(<out=int64#1)
- # asm 2: movl <in4=%edx,16(<out=%rdi)
- movl %edx,16(%rdi)
- # qhasm: *(uint32 *) (out + 20) = in5
- # asm 1: movl <in5=int64#4d,20(<out=int64#1)
- # asm 2: movl <in5=%ecx,20(<out=%rdi)
- movl %ecx,20(%rdi)
- # qhasm: *(uint32 *) (out + 24) = in6
- # asm 1: movl <in6=int64#5d,24(<out=int64#1)
- # asm 2: movl <in6=%r8d,24(<out=%rdi)
- movl %r8d,24(%rdi)
- # qhasm: *(uint32 *) (out + 28) = in7
- # asm 1: movl <in7=int64#6d,28(<out=int64#1)
- # asm 2: movl <in7=%r9d,28(<out=%rdi)
- movl %r9d,28(%rdi)
- # qhasm: in4 = z4
- # asm 1: movd <z4=int6464#15,>in4=int64#3
- # asm 2: movd <z4=%xmm14,>in4=%rdx
- movd %xmm14,%rdx
- # qhasm: in5 = z5
- # asm 1: movd <z5=int6464#1,>in5=int64#4
- # asm 2: movd <z5=%xmm0,>in5=%rcx
- movd %xmm0,%rcx
- # qhasm: in6 = z6
- # asm 1: movd <z6=int6464#6,>in6=int64#5
- # asm 2: movd <z6=%xmm5,>in6=%r8
- movd %xmm5,%r8
- # qhasm: in7 = z7
- # asm 1: movd <z7=int6464#9,>in7=int64#6
- # asm 2: movd <z7=%xmm8,>in7=%r9
- movd %xmm8,%r9
- # qhasm: z4 <<<= 96
- # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15
- # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14
- pshufd $0x39,%xmm14,%xmm14
- # qhasm: z5 <<<= 96
- # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1
- # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0
- pshufd $0x39,%xmm0,%xmm0
- # qhasm: z6 <<<= 96
- # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6
- # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5
- pshufd $0x39,%xmm5,%xmm5
- # qhasm: z7 <<<= 96
- # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9
- # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8
- pshufd $0x39,%xmm8,%xmm8
- # qhasm: (uint32) in4 ^= *(uint32 *) (m + 80)
- # asm 1: xorl 80(<m=int64#2),<in4=int64#3d
- # asm 2: xorl 80(<m=%rsi),<in4=%edx
- xorl 80(%rsi),%edx
- # qhasm: (uint32) in5 ^= *(uint32 *) (m + 84)
- # asm 1: xorl 84(<m=int64#2),<in5=int64#4d
- # asm 2: xorl 84(<m=%rsi),<in5=%ecx
- xorl 84(%rsi),%ecx
- # qhasm: (uint32) in6 ^= *(uint32 *) (m + 88)
- # asm 1: xorl 88(<m=int64#2),<in6=int64#5d
- # asm 2: xorl 88(<m=%rsi),<in6=%r8d
- xorl 88(%rsi),%r8d
- # qhasm: (uint32) in7 ^= *(uint32 *) (m + 92)
- # asm 1: xorl 92(<m=int64#2),<in7=int64#6d
- # asm 2: xorl 92(<m=%rsi),<in7=%r9d
- xorl 92(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 80) = in4
- # asm 1: movl <in4=int64#3d,80(<out=int64#1)
- # asm 2: movl <in4=%edx,80(<out=%rdi)
- movl %edx,80(%rdi)
- # qhasm: *(uint32 *) (out + 84) = in5
- # asm 1: movl <in5=int64#4d,84(<out=int64#1)
- # asm 2: movl <in5=%ecx,84(<out=%rdi)
- movl %ecx,84(%rdi)
- # qhasm: *(uint32 *) (out + 88) = in6
- # asm 1: movl <in6=int64#5d,88(<out=int64#1)
- # asm 2: movl <in6=%r8d,88(<out=%rdi)
- movl %r8d,88(%rdi)
- # qhasm: *(uint32 *) (out + 92) = in7
- # asm 1: movl <in7=int64#6d,92(<out=int64#1)
- # asm 2: movl <in7=%r9d,92(<out=%rdi)
- movl %r9d,92(%rdi)
- # qhasm: in4 = z4
- # asm 1: movd <z4=int6464#15,>in4=int64#3
- # asm 2: movd <z4=%xmm14,>in4=%rdx
- movd %xmm14,%rdx
- # qhasm: in5 = z5
- # asm 1: movd <z5=int6464#1,>in5=int64#4
- # asm 2: movd <z5=%xmm0,>in5=%rcx
- movd %xmm0,%rcx
- # qhasm: in6 = z6
- # asm 1: movd <z6=int6464#6,>in6=int64#5
- # asm 2: movd <z6=%xmm5,>in6=%r8
- movd %xmm5,%r8
- # qhasm: in7 = z7
- # asm 1: movd <z7=int6464#9,>in7=int64#6
- # asm 2: movd <z7=%xmm8,>in7=%r9
- movd %xmm8,%r9
- # qhasm: z4 <<<= 96
- # asm 1: pshufd $0x39,<z4=int6464#15,<z4=int6464#15
- # asm 2: pshufd $0x39,<z4=%xmm14,<z4=%xmm14
- pshufd $0x39,%xmm14,%xmm14
- # qhasm: z5 <<<= 96
- # asm 1: pshufd $0x39,<z5=int6464#1,<z5=int6464#1
- # asm 2: pshufd $0x39,<z5=%xmm0,<z5=%xmm0
- pshufd $0x39,%xmm0,%xmm0
- # qhasm: z6 <<<= 96
- # asm 1: pshufd $0x39,<z6=int6464#6,<z6=int6464#6
- # asm 2: pshufd $0x39,<z6=%xmm5,<z6=%xmm5
- pshufd $0x39,%xmm5,%xmm5
- # qhasm: z7 <<<= 96
- # asm 1: pshufd $0x39,<z7=int6464#9,<z7=int6464#9
- # asm 2: pshufd $0x39,<z7=%xmm8,<z7=%xmm8
- pshufd $0x39,%xmm8,%xmm8
- # qhasm: (uint32) in4 ^= *(uint32 *) (m + 144)
- # asm 1: xorl 144(<m=int64#2),<in4=int64#3d
- # asm 2: xorl 144(<m=%rsi),<in4=%edx
- xorl 144(%rsi),%edx
- # qhasm: (uint32) in5 ^= *(uint32 *) (m + 148)
- # asm 1: xorl 148(<m=int64#2),<in5=int64#4d
- # asm 2: xorl 148(<m=%rsi),<in5=%ecx
- xorl 148(%rsi),%ecx
- # qhasm: (uint32) in6 ^= *(uint32 *) (m + 152)
- # asm 1: xorl 152(<m=int64#2),<in6=int64#5d
- # asm 2: xorl 152(<m=%rsi),<in6=%r8d
- xorl 152(%rsi),%r8d
- # qhasm: (uint32) in7 ^= *(uint32 *) (m + 156)
- # asm 1: xorl 156(<m=int64#2),<in7=int64#6d
- # asm 2: xorl 156(<m=%rsi),<in7=%r9d
- xorl 156(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 144) = in4
- # asm 1: movl <in4=int64#3d,144(<out=int64#1)
- # asm 2: movl <in4=%edx,144(<out=%rdi)
- movl %edx,144(%rdi)
- # qhasm: *(uint32 *) (out + 148) = in5
- # asm 1: movl <in5=int64#4d,148(<out=int64#1)
- # asm 2: movl <in5=%ecx,148(<out=%rdi)
- movl %ecx,148(%rdi)
- # qhasm: *(uint32 *) (out + 152) = in6
- # asm 1: movl <in6=int64#5d,152(<out=int64#1)
- # asm 2: movl <in6=%r8d,152(<out=%rdi)
- movl %r8d,152(%rdi)
- # qhasm: *(uint32 *) (out + 156) = in7
- # asm 1: movl <in7=int64#6d,156(<out=int64#1)
- # asm 2: movl <in7=%r9d,156(<out=%rdi)
- movl %r9d,156(%rdi)
- # qhasm: in4 = z4
- # asm 1: movd <z4=int6464#15,>in4=int64#3
- # asm 2: movd <z4=%xmm14,>in4=%rdx
- movd %xmm14,%rdx
- # qhasm: in5 = z5
- # asm 1: movd <z5=int6464#1,>in5=int64#4
- # asm 2: movd <z5=%xmm0,>in5=%rcx
- movd %xmm0,%rcx
- # qhasm: in6 = z6
- # asm 1: movd <z6=int6464#6,>in6=int64#5
- # asm 2: movd <z6=%xmm5,>in6=%r8
- movd %xmm5,%r8
- # qhasm: in7 = z7
- # asm 1: movd <z7=int6464#9,>in7=int64#6
- # asm 2: movd <z7=%xmm8,>in7=%r9
- movd %xmm8,%r9
- # qhasm: (uint32) in4 ^= *(uint32 *) (m + 208)
- # asm 1: xorl 208(<m=int64#2),<in4=int64#3d
- # asm 2: xorl 208(<m=%rsi),<in4=%edx
- xorl 208(%rsi),%edx
- # qhasm: (uint32) in5 ^= *(uint32 *) (m + 212)
- # asm 1: xorl 212(<m=int64#2),<in5=int64#4d
- # asm 2: xorl 212(<m=%rsi),<in5=%ecx
- xorl 212(%rsi),%ecx
- # qhasm: (uint32) in6 ^= *(uint32 *) (m + 216)
- # asm 1: xorl 216(<m=int64#2),<in6=int64#5d
- # asm 2: xorl 216(<m=%rsi),<in6=%r8d
- xorl 216(%rsi),%r8d
- # qhasm: (uint32) in7 ^= *(uint32 *) (m + 220)
- # asm 1: xorl 220(<m=int64#2),<in7=int64#6d
- # asm 2: xorl 220(<m=%rsi),<in7=%r9d
- xorl 220(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 208) = in4
- # asm 1: movl <in4=int64#3d,208(<out=int64#1)
- # asm 2: movl <in4=%edx,208(<out=%rdi)
- movl %edx,208(%rdi)
- # qhasm: *(uint32 *) (out + 212) = in5
- # asm 1: movl <in5=int64#4d,212(<out=int64#1)
- # asm 2: movl <in5=%ecx,212(<out=%rdi)
- movl %ecx,212(%rdi)
- # qhasm: *(uint32 *) (out + 216) = in6
- # asm 1: movl <in6=int64#5d,216(<out=int64#1)
- # asm 2: movl <in6=%r8d,216(<out=%rdi)
- movl %r8d,216(%rdi)
- # qhasm: *(uint32 *) (out + 220) = in7
- # asm 1: movl <in7=int64#6d,220(<out=int64#1)
- # asm 2: movl <in7=%r9d,220(<out=%rdi)
- movl %r9d,220(%rdi)
- # qhasm: uint32323232 z8 += orig8
- # asm 1: paddd <orig8=stack128#19,<z8=int6464#16
- # asm 2: paddd <orig8=288(%rsp),<z8=%xmm15
- paddd 288(%rsp),%xmm15
- # qhasm: uint32323232 z9 += orig9
- # asm 1: paddd <orig9=stack128#20,<z9=int6464#12
- # asm 2: paddd <orig9=304(%rsp),<z9=%xmm11
- paddd 304(%rsp),%xmm11
- # qhasm: uint32323232 z10 += orig10
- # asm 1: paddd <orig10=stack128#6,<z10=int6464#2
- # asm 2: paddd <orig10=80(%rsp),<z10=%xmm1
- paddd 80(%rsp),%xmm1
- # qhasm: uint32323232 z11 += orig11
- # asm 1: paddd <orig11=stack128#10,<z11=int6464#7
- # asm 2: paddd <orig11=144(%rsp),<z11=%xmm6
- paddd 144(%rsp),%xmm6
- # qhasm: in8 = z8
- # asm 1: movd <z8=int6464#16,>in8=int64#3
- # asm 2: movd <z8=%xmm15,>in8=%rdx
- movd %xmm15,%rdx
- # qhasm: in9 = z9
- # asm 1: movd <z9=int6464#12,>in9=int64#4
- # asm 2: movd <z9=%xmm11,>in9=%rcx
- movd %xmm11,%rcx
- # qhasm: in10 = z10
- # asm 1: movd <z10=int6464#2,>in10=int64#5
- # asm 2: movd <z10=%xmm1,>in10=%r8
- movd %xmm1,%r8
- # qhasm: in11 = z11
- # asm 1: movd <z11=int6464#7,>in11=int64#6
- # asm 2: movd <z11=%xmm6,>in11=%r9
- movd %xmm6,%r9
- # qhasm: z8 <<<= 96
- # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16
- # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15
- pshufd $0x39,%xmm15,%xmm15
- # qhasm: z9 <<<= 96
- # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12
- # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11
- pshufd $0x39,%xmm11,%xmm11
- # qhasm: z10 <<<= 96
- # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2
- # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: z11 <<<= 96
- # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7
- # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6
- pshufd $0x39,%xmm6,%xmm6
- # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32)
- # asm 1: xorl 32(<m=int64#2),<in8=int64#3d
- # asm 2: xorl 32(<m=%rsi),<in8=%edx
- xorl 32(%rsi),%edx
- # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36)
- # asm 1: xorl 36(<m=int64#2),<in9=int64#4d
- # asm 2: xorl 36(<m=%rsi),<in9=%ecx
- xorl 36(%rsi),%ecx
- # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40)
- # asm 1: xorl 40(<m=int64#2),<in10=int64#5d
- # asm 2: xorl 40(<m=%rsi),<in10=%r8d
- xorl 40(%rsi),%r8d
- # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44)
- # asm 1: xorl 44(<m=int64#2),<in11=int64#6d
- # asm 2: xorl 44(<m=%rsi),<in11=%r9d
- xorl 44(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 32) = in8
- # asm 1: movl <in8=int64#3d,32(<out=int64#1)
- # asm 2: movl <in8=%edx,32(<out=%rdi)
- movl %edx,32(%rdi)
- # qhasm: *(uint32 *) (out + 36) = in9
- # asm 1: movl <in9=int64#4d,36(<out=int64#1)
- # asm 2: movl <in9=%ecx,36(<out=%rdi)
- movl %ecx,36(%rdi)
- # qhasm: *(uint32 *) (out + 40) = in10
- # asm 1: movl <in10=int64#5d,40(<out=int64#1)
- # asm 2: movl <in10=%r8d,40(<out=%rdi)
- movl %r8d,40(%rdi)
- # qhasm: *(uint32 *) (out + 44) = in11
- # asm 1: movl <in11=int64#6d,44(<out=int64#1)
- # asm 2: movl <in11=%r9d,44(<out=%rdi)
- movl %r9d,44(%rdi)
- # qhasm: in8 = z8
- # asm 1: movd <z8=int6464#16,>in8=int64#3
- # asm 2: movd <z8=%xmm15,>in8=%rdx
- movd %xmm15,%rdx
- # qhasm: in9 = z9
- # asm 1: movd <z9=int6464#12,>in9=int64#4
- # asm 2: movd <z9=%xmm11,>in9=%rcx
- movd %xmm11,%rcx
- # qhasm: in10 = z10
- # asm 1: movd <z10=int6464#2,>in10=int64#5
- # asm 2: movd <z10=%xmm1,>in10=%r8
- movd %xmm1,%r8
- # qhasm: in11 = z11
- # asm 1: movd <z11=int6464#7,>in11=int64#6
- # asm 2: movd <z11=%xmm6,>in11=%r9
- movd %xmm6,%r9
- # qhasm: z8 <<<= 96
- # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16
- # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15
- pshufd $0x39,%xmm15,%xmm15
- # qhasm: z9 <<<= 96
- # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12
- # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11
- pshufd $0x39,%xmm11,%xmm11
- # qhasm: z10 <<<= 96
- # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2
- # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: z11 <<<= 96
- # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7
- # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6
- pshufd $0x39,%xmm6,%xmm6
- # qhasm: (uint32) in8 ^= *(uint32 *) (m + 96)
- # asm 1: xorl 96(<m=int64#2),<in8=int64#3d
- # asm 2: xorl 96(<m=%rsi),<in8=%edx
- xorl 96(%rsi),%edx
- # qhasm: (uint32) in9 ^= *(uint32 *) (m + 100)
- # asm 1: xorl 100(<m=int64#2),<in9=int64#4d
- # asm 2: xorl 100(<m=%rsi),<in9=%ecx
- xorl 100(%rsi),%ecx
- # qhasm: (uint32) in10 ^= *(uint32 *) (m + 104)
- # asm 1: xorl 104(<m=int64#2),<in10=int64#5d
- # asm 2: xorl 104(<m=%rsi),<in10=%r8d
- xorl 104(%rsi),%r8d
- # qhasm: (uint32) in11 ^= *(uint32 *) (m + 108)
- # asm 1: xorl 108(<m=int64#2),<in11=int64#6d
- # asm 2: xorl 108(<m=%rsi),<in11=%r9d
- xorl 108(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 96) = in8
- # asm 1: movl <in8=int64#3d,96(<out=int64#1)
- # asm 2: movl <in8=%edx,96(<out=%rdi)
- movl %edx,96(%rdi)
- # qhasm: *(uint32 *) (out + 100) = in9
- # asm 1: movl <in9=int64#4d,100(<out=int64#1)
- # asm 2: movl <in9=%ecx,100(<out=%rdi)
- movl %ecx,100(%rdi)
- # qhasm: *(uint32 *) (out + 104) = in10
- # asm 1: movl <in10=int64#5d,104(<out=int64#1)
- # asm 2: movl <in10=%r8d,104(<out=%rdi)
- movl %r8d,104(%rdi)
- # qhasm: *(uint32 *) (out + 108) = in11
- # asm 1: movl <in11=int64#6d,108(<out=int64#1)
- # asm 2: movl <in11=%r9d,108(<out=%rdi)
- movl %r9d,108(%rdi)
- # qhasm: in8 = z8
- # asm 1: movd <z8=int6464#16,>in8=int64#3
- # asm 2: movd <z8=%xmm15,>in8=%rdx
- movd %xmm15,%rdx
- # qhasm: in9 = z9
- # asm 1: movd <z9=int6464#12,>in9=int64#4
- # asm 2: movd <z9=%xmm11,>in9=%rcx
- movd %xmm11,%rcx
- # qhasm: in10 = z10
- # asm 1: movd <z10=int6464#2,>in10=int64#5
- # asm 2: movd <z10=%xmm1,>in10=%r8
- movd %xmm1,%r8
- # qhasm: in11 = z11
- # asm 1: movd <z11=int6464#7,>in11=int64#6
- # asm 2: movd <z11=%xmm6,>in11=%r9
- movd %xmm6,%r9
- # qhasm: z8 <<<= 96
- # asm 1: pshufd $0x39,<z8=int6464#16,<z8=int6464#16
- # asm 2: pshufd $0x39,<z8=%xmm15,<z8=%xmm15
- pshufd $0x39,%xmm15,%xmm15
- # qhasm: z9 <<<= 96
- # asm 1: pshufd $0x39,<z9=int6464#12,<z9=int6464#12
- # asm 2: pshufd $0x39,<z9=%xmm11,<z9=%xmm11
- pshufd $0x39,%xmm11,%xmm11
- # qhasm: z10 <<<= 96
- # asm 1: pshufd $0x39,<z10=int6464#2,<z10=int6464#2
- # asm 2: pshufd $0x39,<z10=%xmm1,<z10=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: z11 <<<= 96
- # asm 1: pshufd $0x39,<z11=int6464#7,<z11=int6464#7
- # asm 2: pshufd $0x39,<z11=%xmm6,<z11=%xmm6
- pshufd $0x39,%xmm6,%xmm6
- # qhasm: (uint32) in8 ^= *(uint32 *) (m + 160)
- # asm 1: xorl 160(<m=int64#2),<in8=int64#3d
- # asm 2: xorl 160(<m=%rsi),<in8=%edx
- xorl 160(%rsi),%edx
- # qhasm: (uint32) in9 ^= *(uint32 *) (m + 164)
- # asm 1: xorl 164(<m=int64#2),<in9=int64#4d
- # asm 2: xorl 164(<m=%rsi),<in9=%ecx
- xorl 164(%rsi),%ecx
- # qhasm: (uint32) in10 ^= *(uint32 *) (m + 168)
- # asm 1: xorl 168(<m=int64#2),<in10=int64#5d
- # asm 2: xorl 168(<m=%rsi),<in10=%r8d
- xorl 168(%rsi),%r8d
- # qhasm: (uint32) in11 ^= *(uint32 *) (m + 172)
- # asm 1: xorl 172(<m=int64#2),<in11=int64#6d
- # asm 2: xorl 172(<m=%rsi),<in11=%r9d
- xorl 172(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 160) = in8
- # asm 1: movl <in8=int64#3d,160(<out=int64#1)
- # asm 2: movl <in8=%edx,160(<out=%rdi)
- movl %edx,160(%rdi)
- # qhasm: *(uint32 *) (out + 164) = in9
- # asm 1: movl <in9=int64#4d,164(<out=int64#1)
- # asm 2: movl <in9=%ecx,164(<out=%rdi)
- movl %ecx,164(%rdi)
- # qhasm: *(uint32 *) (out + 168) = in10
- # asm 1: movl <in10=int64#5d,168(<out=int64#1)
- # asm 2: movl <in10=%r8d,168(<out=%rdi)
- movl %r8d,168(%rdi)
- # qhasm: *(uint32 *) (out + 172) = in11
- # asm 1: movl <in11=int64#6d,172(<out=int64#1)
- # asm 2: movl <in11=%r9d,172(<out=%rdi)
- movl %r9d,172(%rdi)
- # qhasm: in8 = z8
- # asm 1: movd <z8=int6464#16,>in8=int64#3
- # asm 2: movd <z8=%xmm15,>in8=%rdx
- movd %xmm15,%rdx
- # qhasm: in9 = z9
- # asm 1: movd <z9=int6464#12,>in9=int64#4
- # asm 2: movd <z9=%xmm11,>in9=%rcx
- movd %xmm11,%rcx
- # qhasm: in10 = z10
- # asm 1: movd <z10=int6464#2,>in10=int64#5
- # asm 2: movd <z10=%xmm1,>in10=%r8
- movd %xmm1,%r8
- # qhasm: in11 = z11
- # asm 1: movd <z11=int6464#7,>in11=int64#6
- # asm 2: movd <z11=%xmm6,>in11=%r9
- movd %xmm6,%r9
- # qhasm: (uint32) in8 ^= *(uint32 *) (m + 224)
- # asm 1: xorl 224(<m=int64#2),<in8=int64#3d
- # asm 2: xorl 224(<m=%rsi),<in8=%edx
- xorl 224(%rsi),%edx
- # qhasm: (uint32) in9 ^= *(uint32 *) (m + 228)
- # asm 1: xorl 228(<m=int64#2),<in9=int64#4d
- # asm 2: xorl 228(<m=%rsi),<in9=%ecx
- xorl 228(%rsi),%ecx
- # qhasm: (uint32) in10 ^= *(uint32 *) (m + 232)
- # asm 1: xorl 232(<m=int64#2),<in10=int64#5d
- # asm 2: xorl 232(<m=%rsi),<in10=%r8d
- xorl 232(%rsi),%r8d
- # qhasm: (uint32) in11 ^= *(uint32 *) (m + 236)
- # asm 1: xorl 236(<m=int64#2),<in11=int64#6d
- # asm 2: xorl 236(<m=%rsi),<in11=%r9d
- xorl 236(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 224) = in8
- # asm 1: movl <in8=int64#3d,224(<out=int64#1)
- # asm 2: movl <in8=%edx,224(<out=%rdi)
- movl %edx,224(%rdi)
- # qhasm: *(uint32 *) (out + 228) = in9
- # asm 1: movl <in9=int64#4d,228(<out=int64#1)
- # asm 2: movl <in9=%ecx,228(<out=%rdi)
- movl %ecx,228(%rdi)
- # qhasm: *(uint32 *) (out + 232) = in10
- # asm 1: movl <in10=int64#5d,232(<out=int64#1)
- # asm 2: movl <in10=%r8d,232(<out=%rdi)
- movl %r8d,232(%rdi)
- # qhasm: *(uint32 *) (out + 236) = in11
- # asm 1: movl <in11=int64#6d,236(<out=int64#1)
- # asm 2: movl <in11=%r9d,236(<out=%rdi)
- movl %r9d,236(%rdi)
- # qhasm: uint32323232 z12 += orig12
- # asm 1: paddd <orig12=stack128#11,<z12=int6464#14
- # asm 2: paddd <orig12=160(%rsp),<z12=%xmm13
- paddd 160(%rsp),%xmm13
- # qhasm: uint32323232 z13 += orig13
- # asm 1: paddd <orig13=stack128#14,<z13=int6464#10
- # asm 2: paddd <orig13=208(%rsp),<z13=%xmm9
- paddd 208(%rsp),%xmm9
- # qhasm: uint32323232 z14 += orig14
- # asm 1: paddd <orig14=stack128#17,<z14=int6464#4
- # asm 2: paddd <orig14=256(%rsp),<z14=%xmm3
- paddd 256(%rsp),%xmm3
- # qhasm: uint32323232 z15 += orig15
- # asm 1: paddd <orig15=stack128#7,<z15=int6464#3
- # asm 2: paddd <orig15=96(%rsp),<z15=%xmm2
- paddd 96(%rsp),%xmm2
- # qhasm: in12 = z12
- # asm 1: movd <z12=int6464#14,>in12=int64#3
- # asm 2: movd <z12=%xmm13,>in12=%rdx
- movd %xmm13,%rdx
- # qhasm: in13 = z13
- # asm 1: movd <z13=int6464#10,>in13=int64#4
- # asm 2: movd <z13=%xmm9,>in13=%rcx
- movd %xmm9,%rcx
- # qhasm: in14 = z14
- # asm 1: movd <z14=int6464#4,>in14=int64#5
- # asm 2: movd <z14=%xmm3,>in14=%r8
- movd %xmm3,%r8
- # qhasm: in15 = z15
- # asm 1: movd <z15=int6464#3,>in15=int64#6
- # asm 2: movd <z15=%xmm2,>in15=%r9
- movd %xmm2,%r9
- # qhasm: z12 <<<= 96
- # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14
- # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13
- pshufd $0x39,%xmm13,%xmm13
- # qhasm: z13 <<<= 96
- # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10
- # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9
- pshufd $0x39,%xmm9,%xmm9
- # qhasm: z14 <<<= 96
- # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4
- # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: z15 <<<= 96
- # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3
- # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2
- pshufd $0x39,%xmm2,%xmm2
- # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48)
- # asm 1: xorl 48(<m=int64#2),<in12=int64#3d
- # asm 2: xorl 48(<m=%rsi),<in12=%edx
- xorl 48(%rsi),%edx
- # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52)
- # asm 1: xorl 52(<m=int64#2),<in13=int64#4d
- # asm 2: xorl 52(<m=%rsi),<in13=%ecx
- xorl 52(%rsi),%ecx
- # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56)
- # asm 1: xorl 56(<m=int64#2),<in14=int64#5d
- # asm 2: xorl 56(<m=%rsi),<in14=%r8d
- xorl 56(%rsi),%r8d
- # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60)
- # asm 1: xorl 60(<m=int64#2),<in15=int64#6d
- # asm 2: xorl 60(<m=%rsi),<in15=%r9d
- xorl 60(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 48) = in12
- # asm 1: movl <in12=int64#3d,48(<out=int64#1)
- # asm 2: movl <in12=%edx,48(<out=%rdi)
- movl %edx,48(%rdi)
- # qhasm: *(uint32 *) (out + 52) = in13
- # asm 1: movl <in13=int64#4d,52(<out=int64#1)
- # asm 2: movl <in13=%ecx,52(<out=%rdi)
- movl %ecx,52(%rdi)
- # qhasm: *(uint32 *) (out + 56) = in14
- # asm 1: movl <in14=int64#5d,56(<out=int64#1)
- # asm 2: movl <in14=%r8d,56(<out=%rdi)
- movl %r8d,56(%rdi)
- # qhasm: *(uint32 *) (out + 60) = in15
- # asm 1: movl <in15=int64#6d,60(<out=int64#1)
- # asm 2: movl <in15=%r9d,60(<out=%rdi)
- movl %r9d,60(%rdi)
- # qhasm: in12 = z12
- # asm 1: movd <z12=int6464#14,>in12=int64#3
- # asm 2: movd <z12=%xmm13,>in12=%rdx
- movd %xmm13,%rdx
- # qhasm: in13 = z13
- # asm 1: movd <z13=int6464#10,>in13=int64#4
- # asm 2: movd <z13=%xmm9,>in13=%rcx
- movd %xmm9,%rcx
- # qhasm: in14 = z14
- # asm 1: movd <z14=int6464#4,>in14=int64#5
- # asm 2: movd <z14=%xmm3,>in14=%r8
- movd %xmm3,%r8
- # qhasm: in15 = z15
- # asm 1: movd <z15=int6464#3,>in15=int64#6
- # asm 2: movd <z15=%xmm2,>in15=%r9
- movd %xmm2,%r9
- # qhasm: z12 <<<= 96
- # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14
- # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13
- pshufd $0x39,%xmm13,%xmm13
- # qhasm: z13 <<<= 96
- # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10
- # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9
- pshufd $0x39,%xmm9,%xmm9
- # qhasm: z14 <<<= 96
- # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4
- # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: z15 <<<= 96
- # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3
- # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2
- pshufd $0x39,%xmm2,%xmm2
- # qhasm: (uint32) in12 ^= *(uint32 *) (m + 112)
- # asm 1: xorl 112(<m=int64#2),<in12=int64#3d
- # asm 2: xorl 112(<m=%rsi),<in12=%edx
- xorl 112(%rsi),%edx
- # qhasm: (uint32) in13 ^= *(uint32 *) (m + 116)
- # asm 1: xorl 116(<m=int64#2),<in13=int64#4d
- # asm 2: xorl 116(<m=%rsi),<in13=%ecx
- xorl 116(%rsi),%ecx
- # qhasm: (uint32) in14 ^= *(uint32 *) (m + 120)
- # asm 1: xorl 120(<m=int64#2),<in14=int64#5d
- # asm 2: xorl 120(<m=%rsi),<in14=%r8d
- xorl 120(%rsi),%r8d
- # qhasm: (uint32) in15 ^= *(uint32 *) (m + 124)
- # asm 1: xorl 124(<m=int64#2),<in15=int64#6d
- # asm 2: xorl 124(<m=%rsi),<in15=%r9d
- xorl 124(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 112) = in12
- # asm 1: movl <in12=int64#3d,112(<out=int64#1)
- # asm 2: movl <in12=%edx,112(<out=%rdi)
- movl %edx,112(%rdi)
- # qhasm: *(uint32 *) (out + 116) = in13
- # asm 1: movl <in13=int64#4d,116(<out=int64#1)
- # asm 2: movl <in13=%ecx,116(<out=%rdi)
- movl %ecx,116(%rdi)
- # qhasm: *(uint32 *) (out + 120) = in14
- # asm 1: movl <in14=int64#5d,120(<out=int64#1)
- # asm 2: movl <in14=%r8d,120(<out=%rdi)
- movl %r8d,120(%rdi)
- # qhasm: *(uint32 *) (out + 124) = in15
- # asm 1: movl <in15=int64#6d,124(<out=int64#1)
- # asm 2: movl <in15=%r9d,124(<out=%rdi)
- movl %r9d,124(%rdi)
- # qhasm: in12 = z12
- # asm 1: movd <z12=int6464#14,>in12=int64#3
- # asm 2: movd <z12=%xmm13,>in12=%rdx
- movd %xmm13,%rdx
- # qhasm: in13 = z13
- # asm 1: movd <z13=int6464#10,>in13=int64#4
- # asm 2: movd <z13=%xmm9,>in13=%rcx
- movd %xmm9,%rcx
- # qhasm: in14 = z14
- # asm 1: movd <z14=int6464#4,>in14=int64#5
- # asm 2: movd <z14=%xmm3,>in14=%r8
- movd %xmm3,%r8
- # qhasm: in15 = z15
- # asm 1: movd <z15=int6464#3,>in15=int64#6
- # asm 2: movd <z15=%xmm2,>in15=%r9
- movd %xmm2,%r9
- # qhasm: z12 <<<= 96
- # asm 1: pshufd $0x39,<z12=int6464#14,<z12=int6464#14
- # asm 2: pshufd $0x39,<z12=%xmm13,<z12=%xmm13
- pshufd $0x39,%xmm13,%xmm13
- # qhasm: z13 <<<= 96
- # asm 1: pshufd $0x39,<z13=int6464#10,<z13=int6464#10
- # asm 2: pshufd $0x39,<z13=%xmm9,<z13=%xmm9
- pshufd $0x39,%xmm9,%xmm9
- # qhasm: z14 <<<= 96
- # asm 1: pshufd $0x39,<z14=int6464#4,<z14=int6464#4
- # asm 2: pshufd $0x39,<z14=%xmm3,<z14=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: z15 <<<= 96
- # asm 1: pshufd $0x39,<z15=int6464#3,<z15=int6464#3
- # asm 2: pshufd $0x39,<z15=%xmm2,<z15=%xmm2
- pshufd $0x39,%xmm2,%xmm2
- # qhasm: (uint32) in12 ^= *(uint32 *) (m + 176)
- # asm 1: xorl 176(<m=int64#2),<in12=int64#3d
- # asm 2: xorl 176(<m=%rsi),<in12=%edx
- xorl 176(%rsi),%edx
- # qhasm: (uint32) in13 ^= *(uint32 *) (m + 180)
- # asm 1: xorl 180(<m=int64#2),<in13=int64#4d
- # asm 2: xorl 180(<m=%rsi),<in13=%ecx
- xorl 180(%rsi),%ecx
- # qhasm: (uint32) in14 ^= *(uint32 *) (m + 184)
- # asm 1: xorl 184(<m=int64#2),<in14=int64#5d
- # asm 2: xorl 184(<m=%rsi),<in14=%r8d
- xorl 184(%rsi),%r8d
- # qhasm: (uint32) in15 ^= *(uint32 *) (m + 188)
- # asm 1: xorl 188(<m=int64#2),<in15=int64#6d
- # asm 2: xorl 188(<m=%rsi),<in15=%r9d
- xorl 188(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 176) = in12
- # asm 1: movl <in12=int64#3d,176(<out=int64#1)
- # asm 2: movl <in12=%edx,176(<out=%rdi)
- movl %edx,176(%rdi)
- # qhasm: *(uint32 *) (out + 180) = in13
- # asm 1: movl <in13=int64#4d,180(<out=int64#1)
- # asm 2: movl <in13=%ecx,180(<out=%rdi)
- movl %ecx,180(%rdi)
- # qhasm: *(uint32 *) (out + 184) = in14
- # asm 1: movl <in14=int64#5d,184(<out=int64#1)
- # asm 2: movl <in14=%r8d,184(<out=%rdi)
- movl %r8d,184(%rdi)
- # qhasm: *(uint32 *) (out + 188) = in15
- # asm 1: movl <in15=int64#6d,188(<out=int64#1)
- # asm 2: movl <in15=%r9d,188(<out=%rdi)
- movl %r9d,188(%rdi)
- # qhasm: in12 = z12
- # asm 1: movd <z12=int6464#14,>in12=int64#3
- # asm 2: movd <z12=%xmm13,>in12=%rdx
- movd %xmm13,%rdx
- # qhasm: in13 = z13
- # asm 1: movd <z13=int6464#10,>in13=int64#4
- # asm 2: movd <z13=%xmm9,>in13=%rcx
- movd %xmm9,%rcx
- # qhasm: in14 = z14
- # asm 1: movd <z14=int6464#4,>in14=int64#5
- # asm 2: movd <z14=%xmm3,>in14=%r8
- movd %xmm3,%r8
- # qhasm: in15 = z15
- # asm 1: movd <z15=int6464#3,>in15=int64#6
- # asm 2: movd <z15=%xmm2,>in15=%r9
- movd %xmm2,%r9
- # qhasm: (uint32) in12 ^= *(uint32 *) (m + 240)
- # asm 1: xorl 240(<m=int64#2),<in12=int64#3d
- # asm 2: xorl 240(<m=%rsi),<in12=%edx
- xorl 240(%rsi),%edx
- # qhasm: (uint32) in13 ^= *(uint32 *) (m + 244)
- # asm 1: xorl 244(<m=int64#2),<in13=int64#4d
- # asm 2: xorl 244(<m=%rsi),<in13=%ecx
- xorl 244(%rsi),%ecx
- # qhasm: (uint32) in14 ^= *(uint32 *) (m + 248)
- # asm 1: xorl 248(<m=int64#2),<in14=int64#5d
- # asm 2: xorl 248(<m=%rsi),<in14=%r8d
- xorl 248(%rsi),%r8d
- # qhasm: (uint32) in15 ^= *(uint32 *) (m + 252)
- # asm 1: xorl 252(<m=int64#2),<in15=int64#6d
- # asm 2: xorl 252(<m=%rsi),<in15=%r9d
- xorl 252(%rsi),%r9d
- # qhasm: *(uint32 *) (out + 240) = in12
- # asm 1: movl <in12=int64#3d,240(<out=int64#1)
- # asm 2: movl <in12=%edx,240(<out=%rdi)
- movl %edx,240(%rdi)
- # qhasm: *(uint32 *) (out + 244) = in13
- # asm 1: movl <in13=int64#4d,244(<out=int64#1)
- # asm 2: movl <in13=%ecx,244(<out=%rdi)
- movl %ecx,244(%rdi)
- # qhasm: *(uint32 *) (out + 248) = in14
- # asm 1: movl <in14=int64#5d,248(<out=int64#1)
- # asm 2: movl <in14=%r8d,248(<out=%rdi)
- movl %r8d,248(%rdi)
- # qhasm: *(uint32 *) (out + 252) = in15
- # asm 1: movl <in15=int64#6d,252(<out=int64#1)
- # asm 2: movl <in15=%r9d,252(<out=%rdi)
- movl %r9d,252(%rdi)
- # qhasm: bytes = bytes_backup
- # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6
- # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9
- movq 408(%rsp),%r9
- # qhasm: bytes -= 256
- # asm 1: sub $256,<bytes=int64#6
- # asm 2: sub $256,<bytes=%r9
- sub $256,%r9
- # qhasm: m += 256
- # asm 1: add $256,<m=int64#2
- # asm 2: add $256,<m=%rsi
- add $256,%rsi
- # qhasm: out += 256
- # asm 1: add $256,<out=int64#1
- # asm 2: add $256,<out=%rdi
- add $256,%rdi
- # qhasm: unsigned<? bytes - 256
- # asm 1: cmp $256,<bytes=int64#6
- # asm 2: cmp $256,<bytes=%r9
- cmp $256,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto bytesatleast256 if !unsigned<
- jae ._bytesatleast256
- # qhasm: unsigned>? bytes - 0
- # asm 1: cmp $0,<bytes=int64#6
- # asm 2: cmp $0,<bytes=%r9
- cmp $0,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto done if !unsigned>
- jbe ._done
- # comment:fp stack unchanged by fallthrough
- # qhasm: bytesbetween1and255:
- ._bytesbetween1and255:
- # qhasm: unsigned<? bytes - 64
- # asm 1: cmp $64,<bytes=int64#6
- # asm 2: cmp $64,<bytes=%r9
- cmp $64,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto nocopy if !unsigned<
- jae ._nocopy
- # qhasm: ctarget = out
- # asm 1: mov <out=int64#1,>ctarget=int64#3
- # asm 2: mov <out=%rdi,>ctarget=%rdx
- mov %rdi,%rdx
- # qhasm: out = &tmp
- # asm 1: leaq <tmp=stack512#1,>out=int64#1
- # asm 2: leaq <tmp=416(%rsp),>out=%rdi
- leaq 416(%rsp),%rdi
- # qhasm: i = bytes
- # asm 1: mov <bytes=int64#6,>i=int64#4
- # asm 2: mov <bytes=%r9,>i=%rcx
- mov %r9,%rcx
- # qhasm: while (i) { *out++ = *m++; --i }
- rep movsb
- # qhasm: out = &tmp
- # asm 1: leaq <tmp=stack512#1,>out=int64#1
- # asm 2: leaq <tmp=416(%rsp),>out=%rdi
- leaq 416(%rsp),%rdi
- # qhasm: m = &tmp
- # asm 1: leaq <tmp=stack512#1,>m=int64#2
- # asm 2: leaq <tmp=416(%rsp),>m=%rsi
- leaq 416(%rsp),%rsi
- # comment:fp stack unchanged by fallthrough
- # qhasm: nocopy:
- ._nocopy:
- # qhasm: bytes_backup = bytes
- # asm 1: movq <bytes=int64#6,>bytes_backup=stack64#8
- # asm 2: movq <bytes=%r9,>bytes_backup=408(%rsp)
- movq %r9,408(%rsp)
- # qhasm: diag0 = x0
- # asm 1: movdqa <x0=stack128#4,>diag0=int6464#1
- # asm 2: movdqa <x0=48(%rsp),>diag0=%xmm0
- movdqa 48(%rsp),%xmm0
- # qhasm: diag1 = x1
- # asm 1: movdqa <x1=stack128#1,>diag1=int6464#2
- # asm 2: movdqa <x1=0(%rsp),>diag1=%xmm1
- movdqa 0(%rsp),%xmm1
- # qhasm: diag2 = x2
- # asm 1: movdqa <x2=stack128#2,>diag2=int6464#3
- # asm 2: movdqa <x2=16(%rsp),>diag2=%xmm2
- movdqa 16(%rsp),%xmm2
- # qhasm: diag3 = x3
- # asm 1: movdqa <x3=stack128#3,>diag3=int6464#4
- # asm 2: movdqa <x3=32(%rsp),>diag3=%xmm3
- movdqa 32(%rsp),%xmm3
- # qhasm: a0 = diag1
- # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5
- # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4
- movdqa %xmm1,%xmm4
- # qhasm: i = 20
- # asm 1: mov $20,>i=int64#4
- # asm 2: mov $20,>i=%rcx
- mov $20,%rcx
- # qhasm: mainloop2:
- ._mainloop2:
- # qhasm: uint32323232 a0 += diag0
- # asm 1: paddd <diag0=int6464#1,<a0=int6464#5
- # asm 2: paddd <diag0=%xmm0,<a0=%xmm4
- paddd %xmm0,%xmm4
- # qhasm: a1 = diag0
- # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6
- # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5
- movdqa %xmm0,%xmm5
- # qhasm: b0 = a0
- # asm 1: movdqa <a0=int6464#5,>b0=int6464#7
- # asm 2: movdqa <a0=%xmm4,>b0=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a0 <<= 7
- # asm 1: pslld $7,<a0=int6464#5
- # asm 2: pslld $7,<a0=%xmm4
- pslld $7,%xmm4
- # qhasm: uint32323232 b0 >>= 25
- # asm 1: psrld $25,<b0=int6464#7
- # asm 2: psrld $25,<b0=%xmm6
- psrld $25,%xmm6
- # qhasm: diag3 ^= a0
- # asm 1: pxor <a0=int6464#5,<diag3=int6464#4
- # asm 2: pxor <a0=%xmm4,<diag3=%xmm3
- pxor %xmm4,%xmm3
- # qhasm: diag3 ^= b0
- # asm 1: pxor <b0=int6464#7,<diag3=int6464#4
- # asm 2: pxor <b0=%xmm6,<diag3=%xmm3
- pxor %xmm6,%xmm3
- # qhasm: uint32323232 a1 += diag3
- # asm 1: paddd <diag3=int6464#4,<a1=int6464#6
- # asm 2: paddd <diag3=%xmm3,<a1=%xmm5
- paddd %xmm3,%xmm5
- # qhasm: a2 = diag3
- # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5
- # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4
- movdqa %xmm3,%xmm4
- # qhasm: b1 = a1
- # asm 1: movdqa <a1=int6464#6,>b1=int6464#7
- # asm 2: movdqa <a1=%xmm5,>b1=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a1 <<= 9
- # asm 1: pslld $9,<a1=int6464#6
- # asm 2: pslld $9,<a1=%xmm5
- pslld $9,%xmm5
- # qhasm: uint32323232 b1 >>= 23
- # asm 1: psrld $23,<b1=int6464#7
- # asm 2: psrld $23,<b1=%xmm6
- psrld $23,%xmm6
- # qhasm: diag2 ^= a1
- # asm 1: pxor <a1=int6464#6,<diag2=int6464#3
- # asm 2: pxor <a1=%xmm5,<diag2=%xmm2
- pxor %xmm5,%xmm2
- # qhasm: diag3 <<<= 32
- # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x93,%xmm3,%xmm3
- # qhasm: diag2 ^= b1
- # asm 1: pxor <b1=int6464#7,<diag2=int6464#3
- # asm 2: pxor <b1=%xmm6,<diag2=%xmm2
- pxor %xmm6,%xmm2
- # qhasm: uint32323232 a2 += diag2
- # asm 1: paddd <diag2=int6464#3,<a2=int6464#5
- # asm 2: paddd <diag2=%xmm2,<a2=%xmm4
- paddd %xmm2,%xmm4
- # qhasm: a3 = diag2
- # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6
- # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5
- movdqa %xmm2,%xmm5
- # qhasm: b2 = a2
- # asm 1: movdqa <a2=int6464#5,>b2=int6464#7
- # asm 2: movdqa <a2=%xmm4,>b2=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a2 <<= 13
- # asm 1: pslld $13,<a2=int6464#5
- # asm 2: pslld $13,<a2=%xmm4
- pslld $13,%xmm4
- # qhasm: uint32323232 b2 >>= 19
- # asm 1: psrld $19,<b2=int6464#7
- # asm 2: psrld $19,<b2=%xmm6
- psrld $19,%xmm6
- # qhasm: diag1 ^= a2
- # asm 1: pxor <a2=int6464#5,<diag1=int6464#2
- # asm 2: pxor <a2=%xmm4,<diag1=%xmm1
- pxor %xmm4,%xmm1
- # qhasm: diag2 <<<= 64
- # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x4e,%xmm2,%xmm2
- # qhasm: diag1 ^= b2
- # asm 1: pxor <b2=int6464#7,<diag1=int6464#2
- # asm 2: pxor <b2=%xmm6,<diag1=%xmm1
- pxor %xmm6,%xmm1
- # qhasm: uint32323232 a3 += diag1
- # asm 1: paddd <diag1=int6464#2,<a3=int6464#6
- # asm 2: paddd <diag1=%xmm1,<a3=%xmm5
- paddd %xmm1,%xmm5
- # qhasm: a4 = diag3
- # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5
- # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4
- movdqa %xmm3,%xmm4
- # qhasm: b3 = a3
- # asm 1: movdqa <a3=int6464#6,>b3=int6464#7
- # asm 2: movdqa <a3=%xmm5,>b3=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a3 <<= 18
- # asm 1: pslld $18,<a3=int6464#6
- # asm 2: pslld $18,<a3=%xmm5
- pslld $18,%xmm5
- # qhasm: uint32323232 b3 >>= 14
- # asm 1: psrld $14,<b3=int6464#7
- # asm 2: psrld $14,<b3=%xmm6
- psrld $14,%xmm6
- # qhasm: diag0 ^= a3
- # asm 1: pxor <a3=int6464#6,<diag0=int6464#1
- # asm 2: pxor <a3=%xmm5,<diag0=%xmm0
- pxor %xmm5,%xmm0
- # qhasm: diag1 <<<= 96
- # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: diag0 ^= b3
- # asm 1: pxor <b3=int6464#7,<diag0=int6464#1
- # asm 2: pxor <b3=%xmm6,<diag0=%xmm0
- pxor %xmm6,%xmm0
- # qhasm: uint32323232 a4 += diag0
- # asm 1: paddd <diag0=int6464#1,<a4=int6464#5
- # asm 2: paddd <diag0=%xmm0,<a4=%xmm4
- paddd %xmm0,%xmm4
- # qhasm: a5 = diag0
- # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6
- # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5
- movdqa %xmm0,%xmm5
- # qhasm: b4 = a4
- # asm 1: movdqa <a4=int6464#5,>b4=int6464#7
- # asm 2: movdqa <a4=%xmm4,>b4=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a4 <<= 7
- # asm 1: pslld $7,<a4=int6464#5
- # asm 2: pslld $7,<a4=%xmm4
- pslld $7,%xmm4
- # qhasm: uint32323232 b4 >>= 25
- # asm 1: psrld $25,<b4=int6464#7
- # asm 2: psrld $25,<b4=%xmm6
- psrld $25,%xmm6
- # qhasm: diag1 ^= a4
- # asm 1: pxor <a4=int6464#5,<diag1=int6464#2
- # asm 2: pxor <a4=%xmm4,<diag1=%xmm1
- pxor %xmm4,%xmm1
- # qhasm: diag1 ^= b4
- # asm 1: pxor <b4=int6464#7,<diag1=int6464#2
- # asm 2: pxor <b4=%xmm6,<diag1=%xmm1
- pxor %xmm6,%xmm1
- # qhasm: uint32323232 a5 += diag1
- # asm 1: paddd <diag1=int6464#2,<a5=int6464#6
- # asm 2: paddd <diag1=%xmm1,<a5=%xmm5
- paddd %xmm1,%xmm5
- # qhasm: a6 = diag1
- # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5
- # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4
- movdqa %xmm1,%xmm4
- # qhasm: b5 = a5
- # asm 1: movdqa <a5=int6464#6,>b5=int6464#7
- # asm 2: movdqa <a5=%xmm5,>b5=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a5 <<= 9
- # asm 1: pslld $9,<a5=int6464#6
- # asm 2: pslld $9,<a5=%xmm5
- pslld $9,%xmm5
- # qhasm: uint32323232 b5 >>= 23
- # asm 1: psrld $23,<b5=int6464#7
- # asm 2: psrld $23,<b5=%xmm6
- psrld $23,%xmm6
- # qhasm: diag2 ^= a5
- # asm 1: pxor <a5=int6464#6,<diag2=int6464#3
- # asm 2: pxor <a5=%xmm5,<diag2=%xmm2
- pxor %xmm5,%xmm2
- # qhasm: diag1 <<<= 32
- # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x93,%xmm1,%xmm1
- # qhasm: diag2 ^= b5
- # asm 1: pxor <b5=int6464#7,<diag2=int6464#3
- # asm 2: pxor <b5=%xmm6,<diag2=%xmm2
- pxor %xmm6,%xmm2
- # qhasm: uint32323232 a6 += diag2
- # asm 1: paddd <diag2=int6464#3,<a6=int6464#5
- # asm 2: paddd <diag2=%xmm2,<a6=%xmm4
- paddd %xmm2,%xmm4
- # qhasm: a7 = diag2
- # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6
- # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5
- movdqa %xmm2,%xmm5
- # qhasm: b6 = a6
- # asm 1: movdqa <a6=int6464#5,>b6=int6464#7
- # asm 2: movdqa <a6=%xmm4,>b6=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a6 <<= 13
- # asm 1: pslld $13,<a6=int6464#5
- # asm 2: pslld $13,<a6=%xmm4
- pslld $13,%xmm4
- # qhasm: uint32323232 b6 >>= 19
- # asm 1: psrld $19,<b6=int6464#7
- # asm 2: psrld $19,<b6=%xmm6
- psrld $19,%xmm6
- # qhasm: diag3 ^= a6
- # asm 1: pxor <a6=int6464#5,<diag3=int6464#4
- # asm 2: pxor <a6=%xmm4,<diag3=%xmm3
- pxor %xmm4,%xmm3
- # qhasm: diag2 <<<= 64
- # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x4e,%xmm2,%xmm2
- # qhasm: diag3 ^= b6
- # asm 1: pxor <b6=int6464#7,<diag3=int6464#4
- # asm 2: pxor <b6=%xmm6,<diag3=%xmm3
- pxor %xmm6,%xmm3
- # qhasm: uint32323232 a7 += diag3
- # asm 1: paddd <diag3=int6464#4,<a7=int6464#6
- # asm 2: paddd <diag3=%xmm3,<a7=%xmm5
- paddd %xmm3,%xmm5
- # qhasm: a0 = diag1
- # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5
- # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4
- movdqa %xmm1,%xmm4
- # qhasm: b7 = a7
- # asm 1: movdqa <a7=int6464#6,>b7=int6464#7
- # asm 2: movdqa <a7=%xmm5,>b7=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a7 <<= 18
- # asm 1: pslld $18,<a7=int6464#6
- # asm 2: pslld $18,<a7=%xmm5
- pslld $18,%xmm5
- # qhasm: uint32323232 b7 >>= 14
- # asm 1: psrld $14,<b7=int6464#7
- # asm 2: psrld $14,<b7=%xmm6
- psrld $14,%xmm6
- # qhasm: diag0 ^= a7
- # asm 1: pxor <a7=int6464#6,<diag0=int6464#1
- # asm 2: pxor <a7=%xmm5,<diag0=%xmm0
- pxor %xmm5,%xmm0
- # qhasm: diag3 <<<= 96
- # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: diag0 ^= b7
- # asm 1: pxor <b7=int6464#7,<diag0=int6464#1
- # asm 2: pxor <b7=%xmm6,<diag0=%xmm0
- pxor %xmm6,%xmm0
- # qhasm: uint32323232 a0 += diag0
- # asm 1: paddd <diag0=int6464#1,<a0=int6464#5
- # asm 2: paddd <diag0=%xmm0,<a0=%xmm4
- paddd %xmm0,%xmm4
- # qhasm: a1 = diag0
- # asm 1: movdqa <diag0=int6464#1,>a1=int6464#6
- # asm 2: movdqa <diag0=%xmm0,>a1=%xmm5
- movdqa %xmm0,%xmm5
- # qhasm: b0 = a0
- # asm 1: movdqa <a0=int6464#5,>b0=int6464#7
- # asm 2: movdqa <a0=%xmm4,>b0=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a0 <<= 7
- # asm 1: pslld $7,<a0=int6464#5
- # asm 2: pslld $7,<a0=%xmm4
- pslld $7,%xmm4
- # qhasm: uint32323232 b0 >>= 25
- # asm 1: psrld $25,<b0=int6464#7
- # asm 2: psrld $25,<b0=%xmm6
- psrld $25,%xmm6
- # qhasm: diag3 ^= a0
- # asm 1: pxor <a0=int6464#5,<diag3=int6464#4
- # asm 2: pxor <a0=%xmm4,<diag3=%xmm3
- pxor %xmm4,%xmm3
- # qhasm: diag3 ^= b0
- # asm 1: pxor <b0=int6464#7,<diag3=int6464#4
- # asm 2: pxor <b0=%xmm6,<diag3=%xmm3
- pxor %xmm6,%xmm3
- # qhasm: uint32323232 a1 += diag3
- # asm 1: paddd <diag3=int6464#4,<a1=int6464#6
- # asm 2: paddd <diag3=%xmm3,<a1=%xmm5
- paddd %xmm3,%xmm5
- # qhasm: a2 = diag3
- # asm 1: movdqa <diag3=int6464#4,>a2=int6464#5
- # asm 2: movdqa <diag3=%xmm3,>a2=%xmm4
- movdqa %xmm3,%xmm4
- # qhasm: b1 = a1
- # asm 1: movdqa <a1=int6464#6,>b1=int6464#7
- # asm 2: movdqa <a1=%xmm5,>b1=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a1 <<= 9
- # asm 1: pslld $9,<a1=int6464#6
- # asm 2: pslld $9,<a1=%xmm5
- pslld $9,%xmm5
- # qhasm: uint32323232 b1 >>= 23
- # asm 1: psrld $23,<b1=int6464#7
- # asm 2: psrld $23,<b1=%xmm6
- psrld $23,%xmm6
- # qhasm: diag2 ^= a1
- # asm 1: pxor <a1=int6464#6,<diag2=int6464#3
- # asm 2: pxor <a1=%xmm5,<diag2=%xmm2
- pxor %xmm5,%xmm2
- # qhasm: diag3 <<<= 32
- # asm 1: pshufd $0x93,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x93,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x93,%xmm3,%xmm3
- # qhasm: diag2 ^= b1
- # asm 1: pxor <b1=int6464#7,<diag2=int6464#3
- # asm 2: pxor <b1=%xmm6,<diag2=%xmm2
- pxor %xmm6,%xmm2
- # qhasm: uint32323232 a2 += diag2
- # asm 1: paddd <diag2=int6464#3,<a2=int6464#5
- # asm 2: paddd <diag2=%xmm2,<a2=%xmm4
- paddd %xmm2,%xmm4
- # qhasm: a3 = diag2
- # asm 1: movdqa <diag2=int6464#3,>a3=int6464#6
- # asm 2: movdqa <diag2=%xmm2,>a3=%xmm5
- movdqa %xmm2,%xmm5
- # qhasm: b2 = a2
- # asm 1: movdqa <a2=int6464#5,>b2=int6464#7
- # asm 2: movdqa <a2=%xmm4,>b2=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a2 <<= 13
- # asm 1: pslld $13,<a2=int6464#5
- # asm 2: pslld $13,<a2=%xmm4
- pslld $13,%xmm4
- # qhasm: uint32323232 b2 >>= 19
- # asm 1: psrld $19,<b2=int6464#7
- # asm 2: psrld $19,<b2=%xmm6
- psrld $19,%xmm6
- # qhasm: diag1 ^= a2
- # asm 1: pxor <a2=int6464#5,<diag1=int6464#2
- # asm 2: pxor <a2=%xmm4,<diag1=%xmm1
- pxor %xmm4,%xmm1
- # qhasm: diag2 <<<= 64
- # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x4e,%xmm2,%xmm2
- # qhasm: diag1 ^= b2
- # asm 1: pxor <b2=int6464#7,<diag1=int6464#2
- # asm 2: pxor <b2=%xmm6,<diag1=%xmm1
- pxor %xmm6,%xmm1
- # qhasm: uint32323232 a3 += diag1
- # asm 1: paddd <diag1=int6464#2,<a3=int6464#6
- # asm 2: paddd <diag1=%xmm1,<a3=%xmm5
- paddd %xmm1,%xmm5
- # qhasm: a4 = diag3
- # asm 1: movdqa <diag3=int6464#4,>a4=int6464#5
- # asm 2: movdqa <diag3=%xmm3,>a4=%xmm4
- movdqa %xmm3,%xmm4
- # qhasm: b3 = a3
- # asm 1: movdqa <a3=int6464#6,>b3=int6464#7
- # asm 2: movdqa <a3=%xmm5,>b3=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a3 <<= 18
- # asm 1: pslld $18,<a3=int6464#6
- # asm 2: pslld $18,<a3=%xmm5
- pslld $18,%xmm5
- # qhasm: uint32323232 b3 >>= 14
- # asm 1: psrld $14,<b3=int6464#7
- # asm 2: psrld $14,<b3=%xmm6
- psrld $14,%xmm6
- # qhasm: diag0 ^= a3
- # asm 1: pxor <a3=int6464#6,<diag0=int6464#1
- # asm 2: pxor <a3=%xmm5,<diag0=%xmm0
- pxor %xmm5,%xmm0
- # qhasm: diag1 <<<= 96
- # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: diag0 ^= b3
- # asm 1: pxor <b3=int6464#7,<diag0=int6464#1
- # asm 2: pxor <b3=%xmm6,<diag0=%xmm0
- pxor %xmm6,%xmm0
- # qhasm: uint32323232 a4 += diag0
- # asm 1: paddd <diag0=int6464#1,<a4=int6464#5
- # asm 2: paddd <diag0=%xmm0,<a4=%xmm4
- paddd %xmm0,%xmm4
- # qhasm: a5 = diag0
- # asm 1: movdqa <diag0=int6464#1,>a5=int6464#6
- # asm 2: movdqa <diag0=%xmm0,>a5=%xmm5
- movdqa %xmm0,%xmm5
- # qhasm: b4 = a4
- # asm 1: movdqa <a4=int6464#5,>b4=int6464#7
- # asm 2: movdqa <a4=%xmm4,>b4=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a4 <<= 7
- # asm 1: pslld $7,<a4=int6464#5
- # asm 2: pslld $7,<a4=%xmm4
- pslld $7,%xmm4
- # qhasm: uint32323232 b4 >>= 25
- # asm 1: psrld $25,<b4=int6464#7
- # asm 2: psrld $25,<b4=%xmm6
- psrld $25,%xmm6
- # qhasm: diag1 ^= a4
- # asm 1: pxor <a4=int6464#5,<diag1=int6464#2
- # asm 2: pxor <a4=%xmm4,<diag1=%xmm1
- pxor %xmm4,%xmm1
- # qhasm: diag1 ^= b4
- # asm 1: pxor <b4=int6464#7,<diag1=int6464#2
- # asm 2: pxor <b4=%xmm6,<diag1=%xmm1
- pxor %xmm6,%xmm1
- # qhasm: uint32323232 a5 += diag1
- # asm 1: paddd <diag1=int6464#2,<a5=int6464#6
- # asm 2: paddd <diag1=%xmm1,<a5=%xmm5
- paddd %xmm1,%xmm5
- # qhasm: a6 = diag1
- # asm 1: movdqa <diag1=int6464#2,>a6=int6464#5
- # asm 2: movdqa <diag1=%xmm1,>a6=%xmm4
- movdqa %xmm1,%xmm4
- # qhasm: b5 = a5
- # asm 1: movdqa <a5=int6464#6,>b5=int6464#7
- # asm 2: movdqa <a5=%xmm5,>b5=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a5 <<= 9
- # asm 1: pslld $9,<a5=int6464#6
- # asm 2: pslld $9,<a5=%xmm5
- pslld $9,%xmm5
- # qhasm: uint32323232 b5 >>= 23
- # asm 1: psrld $23,<b5=int6464#7
- # asm 2: psrld $23,<b5=%xmm6
- psrld $23,%xmm6
- # qhasm: diag2 ^= a5
- # asm 1: pxor <a5=int6464#6,<diag2=int6464#3
- # asm 2: pxor <a5=%xmm5,<diag2=%xmm2
- pxor %xmm5,%xmm2
- # qhasm: diag1 <<<= 32
- # asm 1: pshufd $0x93,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x93,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x93,%xmm1,%xmm1
- # qhasm: diag2 ^= b5
- # asm 1: pxor <b5=int6464#7,<diag2=int6464#3
- # asm 2: pxor <b5=%xmm6,<diag2=%xmm2
- pxor %xmm6,%xmm2
- # qhasm: uint32323232 a6 += diag2
- # asm 1: paddd <diag2=int6464#3,<a6=int6464#5
- # asm 2: paddd <diag2=%xmm2,<a6=%xmm4
- paddd %xmm2,%xmm4
- # qhasm: a7 = diag2
- # asm 1: movdqa <diag2=int6464#3,>a7=int6464#6
- # asm 2: movdqa <diag2=%xmm2,>a7=%xmm5
- movdqa %xmm2,%xmm5
- # qhasm: b6 = a6
- # asm 1: movdqa <a6=int6464#5,>b6=int6464#7
- # asm 2: movdqa <a6=%xmm4,>b6=%xmm6
- movdqa %xmm4,%xmm6
- # qhasm: uint32323232 a6 <<= 13
- # asm 1: pslld $13,<a6=int6464#5
- # asm 2: pslld $13,<a6=%xmm4
- pslld $13,%xmm4
- # qhasm: uint32323232 b6 >>= 19
- # asm 1: psrld $19,<b6=int6464#7
- # asm 2: psrld $19,<b6=%xmm6
- psrld $19,%xmm6
- # qhasm: diag3 ^= a6
- # asm 1: pxor <a6=int6464#5,<diag3=int6464#4
- # asm 2: pxor <a6=%xmm4,<diag3=%xmm3
- pxor %xmm4,%xmm3
- # qhasm: diag2 <<<= 64
- # asm 1: pshufd $0x4e,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x4e,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x4e,%xmm2,%xmm2
- # qhasm: diag3 ^= b6
- # asm 1: pxor <b6=int6464#7,<diag3=int6464#4
- # asm 2: pxor <b6=%xmm6,<diag3=%xmm3
- pxor %xmm6,%xmm3
- # qhasm: unsigned>? i -= 4
- # asm 1: sub $4,<i=int64#4
- # asm 2: sub $4,<i=%rcx
- sub $4,%rcx
- # qhasm: uint32323232 a7 += diag3
- # asm 1: paddd <diag3=int6464#4,<a7=int6464#6
- # asm 2: paddd <diag3=%xmm3,<a7=%xmm5
- paddd %xmm3,%xmm5
- # qhasm: a0 = diag1
- # asm 1: movdqa <diag1=int6464#2,>a0=int6464#5
- # asm 2: movdqa <diag1=%xmm1,>a0=%xmm4
- movdqa %xmm1,%xmm4
- # qhasm: b7 = a7
- # asm 1: movdqa <a7=int6464#6,>b7=int6464#7
- # asm 2: movdqa <a7=%xmm5,>b7=%xmm6
- movdqa %xmm5,%xmm6
- # qhasm: uint32323232 a7 <<= 18
- # asm 1: pslld $18,<a7=int6464#6
- # asm 2: pslld $18,<a7=%xmm5
- pslld $18,%xmm5
- # qhasm: b0 = 0
- # asm 1: pxor >b0=int6464#8,>b0=int6464#8
- # asm 2: pxor >b0=%xmm7,>b0=%xmm7
- pxor %xmm7,%xmm7
- # qhasm: uint32323232 b7 >>= 14
- # asm 1: psrld $14,<b7=int6464#7
- # asm 2: psrld $14,<b7=%xmm6
- psrld $14,%xmm6
- # qhasm: diag0 ^= a7
- # asm 1: pxor <a7=int6464#6,<diag0=int6464#1
- # asm 2: pxor <a7=%xmm5,<diag0=%xmm0
- pxor %xmm5,%xmm0
- # qhasm: diag3 <<<= 96
- # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: diag0 ^= b7
- # asm 1: pxor <b7=int6464#7,<diag0=int6464#1
- # asm 2: pxor <b7=%xmm6,<diag0=%xmm0
- pxor %xmm6,%xmm0
- # comment:fp stack unchanged by jump
- # qhasm: goto mainloop2 if unsigned>
- ja ._mainloop2
- # qhasm: uint32323232 diag0 += x0
- # asm 1: paddd <x0=stack128#4,<diag0=int6464#1
- # asm 2: paddd <x0=48(%rsp),<diag0=%xmm0
- paddd 48(%rsp),%xmm0
- # qhasm: uint32323232 diag1 += x1
- # asm 1: paddd <x1=stack128#1,<diag1=int6464#2
- # asm 2: paddd <x1=0(%rsp),<diag1=%xmm1
- paddd 0(%rsp),%xmm1
- # qhasm: uint32323232 diag2 += x2
- # asm 1: paddd <x2=stack128#2,<diag2=int6464#3
- # asm 2: paddd <x2=16(%rsp),<diag2=%xmm2
- paddd 16(%rsp),%xmm2
- # qhasm: uint32323232 diag3 += x3
- # asm 1: paddd <x3=stack128#3,<diag3=int6464#4
- # asm 2: paddd <x3=32(%rsp),<diag3=%xmm3
- paddd 32(%rsp),%xmm3
- # qhasm: in0 = diag0
- # asm 1: movd <diag0=int6464#1,>in0=int64#4
- # asm 2: movd <diag0=%xmm0,>in0=%rcx
- movd %xmm0,%rcx
- # qhasm: in12 = diag1
- # asm 1: movd <diag1=int6464#2,>in12=int64#5
- # asm 2: movd <diag1=%xmm1,>in12=%r8
- movd %xmm1,%r8
- # qhasm: in8 = diag2
- # asm 1: movd <diag2=int6464#3,>in8=int64#6
- # asm 2: movd <diag2=%xmm2,>in8=%r9
- movd %xmm2,%r9
- # qhasm: in4 = diag3
- # asm 1: movd <diag3=int6464#4,>in4=int64#7
- # asm 2: movd <diag3=%xmm3,>in4=%rax
- movd %xmm3,%rax
- # qhasm: diag0 <<<= 96
- # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1
- # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0
- pshufd $0x39,%xmm0,%xmm0
- # qhasm: diag1 <<<= 96
- # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: diag2 <<<= 96
- # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x39,%xmm2,%xmm2
- # qhasm: diag3 <<<= 96
- # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: (uint32) in0 ^= *(uint32 *) (m + 0)
- # asm 1: xorl 0(<m=int64#2),<in0=int64#4d
- # asm 2: xorl 0(<m=%rsi),<in0=%ecx
- xorl 0(%rsi),%ecx
- # qhasm: (uint32) in12 ^= *(uint32 *) (m + 48)
- # asm 1: xorl 48(<m=int64#2),<in12=int64#5d
- # asm 2: xorl 48(<m=%rsi),<in12=%r8d
- xorl 48(%rsi),%r8d
- # qhasm: (uint32) in8 ^= *(uint32 *) (m + 32)
- # asm 1: xorl 32(<m=int64#2),<in8=int64#6d
- # asm 2: xorl 32(<m=%rsi),<in8=%r9d
- xorl 32(%rsi),%r9d
- # qhasm: (uint32) in4 ^= *(uint32 *) (m + 16)
- # asm 1: xorl 16(<m=int64#2),<in4=int64#7d
- # asm 2: xorl 16(<m=%rsi),<in4=%eax
- xorl 16(%rsi),%eax
- # qhasm: *(uint32 *) (out + 0) = in0
- # asm 1: movl <in0=int64#4d,0(<out=int64#1)
- # asm 2: movl <in0=%ecx,0(<out=%rdi)
- movl %ecx,0(%rdi)
- # qhasm: *(uint32 *) (out + 48) = in12
- # asm 1: movl <in12=int64#5d,48(<out=int64#1)
- # asm 2: movl <in12=%r8d,48(<out=%rdi)
- movl %r8d,48(%rdi)
- # qhasm: *(uint32 *) (out + 32) = in8
- # asm 1: movl <in8=int64#6d,32(<out=int64#1)
- # asm 2: movl <in8=%r9d,32(<out=%rdi)
- movl %r9d,32(%rdi)
- # qhasm: *(uint32 *) (out + 16) = in4
- # asm 1: movl <in4=int64#7d,16(<out=int64#1)
- # asm 2: movl <in4=%eax,16(<out=%rdi)
- movl %eax,16(%rdi)
- # qhasm: in5 = diag0
- # asm 1: movd <diag0=int6464#1,>in5=int64#4
- # asm 2: movd <diag0=%xmm0,>in5=%rcx
- movd %xmm0,%rcx
- # qhasm: in1 = diag1
- # asm 1: movd <diag1=int6464#2,>in1=int64#5
- # asm 2: movd <diag1=%xmm1,>in1=%r8
- movd %xmm1,%r8
- # qhasm: in13 = diag2
- # asm 1: movd <diag2=int6464#3,>in13=int64#6
- # asm 2: movd <diag2=%xmm2,>in13=%r9
- movd %xmm2,%r9
- # qhasm: in9 = diag3
- # asm 1: movd <diag3=int6464#4,>in9=int64#7
- # asm 2: movd <diag3=%xmm3,>in9=%rax
- movd %xmm3,%rax
- # qhasm: diag0 <<<= 96
- # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1
- # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0
- pshufd $0x39,%xmm0,%xmm0
- # qhasm: diag1 <<<= 96
- # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: diag2 <<<= 96
- # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x39,%xmm2,%xmm2
- # qhasm: diag3 <<<= 96
- # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: (uint32) in5 ^= *(uint32 *) (m + 20)
- # asm 1: xorl 20(<m=int64#2),<in5=int64#4d
- # asm 2: xorl 20(<m=%rsi),<in5=%ecx
- xorl 20(%rsi),%ecx
- # qhasm: (uint32) in1 ^= *(uint32 *) (m + 4)
- # asm 1: xorl 4(<m=int64#2),<in1=int64#5d
- # asm 2: xorl 4(<m=%rsi),<in1=%r8d
- xorl 4(%rsi),%r8d
- # qhasm: (uint32) in13 ^= *(uint32 *) (m + 52)
- # asm 1: xorl 52(<m=int64#2),<in13=int64#6d
- # asm 2: xorl 52(<m=%rsi),<in13=%r9d
- xorl 52(%rsi),%r9d
- # qhasm: (uint32) in9 ^= *(uint32 *) (m + 36)
- # asm 1: xorl 36(<m=int64#2),<in9=int64#7d
- # asm 2: xorl 36(<m=%rsi),<in9=%eax
- xorl 36(%rsi),%eax
- # qhasm: *(uint32 *) (out + 20) = in5
- # asm 1: movl <in5=int64#4d,20(<out=int64#1)
- # asm 2: movl <in5=%ecx,20(<out=%rdi)
- movl %ecx,20(%rdi)
- # qhasm: *(uint32 *) (out + 4) = in1
- # asm 1: movl <in1=int64#5d,4(<out=int64#1)
- # asm 2: movl <in1=%r8d,4(<out=%rdi)
- movl %r8d,4(%rdi)
- # qhasm: *(uint32 *) (out + 52) = in13
- # asm 1: movl <in13=int64#6d,52(<out=int64#1)
- # asm 2: movl <in13=%r9d,52(<out=%rdi)
- movl %r9d,52(%rdi)
- # qhasm: *(uint32 *) (out + 36) = in9
- # asm 1: movl <in9=int64#7d,36(<out=int64#1)
- # asm 2: movl <in9=%eax,36(<out=%rdi)
- movl %eax,36(%rdi)
- # qhasm: in10 = diag0
- # asm 1: movd <diag0=int6464#1,>in10=int64#4
- # asm 2: movd <diag0=%xmm0,>in10=%rcx
- movd %xmm0,%rcx
- # qhasm: in6 = diag1
- # asm 1: movd <diag1=int6464#2,>in6=int64#5
- # asm 2: movd <diag1=%xmm1,>in6=%r8
- movd %xmm1,%r8
- # qhasm: in2 = diag2
- # asm 1: movd <diag2=int6464#3,>in2=int64#6
- # asm 2: movd <diag2=%xmm2,>in2=%r9
- movd %xmm2,%r9
- # qhasm: in14 = diag3
- # asm 1: movd <diag3=int6464#4,>in14=int64#7
- # asm 2: movd <diag3=%xmm3,>in14=%rax
- movd %xmm3,%rax
- # qhasm: diag0 <<<= 96
- # asm 1: pshufd $0x39,<diag0=int6464#1,<diag0=int6464#1
- # asm 2: pshufd $0x39,<diag0=%xmm0,<diag0=%xmm0
- pshufd $0x39,%xmm0,%xmm0
- # qhasm: diag1 <<<= 96
- # asm 1: pshufd $0x39,<diag1=int6464#2,<diag1=int6464#2
- # asm 2: pshufd $0x39,<diag1=%xmm1,<diag1=%xmm1
- pshufd $0x39,%xmm1,%xmm1
- # qhasm: diag2 <<<= 96
- # asm 1: pshufd $0x39,<diag2=int6464#3,<diag2=int6464#3
- # asm 2: pshufd $0x39,<diag2=%xmm2,<diag2=%xmm2
- pshufd $0x39,%xmm2,%xmm2
- # qhasm: diag3 <<<= 96
- # asm 1: pshufd $0x39,<diag3=int6464#4,<diag3=int6464#4
- # asm 2: pshufd $0x39,<diag3=%xmm3,<diag3=%xmm3
- pshufd $0x39,%xmm3,%xmm3
- # qhasm: (uint32) in10 ^= *(uint32 *) (m + 40)
- # asm 1: xorl 40(<m=int64#2),<in10=int64#4d
- # asm 2: xorl 40(<m=%rsi),<in10=%ecx
- xorl 40(%rsi),%ecx
- # qhasm: (uint32) in6 ^= *(uint32 *) (m + 24)
- # asm 1: xorl 24(<m=int64#2),<in6=int64#5d
- # asm 2: xorl 24(<m=%rsi),<in6=%r8d
- xorl 24(%rsi),%r8d
- # qhasm: (uint32) in2 ^= *(uint32 *) (m + 8)
- # asm 1: xorl 8(<m=int64#2),<in2=int64#6d
- # asm 2: xorl 8(<m=%rsi),<in2=%r9d
- xorl 8(%rsi),%r9d
- # qhasm: (uint32) in14 ^= *(uint32 *) (m + 56)
- # asm 1: xorl 56(<m=int64#2),<in14=int64#7d
- # asm 2: xorl 56(<m=%rsi),<in14=%eax
- xorl 56(%rsi),%eax
- # qhasm: *(uint32 *) (out + 40) = in10
- # asm 1: movl <in10=int64#4d,40(<out=int64#1)
- # asm 2: movl <in10=%ecx,40(<out=%rdi)
- movl %ecx,40(%rdi)
- # qhasm: *(uint32 *) (out + 24) = in6
- # asm 1: movl <in6=int64#5d,24(<out=int64#1)
- # asm 2: movl <in6=%r8d,24(<out=%rdi)
- movl %r8d,24(%rdi)
- # qhasm: *(uint32 *) (out + 8) = in2
- # asm 1: movl <in2=int64#6d,8(<out=int64#1)
- # asm 2: movl <in2=%r9d,8(<out=%rdi)
- movl %r9d,8(%rdi)
- # qhasm: *(uint32 *) (out + 56) = in14
- # asm 1: movl <in14=int64#7d,56(<out=int64#1)
- # asm 2: movl <in14=%eax,56(<out=%rdi)
- movl %eax,56(%rdi)
- # qhasm: in15 = diag0
- # asm 1: movd <diag0=int6464#1,>in15=int64#4
- # asm 2: movd <diag0=%xmm0,>in15=%rcx
- movd %xmm0,%rcx
- # qhasm: in11 = diag1
- # asm 1: movd <diag1=int6464#2,>in11=int64#5
- # asm 2: movd <diag1=%xmm1,>in11=%r8
- movd %xmm1,%r8
- # qhasm: in7 = diag2
- # asm 1: movd <diag2=int6464#3,>in7=int64#6
- # asm 2: movd <diag2=%xmm2,>in7=%r9
- movd %xmm2,%r9
- # qhasm: in3 = diag3
- # asm 1: movd <diag3=int6464#4,>in3=int64#7
- # asm 2: movd <diag3=%xmm3,>in3=%rax
- movd %xmm3,%rax
- # qhasm: (uint32) in15 ^= *(uint32 *) (m + 60)
- # asm 1: xorl 60(<m=int64#2),<in15=int64#4d
- # asm 2: xorl 60(<m=%rsi),<in15=%ecx
- xorl 60(%rsi),%ecx
- # qhasm: (uint32) in11 ^= *(uint32 *) (m + 44)
- # asm 1: xorl 44(<m=int64#2),<in11=int64#5d
- # asm 2: xorl 44(<m=%rsi),<in11=%r8d
- xorl 44(%rsi),%r8d
- # qhasm: (uint32) in7 ^= *(uint32 *) (m + 28)
- # asm 1: xorl 28(<m=int64#2),<in7=int64#6d
- # asm 2: xorl 28(<m=%rsi),<in7=%r9d
- xorl 28(%rsi),%r9d
- # qhasm: (uint32) in3 ^= *(uint32 *) (m + 12)
- # asm 1: xorl 12(<m=int64#2),<in3=int64#7d
- # asm 2: xorl 12(<m=%rsi),<in3=%eax
- xorl 12(%rsi),%eax
- # qhasm: *(uint32 *) (out + 60) = in15
- # asm 1: movl <in15=int64#4d,60(<out=int64#1)
- # asm 2: movl <in15=%ecx,60(<out=%rdi)
- movl %ecx,60(%rdi)
- # qhasm: *(uint32 *) (out + 44) = in11
- # asm 1: movl <in11=int64#5d,44(<out=int64#1)
- # asm 2: movl <in11=%r8d,44(<out=%rdi)
- movl %r8d,44(%rdi)
- # qhasm: *(uint32 *) (out + 28) = in7
- # asm 1: movl <in7=int64#6d,28(<out=int64#1)
- # asm 2: movl <in7=%r9d,28(<out=%rdi)
- movl %r9d,28(%rdi)
- # qhasm: *(uint32 *) (out + 12) = in3
- # asm 1: movl <in3=int64#7d,12(<out=int64#1)
- # asm 2: movl <in3=%eax,12(<out=%rdi)
- movl %eax,12(%rdi)
- # qhasm: bytes = bytes_backup
- # asm 1: movq <bytes_backup=stack64#8,>bytes=int64#6
- # asm 2: movq <bytes_backup=408(%rsp),>bytes=%r9
- movq 408(%rsp),%r9
- # qhasm: in8 = ((uint32 *)&x2)[0]
- # asm 1: movl <x2=stack128#2,>in8=int64#4d
- # asm 2: movl <x2=16(%rsp),>in8=%ecx
- movl 16(%rsp),%ecx
- # qhasm: in9 = ((uint32 *)&x3)[1]
- # asm 1: movl 4+<x3=stack128#3,>in9=int64#5d
- # asm 2: movl 4+<x3=32(%rsp),>in9=%r8d
- movl 4+32(%rsp),%r8d
- # qhasm: in8 += 1
- # asm 1: add $1,<in8=int64#4
- # asm 2: add $1,<in8=%rcx
- add $1,%rcx
- # qhasm: in9 <<= 32
- # asm 1: shl $32,<in9=int64#5
- # asm 2: shl $32,<in9=%r8
- shl $32,%r8
- # qhasm: in8 += in9
- # asm 1: add <in9=int64#5,<in8=int64#4
- # asm 2: add <in9=%r8,<in8=%rcx
- add %r8,%rcx
- # qhasm: in9 = in8
- # asm 1: mov <in8=int64#4,>in9=int64#5
- # asm 2: mov <in8=%rcx,>in9=%r8
- mov %rcx,%r8
- # qhasm: (uint64) in9 >>= 32
- # asm 1: shr $32,<in9=int64#5
- # asm 2: shr $32,<in9=%r8
- shr $32,%r8
- # qhasm: ((uint32 *)&x2)[0] = in8
- # asm 1: movl <in8=int64#4d,>x2=stack128#2
- # asm 2: movl <in8=%ecx,>x2=16(%rsp)
- movl %ecx,16(%rsp)
- # qhasm: ((uint32 *)&x3)[1] = in9
- # asm 1: movl <in9=int64#5d,4+<x3=stack128#3
- # asm 2: movl <in9=%r8d,4+<x3=32(%rsp)
- movl %r8d,4+32(%rsp)
- # qhasm: unsigned>? unsigned<? bytes - 64
- # asm 1: cmp $64,<bytes=int64#6
- # asm 2: cmp $64,<bytes=%r9
- cmp $64,%r9
- # comment:fp stack unchanged by jump
- # qhasm: goto bytesatleast65 if unsigned>
- ja ._bytesatleast65
- # comment:fp stack unchanged by jump
- # qhasm: goto bytesatleast64 if !unsigned<
- jae ._bytesatleast64
- # qhasm: m = out
- # asm 1: mov <out=int64#1,>m=int64#2
- # asm 2: mov <out=%rdi,>m=%rsi
- mov %rdi,%rsi
- # qhasm: out = ctarget
- # asm 1: mov <ctarget=int64#3,>out=int64#1
- # asm 2: mov <ctarget=%rdx,>out=%rdi
- mov %rdx,%rdi
- # qhasm: i = bytes
- # asm 1: mov <bytes=int64#6,>i=int64#4
- # asm 2: mov <bytes=%r9,>i=%rcx
- mov %r9,%rcx
- # qhasm: while (i) { *out++ = *m++; --i }
- rep movsb
- # comment:fp stack unchanged by fallthrough
- # qhasm: bytesatleast64:
- ._bytesatleast64:
- # comment:fp stack unchanged by fallthrough
- # qhasm: done:
- ._done:
- # qhasm: r11_caller = r11_stack
- # asm 1: movq <r11_stack=stack64#1,>r11_caller=int64#9
- # asm 2: movq <r11_stack=352(%rsp),>r11_caller=%r11
- movq 352(%rsp),%r11
- # qhasm: r12_caller = r12_stack
- # asm 1: movq <r12_stack=stack64#2,>r12_caller=int64#10
- # asm 2: movq <r12_stack=360(%rsp),>r12_caller=%r12
- movq 360(%rsp),%r12
- # qhasm: r13_caller = r13_stack
- # asm 1: movq <r13_stack=stack64#3,>r13_caller=int64#11
- # asm 2: movq <r13_stack=368(%rsp),>r13_caller=%r13
- movq 368(%rsp),%r13
- # qhasm: r14_caller = r14_stack
- # asm 1: movq <r14_stack=stack64#4,>r14_caller=int64#12
- # asm 2: movq <r14_stack=376(%rsp),>r14_caller=%r14
- movq 376(%rsp),%r14
- # qhasm: r15_caller = r15_stack
- # asm 1: movq <r15_stack=stack64#5,>r15_caller=int64#13
- # asm 2: movq <r15_stack=384(%rsp),>r15_caller=%r15
- movq 384(%rsp),%r15
- # qhasm: rbx_caller = rbx_stack
- # asm 1: movq <rbx_stack=stack64#6,>rbx_caller=int64#14
- # asm 2: movq <rbx_stack=392(%rsp),>rbx_caller=%rbx
- movq 392(%rsp),%rbx
- # qhasm: rbp_caller = rbp_stack
- # asm 1: movq <rbp_stack=stack64#7,>rbp_caller=int64#15
- # asm 2: movq <rbp_stack=400(%rsp),>rbp_caller=%rbp
- movq 400(%rsp),%rbp
- # qhasm: leave
- add %r11,%rsp
- xor %rax,%rax
- xor %rdx,%rdx
- ret
- # qhasm: bytesatleast65:
- ._bytesatleast65:
- # qhasm: bytes -= 64
- # asm 1: sub $64,<bytes=int64#6
- # asm 2: sub $64,<bytes=%r9
- sub $64,%r9
- # qhasm: out += 64
- # asm 1: add $64,<out=int64#1
- # asm 2: add $64,<out=%rdi
- add $64,%rdi
- # qhasm: m += 64
- # asm 1: add $64,<m=int64#2
- # asm 2: add $64,<m=%rsi
- add $64,%rsi
- # comment:fp stack unchanged by jump
- # qhasm: goto bytesbetween1and255
- jmp ._bytesbetween1and255
|