NUMclapack.cpp 505 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352
  1. /* NUMclapack.c */
  2. /* -- LAPACK driver routines (version 3.0) -- Univ. of Tennessee, Univ. of
  3. California Berkeley, NAG Ltd., Courant Institute, Argonne National Lab,
  4. and Rice University October 31, 1999 -- translated by f2c (version
  5. 19990503).
  6. Adapted by David Weenink 20021201
  7. djmw 20030205 Latest modification
  8. */
  9. /* #include "blaswrap.h" */
  10. #include "NUMf2c.h"
  11. #include "NUMclapack.h"
  12. #include "NUMcblas.h"
  13. #include "NUM2.h"
  14. #include "melder.h"
  15. #define FALSE 0
  16. #define TRUE 1
  17. /* Table of constant values */
  18. static integer c__0 = 0;
  19. static integer c__1 = 1;
  20. static integer c_n1 = -1;
  21. static integer c__2 = 2;
  22. static integer c__3 = 3;
  23. static integer c__4 = 4;
  24. static integer c__6 = 6;
  25. static integer c__10 = 10;
  26. static integer c__11 = 11;
  27. static double c_b15 = -.125;
  28. static double c_b49 = 1.;
  29. static double c_b72 = -1.;
  30. static double c_b74 = 0.;
  31. static double c_b108 = 1.;
  32. static double c_b416 = 0.;
  33. static double c_b438 = 1.;
  34. #define MAX(m,n) ((m) > (n) ? (m) : (n))
  35. #define MIN(m,n) ((m) < (n) ? (m) : (n))
  36. #define a_ref(a_1,a_2) a[(a_2)*a_dim1 + a_1]
  37. #define c___ref(a_1,a_2) c__[(a_2)*c_dim1 + a_1]
  38. /* --------------------------------------------------- */
  39. #define u_ref(a_1,a_2) u[(a_2)*u_dim1 + a_1]
  40. #define vt_ref(a_1,a_2) vt[(a_2)*vt_dim1 + a_1]
  41. int NUMlapack_dbdsqr (const char *uplo, integer *n, integer *ncvt, integer *nru, integer *ncc, double *d__, double *e, double *vt,
  42. integer *ldvt, double *u, integer *ldu, double *c__, integer *ldc, double *work, integer *info) {
  43. /* System generated locals */
  44. integer c_dim1, c_offset, u_dim1, u_offset, vt_dim1, vt_offset, i__1, i__2;
  45. double d__1, d__2, d__3, d__4;
  46. /* Local variables */
  47. static double abse;
  48. static integer idir;
  49. static double abss;
  50. static integer oldm;
  51. static double cosl;
  52. static integer isub, iter;
  53. static double unfl, sinl, cosr, smin, smax, sinr;
  54. static double f, g, h__;
  55. static integer i__, j, m;
  56. static double r__;
  57. static double oldcs;
  58. static integer oldll;
  59. static double shift, sigmn, oldsn;
  60. static integer maxit;
  61. static double sminl, sigmx;
  62. static integer lower;
  63. static double cs;
  64. static integer ll;
  65. static double sn, mu;
  66. static double sminoa, thresh;
  67. static integer rotate;
  68. static double sminlo;
  69. static integer nm1;
  70. static double tolmul;
  71. static integer nm12, nm13, lll;
  72. static double eps, sll, tol;
  73. /* Parameter adjustments */
  74. --d__;
  75. --e;
  76. vt_dim1 = *ldvt;
  77. vt_offset = 1 + vt_dim1 * 1;
  78. vt -= vt_offset;
  79. u_dim1 = *ldu;
  80. u_offset = 1 + u_dim1 * 1;
  81. u -= u_offset;
  82. c_dim1 = *ldc;
  83. c_offset = 1 + c_dim1 * 1;
  84. c__ -= c_offset;
  85. --work;
  86. /* Function Body */
  87. *info = 0;
  88. lower = lsame_ (uplo, "L");
  89. if (!lsame_ (uplo, "U") && !lower) {
  90. *info = -1;
  91. } else if (*n < 0) {
  92. *info = -2;
  93. } else if (*ncvt < 0) {
  94. *info = -3;
  95. } else if (*nru < 0) {
  96. *info = -4;
  97. } else if (*ncc < 0) {
  98. *info = -5;
  99. } else if (*ncvt == 0 && *ldvt < 1 || *ncvt > 0 && *ldvt < MAX (1, *n)) {
  100. *info = -9;
  101. } else if (*ldu < MAX (1, *nru)) {
  102. *info = -11;
  103. } else if (*ncc == 0 && *ldc < 1 || *ncc > 0 && *ldc < MAX (1, *n)) {
  104. *info = -13;
  105. }
  106. if (*info != 0) {
  107. i__1 = - (*info);
  108. xerbla_ ("DBDSQR", &i__1);
  109. return 0;
  110. }
  111. if (*n == 0) {
  112. return 0;
  113. }
  114. if (*n == 1) {
  115. goto L160;
  116. }
  117. /* ROTATE is true if any singular vectors desired, false otherwise */
  118. rotate = *ncvt > 0 || *nru > 0 || *ncc > 0;
  119. /* If no singular vectors desired, use qd algorithm */
  120. if (!rotate) {
  121. NUMlapack_dlasq1 (n, &d__[1], &e[1], &work[1], info);
  122. return 0;
  123. }
  124. nm1 = *n - 1;
  125. nm12 = nm1 + nm1;
  126. nm13 = nm12 + nm1;
  127. idir = 0;
  128. /* Get machine constants */
  129. eps = NUMblas_dlamch ("Epsilon");
  130. unfl = NUMblas_dlamch ("Safe minimum");
  131. /* If matrix lower bidiagonal, rotate to be upper bidiagonal by applying
  132. Givens rotations on the left */
  133. if (lower) {
  134. i__1 = *n - 1;
  135. for (i__ = 1; i__ <= i__1; ++i__) {
  136. NUMlapack_dlartg (&d__[i__], &e[i__], &cs, &sn, &r__);
  137. d__[i__] = r__;
  138. e[i__] = sn * d__[i__ + 1];
  139. d__[i__ + 1] = cs * d__[i__ + 1];
  140. work[i__] = cs;
  141. work[nm1 + i__] = sn;
  142. /* L10: */
  143. }
  144. /* Update singular vectors if desired */
  145. if (*nru > 0) {
  146. NUMlapack_dlasr ("R", "V", "F", nru, n, &work[1], &work[*n], &u[u_offset], ldu);
  147. }
  148. if (*ncc > 0) {
  149. NUMlapack_dlasr ("L", "V", "F", n, ncc, &work[1], &work[*n], &c__[c_offset], ldc);
  150. }
  151. }
  152. /* Compute singular values to relative accuracy TOL (By setting TOL to be
  153. negative, algorithm will compute singular values to absolute accuracy
  154. ABS(TOL)*norm(input matrix))
  155. Computing MAX Computing MIN */
  156. d__3 = 100., d__4 = pow (eps, c_b15);
  157. d__1 = 10., d__2 = MIN (d__3, d__4);
  158. tolmul = MAX (d__1, d__2);
  159. tol = tolmul * eps;
  160. /* Compute approximate maximum, minimum singular values */
  161. smax = 0.;
  162. i__1 = *n;
  163. for (i__ = 1; i__ <= i__1; ++i__) {
  164. /* Computing MAX */
  165. d__2 = smax, d__3 = (d__1 = d__[i__], fabs (d__1));
  166. smax = MAX (d__2, d__3);
  167. /* L20: */
  168. }
  169. i__1 = *n - 1;
  170. for (i__ = 1; i__ <= i__1; ++i__) {
  171. /* Computing MAX */
  172. d__2 = smax, d__3 = (d__1 = e[i__], fabs (d__1));
  173. smax = MAX (d__2, d__3);
  174. /* L30: */
  175. }
  176. sminl = 0.;
  177. if (tol >= 0.) {
  178. /* Relative accuracy desired */
  179. sminoa = fabs (d__[1]);
  180. if (sminoa == 0.) {
  181. goto L50;
  182. }
  183. mu = sminoa;
  184. i__1 = *n;
  185. for (i__ = 2; i__ <= i__1; ++i__) {
  186. mu = (d__2 = d__[i__], fabs (d__2)) * (mu / (mu + (d__1 = e[i__ - 1], fabs (d__1))));
  187. sminoa = MIN (sminoa, mu);
  188. if (sminoa == 0.) {
  189. goto L50;
  190. }
  191. /* L40: */
  192. }
  193. L50:
  194. sminoa /= sqrt ( (double) (*n));
  195. /* Computing MAX */
  196. d__1 = tol * sminoa, d__2 = *n * 6 * *n * unfl;
  197. thresh = MAX (d__1, d__2);
  198. } else {
  199. /* Absolute accuracy desired
  200. Computing MAX */
  201. d__1 = fabs (tol) * smax, d__2 = *n * 6 * *n * unfl;
  202. thresh = MAX (d__1, d__2);
  203. }
  204. /* Prepare for main iteration loop for the singular values (MAXIT is the
  205. maximum number of passes through the inner loop permitted before
  206. nonconvergence signalled.) */
  207. maxit = *n * 6 * *n;
  208. iter = 0;
  209. oldll = -1;
  210. oldm = -1;
  211. /* M points to last element of unconverged part of matrix */
  212. m = *n;
  213. /* Begin main iteration loop */
  214. L60:
  215. /* Check for convergence or exceeding iteration count */
  216. if (m <= 1) {
  217. goto L160;
  218. }
  219. if (iter > maxit) {
  220. goto L200;
  221. }
  222. /* Find diagonal block of matrix to work on */
  223. if (tol < 0. && (d__1 = d__[m], fabs (d__1)) <= thresh) {
  224. d__[m] = 0.;
  225. }
  226. smax = (d__1 = d__[m], fabs (d__1));
  227. smin = smax;
  228. i__1 = m - 1;
  229. for (lll = 1; lll <= i__1; ++lll) {
  230. ll = m - lll;
  231. abss = (d__1 = d__[ll], fabs (d__1));
  232. abse = (d__1 = e[ll], fabs (d__1));
  233. if (tol < 0. && abss <= thresh) {
  234. d__[ll] = 0.;
  235. }
  236. if (abse <= thresh) {
  237. goto L80;
  238. }
  239. smin = MIN (smin, abss);
  240. /* Computing MAX */
  241. d__1 = MAX (smax, abss);
  242. smax = MAX (d__1, abse);
  243. /* L70: */
  244. }
  245. ll = 0;
  246. goto L90;
  247. L80:
  248. e[ll] = 0.;
  249. /* Matrix splits since E(LL) = 0 */
  250. if (ll == m - 1) {
  251. /* Convergence of bottom singular value, return to top of loop */
  252. --m;
  253. goto L60;
  254. }
  255. L90:
  256. ++ll;
  257. /* E(LL) through E(M-1) are nonzero, E(LL-1) is zero */
  258. if (ll == m - 1) {
  259. /* 2 by 2 block, handle separately */
  260. NUMlapack_dlasv2 (&d__[m - 1], &e[m - 1], &d__[m], &sigmn, &sigmx, &sinr, &cosr, &sinl, &cosl);
  261. d__[m - 1] = sigmx;
  262. e[m - 1] = 0.;
  263. d__[m] = sigmn;
  264. /* Compute singular vectors, if desired */
  265. if (*ncvt > 0) {
  266. NUMblas_drot (ncvt, &vt_ref (m - 1, 1), ldvt, &vt_ref (m, 1), ldvt, &cosr, &sinr);
  267. }
  268. if (*nru > 0) {
  269. NUMblas_drot (nru, &u_ref (1, m - 1), &c__1, &u_ref (1, m), &c__1, &cosl, &sinl);
  270. }
  271. if (*ncc > 0) {
  272. NUMblas_drot (ncc, &c___ref (m - 1, 1), ldc, &c___ref (m, 1), ldc, &cosl, &sinl);
  273. }
  274. m += -2;
  275. goto L60;
  276. }
  277. /* If working on new submatrix, choose shift direction (from larger end
  278. diagonal element towards smaller) */
  279. if (ll > oldm || m < oldll) {
  280. if ( (d__1 = d__[ll], fabs (d__1)) >= (d__2 = d__[m], fabs (d__2))) {
  281. /* Chase bulge from top (big end) to bottom (small end) */
  282. idir = 1;
  283. } else {
  284. /* Chase bulge from bottom (big end) to top (small end) */
  285. idir = 2;
  286. }
  287. }
  288. /* Apply convergence tests */
  289. if (idir == 1) {
  290. /* Run convergence test in forward direction First apply standard
  291. test to bottom of matrix */
  292. if ( (d__2 = e[m - 1], fabs (d__2)) <= fabs (tol) * (d__1 = d__[m], fabs (d__1)) || tol < 0. &&
  293. (d__3 = e[m - 1], fabs (d__3)) <= thresh) {
  294. e[m - 1] = 0.;
  295. goto L60;
  296. }
  297. if (tol >= 0.) {
  298. /* If relative accuracy desired, apply convergence criterion
  299. forward */
  300. mu = (d__1 = d__[ll], fabs (d__1));
  301. sminl = mu;
  302. i__1 = m - 1;
  303. for (lll = ll; lll <= i__1; ++lll) {
  304. if ( (d__1 = e[lll], fabs (d__1)) <= tol * mu) {
  305. e[lll] = 0.;
  306. goto L60;
  307. }
  308. sminlo = sminl;
  309. mu = (d__2 = d__[lll + 1], fabs (d__2)) * (mu / (mu + (d__1 = e[lll], fabs (d__1))));
  310. sminl = MIN (sminl, mu);
  311. /* L100: */
  312. }
  313. }
  314. } else {
  315. /* Run convergence test in backward direction First apply standard
  316. test to top of matrix */
  317. if ( (d__2 = e[ll], fabs (d__2)) <= fabs (tol) * (d__1 = d__[ll], fabs (d__1)) || tol < 0. &&
  318. (d__3 = e[ll], fabs (d__3)) <= thresh) {
  319. e[ll] = 0.;
  320. goto L60;
  321. }
  322. if (tol >= 0.) {
  323. /* If relative accuracy desired, apply convergence criterion
  324. backward */
  325. mu = (d__1 = d__[m], fabs (d__1));
  326. sminl = mu;
  327. i__1 = ll;
  328. for (lll = m - 1; lll >= i__1; --lll) {
  329. if ( (d__1 = e[lll], fabs (d__1)) <= tol * mu) {
  330. e[lll] = 0.;
  331. goto L60;
  332. }
  333. sminlo = sminl;
  334. mu = (d__2 = d__[lll], fabs (d__2)) * (mu / (mu + (d__1 = e[lll], fabs (d__1))));
  335. sminl = MIN (sminl, mu);
  336. /* L110: */
  337. }
  338. }
  339. }
  340. oldll = ll;
  341. oldm = m;
  342. /* Compute shift. First, test if shifting would ruin relative accuracy,
  343. and if so set the shift to zero.
  344. Computing MAX */
  345. d__1 = eps, d__2 = tol * .01;
  346. if (tol >= 0. && *n * tol * (sminl / smax) <= MAX (d__1, d__2)) {
  347. /* Use a zero shift to avoid loss of relative accuracy */
  348. shift = 0.;
  349. } else {
  350. /* Compute the shift from 2-by-2 block at end of matrix */
  351. if (idir == 1) {
  352. sll = (d__1 = d__[ll], fabs (d__1));
  353. NUMlapack_dlas2 (&d__[m - 1], &e[m - 1], &d__[m], &shift, &r__);
  354. } else {
  355. sll = (d__1 = d__[m], fabs (d__1));
  356. NUMlapack_dlas2 (&d__[ll], &e[ll], &d__[ll + 1], &shift, &r__);
  357. }
  358. /* Test if shift negligible, and if so set to zero */
  359. if (sll > 0.) {
  360. /* Computing 2nd power */
  361. d__1 = shift / sll;
  362. if (d__1 * d__1 < eps) {
  363. shift = 0.;
  364. }
  365. }
  366. }
  367. /* Increment iteration count */
  368. iter = iter + m - ll;
  369. /* If SHIFT = 0, do simplified QR iteration */
  370. if (shift == 0.) {
  371. if (idir == 1) {
  372. /* Chase bulge from top to bottom Save cosines and sines for
  373. later singular vector updates */
  374. cs = 1.;
  375. oldcs = 1.;
  376. i__1 = m - 1;
  377. for (i__ = ll; i__ <= i__1; ++i__) {
  378. d__1 = d__[i__] * cs;
  379. NUMlapack_dlartg (&d__1, &e[i__], &cs, &sn, &r__);
  380. if (i__ > ll) {
  381. e[i__ - 1] = oldsn * r__;
  382. }
  383. d__1 = oldcs * r__;
  384. d__2 = d__[i__ + 1] * sn;
  385. NUMlapack_dlartg (&d__1, &d__2, &oldcs, &oldsn, &d__[i__]);
  386. work[i__ - ll + 1] = cs;
  387. work[i__ - ll + 1 + nm1] = sn;
  388. work[i__ - ll + 1 + nm12] = oldcs;
  389. work[i__ - ll + 1 + nm13] = oldsn;
  390. /* L120: */
  391. }
  392. h__ = d__[m] * cs;
  393. d__[m] = h__ * oldcs;
  394. e[m - 1] = h__ * oldsn;
  395. /* Update singular vectors */
  396. if (*ncvt > 0) {
  397. i__1 = m - ll + 1;
  398. NUMlapack_dlasr ("L", "V", "F", &i__1, ncvt, &work[1], &work[*n], &vt_ref (ll, 1), ldvt);
  399. }
  400. if (*nru > 0) {
  401. i__1 = m - ll + 1;
  402. NUMlapack_dlasr ("R", "V", "F", nru, &i__1, &work[nm12 + 1], &work[nm13 + 1], &u_ref (1, ll),
  403. ldu);
  404. }
  405. if (*ncc > 0) {
  406. i__1 = m - ll + 1;
  407. NUMlapack_dlasr ("L", "V", "F", &i__1, ncc, &work[nm12 + 1], &work[nm13 + 1], &c___ref (ll, 1),
  408. ldc);
  409. }
  410. /* Test convergence */
  411. if ( (d__1 = e[m - 1], fabs (d__1)) <= thresh) {
  412. e[m - 1] = 0.;
  413. }
  414. } else {
  415. /* Chase bulge from bottom to top Save cosines and sines for
  416. later singular vector updates */
  417. cs = 1.;
  418. oldcs = 1.;
  419. i__1 = ll + 1;
  420. for (i__ = m; i__ >= i__1; --i__) {
  421. d__1 = d__[i__] * cs;
  422. NUMlapack_dlartg (&d__1, &e[i__ - 1], &cs, &sn, &r__);
  423. if (i__ < m) {
  424. e[i__] = oldsn * r__;
  425. }
  426. d__1 = oldcs * r__;
  427. d__2 = d__[i__ - 1] * sn;
  428. NUMlapack_dlartg (&d__1, &d__2, &oldcs, &oldsn, &d__[i__]);
  429. work[i__ - ll] = cs;
  430. work[i__ - ll + nm1] = -sn;
  431. work[i__ - ll + nm12] = oldcs;
  432. work[i__ - ll + nm13] = -oldsn;
  433. /* L130: */
  434. }
  435. h__ = d__[ll] * cs;
  436. d__[ll] = h__ * oldcs;
  437. e[ll] = h__ * oldsn;
  438. /* Update singular vectors */
  439. if (*ncvt > 0) {
  440. i__1 = m - ll + 1;
  441. NUMlapack_dlasr ("L", "V", "B", &i__1, ncvt, &work[nm12 + 1], &work[nm13 + 1], &vt_ref (ll, 1),
  442. ldvt);
  443. }
  444. if (*nru > 0) {
  445. i__1 = m - ll + 1;
  446. NUMlapack_dlasr ("R", "V", "B", nru, &i__1, &work[1], &work[*n], &u_ref (1, ll), ldu);
  447. }
  448. if (*ncc > 0) {
  449. i__1 = m - ll + 1;
  450. NUMlapack_dlasr ("L", "V", "B", &i__1, ncc, &work[1], &work[*n], &c___ref (ll, 1), ldc);
  451. }
  452. /* Test convergence */
  453. if ( (d__1 = e[ll], fabs (d__1)) <= thresh) {
  454. e[ll] = 0.;
  455. }
  456. }
  457. } else {
  458. /* Use nonzero shift */
  459. if (idir == 1) {
  460. /* Chase bulge from top to bottom Save cosines and sines for
  461. later singular vector updates */
  462. f = ( (d__1 = d__[ll], fabs (d__1)) - shift) * (d_sign (&c_b49, &d__[ll]) + shift / d__[ll]);
  463. g = e[ll];
  464. i__1 = m - 1;
  465. for (i__ = ll; i__ <= i__1; ++i__) {
  466. NUMlapack_dlartg (&f, &g, &cosr, &sinr, &r__);
  467. if (i__ > ll) {
  468. e[i__ - 1] = r__;
  469. }
  470. f = cosr * d__[i__] + sinr * e[i__];
  471. e[i__] = cosr * e[i__] - sinr * d__[i__];
  472. g = sinr * d__[i__ + 1];
  473. d__[i__ + 1] = cosr * d__[i__ + 1];
  474. NUMlapack_dlartg (&f, &g, &cosl, &sinl, &r__);
  475. d__[i__] = r__;
  476. f = cosl * e[i__] + sinl * d__[i__ + 1];
  477. d__[i__ + 1] = cosl * d__[i__ + 1] - sinl * e[i__];
  478. if (i__ < m - 1) {
  479. g = sinl * e[i__ + 1];
  480. e[i__ + 1] = cosl * e[i__ + 1];
  481. }
  482. work[i__ - ll + 1] = cosr;
  483. work[i__ - ll + 1 + nm1] = sinr;
  484. work[i__ - ll + 1 + nm12] = cosl;
  485. work[i__ - ll + 1 + nm13] = sinl;
  486. /* L140: */
  487. }
  488. e[m - 1] = f;
  489. /* Update singular vectors */
  490. if (*ncvt > 0) {
  491. i__1 = m - ll + 1;
  492. NUMlapack_dlasr ("L", "V", "F", &i__1, ncvt, &work[1], &work[*n], &vt_ref (ll, 1), ldvt);
  493. }
  494. if (*nru > 0) {
  495. i__1 = m - ll + 1;
  496. NUMlapack_dlasr ("R", "V", "F", nru, &i__1, &work[nm12 + 1], &work[nm13 + 1], &u_ref (1, ll),
  497. ldu);
  498. }
  499. if (*ncc > 0) {
  500. i__1 = m - ll + 1;
  501. NUMlapack_dlasr ("L", "V", "F", &i__1, ncc, &work[nm12 + 1], &work[nm13 + 1], &c___ref (ll, 1),
  502. ldc);
  503. }
  504. /* Test convergence */
  505. if ( (d__1 = e[m - 1], fabs (d__1)) <= thresh) {
  506. e[m - 1] = 0.;
  507. }
  508. } else {
  509. /* Chase bulge from bottom to top Save cosines and sines for
  510. later singular vector updates */
  511. f = ( (d__1 = d__[m], fabs (d__1)) - shift) * (d_sign (&c_b49, &d__[m]) + shift / d__[m]);
  512. g = e[m - 1];
  513. i__1 = ll + 1;
  514. for (i__ = m; i__ >= i__1; --i__) {
  515. NUMlapack_dlartg (&f, &g, &cosr, &sinr, &r__);
  516. if (i__ < m) {
  517. e[i__] = r__;
  518. }
  519. f = cosr * d__[i__] + sinr * e[i__ - 1];
  520. e[i__ - 1] = cosr * e[i__ - 1] - sinr * d__[i__];
  521. g = sinr * d__[i__ - 1];
  522. d__[i__ - 1] = cosr * d__[i__ - 1];
  523. NUMlapack_dlartg (&f, &g, &cosl, &sinl, &r__);
  524. d__[i__] = r__;
  525. f = cosl * e[i__ - 1] + sinl * d__[i__ - 1];
  526. d__[i__ - 1] = cosl * d__[i__ - 1] - sinl * e[i__ - 1];
  527. if (i__ > ll + 1) {
  528. g = sinl * e[i__ - 2];
  529. e[i__ - 2] = cosl * e[i__ - 2];
  530. }
  531. work[i__ - ll] = cosr;
  532. work[i__ - ll + nm1] = -sinr;
  533. work[i__ - ll + nm12] = cosl;
  534. work[i__ - ll + nm13] = -sinl;
  535. /* L150: */
  536. }
  537. e[ll] = f;
  538. /* Test convergence */
  539. if ( (d__1 = e[ll], fabs (d__1)) <= thresh) {
  540. e[ll] = 0.;
  541. }
  542. /* Update singular vectors if desired */
  543. if (*ncvt > 0) {
  544. i__1 = m - ll + 1;
  545. NUMlapack_dlasr ("L", "V", "B", &i__1, ncvt, &work[nm12 + 1], &work[nm13 + 1], &vt_ref (ll, 1),
  546. ldvt);
  547. }
  548. if (*nru > 0) {
  549. i__1 = m - ll + 1;
  550. NUMlapack_dlasr ("R", "V", "B", nru, &i__1, &work[1], &work[*n], &u_ref (1, ll), ldu);
  551. }
  552. if (*ncc > 0) {
  553. i__1 = m - ll + 1;
  554. NUMlapack_dlasr ("L", "V", "B", &i__1, ncc, &work[1], &work[*n], &c___ref (ll, 1), ldc);
  555. }
  556. }
  557. }
  558. /* QR iteration finished, go back and check convergence */
  559. goto L60;
  560. /* All singular values converged, so make them positive */
  561. L160:
  562. i__1 = *n;
  563. for (i__ = 1; i__ <= i__1; ++i__) {
  564. if (d__[i__] < 0.) {
  565. d__[i__] = -d__[i__];
  566. /* Change sign of singular vectors, if desired */
  567. if (*ncvt > 0) {
  568. NUMblas_dscal (ncvt, &c_b72, &vt_ref (i__, 1), ldvt);
  569. }
  570. }
  571. /* L170: */
  572. }
  573. /* Sort the singular values into decreasing order (insertion sort on
  574. singular values, but only one transposition per singular vector) */
  575. i__1 = *n - 1;
  576. for (i__ = 1; i__ <= i__1; ++i__) {
  577. /* Scan for smallest D(I) */
  578. isub = 1;
  579. smin = d__[1];
  580. i__2 = *n + 1 - i__;
  581. for (j = 2; j <= i__2; ++j) {
  582. if (d__[j] <= smin) {
  583. isub = j;
  584. smin = d__[j];
  585. }
  586. /* L180: */
  587. }
  588. if (isub != *n + 1 - i__) {
  589. /* Swap singular values and vectors */
  590. d__[isub] = d__[*n + 1 - i__];
  591. d__[*n + 1 - i__] = smin;
  592. if (*ncvt > 0) {
  593. NUMblas_dswap (ncvt, &vt_ref (isub, 1), ldvt, &vt_ref (*n + 1 - i__, 1), ldvt);
  594. }
  595. if (*nru > 0) {
  596. NUMblas_dswap (nru, &u_ref (1, isub), &c__1, &u_ref (1, *n + 1 - i__), &c__1);
  597. }
  598. if (*ncc > 0) {
  599. NUMblas_dswap (ncc, &c___ref (isub, 1), ldc, &c___ref (*n + 1 - i__, 1), ldc);
  600. }
  601. }
  602. /* L190: */
  603. }
  604. goto L220;
  605. /* Maximum number of iterations exceeded, failure to converge */
  606. L200:
  607. *info = 0;
  608. i__1 = *n - 1;
  609. for (i__ = 1; i__ <= i__1; ++i__) {
  610. if (e[i__] != 0.) {
  611. ++ (*info);
  612. }
  613. /* L210: */
  614. }
  615. L220:
  616. return 0;
  617. } /* NUMlapack_dbdsqr */
  618. #undef vt_ref
  619. #undef u_ref
  620. int NUMlapack_dgebd2 (integer *m, integer *n, double *a, integer *lda, double *d__, double *e, double *tauq,
  621. double *taup, double *work, integer *info) {
  622. /* Table of constant values */
  623. static integer c__1 = 1;
  624. /* System generated locals */
  625. integer a_dim1, a_offset, i__1, i__2, i__3, i__4;
  626. /* Local variables */
  627. static integer i__;
  628. a_dim1 = *lda;
  629. a_offset = 1 + a_dim1 * 1;
  630. a -= a_offset;
  631. --d__;
  632. --e;
  633. --tauq;
  634. --taup;
  635. --work;
  636. /* Function Body */
  637. *info = 0;
  638. if (*m < 0) {
  639. *info = -1;
  640. } else if (*n < 0) {
  641. *info = -2;
  642. } else if (*lda < MAX (1, *m)) {
  643. *info = -4;
  644. }
  645. if (*info < 0) {
  646. i__1 = - (*info);
  647. xerbla_ ("DGEBD2", &i__1);
  648. return 0;
  649. }
  650. if (*m >= *n) {
  651. /* Reduce to upper bidiagonal form */
  652. i__1 = *n;
  653. for (i__ = 1; i__ <= i__1; ++i__) {
  654. /* Generate elementary reflector H(i) to annihilate A(i+1:m,i)
  655. Computing MIN */
  656. i__2 = i__ + 1;
  657. i__3 = *m - i__ + 1;
  658. NUMlapack_dlarfg (&i__3, &a_ref (i__, i__), &a_ref (MIN (i__2, *m), i__), &c__1, &tauq[i__]);
  659. d__[i__] = a_ref (i__, i__);
  660. a_ref (i__, i__) = 1.;
  661. /* Apply H(i) to A(i:m,i+1:n) from the left */
  662. i__2 = *m - i__ + 1;
  663. i__3 = *n - i__;
  664. NUMlapack_dlarf ("Left", &i__2, &i__3, &a_ref (i__, i__), &c__1, &tauq[i__], &a_ref (i__, i__ + 1),
  665. lda, &work[1]);
  666. a_ref (i__, i__) = d__[i__];
  667. if (i__ < *n) {
  668. /* Generate elementary reflector G(i) to annihilate
  669. A(i,i+2:n)
  670. Computing MIN */
  671. i__2 = i__ + 2;
  672. i__3 = *n - i__;
  673. NUMlapack_dlarfg (&i__3, &a_ref (i__, i__ + 1), &a_ref (i__, MIN (i__2, *n)), lda, &taup[i__]);
  674. e[i__] = a_ref (i__, i__ + 1);
  675. a_ref (i__, i__ + 1) = 1.;
  676. /* Apply G(i) to A(i+1:m,i+1:n) from the right */
  677. i__2 = *m - i__;
  678. i__3 = *n - i__;
  679. NUMlapack_dlarf ("Right", &i__2, &i__3, &a_ref (i__, i__ + 1), lda, &taup[i__], &a_ref (i__ + 1,
  680. i__ + 1), lda, &work[1]);
  681. a_ref (i__, i__ + 1) = e[i__];
  682. } else {
  683. taup[i__] = 0.;
  684. }
  685. /* L10: */
  686. }
  687. } else {
  688. /* Reduce to lower bidiagonal form */
  689. i__1 = *m;
  690. for (i__ = 1; i__ <= i__1; ++i__) {
  691. /* Generate elementary reflector G(i) to annihilate A(i,i+1:n)
  692. Computing MIN */
  693. i__2 = i__ + 1;
  694. i__3 = *n - i__ + 1;
  695. NUMlapack_dlarfg (&i__3, &a_ref (i__, i__), &a_ref (i__, MIN (i__2, *n)), lda, &taup[i__]);
  696. d__[i__] = a_ref (i__, i__);
  697. a_ref (i__, i__) = 1.;
  698. /* Apply G(i) to A(i+1:m,i:n) from the right
  699. Computing MIN */
  700. i__2 = i__ + 1;
  701. i__3 = *m - i__;
  702. i__4 = *n - i__ + 1;
  703. NUMlapack_dlarf ("Right", &i__3, &i__4, &a_ref (i__, i__), lda, &taup[i__], &a_ref (MIN (i__2, *m),
  704. i__), lda, &work[1]);
  705. a_ref (i__, i__) = d__[i__];
  706. if (i__ < *m) {
  707. /* Generate elementary reflector H(i) to annihilate
  708. A(i+2:m,i)
  709. Computing MIN */
  710. i__2 = i__ + 2;
  711. i__3 = *m - i__;
  712. NUMlapack_dlarfg (&i__3, &a_ref (i__ + 1, i__), &a_ref (MIN (i__2, *m), i__), &c__1,
  713. &tauq[i__]);
  714. e[i__] = a_ref (i__ + 1, i__);
  715. a_ref (i__ + 1, i__) = 1.;
  716. /* Apply H(i) to A(i+1:m,i+1:n) from the left */
  717. i__2 = *m - i__;
  718. i__3 = *n - i__;
  719. NUMlapack_dlarf ("Left", &i__2, &i__3, &a_ref (i__ + 1, i__), &c__1, &tauq[i__],
  720. &a_ref (i__ + 1, i__ + 1), lda, &work[1]);
  721. a_ref (i__ + 1, i__) = e[i__];
  722. } else {
  723. tauq[i__] = 0.;
  724. }
  725. /* L20: */
  726. }
  727. }
  728. return 0;
  729. } /* NUMlapack_dgebd2 */
  730. int NUMlapack_dgebak (const char *job, const char *side, integer *n, integer *ilo, integer *ihi, double *scale, integer *m,
  731. double *v, integer *ldv, integer *info) {
  732. /* System generated locals */
  733. integer v_dim1, v_offset, i__1;
  734. /* Local variables */
  735. static integer i__, k;
  736. static double s;
  737. static int leftv;
  738. static integer ii;
  739. static int rightv;
  740. #define v_ref(a_1,a_2) v[(a_2)*v_dim1 + a_1]
  741. --scale;
  742. v_dim1 = *ldv;
  743. v_offset = 1 + v_dim1 * 1;
  744. v -= v_offset;
  745. /* Function Body */
  746. rightv = lsame_ (side, "R");
  747. leftv = lsame_ (side, "L");
  748. *info = 0;
  749. if (!lsame_ (job, "N") && !lsame_ (job, "P") && !lsame_ (job, "S") && !lsame_ (job, "B")) {
  750. *info = -1;
  751. } else if (!rightv && !leftv) {
  752. *info = -2;
  753. } else if (*n < 0) {
  754. *info = -3;
  755. } else if (*ilo < 1 || *ilo > MAX (1, *n)) {
  756. *info = -4;
  757. } else if (*ihi < MIN (*ilo, *n) || *ihi > *n) {
  758. *info = -5;
  759. } else if (*m < 0) {
  760. *info = -7;
  761. } else if (*ldv < MAX (1, *n)) {
  762. *info = -9;
  763. }
  764. if (*info != 0) {
  765. i__1 = - (*info);
  766. xerbla_ ("NUMlapack_dgebak ", &i__1);
  767. return 0;
  768. }
  769. /* Quick return if possible */
  770. if (*n == 0) {
  771. return 0;
  772. }
  773. if (*m == 0) {
  774. return 0;
  775. }
  776. if (lsame_ (job, "N")) {
  777. return 0;
  778. }
  779. if (*ilo == *ihi) {
  780. goto L30;
  781. }
  782. /* Backward balance */
  783. if (lsame_ (job, "S") || lsame_ (job, "B")) {
  784. if (rightv) {
  785. i__1 = *ihi;
  786. for (i__ = *ilo; i__ <= i__1; ++i__) {
  787. s = scale[i__];
  788. NUMblas_dscal (m, &s, &v_ref (i__, 1), ldv);
  789. /* L10: */
  790. }
  791. }
  792. if (leftv) {
  793. i__1 = *ihi;
  794. for (i__ = *ilo; i__ <= i__1; ++i__) {
  795. s = 1. / scale[i__];
  796. NUMblas_dscal (m, &s, &v_ref (i__, 1), ldv);
  797. /* L20: */
  798. }
  799. }
  800. }
  801. /* Backward permutation
  802. For I = ILO-1 step -1 until 1, IHI+1 step 1 until N do -- */
  803. L30:
  804. if (lsame_ (job, "P") || lsame_ (job, "B")) {
  805. if (rightv) {
  806. i__1 = *n;
  807. for (ii = 1; ii <= i__1; ++ii) {
  808. i__ = ii;
  809. if (i__ >= *ilo && i__ <= *ihi) {
  810. goto L40;
  811. }
  812. if (i__ < *ilo) {
  813. i__ = *ilo - ii;
  814. }
  815. k = (integer) scale[i__];
  816. if (k == i__) {
  817. goto L40;
  818. }
  819. NUMblas_dswap (m, &v_ref (i__, 1), ldv, &v_ref (k, 1), ldv);
  820. L40:
  821. ;
  822. }
  823. }
  824. if (leftv) {
  825. i__1 = *n;
  826. for (ii = 1; ii <= i__1; ++ii) {
  827. i__ = ii;
  828. if (i__ >= *ilo && i__ <= *ihi) {
  829. goto L50;
  830. }
  831. if (i__ < *ilo) {
  832. i__ = *ilo - ii;
  833. }
  834. k = (integer) scale[i__];
  835. if (k == i__) {
  836. goto L50;
  837. }
  838. NUMblas_dswap (m, &v_ref (i__, 1), ldv, &v_ref (k, 1), ldv);
  839. L50:
  840. ;
  841. }
  842. }
  843. }
  844. return 0;
  845. } /* NUMlapack_dgebak */
  846. #undef v_ref
  847. int NUMlapack_dgebal (const char *job, integer *n, double *a, integer *lda, integer *ilo, integer *ihi, double *scale,
  848. integer *info) {
  849. /* Table of constant values */
  850. static integer c__1 = 1;
  851. /* System generated locals */
  852. integer a_dim1, a_offset, i__1, i__2;
  853. double d__1, d__2;
  854. /* Local variables */
  855. static integer iexc;
  856. static double c__, f, g;
  857. static integer i__, j, k, l, m;
  858. static double r__, s;
  859. static double sfmin1, sfmin2, sfmax1, sfmax2, ca, ra;
  860. static int noconv;
  861. static integer ica, ira;
  862. a_dim1 = *lda;
  863. a_offset = 1 + a_dim1 * 1;
  864. a -= a_offset;
  865. --scale;
  866. /* Function Body */
  867. *info = 0;
  868. if (!lsame_ (job, "N") && !lsame_ (job, "P") && !lsame_ (job, "S") && !lsame_ (job, "B")) {
  869. *info = -1;
  870. } else if (*n < 0) {
  871. *info = -2;
  872. } else if (*lda < MAX (1, *n)) {
  873. *info = -4;
  874. }
  875. if (*info != 0) {
  876. i__1 = - (*info);
  877. xerbla_ ("NUMlapack_dgebal ", &i__1);
  878. return 0;
  879. }
  880. k = 1;
  881. l = *n;
  882. if (*n == 0) {
  883. goto L210;
  884. }
  885. if (lsame_ (job, "N")) {
  886. i__1 = *n;
  887. for (i__ = 1; i__ <= i__1; ++i__) {
  888. scale[i__] = 1.;
  889. /* L10: */
  890. }
  891. goto L210;
  892. }
  893. if (lsame_ (job, "S")) {
  894. goto L120;
  895. }
  896. /* Permutation to isolate eigenvalues if possible */
  897. goto L50;
  898. /* Row and column exchange. */
  899. L20:
  900. scale[m] = (double) j;
  901. if (j == m) {
  902. goto L30;
  903. }
  904. NUMblas_dswap (&l, &a_ref (1, j), &c__1, &a_ref (1, m), &c__1);
  905. i__1 = *n - k + 1;
  906. NUMblas_dswap (&i__1, &a_ref (j, k), lda, &a_ref (m, k), lda);
  907. L30:
  908. switch (iexc) {
  909. case 1:
  910. goto L40;
  911. case 2:
  912. goto L80;
  913. }
  914. /* Search for rows isolating an eigenvalue and push them down. */
  915. L40:
  916. if (l == 1) {
  917. goto L210;
  918. }
  919. --l;
  920. L50:
  921. for (j = l; j >= 1; --j) {
  922. i__1 = l;
  923. for (i__ = 1; i__ <= i__1; ++i__) {
  924. if (i__ == j) {
  925. goto L60;
  926. }
  927. if (a_ref (j, i__) != 0.) {
  928. goto L70;
  929. }
  930. L60:
  931. ;
  932. }
  933. m = l;
  934. iexc = 1;
  935. goto L20;
  936. L70:
  937. ;
  938. }
  939. goto L90;
  940. /* Search for columns isolating an eigenvalue and push them left. */
  941. L80:
  942. ++k;
  943. L90:
  944. i__1 = l;
  945. for (j = k; j <= i__1; ++j) {
  946. i__2 = l;
  947. for (i__ = k; i__ <= i__2; ++i__) {
  948. if (i__ == j) {
  949. goto L100;
  950. }
  951. if (a_ref (i__, j) != 0.) {
  952. goto L110;
  953. }
  954. L100:
  955. ;
  956. }
  957. m = k;
  958. iexc = 2;
  959. goto L20;
  960. L110:
  961. ;
  962. }
  963. L120:
  964. i__1 = l;
  965. for (i__ = k; i__ <= i__1; ++i__) {
  966. scale[i__] = 1.;
  967. /* L130: */
  968. }
  969. if (lsame_ (job, "P")) {
  970. goto L210;
  971. }
  972. /* Balance the submatrix in rows K to L.
  973. Iterative loop for norm reduction */
  974. sfmin1 = NUMblas_dlamch ("S") / NUMblas_dlamch ("P");
  975. sfmax1 = 1. / sfmin1;
  976. sfmin2 = sfmin1 * 8.;
  977. sfmax2 = 1. / sfmin2;
  978. L140:
  979. noconv = FALSE;
  980. i__1 = l;
  981. for (i__ = k; i__ <= i__1; ++i__) {
  982. c__ = 0.;
  983. r__ = 0.;
  984. i__2 = l;
  985. for (j = k; j <= i__2; ++j) {
  986. if (j == i__) {
  987. goto L150;
  988. }
  989. c__ += (d__1 = a_ref (j, i__), fabs (d__1));
  990. r__ += (d__1 = a_ref (i__, j), fabs (d__1));
  991. L150:
  992. ;
  993. }
  994. ica = NUMblas_idamax (&l, &a_ref (1, i__), &c__1);
  995. ca = (d__1 = a_ref (ica, i__), fabs (d__1));
  996. i__2 = *n - k + 1;
  997. ira = NUMblas_idamax (&i__2, &a_ref (i__, k), lda);
  998. ra = (d__1 = a_ref (i__, ira + k - 1), fabs (d__1));
  999. /* Guard against zero C or R due to underflow. */
  1000. if (c__ == 0. || r__ == 0.) {
  1001. goto L200;
  1002. }
  1003. g = r__ / 8.;
  1004. f = 1.;
  1005. s = c__ + r__;
  1006. L160:
  1007. /* Computing MAX */
  1008. d__1 = MAX (f, c__);
  1009. /* Computing MIN */
  1010. d__2 = MIN (r__, g);
  1011. if (c__ >= g || MAX (d__1, ca) >= sfmax2 || MIN (d__2, ra) <= sfmin2) {
  1012. goto L170;
  1013. }
  1014. f *= 8.;
  1015. c__ *= 8.;
  1016. ca *= 8.;
  1017. r__ /= 8.;
  1018. g /= 8.;
  1019. ra /= 8.;
  1020. goto L160;
  1021. L170:
  1022. g = c__ / 8.;
  1023. L180:
  1024. /* Computing MIN */
  1025. d__1 = MIN (f, c__), d__1 = MIN (d__1, g);
  1026. if (g < r__ || MAX (r__, ra) >= sfmax2 || MIN (d__1, ca) <= sfmin2) {
  1027. goto L190;
  1028. }
  1029. f /= 8.;
  1030. c__ /= 8.;
  1031. g /= 8.;
  1032. ca /= 8.;
  1033. r__ *= 8.;
  1034. ra *= 8.;
  1035. goto L180;
  1036. /* Now balance. */
  1037. L190:
  1038. if (c__ + r__ >= s * .95) {
  1039. goto L200;
  1040. }
  1041. if (f < 1. && scale[i__] < 1.) {
  1042. if (f * scale[i__] <= sfmin1) {
  1043. goto L200;
  1044. }
  1045. }
  1046. if (f > 1. && scale[i__] > 1.) {
  1047. if (scale[i__] >= sfmax1 / f) {
  1048. goto L200;
  1049. }
  1050. }
  1051. g = 1. / f;
  1052. scale[i__] *= f;
  1053. noconv = TRUE;
  1054. i__2 = *n - k + 1;
  1055. NUMblas_dscal (&i__2, &g, &a_ref (i__, k), lda);
  1056. NUMblas_dscal (&l, &f, &a_ref (1, i__), &c__1);
  1057. L200:
  1058. ;
  1059. }
  1060. if (noconv) {
  1061. goto L140;
  1062. }
  1063. L210:
  1064. *ilo = k;
  1065. *ihi = l;
  1066. return 0;
  1067. } /* NUMlapack_dgebal */
  1068. int NUMlapack_dgebrd (integer *m, integer *n, double *a, integer *lda, double *d__, double *e, double *tauq,
  1069. double *taup, double *work, integer *lwork, integer *info) {
  1070. /* Table of constant values */
  1071. static integer c__1 = 1;
  1072. static integer c_n1 = -1;
  1073. static integer c__3 = 3;
  1074. static integer c__2 = 2;
  1075. static double c_b21 = -1.;
  1076. static double c_b22 = 1.;
  1077. /* System generated locals */
  1078. integer a_dim1, a_offset, i__1, i__2, i__3, i__4;
  1079. /* Local variables */
  1080. static integer i__, j;
  1081. static integer nbmin, iinfo, minmn;
  1082. static integer nb;
  1083. static integer nx;
  1084. static double ws;
  1085. static integer ldwrkx, ldwrky, lwkopt;
  1086. static integer lquery;
  1087. a_dim1 = *lda;
  1088. a_offset = 1 + a_dim1 * 1;
  1089. a -= a_offset;
  1090. --d__;
  1091. --e;
  1092. --tauq;
  1093. --taup;
  1094. --work;
  1095. /* Function Body */
  1096. *info = 0;
  1097. /* Computing MAX */
  1098. i__1 = 1, i__2 = NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, n, &c_n1, &c_n1, 6, 1);
  1099. nb = MAX (i__1, i__2);
  1100. lwkopt = (*m + *n) * nb;
  1101. work[1] = (double) lwkopt;
  1102. lquery = *lwork == -1;
  1103. if (*m < 0) {
  1104. *info = -1;
  1105. } else if (*n < 0) {
  1106. *info = -2;
  1107. } else if (*lda < MAX (1, *m)) {
  1108. *info = -4;
  1109. } else { /* if(complicated condition) */
  1110. /* Computing MAX */
  1111. i__1 = MAX (1, *m);
  1112. if (*lwork < MAX (i__1, *n) && !lquery) {
  1113. *info = -10;
  1114. }
  1115. }
  1116. if (*info < 0) {
  1117. i__1 = - (*info);
  1118. xerbla_ ("DGEBRD", &i__1);
  1119. return 0;
  1120. } else if (lquery) {
  1121. return 0;
  1122. }
  1123. /* Quick return if possible */
  1124. minmn = MIN (*m, *n);
  1125. if (minmn == 0) {
  1126. work[1] = 1.;
  1127. return 0;
  1128. }
  1129. ws = (double) MAX (*m, *n);
  1130. ldwrkx = *m;
  1131. ldwrky = *n;
  1132. if (nb > 1 && nb < minmn) {
  1133. /* Set the crossover point NX.
  1134. Computing MAX */
  1135. i__1 = nb, i__2 = NUMlapack_ilaenv (&c__3, "DGEBRD", " ", m, n, &c_n1, &c_n1, 6, 1);
  1136. nx = MAX (i__1, i__2);
  1137. /* Determine when to switch from blocked to unblocked code. */
  1138. if (nx < minmn) {
  1139. ws = (double) ( (*m + *n) * nb);
  1140. if ( (double) (*lwork) < ws) {
  1141. /* Not enough work space for the optimal NB, consider using a
  1142. smaller block size. */
  1143. nbmin = NUMlapack_ilaenv (&c__2, "DGEBRD", " ", m, n, &c_n1, &c_n1, 6, 1);
  1144. if (*lwork >= (*m + *n) * nbmin) {
  1145. nb = *lwork / (*m + *n);
  1146. } else {
  1147. nb = 1;
  1148. nx = minmn;
  1149. }
  1150. }
  1151. }
  1152. } else {
  1153. nx = minmn;
  1154. }
  1155. i__1 = minmn - nx;
  1156. i__2 = nb;
  1157. for (i__ = 1; i__2 < 0 ? i__ >= i__1 : i__ <= i__1; i__ += i__2) {
  1158. /* Reduce rows and columns i:i+nb-1 to bidiagonal form and return the
  1159. matrices X and Y which are needed to update the unreduced part of
  1160. the matrix */
  1161. i__3 = *m - i__ + 1;
  1162. i__4 = *n - i__ + 1;
  1163. NUMlapack_dlabrd (&i__3, &i__4, &nb, &a_ref (i__, i__), lda, &d__[i__], &e[i__], &tauq[i__], &taup[i__],
  1164. &work[1], &ldwrkx, &work[ldwrkx * nb + 1], &ldwrky);
  1165. /* Update the trailing submatrix A(i+nb:m,i+nb:n), using an update of
  1166. the form A := A - V*Y' - X*U' */
  1167. i__3 = *m - i__ - nb + 1;
  1168. i__4 = *n - i__ - nb + 1;
  1169. NUMblas_dgemm ("No transpose", "Transpose", &i__3, &i__4, &nb, &c_b21, &a_ref (i__ + nb, i__), lda,
  1170. &work[ldwrkx * nb + nb + 1], &ldwrky, &c_b22, &a_ref (i__ + nb, i__ + nb), lda);
  1171. i__3 = *m - i__ - nb + 1;
  1172. i__4 = *n - i__ - nb + 1;
  1173. NUMblas_dgemm ("No transpose", "No transpose", &i__3, &i__4, &nb, &c_b21, &work[nb + 1], &ldwrkx,
  1174. &a_ref (i__, i__ + nb), lda, &c_b22, &a_ref (i__ + nb, i__ + nb), lda);
  1175. /* Copy diagonal and off-diagonal elements of B back into A */
  1176. if (*m >= *n) {
  1177. i__3 = i__ + nb - 1;
  1178. for (j = i__; j <= i__3; ++j) {
  1179. a_ref (j, j) = d__[j];
  1180. a_ref (j, j + 1) = e[j];
  1181. /* L10: */
  1182. }
  1183. } else {
  1184. i__3 = i__ + nb - 1;
  1185. for (j = i__; j <= i__3; ++j) {
  1186. a_ref (j, j) = d__[j];
  1187. a_ref (j + 1, j) = e[j];
  1188. /* L20: */
  1189. }
  1190. }
  1191. /* L30: */
  1192. }
  1193. /* Use unblocked code to reduce the remainder of the matrix */
  1194. i__2 = *m - i__ + 1;
  1195. i__1 = *n - i__ + 1;
  1196. NUMlapack_dgebd2 (&i__2, &i__1, &a_ref (i__, i__), lda, &d__[i__], &e[i__], &tauq[i__], &taup[i__],
  1197. &work[1], &iinfo);
  1198. work[1] = ws;
  1199. return 0;
  1200. } /* NUMlapack_dgebrd */
  1201. int NUMlapack_dgeev (const char *jobvl, const char *jobvr, integer *n, double *a, integer *lda, double *wr, double *wi,
  1202. double *vl, integer *ldvl, double *vr, integer *ldvr, double *work, integer *lwork, integer *info) {
  1203. /* Table of constant values */
  1204. static integer c__8 = 8;
  1205. static integer c_n1 = -1;
  1206. /* System generated locals */
  1207. integer a_dim1, a_offset, vl_dim1, vl_offset, vr_dim1, vr_offset, i__1, i__2, i__3, i__4;
  1208. double d__1, d__2;
  1209. /* Local variables */
  1210. static integer ibal;
  1211. static char side[1];
  1212. static integer maxb;
  1213. static double anrm;
  1214. static integer ierr, itau;
  1215. static integer iwrk, nout;
  1216. static integer i__, k;
  1217. static double r__;
  1218. static double cs;
  1219. static int scalea;
  1220. static double cscale;
  1221. static double sn;
  1222. static int select[1];
  1223. static double bignum;
  1224. static integer minwrk, maxwrk;
  1225. static int wantvl;
  1226. static double smlnum;
  1227. static integer hswork;
  1228. static int lquery, wantvr;
  1229. static integer ihi;
  1230. static double scl;
  1231. static integer ilo;
  1232. static double dum[1], eps;
  1233. #define vl_ref(a_1,a_2) vl[(a_2)*vl_dim1 + a_1]
  1234. #define vr_ref(a_1,a_2) vr[(a_2)*vr_dim1 + a_1]
  1235. a_dim1 = *lda;
  1236. a_offset = 1 + a_dim1 * 1;
  1237. a -= a_offset;
  1238. --wr;
  1239. --wi;
  1240. vl_dim1 = *ldvl;
  1241. vl_offset = 1 + vl_dim1 * 1;
  1242. vl -= vl_offset;
  1243. vr_dim1 = *ldvr;
  1244. vr_offset = 1 + vr_dim1 * 1;
  1245. vr -= vr_offset;
  1246. --work;
  1247. /* Function Body */
  1248. *info = 0;
  1249. lquery = *lwork == -1;
  1250. wantvl = lsame_ (jobvl, "V");
  1251. wantvr = lsame_ (jobvr, "V");
  1252. if (!wantvl && !lsame_ (jobvl, "N")) {
  1253. *info = -1;
  1254. } else if (!wantvr && !lsame_ (jobvr, "N")) {
  1255. *info = -2;
  1256. } else if (*n < 0) {
  1257. *info = -3;
  1258. } else if (*lda < MAX (1, *n)) {
  1259. *info = -5;
  1260. } else if (*ldvl < 1 || wantvl && *ldvl < *n) {
  1261. *info = -9;
  1262. } else if (*ldvr < 1 || wantvr && *ldvr < *n) {
  1263. *info = -11;
  1264. }
  1265. /* Compute workspace (Note: Comments in the code beginning "Workspace:"
  1266. describe the minimal amount of workspace needed at that point in the
  1267. code, as well as the preferred amount for good performance. NB refers
  1268. to the optimal block size for the immediately following subroutine, as
  1269. returned by ILAENV. HSWORK refers to the workspace preferred by
  1270. NUMlapack_dhseqr , as calculated below. HSWORK is computed assuming
  1271. ILO=1 and IHI=N, the worst case.) */
  1272. minwrk = 1;
  1273. if (*info == 0 && (*lwork >= 1 || lquery)) {
  1274. maxwrk =
  1275. (*n << 1) + *n * NUMlapack_ilaenv (&c__1, "NUMlapack_dgehrd ", " ", n, &c__1, n, &c__0, 6, 1);
  1276. if (!wantvl && !wantvr) {
  1277. /* Computing MAX */
  1278. i__1 = 1, i__2 = *n * 3;
  1279. minwrk = MAX (i__1, i__2);
  1280. /* Computing MAX */
  1281. i__1 = NUMlapack_ilaenv (&c__8, "NUMlapack_dhseqr ", "EN", n, &c__1, n, &c_n1, 6, 2);
  1282. maxb = MAX (i__1, 2);
  1283. /* Computing MIN Computing MAX */
  1284. i__3 = 2, i__4 = NUMlapack_ilaenv (&c__4, "NUMlapack_dhseqr ", "EN", n, &c__1, n, &c_n1, 6, 2);
  1285. i__1 = MIN (maxb, *n), i__2 = MAX (i__3, i__4);
  1286. k = MIN (i__1, i__2);
  1287. /* Computing MAX */
  1288. i__1 = k * (k + 2), i__2 = *n << 1;
  1289. hswork = MAX (i__1, i__2);
  1290. /* Computing MAX */
  1291. i__1 = maxwrk, i__2 = *n + 1, i__1 = MAX (i__1, i__2), i__2 = *n + hswork;
  1292. maxwrk = MAX (i__1, i__2);
  1293. } else {
  1294. /* Computing MAX */
  1295. i__1 = 1, i__2 = *n << 2;
  1296. minwrk = MAX (i__1, i__2);
  1297. /* Computing MAX */
  1298. i__1 = maxwrk, i__2 =
  1299. (*n << 1) + (*n - 1) * NUMlapack_ilaenv (&c__1, "DOR" "GHR", " ", n, &c__1, n, &c_n1, 6, 1);
  1300. maxwrk = MAX (i__1, i__2);
  1301. /* Computing MAX */
  1302. i__1 = NUMlapack_ilaenv (&c__8, "NUMlapack_dhseqr ", "SV", n, &c__1, n, &c_n1, 6, 2);
  1303. maxb = MAX (i__1, 2);
  1304. /* Computing MIN Computing MAX */
  1305. i__3 = 2, i__4 = NUMlapack_ilaenv (&c__4, "NUMlapack_dhseqr ", "SV", n, &c__1, n, &c_n1, 6, 2);
  1306. i__1 = MIN (maxb, *n), i__2 = MAX (i__3, i__4);
  1307. k = MIN (i__1, i__2);
  1308. /* Computing MAX */
  1309. i__1 = k * (k + 2), i__2 = *n << 1;
  1310. hswork = MAX (i__1, i__2);
  1311. /* Computing MAX */
  1312. i__1 = maxwrk, i__2 = *n + 1, i__1 = MAX (i__1, i__2), i__2 = *n + hswork;
  1313. maxwrk = MAX (i__1, i__2);
  1314. /* Computing MAX */
  1315. i__1 = maxwrk, i__2 = *n << 2;
  1316. maxwrk = MAX (i__1, i__2);
  1317. }
  1318. work[1] = (double) maxwrk;
  1319. }
  1320. if (*lwork < minwrk && !lquery) {
  1321. *info = -13;
  1322. }
  1323. if (*info != 0) {
  1324. i__1 = - (*info);
  1325. xerbla_ ("NUMlapack_dgeev ", &i__1);
  1326. return 0;
  1327. } else if (lquery) {
  1328. return 0;
  1329. }
  1330. /* Quick return if possible */
  1331. if (*n == 0) {
  1332. return 0;
  1333. }
  1334. /* Get machine constants */
  1335. eps = NUMblas_dlamch ("P");
  1336. smlnum = NUMblas_dlamch ("S");
  1337. bignum = 1. / smlnum;
  1338. NUMlapack_dlabad (&smlnum, &bignum);
  1339. smlnum = sqrt (smlnum) / eps;
  1340. bignum = 1. / smlnum;
  1341. /* Scale A if max element outside range [SMLNUM,BIGNUM] */
  1342. anrm = NUMlapack_dlange ("M", n, n, &a[a_offset], lda, dum);
  1343. scalea = FALSE;
  1344. if (anrm > 0. && anrm < smlnum) {
  1345. scalea = TRUE;
  1346. cscale = smlnum;
  1347. } else if (anrm > bignum) {
  1348. scalea = TRUE;
  1349. cscale = bignum;
  1350. }
  1351. if (scalea) {
  1352. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &cscale, n, n, &a[a_offset], lda, &ierr);
  1353. }
  1354. /* Balance the matrix (Workspace: need N) */
  1355. ibal = 1;
  1356. NUMlapack_dgebal ("B", n, &a[a_offset], lda, &ilo, &ihi, &work[ibal], &ierr);
  1357. /* Reduce to upper Hessenberg form (Workspace: need 3*N, prefer 2*N+N*NB)
  1358. */
  1359. itau = ibal + *n;
  1360. iwrk = itau + *n;
  1361. i__1 = *lwork - iwrk + 1;
  1362. NUMlapack_dgehrd (n, &ilo, &ihi, &a[a_offset], lda, &work[itau], &work[iwrk], &i__1, &ierr);
  1363. if (wantvl) {
  1364. /* Want left eigenvectors Copy Householder vectors to VL */
  1365. * (unsigned char *) side = 'L';
  1366. NUMlapack_dlacpy ("L", n, n, &a[a_offset], lda, &vl[vl_offset], ldvl);
  1367. /* Generate orthogonal matrix in VL (Workspace: need 3*N-1, prefer
  1368. 2*N+(N-1)*NB) */
  1369. i__1 = *lwork - iwrk + 1;
  1370. NUMlapack_dorghr (n, &ilo, &ihi, &vl[vl_offset], ldvl, &work[itau], &work[iwrk], &i__1, &ierr);
  1371. /* Perform QR iteration, accumulating Schur vectors in VL (Workspace:
  1372. need N+1, prefer N+HSWORK (see comments) ) */
  1373. iwrk = itau;
  1374. i__1 = *lwork - iwrk + 1;
  1375. NUMlapack_dhseqr ("S", "V", n, &ilo, &ihi, &a[a_offset], lda, &wr[1], &wi[1], &vl[vl_offset], ldvl,
  1376. &work[iwrk], &i__1, info);
  1377. if (wantvr) {
  1378. /* Want left and right eigenvectors Copy Schur vectors to VR */
  1379. * (unsigned char *) side = 'B';
  1380. NUMlapack_dlacpy ("F", n, n, &vl[vl_offset], ldvl, &vr[vr_offset], ldvr);
  1381. }
  1382. } else if (wantvr) {
  1383. /* Want right eigenvectors Copy Householder vectors to VR */
  1384. * (unsigned char *) side = 'R';
  1385. NUMlapack_dlacpy ("L", n, n, &a[a_offset], lda, &vr[vr_offset], ldvr);
  1386. /* Generate orthogonal matrix in VR (Workspace: need 3*N-1, prefer
  1387. 2*N+(N-1)*NB) */
  1388. i__1 = *lwork - iwrk + 1;
  1389. NUMlapack_dorghr (n, &ilo, &ihi, &vr[vr_offset], ldvr, &work[itau], &work[iwrk], &i__1, &ierr);
  1390. /* Perform QR iteration, accumulating Schur vectors in VR (Workspace:
  1391. need N+1, prefer N+HSWORK (see comments) ) */
  1392. iwrk = itau;
  1393. i__1 = *lwork - iwrk + 1;
  1394. NUMlapack_dhseqr ("S", "V", n, &ilo, &ihi, &a[a_offset], lda, &wr[1], &wi[1], &vr[vr_offset], ldvr,
  1395. &work[iwrk], &i__1, info);
  1396. } else {
  1397. /* Compute eigenvalues only (Workspace: need N+1, prefer N+HSWORK
  1398. (see comments) ) */
  1399. iwrk = itau;
  1400. i__1 = *lwork - iwrk + 1;
  1401. NUMlapack_dhseqr ("E", "N", n, &ilo, &ihi, &a[a_offset], lda, &wr[1], &wi[1], &vr[vr_offset], ldvr,
  1402. &work[iwrk], &i__1, info);
  1403. }
  1404. /* If INFO > 0 from NUMlapack_dhseqr , then quit */
  1405. if (*info > 0) {
  1406. goto L50;
  1407. }
  1408. if (wantvl || wantvr) {
  1409. /* Compute left and/or right eigenvectors (Workspace: need 4*N) */
  1410. NUMlapack_dtrevc (side, "B", select, n, &a[a_offset], lda, &vl[vl_offset], ldvl, &vr[vr_offset],
  1411. ldvr, n, &nout, &work[iwrk], &ierr);
  1412. }
  1413. if (wantvl) {
  1414. /* Undo balancing of left eigenvectors (Workspace: need N) */
  1415. NUMlapack_dgebak ("B", "L", n, &ilo, &ihi, &work[ibal], n, &vl[vl_offset], ldvl, &ierr);
  1416. /* Normalize left eigenvectors and make largest component real */
  1417. i__1 = *n;
  1418. for (i__ = 1; i__ <= i__1; ++i__) {
  1419. if (wi[i__] == 0.) {
  1420. scl = 1. / NUMblas_dnrm2 (n, &vl_ref (1, i__), &c__1);
  1421. NUMblas_dscal (n, &scl, &vl_ref (1, i__), &c__1);
  1422. } else if (wi[i__] > 0.) {
  1423. d__1 = NUMblas_dnrm2 (n, &vl_ref (1, i__), &c__1);
  1424. d__2 = NUMblas_dnrm2 (n, &vl_ref (1, i__ + 1), &c__1);
  1425. scl = 1. / NUMlapack_dlapy2 (&d__1, &d__2);
  1426. NUMblas_dscal (n, &scl, &vl_ref (1, i__), &c__1);
  1427. NUMblas_dscal (n, &scl, &vl_ref (1, i__ + 1), &c__1);
  1428. i__2 = *n;
  1429. for (k = 1; k <= i__2; ++k) {
  1430. /* Computing 2nd power */
  1431. d__1 = vl_ref (k, i__);
  1432. /* Computing 2nd power */
  1433. d__2 = vl_ref (k, i__ + 1);
  1434. work[iwrk + k - 1] = d__1 * d__1 + d__2 * d__2;
  1435. /* L10: */
  1436. }
  1437. k = NUMblas_idamax (n, &work[iwrk], &c__1);
  1438. NUMlapack_dlartg (&vl_ref (k, i__), &vl_ref (k, i__ + 1), &cs, &sn, &r__);
  1439. NUMblas_drot (n, &vl_ref (1, i__), &c__1, &vl_ref (1, i__ + 1), &c__1, &cs, &sn);
  1440. vl_ref (k, i__ + 1) = 0.;
  1441. }
  1442. /* L20: */
  1443. }
  1444. }
  1445. if (wantvr) {
  1446. /* Undo balancing of right eigenvectors (Workspace: need N) */
  1447. NUMlapack_dgebak ("B", "R", n, &ilo, &ihi, &work[ibal], n, &vr[vr_offset], ldvr, &ierr);
  1448. /* Normalize right eigenvectors and make largest component real */
  1449. i__1 = *n;
  1450. for (i__ = 1; i__ <= i__1; ++i__) {
  1451. if (wi[i__] == 0.) {
  1452. scl = 1. / NUMblas_dnrm2 (n, &vr_ref (1, i__), &c__1);
  1453. NUMblas_dscal (n, &scl, &vr_ref (1, i__), &c__1);
  1454. } else if (wi[i__] > 0.) {
  1455. d__1 = NUMblas_dnrm2 (n, &vr_ref (1, i__), &c__1);
  1456. d__2 = NUMblas_dnrm2 (n, &vr_ref (1, i__ + 1), &c__1);
  1457. scl = 1. / NUMlapack_dlapy2 (&d__1, &d__2);
  1458. NUMblas_dscal (n, &scl, &vr_ref (1, i__), &c__1);
  1459. NUMblas_dscal (n, &scl, &vr_ref (1, i__ + 1), &c__1);
  1460. i__2 = *n;
  1461. for (k = 1; k <= i__2; ++k) {
  1462. /* Computing 2nd power */
  1463. d__1 = vr_ref (k, i__);
  1464. /* Computing 2nd power */
  1465. d__2 = vr_ref (k, i__ + 1);
  1466. work[iwrk + k - 1] = d__1 * d__1 + d__2 * d__2;
  1467. /* L30: */
  1468. }
  1469. k = NUMblas_idamax (n, &work[iwrk], &c__1);
  1470. NUMlapack_dlartg (&vr_ref (k, i__), &vr_ref (k, i__ + 1), &cs, &sn, &r__);
  1471. NUMblas_drot (n, &vr_ref (1, i__), &c__1, &vr_ref (1, i__ + 1), &c__1, &cs, &sn);
  1472. vr_ref (k, i__ + 1) = 0.;
  1473. }
  1474. /* L40: */
  1475. }
  1476. }
  1477. /* Undo scaling if necessary */
  1478. L50:
  1479. if (scalea) {
  1480. i__1 = *n - *info;
  1481. /* Computing MAX */
  1482. i__3 = *n - *info;
  1483. i__2 = MAX (i__3, 1);
  1484. NUMlapack_dlascl ("G", &c__0, &c__0, &cscale, &anrm, &i__1, &c__1, &wr[*info + 1], &i__2, &ierr);
  1485. i__1 = *n - *info;
  1486. /* Computing MAX */
  1487. i__3 = *n - *info;
  1488. i__2 = MAX (i__3, 1);
  1489. NUMlapack_dlascl ("G", &c__0, &c__0, &cscale, &anrm, &i__1, &c__1, &wi[*info + 1], &i__2, &ierr);
  1490. if (*info > 0) {
  1491. i__1 = ilo - 1;
  1492. NUMlapack_dlascl ("G", &c__0, &c__0, &cscale, &anrm, &i__1, &c__1, &wr[1], n, &ierr);
  1493. i__1 = ilo - 1;
  1494. NUMlapack_dlascl ("G", &c__0, &c__0, &cscale, &anrm, &i__1, &c__1, &wi[1], n, &ierr);
  1495. }
  1496. }
  1497. work[1] = (double) maxwrk;
  1498. return 0;
  1499. } /* NUMlapack_dgeev */
  1500. #undef vr_ref
  1501. #undef vl_ref
  1502. int NUMlapack_dgehd2 (integer *n, integer *ilo, integer *ihi, double *a, integer *lda, double *tau, double *work,
  1503. integer *info) {
  1504. /* Table of constant values */
  1505. static integer c__1 = 1;
  1506. /* System generated locals */
  1507. integer a_dim1, a_offset, i__1, i__2, i__3;
  1508. /* Local variables */
  1509. static integer i__;
  1510. static double aii;
  1511. a_dim1 = *lda;
  1512. a_offset = 1 + a_dim1 * 1;
  1513. a -= a_offset;
  1514. --tau;
  1515. --work;
  1516. /* Function Body */
  1517. *info = 0;
  1518. if (*n < 0) {
  1519. *info = -1;
  1520. } else if (*ilo < 1 || *ilo > MAX (1, *n)) {
  1521. *info = -2;
  1522. } else if (*ihi < MIN (*ilo, *n) || *ihi > *n) {
  1523. *info = -3;
  1524. } else if (*lda < MAX (1, *n)) {
  1525. *info = -5;
  1526. }
  1527. if (*info != 0) {
  1528. i__1 = - (*info);
  1529. xerbla_ ("NUMlapack_dgehd2 ", &i__1);
  1530. return 0;
  1531. }
  1532. i__1 = *ihi - 1;
  1533. for (i__ = *ilo; i__ <= i__1; ++i__) {
  1534. /* Compute elementary reflector H(i) to annihilate A(i+2:ihi,i)
  1535. Computing MIN */
  1536. i__2 = i__ + 2;
  1537. i__3 = *ihi - i__;
  1538. NUMlapack_dlarfg (&i__3, &a_ref (i__ + 1, i__), &a_ref (MIN (i__2, *n), i__), &c__1, &tau[i__]);
  1539. aii = a_ref (i__ + 1, i__);
  1540. a_ref (i__ + 1, i__) = 1.;
  1541. /* Apply H(i) to A(1:ihi,i+1:ihi) from the right */
  1542. i__2 = *ihi - i__;
  1543. NUMlapack_dlarf ("Right", ihi, &i__2, &a_ref (i__ + 1, i__), &c__1, &tau[i__], &a_ref (1, i__ + 1),
  1544. lda, &work[1]);
  1545. /* Apply H(i) to A(i+1:ihi,i+1:n) from the left */
  1546. i__2 = *ihi - i__;
  1547. i__3 = *n - i__;
  1548. NUMlapack_dlarf ("Left", &i__2, &i__3, &a_ref (i__ + 1, i__), &c__1, &tau[i__], &a_ref (i__ + 1,
  1549. i__ + 1), lda, &work[1]);
  1550. a_ref (i__ + 1, i__) = aii;
  1551. /* L10: */
  1552. }
  1553. return 0;
  1554. } /* NUMlapack_dgehd2 */
  1555. int NUMlapack_dgehrd (integer *n, integer *ilo, integer *ihi, double *a, integer *lda, double *tau, double *work,
  1556. integer *lwork, integer *info) {
  1557. /* Table of constant values */
  1558. static integer c__1 = 1;
  1559. static integer c_n1 = -1;
  1560. static integer c__3 = 3;
  1561. static integer c__2 = 2;
  1562. static integer c__65 = 65;
  1563. static double c_b25 = -1.;
  1564. static double c_b26 = 1.;
  1565. /* System generated locals */
  1566. integer a_dim1, a_offset, i__1, i__2, i__3, i__4;
  1567. /* Local variables */
  1568. static integer i__;
  1569. static double t[4160] /* was [65][64] */ ;
  1570. static integer nbmin, iinfo;
  1571. static integer ib;
  1572. static double ei;
  1573. static integer nb, nh;
  1574. static integer nx;
  1575. static integer ldwork, lwkopt;
  1576. static int lquery;
  1577. static integer iws;
  1578. a_dim1 = *lda;
  1579. a_offset = 1 + a_dim1 * 1;
  1580. a -= a_offset;
  1581. --tau;
  1582. --work;
  1583. /* Function Body */
  1584. *info = 0;
  1585. /* Computing MIN */
  1586. i__1 = 64, i__2 = NUMlapack_ilaenv (&c__1, "NUMlapack_dgehrd ", " ", n, ilo, ihi, &c_n1, 6, 1);
  1587. nb = MIN (i__1, i__2);
  1588. lwkopt = *n * nb;
  1589. work[1] = (double) lwkopt;
  1590. lquery = *lwork == -1;
  1591. if (*n < 0) {
  1592. *info = -1;
  1593. } else if (*ilo < 1 || *ilo > MAX (1, *n)) {
  1594. *info = -2;
  1595. } else if (*ihi < MIN (*ilo, *n) || *ihi > *n) {
  1596. *info = -3;
  1597. } else if (*lda < MAX (1, *n)) {
  1598. *info = -5;
  1599. } else if (*lwork < MAX (1, *n) && !lquery) {
  1600. *info = -8;
  1601. }
  1602. if (*info != 0) {
  1603. i__1 = - (*info);
  1604. xerbla_ ("NUMlapack_dgehrd ", &i__1);
  1605. return 0;
  1606. } else if (lquery) {
  1607. return 0;
  1608. }
  1609. /* Set elements 1:ILO-1 and IHI:N-1 of TAU to zero */
  1610. i__1 = *ilo - 1;
  1611. for (i__ = 1; i__ <= i__1; ++i__) {
  1612. tau[i__] = 0.;
  1613. /* L10: */
  1614. }
  1615. i__1 = *n - 1;
  1616. for (i__ = MAX (1, *ihi); i__ <= i__1; ++i__) {
  1617. tau[i__] = 0.;
  1618. /* L20: */
  1619. }
  1620. /* Quick return if possible */
  1621. nh = *ihi - *ilo + 1;
  1622. if (nh <= 1) {
  1623. work[1] = 1.;
  1624. return 0;
  1625. }
  1626. /* Determine the block size.
  1627. Computing MIN */
  1628. i__1 = 64, i__2 = NUMlapack_ilaenv (&c__1, "NUMlapack_dgehrd ", " ", n, ilo, ihi, &c_n1, 6, 1);
  1629. nb = MIN (i__1, i__2);
  1630. nbmin = 2;
  1631. iws = 1;
  1632. if (nb > 1 && nb < nh) {
  1633. /* Determine when to cross over from blocked to unblocked code (last
  1634. block is always handled by unblocked code).
  1635. Computing MAX */
  1636. i__1 = nb, i__2 = NUMlapack_ilaenv (&c__3, "NUMlapack_dgehrd ", " ", n, ilo, ihi, &c_n1, 6, 1);
  1637. nx = MAX (i__1, i__2);
  1638. if (nx < nh) {
  1639. /* Determine if workspace is large enough for blocked code. */
  1640. iws = *n * nb;
  1641. if (*lwork < iws) {
  1642. /* Not enough workspace to use optimal NB: determine the
  1643. minimum value of NB, and reduce NB or force use of
  1644. unblocked code.
  1645. Computing MAX */
  1646. i__1 = 2, i__2 =
  1647. NUMlapack_ilaenv (&c__2, "NUMlapack_dgehrd ", " ", n, ilo, ihi, &c_n1, 6, 1);
  1648. nbmin = MAX (i__1, i__2);
  1649. if (*lwork >= *n * nbmin) {
  1650. nb = *lwork / *n;
  1651. } else {
  1652. nb = 1;
  1653. }
  1654. }
  1655. }
  1656. }
  1657. ldwork = *n;
  1658. if (nb < nbmin || nb >= nh) {
  1659. /* Use unblocked code below */
  1660. i__ = *ilo;
  1661. } else {
  1662. /* Use blocked code */
  1663. i__1 = *ihi - 1 - nx;
  1664. i__2 = nb;
  1665. for (i__ = *ilo; i__2 < 0 ? i__ >= i__1 : i__ <= i__1; i__ += i__2) {
  1666. /* Computing MIN */
  1667. i__3 = nb, i__4 = *ihi - i__;
  1668. ib = MIN (i__3, i__4);
  1669. /* Reduce columns i:i+ib-1 to Hessenberg form, returning the
  1670. matrices V and T of the block reflector H = I - V*T*V' which
  1671. performs the reduction, and also the matrix Y = A*V*T */
  1672. NUMlapack_dlahrd (ihi, &i__, &ib, &a_ref (1, i__), lda, &tau[i__], t, &c__65, &work[1], &ldwork);
  1673. /* Apply the block reflector H to A(1:ihi,i+ib:ihi) from the
  1674. right, computing A := A - Y * V'. V(i+ib,ib-1) should be set to
  1675. 1. */
  1676. ei = a_ref (i__ + ib, i__ + ib - 1);
  1677. a_ref (i__ + ib, i__ + ib - 1) = 1.;
  1678. i__3 = *ihi - i__ - ib + 1;
  1679. NUMblas_dgemm ("No transpose", "Transpose", ihi, &i__3, &ib, &c_b25, &work[1], &ldwork,
  1680. &a_ref (i__ + ib, i__), lda, &c_b26, &a_ref (1, i__ + ib), lda);
  1681. a_ref (i__ + ib, i__ + ib - 1) = ei;
  1682. /* Apply the block reflector H to A(i+1:ihi,i+ib:n) from the left
  1683. */
  1684. i__3 = *ihi - i__;
  1685. i__4 = *n - i__ - ib + 1;
  1686. NUMlapack_dlarfb ("Left", "Transpose", "Forward", "Columnwise", &i__3, &i__4, &ib,
  1687. &a_ref (i__ + 1, i__), lda, t, &c__65, &a_ref (i__ + 1, i__ + ib), lda, &work[1], &ldwork);
  1688. /* L30: */
  1689. }
  1690. }
  1691. /* Use unblocked code to reduce the rest of the matrix */
  1692. NUMlapack_dgehd2 (n, &i__, ihi, &a[a_offset], lda, &tau[1], &work[1], &iinfo);
  1693. work[1] = (double) iws;
  1694. return 0;
  1695. } /* NUMlapack_dgehrd */
  1696. int NUMlapack_dgelq2 (integer *m, integer *n, double *a, integer *lda, double *tau, double *work, integer *info) {
  1697. /* System generated locals */
  1698. integer a_dim1, a_offset, i__1, i__2, i__3;
  1699. /* Local variables */
  1700. static integer i__, k;
  1701. static double aii;
  1702. a_dim1 = *lda;
  1703. a_offset = 1 + a_dim1 * 1;
  1704. a -= a_offset;
  1705. --tau;
  1706. --work;
  1707. /* Function Body */
  1708. *info = 0;
  1709. if (*m < 0) {
  1710. *info = -1;
  1711. } else if (*n < 0) {
  1712. *info = -2;
  1713. } else if (*lda < MAX (1, *m)) {
  1714. *info = -4;
  1715. }
  1716. if (*info != 0) {
  1717. i__1 = - (*info);
  1718. xerbla_ ("DGELQ2", &i__1);
  1719. return 0;
  1720. }
  1721. k = MIN (*m, *n);
  1722. i__1 = k;
  1723. for (i__ = 1; i__ <= i__1; ++i__) {
  1724. /* Generate elementary reflector H(i) to annihilate A(i,i+1:n)
  1725. Computing MIN */
  1726. i__2 = i__ + 1;
  1727. i__3 = *n - i__ + 1;
  1728. NUMlapack_dlarfg (&i__3, &a_ref (i__, i__), &a_ref (i__, MIN (i__2, *n)), lda, &tau[i__]);
  1729. if (i__ < *m) {
  1730. /* Apply H(i) to A(i+1:m,i:n) from the right */
  1731. aii = a_ref (i__, i__);
  1732. a_ref (i__, i__) = 1.;
  1733. i__2 = *m - i__;
  1734. i__3 = *n - i__ + 1;
  1735. NUMlapack_dlarf ("Right", &i__2, &i__3, &a_ref (i__, i__), lda, &tau[i__], &a_ref (i__ + 1, i__),
  1736. lda, &work[1]);
  1737. a_ref (i__, i__) = aii;
  1738. }
  1739. /* L10: */
  1740. }
  1741. return 0;
  1742. } /* NUMlapack_dgelq2 */
  1743. int NUMlapack_dgelqf (integer *m, integer *n, double *a, integer *lda, double *tau, double *work, integer *lwork,
  1744. integer *info) {
  1745. /* Table of constant values */
  1746. static integer c__1 = 1;
  1747. static integer c_n1 = -1;
  1748. static integer c__3 = 3;
  1749. static integer c__2 = 2;
  1750. /* System generated locals */
  1751. integer a_dim1, a_offset, i__1, i__2, i__3, i__4;
  1752. /* Local variables */
  1753. static integer i__, k, nbmin, iinfo;
  1754. static integer ib, nb;
  1755. static integer nx;
  1756. static integer ldwork, lwkopt;
  1757. static integer lquery;
  1758. static integer iws;
  1759. a_dim1 = *lda;
  1760. a_offset = 1 + a_dim1 * 1;
  1761. a -= a_offset;
  1762. --tau;
  1763. --work;
  1764. /* Function Body */
  1765. *info = 0;
  1766. nb = NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  1767. lwkopt = *m * nb;
  1768. work[1] = (double) lwkopt;
  1769. lquery = *lwork == -1;
  1770. if (*m < 0) {
  1771. *info = -1;
  1772. } else if (*n < 0) {
  1773. *info = -2;
  1774. } else if (*lda < MAX (1, *m)) {
  1775. *info = -4;
  1776. } else if (*lwork < MAX (1, *m) && !lquery) {
  1777. *info = -7;
  1778. }
  1779. if (*info != 0) {
  1780. i__1 = - (*info);
  1781. xerbla_ ("DGELQF", &i__1);
  1782. return 0;
  1783. } else if (lquery) {
  1784. return 0;
  1785. }
  1786. /* Quick return if possible */
  1787. k = MIN (*m, *n);
  1788. if (k == 0) {
  1789. work[1] = 1.;
  1790. return 0;
  1791. }
  1792. nbmin = 2;
  1793. nx = 0;
  1794. iws = *m;
  1795. if (nb > 1 && nb < k) {
  1796. /* Determine when to cross over from blocked to unblocked code.
  1797. Computing MAX */
  1798. i__1 = 0, i__2 = NUMlapack_ilaenv (&c__3, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  1799. nx = MAX (i__1, i__2);
  1800. if (nx < k) {
  1801. /* Determine if workspace is large enough for blocked code. */
  1802. ldwork = *m;
  1803. iws = ldwork * nb;
  1804. if (*lwork < iws) {
  1805. /* Not enough workspace to use optimal NB: reduce NB and
  1806. determine the minimum value of NB. */
  1807. nb = *lwork / ldwork;
  1808. /* Computing MAX */
  1809. i__1 = 2, i__2 = NUMlapack_ilaenv (&c__2, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  1810. nbmin = MAX (i__1, i__2);
  1811. }
  1812. }
  1813. }
  1814. if (nb >= nbmin && nb < k && nx < k) {
  1815. /* Use blocked code initially */
  1816. i__1 = k - nx;
  1817. i__2 = nb;
  1818. for (i__ = 1; i__2 < 0 ? i__ >= i__1 : i__ <= i__1; i__ += i__2) {
  1819. /* Computing MIN */
  1820. i__3 = k - i__ + 1;
  1821. ib = MIN (i__3, nb);
  1822. /* Compute the LQ factorization of the current block
  1823. A(i:i+ib-1,i:n) */
  1824. i__3 = *n - i__ + 1;
  1825. NUMlapack_dgelq2 (&ib, &i__3, &a_ref (i__, i__), lda, &tau[i__], &work[1], &iinfo);
  1826. if (i__ + ib <= *m) {
  1827. /* Form the triangular factor of the block reflector H = H(i)
  1828. H(i+1) . . . H(i+ib-1) */
  1829. i__3 = *n - i__ + 1;
  1830. NUMlapack_dlarft ("Forward", "Rowwise", &i__3, &ib, &a_ref (i__, i__), lda, &tau[i__], &work[1],
  1831. &ldwork);
  1832. /* Apply H to A(i+ib:m,i:n) from the right */
  1833. i__3 = *m - i__ - ib + 1;
  1834. i__4 = *n - i__ + 1;
  1835. NUMlapack_dlarfb ("Right", "No transpose", "Forward", "Rowwise", &i__3, &i__4, &ib, &a_ref (i__,
  1836. i__), lda, &work[1], &ldwork, &a_ref (i__ + ib, i__), lda, &work[ib + 1], &ldwork);
  1837. }
  1838. /* L10: */
  1839. }
  1840. } else {
  1841. i__ = 1;
  1842. }
  1843. /* Use unblocked code to factor the last or only block. */
  1844. if (i__ <= k) {
  1845. i__2 = *m - i__ + 1;
  1846. i__1 = *n - i__ + 1;
  1847. NUMlapack_dgelq2 (&i__2, &i__1, &a_ref (i__, i__), lda, &tau[i__], &work[1], &iinfo);
  1848. }
  1849. work[1] = (double) iws;
  1850. return 0;
  1851. } /* NUMlapack_dgelqf */
  1852. #define b_ref(a_1,a_2) b[(a_2)*b_dim1 + a_1]
  1853. int NUMlapack_dgelss (integer *m, integer *n, integer *nrhs, double *a, integer *lda, double *b, integer *ldb, double *s,
  1854. double *rcond, integer *rank, double *work, integer *lwork, integer *info) {
  1855. /* System generated locals */
  1856. integer a_dim1, a_offset, b_dim1, b_offset, i__1, i__2, i__3, i__4;
  1857. double d__1;
  1858. /* Local variables */
  1859. static double anrm, bnrm;
  1860. static integer itau;
  1861. static double vdum[1];
  1862. static integer i__;
  1863. static integer iascl, ibscl;
  1864. static integer chunk;
  1865. static double sfmin;
  1866. static integer minmn;
  1867. static integer maxmn, itaup, itauq, mnthr, iwork;
  1868. static integer bl, ie, il;
  1869. static integer mm;
  1870. static integer bdspac;
  1871. static double bignum;
  1872. static integer ldwork;
  1873. static integer minwrk, maxwrk;
  1874. static double smlnum;
  1875. static integer lquery;
  1876. static double eps, thr;
  1877. a_dim1 = *lda;
  1878. a_offset = 1 + a_dim1 * 1;
  1879. a -= a_offset;
  1880. b_dim1 = *ldb;
  1881. b_offset = 1 + b_dim1 * 1;
  1882. b -= b_offset;
  1883. --s;
  1884. --work;
  1885. /* Function Body */
  1886. *info = 0;
  1887. minmn = MIN (*m, *n);
  1888. maxmn = MAX (*m, *n);
  1889. mnthr = NUMlapack_ilaenv (&c__6, "DGELSS", " ", m, n, nrhs, &c_n1, 6, 1);
  1890. lquery = *lwork == -1;
  1891. if (*m < 0) {
  1892. *info = -1;
  1893. } else if (*n < 0) {
  1894. *info = -2;
  1895. } else if (*nrhs < 0) {
  1896. *info = -3;
  1897. } else if (*lda < MAX (1, *m)) {
  1898. *info = -5;
  1899. } else if (*ldb < MAX (1, maxmn)) {
  1900. *info = -7;
  1901. }
  1902. /* Compute workspace (Note: Comments in the code beginning "Workspace:"
  1903. describe the minimal amount of workspace needed at that point in the
  1904. code, as well as the preferred amount for good performance. NB refers
  1905. to the optimal block size for the immediately following subroutine, as
  1906. returned by ILAENV.) */
  1907. minwrk = 1;
  1908. if (*info == 0 && (*lwork >= 1 || lquery)) {
  1909. maxwrk = 0;
  1910. mm = *m;
  1911. if (*m >= *n && *m >= mnthr) {
  1912. /* Path 1a - overdetermined, with many more rows than columns */
  1913. mm = *n;
  1914. /* Computing MAX */
  1915. i__1 = maxwrk, i__2 = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  1916. maxwrk = MAX (i__1, i__2);
  1917. /* Computing MAX */
  1918. i__1 = maxwrk, i__2 =
  1919. *n + *nrhs * NUMlapack_ilaenv (&c__1, "DORMQR", "LT", m, nrhs, n, &c_n1, 6, 2);
  1920. maxwrk = MAX (i__1, i__2);
  1921. }
  1922. if (*m >= *n) {
  1923. /* Path 1 - overdetermined or exactly determined
  1924. Compute workspace needed for DBDSQR
  1925. Computing MAX */
  1926. i__1 = 1, i__2 = *n * 5;
  1927. bdspac = MAX (i__1, i__2);
  1928. /* Computing MAX */
  1929. i__1 = maxwrk, i__2 =
  1930. *n * 3 + (mm + *n) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", &mm, n, &c_n1, &c_n1, 6, 1);
  1931. maxwrk = MAX (i__1, i__2);
  1932. /* Computing MAX */
  1933. i__1 = maxwrk, i__2 =
  1934. *n * 3 + *nrhs * NUMlapack_ilaenv (&c__1, "DORMBR", "QLT", &mm, nrhs, n, &c_n1, 6, 3);
  1935. maxwrk = MAX (i__1, i__2);
  1936. /* Computing MAX */
  1937. i__1 = maxwrk, i__2 =
  1938. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  1939. maxwrk = MAX (i__1, i__2);
  1940. maxwrk = MAX (maxwrk, bdspac);
  1941. /* Computing MAX */
  1942. i__1 = maxwrk, i__2 = *n * *nrhs;
  1943. maxwrk = MAX (i__1, i__2);
  1944. /* Computing MAX */
  1945. i__1 = *n * 3 + mm, i__2 = *n * 3 + *nrhs, i__1 = MAX (i__1, i__2);
  1946. minwrk = MAX (i__1, bdspac);
  1947. maxwrk = MAX (minwrk, maxwrk);
  1948. }
  1949. if (*n > *m) {
  1950. /* Compute workspace needed for DBDSQR
  1951. Computing MAX */
  1952. i__1 = 1, i__2 = *m * 5;
  1953. bdspac = MAX (i__1, i__2);
  1954. /* Computing MAX */
  1955. i__1 = *m * 3 + *nrhs, i__2 = *m * 3 + *n, i__1 = MAX (i__1, i__2);
  1956. minwrk = MAX (i__1, bdspac);
  1957. if (*n >= mnthr) {
  1958. /* Path 2a - underdetermined, with many more columns than
  1959. rows */
  1960. maxwrk = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  1961. /* Computing MAX */
  1962. i__1 = maxwrk, i__2 =
  1963. *m * *m + (*m << 2) + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1,
  1964. &c_n1, 6, 1);
  1965. maxwrk = MAX (i__1, i__2);
  1966. /* Computing MAX */
  1967. i__1 = maxwrk, i__2 =
  1968. *m * *m + (*m << 2) + *nrhs * NUMlapack_ilaenv (&c__1, "DORMBR", "QLT", m, nrhs, m, &c_n1,
  1969. 6, 3);
  1970. maxwrk = MAX (i__1, i__2);
  1971. /* Computing MAX */
  1972. i__1 = maxwrk, i__2 =
  1973. *m * *m + (*m << 2) + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6,
  1974. 1);
  1975. maxwrk = MAX (i__1, i__2);
  1976. /* Computing MAX */
  1977. i__1 = maxwrk, i__2 = *m * *m + *m + bdspac;
  1978. maxwrk = MAX (i__1, i__2);
  1979. if (*nrhs > 1) {
  1980. /* Computing MAX */
  1981. i__1 = maxwrk, i__2 = *m * *m + *m + *m * *nrhs;
  1982. maxwrk = MAX (i__1, i__2);
  1983. } else {
  1984. /* Computing MAX */
  1985. i__1 = maxwrk, i__2 = *m * *m + (*m << 1);
  1986. maxwrk = MAX (i__1, i__2);
  1987. }
  1988. /* Computing MAX */
  1989. i__1 = maxwrk, i__2 =
  1990. *m + *nrhs * NUMlapack_ilaenv (&c__1, "DORMLQ", "LT", n, nrhs, m, &c_n1, 6, 2);
  1991. maxwrk = MAX (i__1, i__2);
  1992. } else {
  1993. /* Path 2 - underdetermined */
  1994. maxwrk = *m * 3 + (*n + *m) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, n, &c_n1, &c_n1, 6, 1);
  1995. /* Computing MAX */
  1996. i__1 = maxwrk, i__2 =
  1997. *m * 3 + *nrhs * NUMlapack_ilaenv (&c__1, "DORMBR", "QLT", m, nrhs, m, &c_n1, 6, 3);
  1998. maxwrk = MAX (i__1, i__2);
  1999. /* Computing MAX */
  2000. i__1 = maxwrk, i__2 =
  2001. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, n, m, &c_n1, 6, 1);
  2002. maxwrk = MAX (i__1, i__2);
  2003. maxwrk = MAX (maxwrk, bdspac);
  2004. /* Computing MAX */
  2005. i__1 = maxwrk, i__2 = *n * *nrhs;
  2006. maxwrk = MAX (i__1, i__2);
  2007. }
  2008. }
  2009. maxwrk = MAX (minwrk, maxwrk);
  2010. work[1] = (double) maxwrk;
  2011. }
  2012. minwrk = MAX (minwrk, 1);
  2013. if (*lwork < minwrk && !lquery) {
  2014. *info = -12;
  2015. }
  2016. if (*info != 0) {
  2017. i__1 = - (*info);
  2018. xerbla_ ("DGELSS", &i__1);
  2019. return 0;
  2020. } else if (lquery) {
  2021. return 0;
  2022. }
  2023. /* Quick return if possible */
  2024. if (*m == 0 || *n == 0) {
  2025. *rank = 0;
  2026. return 0;
  2027. }
  2028. /* Get machine parameters */
  2029. eps = NUMblas_dlamch ("P");
  2030. sfmin = NUMblas_dlamch ("S");
  2031. smlnum = sfmin / eps;
  2032. bignum = 1. / smlnum;
  2033. NUMlapack_dlabad (&smlnum, &bignum);
  2034. /* Scale A if max element outside range [SMLNUM,BIGNUM] */
  2035. anrm = NUMlapack_dlange ("M", m, n, &a[a_offset], lda, &work[1]);
  2036. iascl = 0;
  2037. if (anrm > 0. && anrm < smlnum) {
  2038. /* Scale matrix norm up to SMLNUM */
  2039. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &smlnum, m, n, &a[a_offset], lda, info);
  2040. iascl = 1;
  2041. } else if (anrm > bignum) {
  2042. /* Scale matrix norm down to BIGNUM */
  2043. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &bignum, m, n, &a[a_offset], lda, info);
  2044. iascl = 2;
  2045. } else if (anrm == 0.) {
  2046. /* Matrix all zero. Return zero solution. */
  2047. i__1 = MAX (*m, *n);
  2048. NUMlapack_dlaset ("F", &i__1, nrhs, &c_b74, &c_b74, &b[b_offset], ldb);
  2049. NUMlapack_dlaset ("F", &minmn, &c__1, &c_b74, &c_b74, &s[1], &c__1);
  2050. *rank = 0;
  2051. goto L70;
  2052. }
  2053. /* Scale B if max element outside range [SMLNUM,BIGNUM] */
  2054. bnrm = NUMlapack_dlange ("M", m, nrhs, &b[b_offset], ldb, &work[1]);
  2055. ibscl = 0;
  2056. if (bnrm > 0. && bnrm < smlnum) {
  2057. /* Scale matrix norm up to SMLNUM */
  2058. NUMlapack_dlascl ("G", &c__0, &c__0, &bnrm, &smlnum, m, nrhs, &b[b_offset], ldb, info);
  2059. ibscl = 1;
  2060. } else if (bnrm > bignum) {
  2061. /* Scale matrix norm down to BIGNUM */
  2062. NUMlapack_dlascl ("G", &c__0, &c__0, &bnrm, &bignum, m, nrhs, &b[b_offset], ldb, info);
  2063. ibscl = 2;
  2064. }
  2065. /* Overdetermined case */
  2066. if (*m >= *n) {
  2067. /* Path 1 - overdetermined or exactly determined */
  2068. mm = *m;
  2069. if (*m >= mnthr) {
  2070. /* Path 1a - overdetermined, with many more rows than columns */
  2071. mm = *n;
  2072. itau = 1;
  2073. iwork = itau + *n;
  2074. /* Compute A=Q*R (Workspace: need 2*N, prefer N+N*NB) */
  2075. i__1 = *lwork - iwork + 1;
  2076. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__1, info);
  2077. /* Multiply B by transpose(Q) (Workspace: need N+NRHS, prefer
  2078. N+NRHS*NB) */
  2079. i__1 = *lwork - iwork + 1;
  2080. NUMlapack_dormqr ("L", "T", m, nrhs, n, &a[a_offset], lda, &work[itau], &b[b_offset], ldb,
  2081. &work[iwork], &i__1, info);
  2082. /* Zero out below R */
  2083. if (*n > 1) {
  2084. i__1 = *n - 1;
  2085. i__2 = *n - 1;
  2086. NUMlapack_dlaset ("L", &i__1, &i__2, &c_b74, &c_b74, &a_ref (2, 1), lda);
  2087. }
  2088. }
  2089. ie = 1;
  2090. itauq = ie + *n;
  2091. itaup = itauq + *n;
  2092. iwork = itaup + *n;
  2093. /* Bidiagonalize R in A (Workspace: need 3*N+MM, prefer
  2094. 3*N+(MM+N)*NB) */
  2095. i__1 = *lwork - iwork + 1;
  2096. NUMlapack_dgebrd (&mm, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup], &work[iwork],
  2097. &i__1, info);
  2098. /* Multiply B by transpose of left bidiagonalizing vectors of R
  2099. (Workspace: need 3*N+NRHS, prefer 3*N+NRHS*NB) */
  2100. i__1 = *lwork - iwork + 1;
  2101. NUMlapack_dormbr ("Q", "L", "T", &mm, nrhs, n, &a[a_offset], lda, &work[itauq], &b[b_offset], ldb,
  2102. &work[iwork], &i__1, info);
  2103. /* Generate right bidiagonalizing vectors of R in A (Workspace: need
  2104. 4*N-1, prefer 3*N+(N-1)*NB) */
  2105. i__1 = *lwork - iwork + 1;
  2106. NUMlapack_dorgbr ("P", n, n, n, &a[a_offset], lda, &work[itaup], &work[iwork], &i__1, info);
  2107. iwork = ie + *n;
  2108. /* Perform bidiagonal QR iteration multiply B by transpose of left
  2109. singular vectors compute right singular vectors in A (Workspace:
  2110. need BDSPAC) */
  2111. NUMlapack_dbdsqr ("U", n, n, &c__0, nrhs, &s[1], &work[ie], &a[a_offset], lda, vdum, &c__1,
  2112. &b[b_offset], ldb, &work[iwork], info);
  2113. if (*info != 0) {
  2114. goto L70;
  2115. }
  2116. /* Multiply B by reciprocals of singular values
  2117. Computing MAX */
  2118. d__1 = *rcond * s[1];
  2119. thr = MAX (d__1, sfmin);
  2120. if (*rcond < 0.) {
  2121. /* Computing MAX */
  2122. d__1 = eps * s[1];
  2123. thr = MAX (d__1, sfmin);
  2124. }
  2125. *rank = 0;
  2126. i__1 = *n;
  2127. for (i__ = 1; i__ <= i__1; ++i__) {
  2128. if (s[i__] > thr) {
  2129. NUMlapack_drscl (nrhs, &s[i__], &b_ref (i__, 1), ldb);
  2130. ++ (*rank);
  2131. } else {
  2132. NUMlapack_dlaset ("F", &c__1, nrhs, &c_b74, &c_b74, &b_ref (i__, 1), ldb);
  2133. }
  2134. /* L10: */
  2135. }
  2136. /* Multiply B by right singular vectors (Workspace: need N, prefer
  2137. N*NRHS) */
  2138. if (*lwork >= *ldb * *nrhs && *nrhs > 1) {
  2139. NUMblas_dgemm ("T", "N", n, nrhs, n, &c_b108, &a[a_offset], lda, &b[b_offset], ldb, &c_b74, &work[1],
  2140. ldb);
  2141. NUMlapack_dlacpy ("G", n, nrhs, &work[1], ldb, &b[b_offset], ldb);
  2142. } else if (*nrhs > 1) {
  2143. chunk = *lwork / *n;
  2144. i__1 = *nrhs;
  2145. i__2 = chunk;
  2146. for (i__ = 1; i__2 < 0 ? i__ >= i__1 : i__ <= i__1; i__ += i__2) {
  2147. /* Computing MIN */
  2148. i__3 = *nrhs - i__ + 1;
  2149. bl = MIN (i__3, chunk);
  2150. NUMblas_dgemm ("T", "N", n, &bl, n, &c_b108, &a[a_offset], lda, &b_ref (1, i__), ldb, &c_b74,
  2151. &work[1], n);
  2152. NUMlapack_dlacpy ("G", n, &bl, &work[1], n, &b_ref (1, i__), ldb);
  2153. /* L20: */
  2154. }
  2155. } else {
  2156. NUMblas_dgemv ("T", n, n, &c_b108, &a[a_offset], lda, &b[b_offset], &c__1, &c_b74, &work[1], &c__1);
  2157. NUMblas_dcopy (n, &work[1], &c__1, &b[b_offset], &c__1);
  2158. }
  2159. } else { /* if(complicated condition) */
  2160. /* Computing MAX */
  2161. i__2 = *m, i__1 = (*m << 1) - 4, i__2 = MAX (i__2, i__1), i__2 = MAX (i__2, *nrhs), i__1 =
  2162. *n - *m * 3;
  2163. if (*n >= mnthr && *lwork >= (*m << 2) + *m * *m + MAX (i__2, i__1)) {
  2164. /* Path 2a - underdetermined, with many more columns than rows
  2165. and sufficient workspace for an efficient algorithm */
  2166. ldwork = *m;
  2167. /* Computing MAX Computing MAX */
  2168. i__3 = *m, i__4 = (*m << 1) - 4, i__3 = MAX (i__3, i__4), i__3 = MAX (i__3, *nrhs), i__4 =
  2169. *n - *m * 3;
  2170. i__2 = (*m << 2) + *m * *lda + MAX (i__3, i__4), i__1 = *m * *lda + *m + *m * *nrhs;
  2171. if (*lwork >= MAX (i__2, i__1)) {
  2172. ldwork = *lda;
  2173. }
  2174. itau = 1;
  2175. iwork = *m + 1;
  2176. /* Compute A=L*Q (Workspace: need 2*M, prefer M+M*NB) */
  2177. i__2 = *lwork - iwork + 1;
  2178. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, info);
  2179. il = iwork;
  2180. /* Copy L to WORK(IL), zeroing out above it */
  2181. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[il], &ldwork);
  2182. i__2 = *m - 1;
  2183. i__1 = *m - 1;
  2184. NUMlapack_dlaset ("U", &i__2, &i__1, &c_b74, &c_b74, &work[il + ldwork], &ldwork);
  2185. ie = il + ldwork * *m;
  2186. itauq = ie + *m;
  2187. itaup = itauq + *m;
  2188. iwork = itaup + *m;
  2189. /* Bidiagonalize L in WORK(IL) (Workspace: need M*M+5*M, prefer
  2190. M*M+4*M+2*M*NB) */
  2191. i__2 = *lwork - iwork + 1;
  2192. NUMlapack_dgebrd (m, m, &work[il], &ldwork, &s[1], &work[ie], &work[itauq], &work[itaup],
  2193. &work[iwork], &i__2, info);
  2194. /* Multiply B by transpose of left bidiagonalizing vectors of L
  2195. (Workspace: need M*M+4*M+NRHS, prefer M*M+4*M+NRHS*NB) */
  2196. i__2 = *lwork - iwork + 1;
  2197. NUMlapack_dormbr ("Q", "L", "T", m, nrhs, m, &work[il], &ldwork, &work[itauq], &b[b_offset], ldb,
  2198. &work[iwork], &i__2, info);
  2199. /* Generate right bidiagonalizing vectors of R in WORK(IL)
  2200. (Workspace: need M*M+5*M-1, prefer M*M+4*M+(M-1)*NB) */
  2201. i__2 = *lwork - iwork + 1;
  2202. NUMlapack_dorgbr ("P", m, m, m, &work[il], &ldwork, &work[itaup], &work[iwork], &i__2, info);
  2203. iwork = ie + *m;
  2204. /* Perform bidiagonal QR iteration, computing right singular
  2205. vectors of L in WORK(IL) and multiplying B by transpose of
  2206. left singular vectors (Workspace: need M*M+M+BDSPAC) */
  2207. NUMlapack_dbdsqr ("U", m, m, &c__0, nrhs, &s[1], &work[ie], &work[il], &ldwork, &a[a_offset], lda,
  2208. &b[b_offset], ldb, &work[iwork], info);
  2209. if (*info != 0) {
  2210. goto L70;
  2211. }
  2212. /* Multiply B by reciprocals of singular values
  2213. Computing MAX */
  2214. d__1 = *rcond * s[1];
  2215. thr = MAX (d__1, sfmin);
  2216. if (*rcond < 0.) {
  2217. /* Computing MAX */
  2218. d__1 = eps * s[1];
  2219. thr = MAX (d__1, sfmin);
  2220. }
  2221. *rank = 0;
  2222. i__2 = *m;
  2223. for (i__ = 1; i__ <= i__2; ++i__) {
  2224. if (s[i__] > thr) {
  2225. NUMlapack_drscl (nrhs, &s[i__], &b_ref (i__, 1), ldb);
  2226. ++ (*rank);
  2227. } else {
  2228. NUMlapack_dlaset ("F", &c__1, nrhs, &c_b74, &c_b74, &b_ref (i__, 1), ldb);
  2229. }
  2230. /* L30: */
  2231. }
  2232. iwork = ie;
  2233. /* Multiply B by right singular vectors of L in WORK(IL)
  2234. (Workspace: need M*M+2*M, prefer M*M+M+M*NRHS) */
  2235. if (*lwork >= *ldb * *nrhs + iwork - 1 && *nrhs > 1) {
  2236. NUMblas_dgemm ("T", "N", m, nrhs, m, &c_b108, &work[il], &ldwork, &b[b_offset], ldb, &c_b74,
  2237. &work[iwork], ldb);
  2238. NUMlapack_dlacpy ("G", m, nrhs, &work[iwork], ldb, &b[b_offset], ldb);
  2239. } else if (*nrhs > 1) {
  2240. chunk = (*lwork - iwork + 1) / *m;
  2241. i__2 = *nrhs;
  2242. i__1 = chunk;
  2243. for (i__ = 1; i__1 < 0 ? i__ >= i__2 : i__ <= i__2; i__ += i__1) {
  2244. /* Computing MIN */
  2245. i__3 = *nrhs - i__ + 1;
  2246. bl = MIN (i__3, chunk);
  2247. NUMblas_dgemm ("T", "N", m, &bl, m, &c_b108, &work[il], &ldwork, &b_ref (1, i__), ldb, &c_b74,
  2248. &work[iwork], n);
  2249. NUMlapack_dlacpy ("G", m, &bl, &work[iwork], n, &b_ref (1, i__), ldb);
  2250. /* L40: */
  2251. }
  2252. } else {
  2253. NUMblas_dgemv ("T", m, m, &c_b108, &work[il], &ldwork, &b_ref (1, 1), &c__1, &c_b74, &work[iwork],
  2254. &c__1);
  2255. NUMblas_dcopy (m, &work[iwork], &c__1, &b_ref (1, 1), &c__1);
  2256. }
  2257. /* Zero out below first M rows of B */
  2258. i__1 = *n - *m;
  2259. NUMlapack_dlaset ("F", &i__1, nrhs, &c_b74, &c_b74, &b_ref (*m + 1, 1), ldb);
  2260. iwork = itau + *m;
  2261. /* Multiply transpose(Q) by B (Workspace: need M+NRHS, prefer
  2262. M+NRHS*NB) */
  2263. i__1 = *lwork - iwork + 1;
  2264. NUMlapack_dormlq ("L", "T", n, nrhs, m, &a[a_offset], lda, &work[itau], &b[b_offset], ldb,
  2265. &work[iwork], &i__1, info);
  2266. } else {
  2267. /* Path 2 - remaining underdetermined cases */
  2268. ie = 1;
  2269. itauq = ie + *m;
  2270. itaup = itauq + *m;
  2271. iwork = itaup + *m;
  2272. /* Bidiagonalize A (Workspace: need 3*M+N, prefer 3*M+(M+N)*NB) */
  2273. i__1 = *lwork - iwork + 1;
  2274. NUMlapack_dgebrd (m, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  2275. &work[iwork], &i__1, info);
  2276. /* Multiply B by transpose of left bidiagonalizing vectors
  2277. (Workspace: need 3*M+NRHS, prefer 3*M+NRHS*NB) */
  2278. i__1 = *lwork - iwork + 1;
  2279. NUMlapack_dormbr ("Q", "L", "T", m, nrhs, n, &a[a_offset], lda, &work[itauq], &b[b_offset], ldb,
  2280. &work[iwork], &i__1, info);
  2281. /* Generate right bidiagonalizing vectors in A (Workspace: need
  2282. 4*M, prefer 3*M+M*NB) */
  2283. i__1 = *lwork - iwork + 1;
  2284. NUMlapack_dorgbr ("P", m, n, m, &a[a_offset], lda, &work[itaup], &work[iwork], &i__1, info);
  2285. iwork = ie + *m;
  2286. /* Perform bidiagonal QR iteration, computing right singular
  2287. vectors of A in A and multiplying B by transpose of left
  2288. singular vectors (Workspace: need BDSPAC) */
  2289. NUMlapack_dbdsqr ("L", m, n, &c__0, nrhs, &s[1], &work[ie], &a[a_offset], lda, vdum, &c__1,
  2290. &b[b_offset], ldb, &work[iwork], info);
  2291. if (*info != 0) {
  2292. goto L70;
  2293. }
  2294. /* Multiply B by reciprocals of singular values
  2295. Computing MAX */
  2296. d__1 = *rcond * s[1];
  2297. thr = MAX (d__1, sfmin);
  2298. if (*rcond < 0.) {
  2299. /* Computing MAX */
  2300. d__1 = eps * s[1];
  2301. thr = MAX (d__1, sfmin);
  2302. }
  2303. *rank = 0;
  2304. i__1 = *m;
  2305. for (i__ = 1; i__ <= i__1; ++i__) {
  2306. if (s[i__] > thr) {
  2307. NUMlapack_drscl (nrhs, &s[i__], &b_ref (i__, 1), ldb);
  2308. ++ (*rank);
  2309. } else {
  2310. NUMlapack_dlaset ("F", &c__1, nrhs, &c_b74, &c_b74, &b_ref (i__, 1), ldb);
  2311. }
  2312. /* L50: */
  2313. }
  2314. /* Multiply B by right singular vectors of A (Workspace: need N,
  2315. prefer N*NRHS) */
  2316. if (*lwork >= *ldb * *nrhs && *nrhs > 1) {
  2317. NUMblas_dgemm ("T", "N", n, nrhs, m, &c_b108, &a[a_offset], lda, &b[b_offset], ldb, &c_b74, &work[1],
  2318. ldb);
  2319. NUMlapack_dlacpy ("F", n, nrhs, &work[1], ldb, &b[b_offset], ldb);
  2320. } else if (*nrhs > 1) {
  2321. chunk = *lwork / *n;
  2322. i__1 = *nrhs;
  2323. i__2 = chunk;
  2324. for (i__ = 1; i__2 < 0 ? i__ >= i__1 : i__ <= i__1; i__ += i__2) {
  2325. /* Computing MIN */
  2326. i__3 = *nrhs - i__ + 1;
  2327. bl = MIN (i__3, chunk);
  2328. NUMblas_dgemm ("T", "N", n, &bl, m, &c_b108, &a[a_offset], lda, &b_ref (1, i__), ldb, &c_b74,
  2329. &work[1], n);
  2330. NUMlapack_dlacpy ("F", n, &bl, &work[1], n, &b_ref (1, i__), ldb);
  2331. /* L60: */
  2332. }
  2333. } else {
  2334. NUMblas_dgemv ("T", m, n, &c_b108, &a[a_offset], lda, &b[b_offset], &c__1, &c_b74, &work[1], &c__1);
  2335. NUMblas_dcopy (n, &work[1], &c__1, &b[b_offset], &c__1);
  2336. }
  2337. }
  2338. }
  2339. /* Undo scaling */
  2340. if (iascl == 1) {
  2341. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &smlnum, n, nrhs, &b[b_offset], ldb, info);
  2342. NUMlapack_dlascl ("G", &c__0, &c__0, &smlnum, &anrm, &minmn, &c__1, &s[1], &minmn, info);
  2343. } else if (iascl == 2) {
  2344. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &bignum, n, nrhs, &b[b_offset], ldb, info);
  2345. NUMlapack_dlascl ("G", &c__0, &c__0, &bignum, &anrm, &minmn, &c__1, &s[1], &minmn, info);
  2346. }
  2347. if (ibscl == 1) {
  2348. NUMlapack_dlascl ("G", &c__0, &c__0, &smlnum, &bnrm, n, nrhs, &b[b_offset], ldb, info);
  2349. } else if (ibscl == 2) {
  2350. NUMlapack_dlascl ("G", &c__0, &c__0, &bignum, &bnrm, n, nrhs, &b[b_offset], ldb, info);
  2351. }
  2352. L70:
  2353. work[1] = (double) maxwrk;
  2354. return 0;
  2355. } /* NUMlapack_dgelss */
  2356. #undef b_ref
  2357. int NUMlapack_dgeqpf (integer *m, integer *n, double *a, integer *lda, integer *jpvt, double *tau, double *work, integer *info) {
  2358. /* Table of constant values */
  2359. static integer c__1 = 1;
  2360. /* System generated locals */
  2361. integer a_dim1, a_offset, i__1, i__2, i__3;
  2362. double d__1, d__2;
  2363. /* Local variables */
  2364. static double temp;
  2365. static double temp2;
  2366. static integer i__, j;
  2367. static integer itemp;
  2368. static integer ma, mn;
  2369. static double aii;
  2370. static integer pvt;
  2371. a_dim1 = *lda;
  2372. a_offset = 1 + a_dim1 * 1;
  2373. a -= a_offset;
  2374. --jpvt;
  2375. --tau;
  2376. --work;
  2377. /* Function Body */
  2378. *info = 0;
  2379. if (*m < 0) {
  2380. *info = -1;
  2381. } else if (*n < 0) {
  2382. *info = -2;
  2383. } else if (*lda < MAX (1, *m)) {
  2384. *info = -4;
  2385. }
  2386. if (*info != 0) {
  2387. i__1 = - (*info);
  2388. xerbla_ ("DGEQPF", &i__1);
  2389. return 0;
  2390. }
  2391. mn = MIN (*m, *n);
  2392. /* Move initial columns up front */
  2393. itemp = 1;
  2394. i__1 = *n;
  2395. for (i__ = 1; i__ <= i__1; ++i__) {
  2396. if (jpvt[i__] != 0) {
  2397. if (i__ != itemp) {
  2398. NUMblas_dswap (m, &a_ref (1, i__), &c__1, &a_ref (1, itemp), &c__1);
  2399. jpvt[i__] = jpvt[itemp];
  2400. jpvt[itemp] = i__;
  2401. } else {
  2402. jpvt[i__] = i__;
  2403. }
  2404. ++itemp;
  2405. } else {
  2406. jpvt[i__] = i__;
  2407. }
  2408. /* L10: */
  2409. }
  2410. --itemp;
  2411. /* Compute the QR factorization and update remaining columns */
  2412. if (itemp > 0) {
  2413. ma = MIN (itemp, *m);
  2414. NUMlapack_dgeqr2 (m, &ma, &a[a_offset], lda, &tau[1], &work[1], info);
  2415. if (ma < *n) {
  2416. i__1 = *n - ma;
  2417. NUMlapack_dorm2r ("Left", "Transpose", m, &i__1, &ma, &a[a_offset], lda, &tau[1], &a_ref (1,
  2418. ma + 1), lda, &work[1], info);
  2419. }
  2420. }
  2421. if (itemp < mn) {
  2422. /* Initialize partial column norms. The first n elements of work
  2423. store the exact column norms. */
  2424. i__1 = *n;
  2425. for (i__ = itemp + 1; i__ <= i__1; ++i__) {
  2426. i__2 = *m - itemp;
  2427. work[i__] = NUMblas_dnrm2 (&i__2, &a_ref (itemp + 1, i__), &c__1);
  2428. work[*n + i__] = work[i__];
  2429. /* L20: */
  2430. }
  2431. /* Compute factorization */
  2432. i__1 = mn;
  2433. for (i__ = itemp + 1; i__ <= i__1; ++i__) {
  2434. /* Determine ith pivot column and swap if necessary */
  2435. i__2 = *n - i__ + 1;
  2436. pvt = i__ - 1 + NUMblas_idamax (&i__2, &work[i__], &c__1);
  2437. if (pvt != i__) {
  2438. NUMblas_dswap (m, &a_ref (1, pvt), &c__1, &a_ref (1, i__), &c__1);
  2439. itemp = jpvt[pvt];
  2440. jpvt[pvt] = jpvt[i__];
  2441. jpvt[i__] = itemp;
  2442. work[pvt] = work[i__];
  2443. work[*n + pvt] = work[*n + i__];
  2444. }
  2445. /* Generate elementary reflector H(i) */
  2446. if (i__ < *m) {
  2447. i__2 = *m - i__ + 1;
  2448. NUMlapack_dlarfg (&i__2, &a_ref (i__, i__), &a_ref (i__ + 1, i__), &c__1, &tau[i__]);
  2449. } else {
  2450. NUMlapack_dlarfg (&c__1, &a_ref (*m, *m), &a_ref (*m, *m), &c__1, &tau[*m]);
  2451. }
  2452. if (i__ < *n) {
  2453. /* Apply H(i) to A(i:m,i+1:n) from the left */
  2454. aii = a_ref (i__, i__);
  2455. a_ref (i__, i__) = 1.;
  2456. i__2 = *m - i__ + 1;
  2457. i__3 = *n - i__;
  2458. NUMlapack_dlarf ("LEFT", &i__2, &i__3, &a_ref (i__, i__), &c__1, &tau[i__], &a_ref (i__,
  2459. i__ + 1), lda, &work[ (*n << 1) + 1]);
  2460. a_ref (i__, i__) = aii;
  2461. }
  2462. /* Update partial column norms */
  2463. i__2 = *n;
  2464. for (j = i__ + 1; j <= i__2; ++j) {
  2465. if (work[j] != 0.) {
  2466. /* Computing 2nd power */
  2467. d__2 = (d__1 = a_ref (i__, j), fabs (d__1)) / work[j];
  2468. temp = 1. - d__2 * d__2;
  2469. temp = MAX (temp, 0.);
  2470. /* Computing 2nd power */
  2471. d__1 = work[j] / work[*n + j];
  2472. temp2 = temp * .05 * (d__1 * d__1) + 1.;
  2473. if (temp2 == 1.) {
  2474. if (*m - i__ > 0) {
  2475. i__3 = *m - i__;
  2476. work[j] = NUMblas_dnrm2 (&i__3, &a_ref (i__ + 1, j), &c__1);
  2477. work[*n + j] = work[j];
  2478. } else {
  2479. work[j] = 0.;
  2480. work[*n + j] = 0.;
  2481. }
  2482. } else {
  2483. work[j] *= sqrt (temp);
  2484. }
  2485. }
  2486. /* L30: */
  2487. }
  2488. /* L40: */
  2489. }
  2490. }
  2491. return 0;
  2492. } /* NUMlapack_dgeqpf */
  2493. int NUMlapack_dgeqr2 (integer *m, integer *n, double *a, integer *lda, double *tau, double *work, integer *info) {
  2494. /* Table of constant values */
  2495. static integer c__1 = 1;
  2496. /* System generated locals */
  2497. integer a_dim1, a_offset, i__1, i__2, i__3;
  2498. /* Local variables */
  2499. static integer i__, k;
  2500. static double aii;
  2501. a_dim1 = *lda;
  2502. a_offset = 1 + a_dim1 * 1;
  2503. a -= a_offset;
  2504. --tau;
  2505. --work;
  2506. /* Function Body */
  2507. *info = 0;
  2508. if (*m < 0) {
  2509. *info = -1;
  2510. } else if (*n < 0) {
  2511. *info = -2;
  2512. } else if (*lda < MAX (1, *m)) {
  2513. *info = -4;
  2514. }
  2515. if (*info != 0) {
  2516. i__1 = - (*info);
  2517. xerbla_ ("DGEQR2", &i__1);
  2518. return 0;
  2519. }
  2520. k = MIN (*m, *n);
  2521. i__1 = k;
  2522. for (i__ = 1; i__ <= i__1; ++i__) {
  2523. /* Generate elementary reflector H(i) to annihilate A(i+1:m,i)
  2524. Computing MIN */
  2525. i__2 = i__ + 1;
  2526. i__3 = *m - i__ + 1;
  2527. NUMlapack_dlarfg (&i__3, &a_ref (i__, i__), &a_ref (MIN (i__2, *m), i__), &c__1, &tau[i__]);
  2528. if (i__ < *n) {
  2529. /* Apply H(i) to A(i:m,i+1:n) from the left */
  2530. aii = a_ref (i__, i__);
  2531. a_ref (i__, i__) = 1.;
  2532. i__2 = *m - i__ + 1;
  2533. i__3 = *n - i__;
  2534. NUMlapack_dlarf ("Left", &i__2, &i__3, &a_ref (i__, i__), &c__1, &tau[i__], &a_ref (i__, i__ + 1),
  2535. lda, &work[1]);
  2536. a_ref (i__, i__) = aii;
  2537. }
  2538. /* L10: */
  2539. }
  2540. return 0;
  2541. } /* NUMlapack_dgeqr2 */
  2542. int NUMlapack_dgeqrf (integer *m, integer *n, double *a, integer *lda, double *tau, double *work, integer *lwork,
  2543. integer *info) {
  2544. /* Table of constant values */
  2545. static integer c__1 = 1;
  2546. static integer c_n1 = -1;
  2547. static integer c__3 = 3;
  2548. static integer c__2 = 2;
  2549. /* System generated locals */
  2550. integer a_dim1, a_offset, i__1, i__2, i__3, i__4;
  2551. /* Local variables */
  2552. static integer i__, k, nbmin, iinfo;
  2553. static integer ib, nb;
  2554. static integer nx;
  2555. static integer ldwork, lwkopt;
  2556. static integer lquery;
  2557. static integer iws;
  2558. a_dim1 = *lda;
  2559. a_offset = 1 + a_dim1 * 1;
  2560. a -= a_offset;
  2561. --tau;
  2562. --work;
  2563. /* Function Body */
  2564. *info = 0;
  2565. nb = NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2566. lwkopt = *n * nb;
  2567. work[1] = (double) lwkopt;
  2568. lquery = *lwork == -1;
  2569. if (*m < 0) {
  2570. *info = -1;
  2571. } else if (*n < 0) {
  2572. *info = -2;
  2573. } else if (*lda < MAX (1, *m)) {
  2574. *info = -4;
  2575. } else if (*lwork < MAX (1, *n) && !lquery) {
  2576. *info = -7;
  2577. }
  2578. if (*info != 0) {
  2579. i__1 = - (*info);
  2580. xerbla_ ("DGEQRF", &i__1);
  2581. return 0;
  2582. } else if (lquery) {
  2583. return 0;
  2584. }
  2585. /* Quick return if possible */
  2586. k = MIN (*m, *n);
  2587. if (k == 0) {
  2588. work[1] = 1.;
  2589. return 0;
  2590. }
  2591. nbmin = 2;
  2592. nx = 0;
  2593. iws = *n;
  2594. if (nb > 1 && nb < k) {
  2595. /* Determine when to cross over from blocked to unblocked code.
  2596. Computing MAX */
  2597. i__1 = 0, i__2 = NUMlapack_ilaenv (&c__3, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2598. nx = MAX (i__1, i__2);
  2599. if (nx < k) {
  2600. /* Determine if workspace is large enough for blocked code. */
  2601. ldwork = *n;
  2602. iws = ldwork * nb;
  2603. if (*lwork < iws) {
  2604. /* Not enough workspace to use optimal NB: reduce NB and
  2605. determine the minimum value of NB. */
  2606. nb = *lwork / ldwork;
  2607. /* Computing MAX */
  2608. i__1 = 2, i__2 = NUMlapack_ilaenv (&c__2, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2609. nbmin = MAX (i__1, i__2);
  2610. }
  2611. }
  2612. }
  2613. if (nb >= nbmin && nb < k && nx < k) {
  2614. /* Use blocked code initially */
  2615. i__1 = k - nx;
  2616. i__2 = nb;
  2617. for (i__ = 1; i__2 < 0 ? i__ >= i__1 : i__ <= i__1; i__ += i__2) {
  2618. /* Computing MIN */
  2619. i__3 = k - i__ + 1;
  2620. ib = MIN (i__3, nb);
  2621. /* Compute the QR factorization of the current block
  2622. A(i:m,i:i+ib-1) */
  2623. i__3 = *m - i__ + 1;
  2624. NUMlapack_dgeqr2 (&i__3, &ib, &a_ref (i__, i__), lda, &tau[i__], &work[1], &iinfo);
  2625. if (i__ + ib <= *n) {
  2626. /* Form the triangular factor of the block reflector H = H(i)
  2627. H(i+1) . . . H(i+ib-1) */
  2628. i__3 = *m - i__ + 1;
  2629. NUMlapack_dlarft ("Forward", "Columnwise", &i__3, &ib, &a_ref (i__, i__), lda, &tau[i__],
  2630. &work[1], &ldwork);
  2631. /* Apply H' to A(i:m,i+ib:n) from the left */
  2632. i__3 = *m - i__ + 1;
  2633. i__4 = *n - i__ - ib + 1;
  2634. NUMlapack_dlarfb ("Left", "Transpose", "Forward", "Columnwise", &i__3, &i__4, &ib, &a_ref (i__,
  2635. i__), lda, &work[1], &ldwork, &a_ref (i__, i__ + ib), lda, &work[ib + 1], &ldwork);
  2636. }
  2637. /* L10: */
  2638. }
  2639. } else {
  2640. i__ = 1;
  2641. }
  2642. /* Use unblocked code to factor the last or only block. */
  2643. if (i__ <= k) {
  2644. i__2 = *m - i__ + 1;
  2645. i__1 = *n - i__ + 1;
  2646. NUMlapack_dgeqr2 (&i__2, &i__1, &a_ref (i__, i__), lda, &tau[i__], &work[1], &iinfo);
  2647. }
  2648. work[1] = (double) iws;
  2649. return 0;
  2650. } /* NUMlapack_dgeqrf */
  2651. int NUMlapack_dgerq2 (integer *m, integer *n, double *a, integer *lda, double *tau, double *work, integer *info) {
  2652. /* System generated locals */
  2653. integer a_dim1, a_offset, i__1, i__2;
  2654. /* Local variables */
  2655. static integer i__, k;
  2656. static double aii;
  2657. a_dim1 = *lda;
  2658. a_offset = 1 + a_dim1 * 1;
  2659. a -= a_offset;
  2660. --tau;
  2661. --work;
  2662. /* Function Body */
  2663. *info = 0;
  2664. if (*m < 0) {
  2665. *info = -1;
  2666. } else if (*n < 0) {
  2667. *info = -2;
  2668. } else if (*lda < MAX (1, *m)) {
  2669. *info = -4;
  2670. }
  2671. if (*info != 0) {
  2672. i__1 = - (*info);
  2673. xerbla_ ("DGERQ2", &i__1);
  2674. return 0;
  2675. }
  2676. k = MIN (*m, *n);
  2677. for (i__ = k; i__ >= 1; --i__) {
  2678. /* Generate elementary reflector H(i) to annihilate
  2679. A(m-k+i,1:n-k+i-1) */
  2680. i__1 = *n - k + i__;
  2681. NUMlapack_dlarfg (&i__1, &a_ref (*m - k + i__, *n - k + i__), &a_ref (*m - k + i__, 1), lda, &tau[i__]);
  2682. /* Apply H(i) to A(1:m-k+i-1,1:n-k+i) from the right */
  2683. aii = a_ref (*m - k + i__, *n - k + i__);
  2684. a_ref (*m - k + i__, *n - k + i__) = 1.;
  2685. i__1 = *m - k + i__ - 1;
  2686. i__2 = *n - k + i__;
  2687. NUMlapack_dlarf ("Right", &i__1, &i__2, &a_ref (*m - k + i__, 1), lda, &tau[i__], &a[a_offset], lda,
  2688. &work[1]);
  2689. a_ref (*m - k + i__, *n - k + i__) = aii;
  2690. /* L10: */
  2691. }
  2692. return 0;
  2693. } /* NUMlapack_dgerq2 */
  2694. int NUMlapack_dgesv (integer *n, integer *nrhs, double *a, integer *lda, integer *ipiv, double *b, integer *ldb, integer *info) {
  2695. /* System generated locals */
  2696. integer a_dim1, a_offset, b_dim1, b_offset, i__1;
  2697. /* Local variables */
  2698. a_dim1 = *lda;
  2699. a_offset = 1 + a_dim1 * 1;
  2700. a -= a_offset;
  2701. --ipiv;
  2702. b_dim1 = *ldb;
  2703. b_offset = 1 + b_dim1 * 1;
  2704. b -= b_offset;
  2705. /* Function Body */
  2706. *info = 0;
  2707. if (*n < 0) {
  2708. *info = -1;
  2709. } else if (*nrhs < 0) {
  2710. *info = -2;
  2711. } else if (*lda < MAX (1, *n)) {
  2712. *info = -4;
  2713. } else if (*ldb < MAX (1, *n)) {
  2714. *info = -7;
  2715. }
  2716. if (*info != 0) {
  2717. i__1 = - (*info);
  2718. xerbla_ ("DGESV ", &i__1);
  2719. return 0;
  2720. }
  2721. /* Compute the LU factorization of A. */
  2722. NUMlapack_dgetrf (n, n, &a[a_offset], lda, &ipiv[1], info);
  2723. if (*info == 0) {
  2724. /* Solve the system A*X = B, overwriting B with X. */
  2725. NUMlapack_dgetrs ("No transpose", n, nrhs, &a[a_offset], lda, &ipiv[1], &b[b_offset], ldb, info);
  2726. }
  2727. return 0;
  2728. } /* NUMlapack_dgesv */
  2729. #define u_ref(a_1,a_2) u[(a_2)*u_dim1 + a_1]
  2730. #define vt_ref(a_1,a_2) vt[(a_2)*vt_dim1 + a_1]
  2731. int NUMlapack_dgesvd (const char *jobu, const char *jobvt, integer *m, integer *n, double *a, integer *lda, double *s, double *u,
  2732. integer *ldu, double *vt, integer *ldvt, double *work, integer *lwork, integer *info) {
  2733. /* System generated locals */
  2734. const char *a__1[2];
  2735. integer a_dim1, a_offset, u_dim1, u_offset, vt_dim1, vt_offset, i__1[2], i__2, i__3, i__4;
  2736. char ch__1[2];
  2737. /* Local variables */
  2738. static integer iscl;
  2739. static double anrm;
  2740. static integer ierr, itau, ncvt, nrvt, i__;
  2741. static integer chunk, minmn, wrkbl, itaup, itauq, mnthr, iwork;
  2742. static integer wntua, wntva, wntun, wntuo, wntvn, wntvo, wntus, wntvs;
  2743. static integer ie;
  2744. static integer ir, bdspac, iu;
  2745. static double bignum;
  2746. static integer ldwrkr, minwrk, ldwrku, maxwrk;
  2747. static double smlnum;
  2748. static integer lquery, wntuas, wntvas;
  2749. static integer blk, ncu;
  2750. static double dum[1], eps;
  2751. static integer nru;
  2752. /* Parameter adjustments */
  2753. a_dim1 = *lda;
  2754. a_offset = 1 + a_dim1 * 1;
  2755. a -= a_offset;
  2756. --s;
  2757. u_dim1 = *ldu;
  2758. u_offset = 1 + u_dim1 * 1;
  2759. u -= u_offset;
  2760. vt_dim1 = *ldvt;
  2761. vt_offset = 1 + vt_dim1 * 1;
  2762. vt -= vt_offset;
  2763. --work;
  2764. /* Function Body */
  2765. *info = 0;
  2766. minmn = MIN (*m, *n);
  2767. /* Writing concatenation */
  2768. i__1[0] = 1, a__1[0] = jobu;
  2769. i__1[1] = 1, a__1[1] = jobvt;
  2770. s_cat (ch__1, a__1, i__1, &c__2, 2);
  2771. mnthr = NUMlapack_ilaenv (&c__6, "DGESVD", ch__1, m, n, &c__0, &c__0, 6, 2);
  2772. wntua = lsame_ (jobu, "A");
  2773. wntus = lsame_ (jobu, "S");
  2774. wntuas = wntua || wntus;
  2775. wntuo = lsame_ (jobu, "O");
  2776. wntun = lsame_ (jobu, "N");
  2777. wntva = lsame_ (jobvt, "A");
  2778. wntvs = lsame_ (jobvt, "S");
  2779. wntvas = wntva || wntvs;
  2780. wntvo = lsame_ (jobvt, "O");
  2781. wntvn = lsame_ (jobvt, "N");
  2782. minwrk = 1;
  2783. lquery = *lwork == -1;
  2784. if (! (wntua || wntus || wntuo || wntun)) {
  2785. *info = -1;
  2786. } else if (! (wntva || wntvs || wntvo || wntvn) || wntvo && wntuo) {
  2787. *info = -2;
  2788. } else if (*m < 0) {
  2789. *info = -3;
  2790. } else if (*n < 0) {
  2791. *info = -4;
  2792. } else if (*lda < MAX (1, *m)) {
  2793. *info = -6;
  2794. } else if (*ldu < 1 || wntuas && *ldu < *m) {
  2795. *info = -9;
  2796. } else if (*ldvt < 1 || wntva && *ldvt < *n || wntvs && *ldvt < minmn) {
  2797. *info = -11;
  2798. }
  2799. /* Compute workspace (Note: Comments in the code beginning "Workspace:"
  2800. describe the minimal amount of workspace needed at that point in the
  2801. code, as well as the preferred amount for good performance. NB refers
  2802. to the optimal block size for the immediately following subroutine, as
  2803. returned by ILAENV.) */
  2804. if (*info == 0 && (*lwork >= 1 || lquery) && *m > 0 && *n > 0) {
  2805. if (*m >= *n) {
  2806. /* Compute space needed for DBDSQR */
  2807. bdspac = *n * 5;
  2808. if (*m >= mnthr) {
  2809. if (wntun) {
  2810. /* Path 1 (M much larger than N, JOBU='N') */
  2811. maxwrk = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2812. /* Computing MAX */
  2813. i__2 = maxwrk, i__3 =
  2814. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2815. maxwrk = MAX (i__2, i__3);
  2816. if (wntvo || wntvas) {
  2817. /* Computing MAX */
  2818. i__2 = maxwrk, i__3 =
  2819. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  2820. maxwrk = MAX (i__2, i__3);
  2821. }
  2822. maxwrk = MAX (maxwrk, bdspac);
  2823. /* Computing MAX */
  2824. i__2 = *n << 2;
  2825. minwrk = MAX (i__2, bdspac);
  2826. maxwrk = MAX (maxwrk, minwrk);
  2827. } else if (wntuo && wntvn) {
  2828. /* Path 2 (M much larger than N, JOBU='O', JOBVT='N') */
  2829. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2830. /* Computing MAX */
  2831. i__2 = wrkbl, i__3 =
  2832. *n + *n * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, n, n, &c_n1, 6, 1);
  2833. wrkbl = MAX (i__2, i__3);
  2834. /* Computing MAX */
  2835. i__2 = wrkbl, i__3 =
  2836. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2837. wrkbl = MAX (i__2, i__3);
  2838. /* Computing MAX */
  2839. i__2 = wrkbl, i__3 =
  2840. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2841. wrkbl = MAX (i__2, i__3);
  2842. wrkbl = MAX (wrkbl, bdspac);
  2843. /* Computing MAX */
  2844. i__2 = *n * *n + wrkbl, i__3 = *n * *n + *m * *n + *n;
  2845. maxwrk = MAX (i__2, i__3);
  2846. /* Computing MAX */
  2847. i__2 = *n * 3 + *m;
  2848. minwrk = MAX (i__2, bdspac);
  2849. maxwrk = MAX (maxwrk, minwrk);
  2850. } else if (wntuo && wntvas) {
  2851. /* Path 3 (M much larger than N, JOBU='O', JOBVT='S' or
  2852. 'A') */
  2853. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2854. /* Computing MAX */
  2855. i__2 = wrkbl, i__3 =
  2856. *n + *n * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, n, n, &c_n1, 6, 1);
  2857. wrkbl = MAX (i__2, i__3);
  2858. /* Computing MAX */
  2859. i__2 = wrkbl, i__3 =
  2860. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2861. wrkbl = MAX (i__2, i__3);
  2862. /* Computing MAX */
  2863. i__2 = wrkbl, i__3 =
  2864. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2865. wrkbl = MAX (i__2, i__3);
  2866. /* Computing MAX */
  2867. i__2 = wrkbl, i__3 =
  2868. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  2869. wrkbl = MAX (i__2, i__3);
  2870. wrkbl = MAX (wrkbl, bdspac);
  2871. /* Computing MAX */
  2872. i__2 = *n * *n + wrkbl, i__3 = *n * *n + *m * *n + *n;
  2873. maxwrk = MAX (i__2, i__3);
  2874. /* Computing MAX */
  2875. i__2 = *n * 3 + *m;
  2876. minwrk = MAX (i__2, bdspac);
  2877. maxwrk = MAX (maxwrk, minwrk);
  2878. } else if (wntus && wntvn) {
  2879. /* Path 4 (M much larger than N, JOBU='S', JOBVT='N') */
  2880. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2881. /* Computing MAX */
  2882. i__2 = wrkbl, i__3 =
  2883. *n + *n * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, n, n, &c_n1, 6, 1);
  2884. wrkbl = MAX (i__2, i__3);
  2885. /* Computing MAX */
  2886. i__2 = wrkbl, i__3 =
  2887. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2888. wrkbl = MAX (i__2, i__3);
  2889. /* Computing MAX */
  2890. i__2 = wrkbl, i__3 =
  2891. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2892. wrkbl = MAX (i__2, i__3);
  2893. wrkbl = MAX (wrkbl, bdspac);
  2894. maxwrk = *n * *n + wrkbl;
  2895. /* Computing MAX */
  2896. i__2 = *n * 3 + *m;
  2897. minwrk = MAX (i__2, bdspac);
  2898. maxwrk = MAX (maxwrk, minwrk);
  2899. } else if (wntus && wntvo) {
  2900. /* Path 5 (M much larger than N, JOBU='S', JOBVT='O') */
  2901. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2902. /* Computing MAX */
  2903. i__2 = wrkbl, i__3 =
  2904. *n + *n * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, n, n, &c_n1, 6, 1);
  2905. wrkbl = MAX (i__2, i__3);
  2906. /* Computing MAX */
  2907. i__2 = wrkbl, i__3 =
  2908. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2909. wrkbl = MAX (i__2, i__3);
  2910. /* Computing MAX */
  2911. i__2 = wrkbl, i__3 =
  2912. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2913. wrkbl = MAX (i__2, i__3);
  2914. /* Computing MAX */
  2915. i__2 = wrkbl, i__3 =
  2916. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  2917. wrkbl = MAX (i__2, i__3);
  2918. wrkbl = MAX (wrkbl, bdspac);
  2919. maxwrk = (*n << 1) * *n + wrkbl;
  2920. /* Computing MAX */
  2921. i__2 = *n * 3 + *m;
  2922. minwrk = MAX (i__2, bdspac);
  2923. maxwrk = MAX (maxwrk, minwrk);
  2924. } else if (wntus && wntvas) {
  2925. /* Path 6 (M much larger than N, JOBU='S', JOBVT='S' or
  2926. 'A') */
  2927. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2928. /* Computing MAX */
  2929. i__2 = wrkbl, i__3 =
  2930. *n + *n * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, n, n, &c_n1, 6, 1);
  2931. wrkbl = MAX (i__2, i__3);
  2932. /* Computing MAX */
  2933. i__2 = wrkbl, i__3 =
  2934. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2935. wrkbl = MAX (i__2, i__3);
  2936. /* Computing MAX */
  2937. i__2 = wrkbl, i__3 =
  2938. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2939. wrkbl = MAX (i__2, i__3);
  2940. /* Computing MAX */
  2941. i__2 = wrkbl, i__3 =
  2942. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  2943. wrkbl = MAX (i__2, i__3);
  2944. wrkbl = MAX (wrkbl, bdspac);
  2945. maxwrk = *n * *n + wrkbl;
  2946. /* Computing MAX */
  2947. i__2 = *n * 3 + *m;
  2948. minwrk = MAX (i__2, bdspac);
  2949. maxwrk = MAX (maxwrk, minwrk);
  2950. } else if (wntua && wntvn) {
  2951. /* Path 7 (M much larger than N, JOBU='A', JOBVT='N') */
  2952. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2953. /* Computing MAX */
  2954. i__2 = wrkbl, i__3 =
  2955. *n + *m * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, m, n, &c_n1, 6, 1);
  2956. wrkbl = MAX (i__2, i__3);
  2957. /* Computing MAX */
  2958. i__2 = wrkbl, i__3 =
  2959. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2960. wrkbl = MAX (i__2, i__3);
  2961. /* Computing MAX */
  2962. i__2 = wrkbl, i__3 =
  2963. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2964. wrkbl = MAX (i__2, i__3);
  2965. wrkbl = MAX (wrkbl, bdspac);
  2966. maxwrk = *n * *n + wrkbl;
  2967. /* Computing MAX */
  2968. i__2 = *n * 3 + *m;
  2969. minwrk = MAX (i__2, bdspac);
  2970. maxwrk = MAX (maxwrk, minwrk);
  2971. } else if (wntua && wntvo) {
  2972. /* Path 8 (M much larger than N, JOBU='A', JOBVT='O') */
  2973. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  2974. /* Computing MAX */
  2975. i__2 = wrkbl, i__3 =
  2976. *n + *m * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, m, n, &c_n1, 6, 1);
  2977. wrkbl = MAX (i__2, i__3);
  2978. /* Computing MAX */
  2979. i__2 = wrkbl, i__3 =
  2980. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  2981. wrkbl = MAX (i__2, i__3);
  2982. /* Computing MAX */
  2983. i__2 = wrkbl, i__3 =
  2984. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  2985. wrkbl = MAX (i__2, i__3);
  2986. /* Computing MAX */
  2987. i__2 = wrkbl, i__3 =
  2988. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  2989. wrkbl = MAX (i__2, i__3);
  2990. wrkbl = MAX (wrkbl, bdspac);
  2991. maxwrk = (*n << 1) * *n + wrkbl;
  2992. /* Computing MAX */
  2993. i__2 = *n * 3 + *m;
  2994. minwrk = MAX (i__2, bdspac);
  2995. maxwrk = MAX (maxwrk, minwrk);
  2996. } else if (wntua && wntvas) {
  2997. /* Path 9 (M much larger than N, JOBU='A', JOBVT='S' or
  2998. 'A') */
  2999. wrkbl = *n + *n * NUMlapack_ilaenv (&c__1, "DGEQRF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3000. /* Computing MAX */
  3001. i__2 = wrkbl, i__3 =
  3002. *n + *m * NUMlapack_ilaenv (&c__1, "DORGQR", " ", m, m, n, &c_n1, 6, 1);
  3003. wrkbl = MAX (i__2, i__3);
  3004. /* Computing MAX */
  3005. i__2 = wrkbl, i__3 =
  3006. *n * 3 + (*n << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", n, n, &c_n1, &c_n1, 6, 1);
  3007. wrkbl = MAX (i__2, i__3);
  3008. /* Computing MAX */
  3009. i__2 = wrkbl, i__3 =
  3010. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", n, n, n, &c_n1, 6, 1);
  3011. wrkbl = MAX (i__2, i__3);
  3012. /* Computing MAX */
  3013. i__2 = wrkbl, i__3 =
  3014. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  3015. wrkbl = MAX (i__2, i__3);
  3016. wrkbl = MAX (wrkbl, bdspac);
  3017. maxwrk = *n * *n + wrkbl;
  3018. /* Computing MAX */
  3019. i__2 = *n * 3 + *m;
  3020. minwrk = MAX (i__2, bdspac);
  3021. maxwrk = MAX (maxwrk, minwrk);
  3022. }
  3023. } else {
  3024. /* Path 10 (M at least N, but not much larger) */
  3025. maxwrk = *n * 3 + (*m + *n) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, n, &c_n1, &c_n1, 6, 1);
  3026. if (wntus || wntuo) {
  3027. /* Computing MAX */
  3028. i__2 = maxwrk, i__3 =
  3029. *n * 3 + *n * NUMlapack_ilaenv (&c__1, "DORG" "BR", "Q", m, n, n, &c_n1, 6, 1);
  3030. maxwrk = MAX (i__2, i__3);
  3031. }
  3032. if (wntua) {
  3033. /* Computing MAX */
  3034. i__2 = maxwrk, i__3 =
  3035. *n * 3 + *m * NUMlapack_ilaenv (&c__1, "DORG" "BR", "Q", m, m, n, &c_n1, 6, 1);
  3036. maxwrk = MAX (i__2, i__3);
  3037. }
  3038. if (!wntvn) {
  3039. /* Computing MAX */
  3040. i__2 = maxwrk, i__3 =
  3041. *n * 3 + (*n - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", n, n, n, &c_n1, 6, 1);
  3042. maxwrk = MAX (i__2, i__3);
  3043. }
  3044. maxwrk = MAX (maxwrk, bdspac);
  3045. /* Computing MAX */
  3046. i__2 = *n * 3 + *m;
  3047. minwrk = MAX (i__2, bdspac);
  3048. maxwrk = MAX (maxwrk, minwrk);
  3049. }
  3050. } else {
  3051. /* Compute space needed for DBDSQR */
  3052. bdspac = *m * 5;
  3053. if (*n >= mnthr) {
  3054. if (wntvn) {
  3055. /* Path 1t(N much larger than M, JOBVT='N') */
  3056. maxwrk = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3057. /* Computing MAX */
  3058. i__2 = maxwrk, i__3 =
  3059. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3060. maxwrk = MAX (i__2, i__3);
  3061. if (wntuo || wntuas) {
  3062. /* Computing MAX */
  3063. i__2 = maxwrk, i__3 =
  3064. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3065. maxwrk = MAX (i__2, i__3);
  3066. }
  3067. maxwrk = MAX (maxwrk, bdspac);
  3068. /* Computing MAX */
  3069. i__2 = *m << 2;
  3070. minwrk = MAX (i__2, bdspac);
  3071. maxwrk = MAX (maxwrk, minwrk);
  3072. } else if (wntvo && wntun) {
  3073. /* Path 2t(N much larger than M, JOBU='N', JOBVT='O') */
  3074. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3075. /* Computing MAX */
  3076. i__2 = wrkbl, i__3 =
  3077. *m + *m * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", m, n, m, &c_n1, 6, 1);
  3078. wrkbl = MAX (i__2, i__3);
  3079. /* Computing MAX */
  3080. i__2 = wrkbl, i__3 =
  3081. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3082. wrkbl = MAX (i__2, i__3);
  3083. /* Computing MAX */
  3084. i__2 = wrkbl, i__3 =
  3085. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3086. wrkbl = MAX (i__2, i__3);
  3087. wrkbl = MAX (wrkbl, bdspac);
  3088. /* Computing MAX */
  3089. i__2 = *m * *m + wrkbl, i__3 = *m * *m + *m * *n + *m;
  3090. maxwrk = MAX (i__2, i__3);
  3091. /* Computing MAX */
  3092. i__2 = *m * 3 + *n;
  3093. minwrk = MAX (i__2, bdspac);
  3094. maxwrk = MAX (maxwrk, minwrk);
  3095. } else if (wntvo && wntuas) {
  3096. /* Path 3t(N much larger than M, JOBU='S' or 'A',
  3097. JOBVT='O') */
  3098. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3099. /* Computing MAX */
  3100. i__2 = wrkbl, i__3 =
  3101. *m + *m * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", m, n, m, &c_n1, 6, 1);
  3102. wrkbl = MAX (i__2, i__3);
  3103. /* Computing MAX */
  3104. i__2 = wrkbl, i__3 =
  3105. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3106. wrkbl = MAX (i__2, i__3);
  3107. /* Computing MAX */
  3108. i__2 = wrkbl, i__3 =
  3109. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3110. wrkbl = MAX (i__2, i__3);
  3111. /* Computing MAX */
  3112. i__2 = wrkbl, i__3 =
  3113. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3114. wrkbl = MAX (i__2, i__3);
  3115. wrkbl = MAX (wrkbl, bdspac);
  3116. /* Computing MAX */
  3117. i__2 = *m * *m + wrkbl, i__3 = *m * *m + *m * *n + *m;
  3118. maxwrk = MAX (i__2, i__3);
  3119. /* Computing MAX */
  3120. i__2 = *m * 3 + *n;
  3121. minwrk = MAX (i__2, bdspac);
  3122. maxwrk = MAX (maxwrk, minwrk);
  3123. } else if (wntvs && wntun) {
  3124. /* Path 4t(N much larger than M, JOBU='N', JOBVT='S') */
  3125. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3126. /* Computing MAX */
  3127. i__2 = wrkbl, i__3 =
  3128. *m + *m * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", m, n, m, &c_n1, 6, 1);
  3129. wrkbl = MAX (i__2, i__3);
  3130. /* Computing MAX */
  3131. i__2 = wrkbl, i__3 =
  3132. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3133. wrkbl = MAX (i__2, i__3);
  3134. /* Computing MAX */
  3135. i__2 = wrkbl, i__3 =
  3136. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3137. wrkbl = MAX (i__2, i__3);
  3138. wrkbl = MAX (wrkbl, bdspac);
  3139. maxwrk = *m * *m + wrkbl;
  3140. /* Computing MAX */
  3141. i__2 = *m * 3 + *n;
  3142. minwrk = MAX (i__2, bdspac);
  3143. maxwrk = MAX (maxwrk, minwrk);
  3144. } else if (wntvs && wntuo) {
  3145. /* Path 5t(N much larger than M, JOBU='O', JOBVT='S') */
  3146. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3147. /* Computing MAX */
  3148. i__2 = wrkbl, i__3 =
  3149. *m + *m * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", m, n, m, &c_n1, 6, 1);
  3150. wrkbl = MAX (i__2, i__3);
  3151. /* Computing MAX */
  3152. i__2 = wrkbl, i__3 =
  3153. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3154. wrkbl = MAX (i__2, i__3);
  3155. /* Computing MAX */
  3156. i__2 = wrkbl, i__3 =
  3157. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3158. wrkbl = MAX (i__2, i__3);
  3159. /* Computing MAX */
  3160. i__2 = wrkbl, i__3 =
  3161. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3162. wrkbl = MAX (i__2, i__3);
  3163. wrkbl = MAX (wrkbl, bdspac);
  3164. maxwrk = (*m << 1) * *m + wrkbl;
  3165. /* Computing MAX */
  3166. i__2 = *m * 3 + *n;
  3167. minwrk = MAX (i__2, bdspac);
  3168. maxwrk = MAX (maxwrk, minwrk);
  3169. } else if (wntvs && wntuas) {
  3170. /* Path 6t(N much larger than M, JOBU='S' or 'A',
  3171. JOBVT='S') */
  3172. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3173. /* Computing MAX */
  3174. i__2 = wrkbl, i__3 =
  3175. *m + *m * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", m, n, m, &c_n1, 6, 1);
  3176. wrkbl = MAX (i__2, i__3);
  3177. /* Computing MAX */
  3178. i__2 = wrkbl, i__3 =
  3179. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3180. wrkbl = MAX (i__2, i__3);
  3181. /* Computing MAX */
  3182. i__2 = wrkbl, i__3 =
  3183. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3184. wrkbl = MAX (i__2, i__3);
  3185. /* Computing MAX */
  3186. i__2 = wrkbl, i__3 =
  3187. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3188. wrkbl = MAX (i__2, i__3);
  3189. wrkbl = MAX (wrkbl, bdspac);
  3190. maxwrk = *m * *m + wrkbl;
  3191. /* Computing MAX */
  3192. i__2 = *m * 3 + *n;
  3193. minwrk = MAX (i__2, bdspac);
  3194. maxwrk = MAX (maxwrk, minwrk);
  3195. } else if (wntva && wntun) {
  3196. /* Path 7t(N much larger than M, JOBU='N', JOBVT='A') */
  3197. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3198. /* Computing MAX */
  3199. i__2 = wrkbl, i__3 =
  3200. *m + *n * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", n, n, m, &c_n1, 6, 1);
  3201. wrkbl = MAX (i__2, i__3);
  3202. /* Computing MAX */
  3203. i__2 = wrkbl, i__3 =
  3204. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3205. wrkbl = MAX (i__2, i__3);
  3206. /* Computing MAX */
  3207. i__2 = wrkbl, i__3 =
  3208. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3209. wrkbl = MAX (i__2, i__3);
  3210. wrkbl = MAX (wrkbl, bdspac);
  3211. maxwrk = *m * *m + wrkbl;
  3212. /* Computing MAX */
  3213. i__2 = *m * 3 + *n;
  3214. minwrk = MAX (i__2, bdspac);
  3215. maxwrk = MAX (maxwrk, minwrk);
  3216. } else if (wntva && wntuo) {
  3217. /* Path 8t(N much larger than M, JOBU='O', JOBVT='A') */
  3218. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3219. /* Computing MAX */
  3220. i__2 = wrkbl, i__3 =
  3221. *m + *n * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", n, n, m, &c_n1, 6, 1);
  3222. wrkbl = MAX (i__2, i__3);
  3223. /* Computing MAX */
  3224. i__2 = wrkbl, i__3 =
  3225. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3226. wrkbl = MAX (i__2, i__3);
  3227. /* Computing MAX */
  3228. i__2 = wrkbl, i__3 =
  3229. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3230. wrkbl = MAX (i__2, i__3);
  3231. /* Computing MAX */
  3232. i__2 = wrkbl, i__3 =
  3233. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3234. wrkbl = MAX (i__2, i__3);
  3235. wrkbl = MAX (wrkbl, bdspac);
  3236. maxwrk = (*m << 1) * *m + wrkbl;
  3237. /* Computing MAX */
  3238. i__2 = *m * 3 + *n;
  3239. minwrk = MAX (i__2, bdspac);
  3240. maxwrk = MAX (maxwrk, minwrk);
  3241. } else if (wntva && wntuas) {
  3242. /* Path 9t(N much larger than M, JOBU='S' or 'A',
  3243. JOBVT='A') */
  3244. wrkbl = *m + *m * NUMlapack_ilaenv (&c__1, "DGELQF", " ", m, n, &c_n1, &c_n1, 6, 1);
  3245. /* Computing MAX */
  3246. i__2 = wrkbl, i__3 =
  3247. *m + *n * NUMlapack_ilaenv (&c__1, "DORGLQ", " ", n, n, m, &c_n1, 6, 1);
  3248. wrkbl = MAX (i__2, i__3);
  3249. /* Computing MAX */
  3250. i__2 = wrkbl, i__3 =
  3251. *m * 3 + (*m << 1) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, m, &c_n1, &c_n1, 6, 1);
  3252. wrkbl = MAX (i__2, i__3);
  3253. /* Computing MAX */
  3254. i__2 = wrkbl, i__3 =
  3255. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "P", m, m, m, &c_n1, 6, 1);
  3256. wrkbl = MAX (i__2, i__3);
  3257. /* Computing MAX */
  3258. i__2 = wrkbl, i__3 =
  3259. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3260. wrkbl = MAX (i__2, i__3);
  3261. wrkbl = MAX (wrkbl, bdspac);
  3262. maxwrk = *m * *m + wrkbl;
  3263. /* Computing MAX */
  3264. i__2 = *m * 3 + *n;
  3265. minwrk = MAX (i__2, bdspac);
  3266. maxwrk = MAX (maxwrk, minwrk);
  3267. }
  3268. } else {
  3269. /* Path 10t(N greater than M, but not much larger) */
  3270. maxwrk = *m * 3 + (*m + *n) * NUMlapack_ilaenv (&c__1, "DGEBRD", " ", m, n, &c_n1, &c_n1, 6, 1);
  3271. if (wntvs || wntvo) {
  3272. /* Computing MAX */
  3273. i__2 = maxwrk, i__3 =
  3274. *m * 3 + *m * NUMlapack_ilaenv (&c__1, "DORG" "BR", "P", m, n, m, &c_n1, 6, 1);
  3275. maxwrk = MAX (i__2, i__3);
  3276. }
  3277. if (wntva) {
  3278. /* Computing MAX */
  3279. i__2 = maxwrk, i__3 =
  3280. *m * 3 + *n * NUMlapack_ilaenv (&c__1, "DORG" "BR", "P", n, n, m, &c_n1, 6, 1);
  3281. maxwrk = MAX (i__2, i__3);
  3282. }
  3283. if (!wntun) {
  3284. /* Computing MAX */
  3285. i__2 = maxwrk, i__3 =
  3286. *m * 3 + (*m - 1) * NUMlapack_ilaenv (&c__1, "DORGBR", "Q", m, m, m, &c_n1, 6, 1);
  3287. maxwrk = MAX (i__2, i__3);
  3288. }
  3289. maxwrk = MAX (maxwrk, bdspac);
  3290. /* Computing MAX */
  3291. i__2 = *m * 3 + *n;
  3292. minwrk = MAX (i__2, bdspac);
  3293. maxwrk = MAX (maxwrk, minwrk);
  3294. }
  3295. }
  3296. work[1] = (double) maxwrk;
  3297. }
  3298. if (*lwork < minwrk && !lquery) {
  3299. *info = -13;
  3300. }
  3301. if (*info != 0) {
  3302. i__2 = - (*info);
  3303. xerbla_ ("DGESVD", &i__2);
  3304. return 0;
  3305. } else if (lquery) {
  3306. return 0;
  3307. }
  3308. /* Quick return if possible */
  3309. if (*m == 0 || *n == 0) {
  3310. if (*lwork >= 1) {
  3311. work[1] = 1.;
  3312. }
  3313. return 0;
  3314. }
  3315. /* Get machine constants */
  3316. eps = NUMblas_dlamch ("P");
  3317. smlnum = sqrt (NUMblas_dlamch ("S")) / eps;
  3318. bignum = 1. / smlnum;
  3319. /* Scale A if max element outside range [SMLNUM,BIGNUM] */
  3320. anrm = NUMlapack_dlange ("M", m, n, &a[a_offset], lda, dum);
  3321. iscl = 0;
  3322. if (anrm > 0. && anrm < smlnum) {
  3323. iscl = 1;
  3324. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &smlnum, m, n, &a[a_offset], lda, &ierr);
  3325. } else if (anrm > bignum) {
  3326. iscl = 1;
  3327. NUMlapack_dlascl ("G", &c__0, &c__0, &anrm, &bignum, m, n, &a[a_offset], lda, &ierr);
  3328. }
  3329. if (*m >= *n) {
  3330. /* A has at least as many rows as columns. If A has sufficiently more
  3331. rows than columns, first reduce using the QR decomposition (if
  3332. sufficient workspace available) */
  3333. if (*m >= mnthr) {
  3334. if (wntun) {
  3335. /* Path 1 (M much larger than N, JOBU='N') No left singular
  3336. vectors to be computed */
  3337. itau = 1;
  3338. iwork = itau + *n;
  3339. /* Compute A=Q*R (Workspace: need 2*N, prefer N+N*NB) */
  3340. i__2 = *lwork - iwork + 1;
  3341. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3342. /* Zero out below R */
  3343. i__2 = *n - 1;
  3344. i__3 = *n - 1;
  3345. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &a_ref (2, 1), lda);
  3346. ie = 1;
  3347. itauq = ie + *n;
  3348. itaup = itauq + *n;
  3349. iwork = itaup + *n;
  3350. /* Bidiagonalize R in A (Workspace: need 4*N, prefer
  3351. 3*N+2*N*NB) */
  3352. i__2 = *lwork - iwork + 1;
  3353. NUMlapack_dgebrd (n, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  3354. &work[iwork], &i__2, &ierr);
  3355. ncvt = 0;
  3356. if (wntvo || wntvas) {
  3357. /* If right singular vectors desired, generate P'.
  3358. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  3359. i__2 = *lwork - iwork + 1;
  3360. NUMlapack_dorgbr ("P", n, n, n, &a[a_offset], lda, &work[itaup], &work[iwork], &i__2,
  3361. &ierr);
  3362. ncvt = *n;
  3363. }
  3364. iwork = ie + *n;
  3365. /* Perform bidiagonal QR iteration, computing right singular
  3366. vectors of A in A if desired (Workspace: need BDSPAC) */
  3367. NUMlapack_dbdsqr ("U", n, &ncvt, &c__0, &c__0, &s[1], &work[ie], &a[a_offset], lda, dum, &c__1,
  3368. dum, &c__1, &work[iwork], info);
  3369. /* If right singular vectors desired in VT, copy them there */
  3370. if (wntvas) {
  3371. NUMlapack_dlacpy ("F", n, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  3372. }
  3373. } else if (wntuo && wntvn) {
  3374. /* Path 2 (M much larger than N, JOBU='O', JOBVT='N') N left
  3375. singular vectors to be overwritten on A and no right
  3376. singular vectors to be computed
  3377. Computing MAX */
  3378. i__2 = *n << 2;
  3379. if (*lwork >= *n * *n + MAX (i__2, bdspac)) {
  3380. /* Sufficient workspace for a fast algorithm */
  3381. ir = 1;
  3382. /* Computing MAX */
  3383. i__2 = wrkbl, i__3 = *lda * *n + *n;
  3384. if (*lwork >= MAX (i__2, i__3) + *lda * *n) {
  3385. /* WORK(IU) is LDA by N, WORK(IR) is LDA by N */
  3386. ldwrku = *lda;
  3387. ldwrkr = *lda;
  3388. } else { /* if(complicated condition) */
  3389. /* Computing MAX */
  3390. i__2 = wrkbl, i__3 = *lda * *n + *n;
  3391. if (*lwork >= MAX (i__2, i__3) + *n * *n) {
  3392. /* WORK(IU) is LDA by N, WORK(IR) is N by N */
  3393. ldwrku = *lda;
  3394. ldwrkr = *n;
  3395. } else {
  3396. /* WORK(IU) is LDWRKU by N, WORK(IR) is N by N */
  3397. ldwrku = (*lwork - *n * *n - *n) / *n;
  3398. ldwrkr = *n;
  3399. }
  3400. }
  3401. itau = ir + ldwrkr * *n;
  3402. iwork = itau + *n;
  3403. /* Compute A=Q*R (Workspace: need N*N+2*N, prefer
  3404. N*N+N+N*NB) */
  3405. i__2 = *lwork - iwork + 1;
  3406. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3407. /* Copy R to WORK(IR) and zero out below it */
  3408. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[ir], &ldwrkr);
  3409. i__2 = *n - 1;
  3410. i__3 = *n - 1;
  3411. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[ir + 1], &ldwrkr);
  3412. /* Generate Q in A (Workspace: need N*N+2*N, prefer
  3413. N*N+N+N*NB) */
  3414. i__2 = *lwork - iwork + 1;
  3415. NUMlapack_dorgqr (m, n, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3416. ie = itau;
  3417. itauq = ie + *n;
  3418. itaup = itauq + *n;
  3419. iwork = itaup + *n;
  3420. /* Bidiagonalize R in WORK(IR) (Workspace: need N*N+4*N,
  3421. prefer N*N+3*N+2*N*NB) */
  3422. i__2 = *lwork - iwork + 1;
  3423. NUMlapack_dgebrd (n, n, &work[ir], &ldwrkr, &s[1], &work[ie], &work[itauq], &work[itaup],
  3424. &work[iwork], &i__2, &ierr);
  3425. /* Generate left vectors bidiagonalizing R (Workspace:
  3426. need N*N+4*N, prefer N*N+3*N+N*NB) */
  3427. i__2 = *lwork - iwork + 1;
  3428. NUMlapack_dorgbr ("Q", n, n, n, &work[ir], &ldwrkr, &work[itauq], &work[iwork], &i__2,
  3429. &ierr);
  3430. iwork = ie + *n;
  3431. /* Perform bidiagonal QR iteration, computing left
  3432. singular vectors of R in WORK(IR) (Workspace: need
  3433. N*N+BDSPAC) */
  3434. NUMlapack_dbdsqr ("U", n, &c__0, n, &c__0, &s[1], &work[ie], dum, &c__1, &work[ir], &ldwrkr,
  3435. dum, &c__1, &work[iwork], info);
  3436. iu = ie + *n;
  3437. /* Multiply Q in A by left singular vectors of R in
  3438. WORK(IR), storing result in WORK(IU) and copying to A
  3439. (Workspace: need N*N+2*N, prefer N*N+M*N+N) */
  3440. i__2 = *m;
  3441. i__3 = ldwrku;
  3442. for (i__ = 1; i__3 < 0 ? i__ >= i__2 : i__ <= i__2; i__ += i__3) {
  3443. /* Computing MIN */
  3444. i__4 = *m - i__ + 1;
  3445. chunk = MIN (i__4, ldwrku);
  3446. NUMblas_dgemm ("N", "N", &chunk, n, n, &c_b438, &a_ref (i__, 1), lda, &work[ir], &ldwrkr,
  3447. &c_b416, &work[iu], &ldwrku);
  3448. NUMlapack_dlacpy ("F", &chunk, n, &work[iu], &ldwrku, &a_ref (i__, 1), lda);
  3449. /* L10: */
  3450. }
  3451. } else {
  3452. /* Insufficient workspace for a fast algorithm */
  3453. ie = 1;
  3454. itauq = ie + *n;
  3455. itaup = itauq + *n;
  3456. iwork = itaup + *n;
  3457. /* Bidiagonalize A (Workspace: need 3*N+M, prefer
  3458. 3*N+(M+N)*NB) */
  3459. i__3 = *lwork - iwork + 1;
  3460. NUMlapack_dgebrd (m, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  3461. &work[iwork], &i__3, &ierr);
  3462. /* Generate left vectors bidiagonalizing A (Workspace:
  3463. need 4*N, prefer 3*N+N*NB) */
  3464. i__3 = *lwork - iwork + 1;
  3465. NUMlapack_dorgbr ("Q", m, n, n, &a[a_offset], lda, &work[itauq], &work[iwork], &i__3,
  3466. &ierr);
  3467. iwork = ie + *n;
  3468. /* Perform bidiagonal QR iteration, computing left
  3469. singular vectors of A in A (Workspace: need BDSPAC) */
  3470. NUMlapack_dbdsqr ("U", n, &c__0, m, &c__0, &s[1], &work[ie], dum, &c__1, &a[a_offset], lda,
  3471. dum, &c__1, &work[iwork], info);
  3472. }
  3473. } else if (wntuo && wntvas) {
  3474. /* Path 3 (M much larger than N, JOBU='O', JOBVT='S' or 'A')
  3475. N left singular vectors to be overwritten on A and N right
  3476. singular vectors to be computed in VT
  3477. Computing MAX */
  3478. i__3 = *n << 2;
  3479. if (*lwork >= *n * *n + MAX (i__3, bdspac)) {
  3480. /* Sufficient workspace for a fast algorithm */
  3481. ir = 1;
  3482. /* Computing MAX */
  3483. i__3 = wrkbl, i__2 = *lda * *n + *n;
  3484. if (*lwork >= MAX (i__3, i__2) + *lda * *n) {
  3485. /* WORK(IU) is LDA by N and WORK(IR) is LDA by N */
  3486. ldwrku = *lda;
  3487. ldwrkr = *lda;
  3488. } else { /* if(complicated condition) */
  3489. /* Computing MAX */
  3490. i__3 = wrkbl, i__2 = *lda * *n + *n;
  3491. if (*lwork >= MAX (i__3, i__2) + *n * *n) {
  3492. /* WORK(IU) is LDA by N and WORK(IR) is N by N */
  3493. ldwrku = *lda;
  3494. ldwrkr = *n;
  3495. } else {
  3496. /* WORK(IU) is LDWRKU by N and WORK(IR) is N by N
  3497. */
  3498. ldwrku = (*lwork - *n * *n - *n) / *n;
  3499. ldwrkr = *n;
  3500. }
  3501. }
  3502. itau = ir + ldwrkr * *n;
  3503. iwork = itau + *n;
  3504. /* Compute A=Q*R (Workspace: need N*N+2*N, prefer
  3505. N*N+N+N*NB) */
  3506. i__3 = *lwork - iwork + 1;
  3507. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__3, &ierr);
  3508. /* Copy R to VT, zeroing out below it */
  3509. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  3510. i__3 = *n - 1;
  3511. i__2 = *n - 1;
  3512. NUMlapack_dlaset ("L", &i__3, &i__2, &c_b416, &c_b416, &vt_ref (2, 1), ldvt);
  3513. /* Generate Q in A (Workspace: need N*N+2*N, prefer
  3514. N*N+N+N*NB) */
  3515. i__3 = *lwork - iwork + 1;
  3516. NUMlapack_dorgqr (m, n, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__3, &ierr);
  3517. ie = itau;
  3518. itauq = ie + *n;
  3519. itaup = itauq + *n;
  3520. iwork = itaup + *n;
  3521. /* Bidiagonalize R in VT, copying result to WORK(IR)
  3522. (Workspace: need N*N+4*N, prefer N*N+3*N+2*N*NB) */
  3523. i__3 = *lwork - iwork + 1;
  3524. NUMlapack_dgebrd (n, n, &vt[vt_offset], ldvt, &s[1], &work[ie], &work[itauq], &work[itaup],
  3525. &work[iwork], &i__3, &ierr);
  3526. NUMlapack_dlacpy ("L", n, n, &vt[vt_offset], ldvt, &work[ir], &ldwrkr);
  3527. /* Generate left vectors bidiagonalizing R in WORK(IR)
  3528. (Workspace: need N*N+4*N, prefer N*N+3*N+N*NB) */
  3529. i__3 = *lwork - iwork + 1;
  3530. NUMlapack_dorgbr ("Q", n, n, n, &work[ir], &ldwrkr, &work[itauq], &work[iwork], &i__3,
  3531. &ierr);
  3532. /* Generate right vectors bidiagonalizing R in VT
  3533. (Workspace: need N*N+4*N-1, prefer N*N+3*N+(N-1)*NB) */
  3534. i__3 = *lwork - iwork + 1;
  3535. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__3,
  3536. &ierr);
  3537. iwork = ie + *n;
  3538. /* Perform bidiagonal QR iteration, computing left
  3539. singular vectors of R in WORK(IR) and computing right
  3540. singular vectors of R in VT (Workspace: need
  3541. N*N+BDSPAC) */
  3542. NUMlapack_dbdsqr ("U", n, n, n, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt, &work[ir],
  3543. &ldwrkr, dum, &c__1, &work[iwork], info);
  3544. iu = ie + *n;
  3545. /* Multiply Q in A by left singular vectors of R in
  3546. WORK(IR), storing result in WORK(IU) and copying to A
  3547. (Workspace: need N*N+2*N, prefer N*N+M*N+N) */
  3548. i__3 = *m;
  3549. i__2 = ldwrku;
  3550. for (i__ = 1; i__2 < 0 ? i__ >= i__3 : i__ <= i__3; i__ += i__2) {
  3551. /* Computing MIN */
  3552. i__4 = *m - i__ + 1;
  3553. chunk = MIN (i__4, ldwrku);
  3554. NUMblas_dgemm ("N", "N", &chunk, n, n, &c_b438, &a_ref (i__, 1), lda, &work[ir], &ldwrkr,
  3555. &c_b416, &work[iu], &ldwrku);
  3556. NUMlapack_dlacpy ("F", &chunk, n, &work[iu], &ldwrku, &a_ref (i__, 1), lda);
  3557. /* L20: */
  3558. }
  3559. } else {
  3560. /* Insufficient workspace for a fast algorithm */
  3561. itau = 1;
  3562. iwork = itau + *n;
  3563. /* Compute A=Q*R (Workspace: need 2*N, prefer N+N*NB) */
  3564. i__2 = *lwork - iwork + 1;
  3565. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3566. /* Copy R to VT, zeroing out below it */
  3567. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  3568. i__2 = *n - 1;
  3569. i__3 = *n - 1;
  3570. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &vt_ref (2, 1), ldvt);
  3571. /* Generate Q in A (Workspace: need 2*N, prefer N+N*NB) */
  3572. i__2 = *lwork - iwork + 1;
  3573. NUMlapack_dorgqr (m, n, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3574. ie = itau;
  3575. itauq = ie + *n;
  3576. itaup = itauq + *n;
  3577. iwork = itaup + *n;
  3578. /* Bidiagonalize R in VT (Workspace: need 4*N, prefer
  3579. 3*N+2*N*NB) */
  3580. i__2 = *lwork - iwork + 1;
  3581. NUMlapack_dgebrd (n, n, &vt[vt_offset], ldvt, &s[1], &work[ie], &work[itauq], &work[itaup],
  3582. &work[iwork], &i__2, &ierr);
  3583. /* Multiply Q in A by left vectors bidiagonalizing R
  3584. (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  3585. i__2 = *lwork - iwork + 1;
  3586. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &vt[vt_offset], ldvt, &work[itauq], &a[a_offset],
  3587. lda, &work[iwork], &i__2, &ierr);
  3588. /* Generate right vectors bidiagonalizing R in VT
  3589. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  3590. i__2 = *lwork - iwork + 1;
  3591. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__2,
  3592. &ierr);
  3593. iwork = ie + *n;
  3594. /* Perform bidiagonal QR iteration, computing left
  3595. singular vectors of A in A and computing right
  3596. singular vectors of A in VT (Workspace: need BDSPAC) */
  3597. NUMlapack_dbdsqr ("U", n, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt, &a[a_offset],
  3598. lda, dum, &c__1, &work[iwork], info);
  3599. }
  3600. } else if (wntus) {
  3601. if (wntvn) {
  3602. /* Path 4 (M much larger than N, JOBU='S', JOBVT='N') N
  3603. left singular vectors to be computed in U and no right
  3604. singular vectors to be computed
  3605. Computing MAX */
  3606. i__2 = *n << 2;
  3607. if (*lwork >= *n * *n + MAX (i__2, bdspac)) {
  3608. /* Sufficient workspace for a fast algorithm */
  3609. ir = 1;
  3610. if (*lwork >= wrkbl + *lda * *n) {
  3611. /* WORK(IR) is LDA by N */
  3612. ldwrkr = *lda;
  3613. } else {
  3614. /* WORK(IR) is N by N */
  3615. ldwrkr = *n;
  3616. }
  3617. itau = ir + ldwrkr * *n;
  3618. iwork = itau + *n;
  3619. /* Compute A=Q*R (Workspace: need N*N+2*N, prefer
  3620. N*N+N+N*NB) */
  3621. i__2 = *lwork - iwork + 1;
  3622. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3623. /* Copy R to WORK(IR), zeroing out below it */
  3624. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[ir], &ldwrkr);
  3625. i__2 = *n - 1;
  3626. i__3 = *n - 1;
  3627. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[ir + 1], &ldwrkr);
  3628. /* Generate Q in A (Workspace: need N*N+2*N, prefer
  3629. N*N+N+N*NB) */
  3630. i__2 = *lwork - iwork + 1;
  3631. NUMlapack_dorgqr (m, n, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3632. ie = itau;
  3633. itauq = ie + *n;
  3634. itaup = itauq + *n;
  3635. iwork = itaup + *n;
  3636. /* Bidiagonalize R in WORK(IR) (Workspace: need
  3637. N*N+4*N, prefer N*N+3*N+2*N*NB) */
  3638. i__2 = *lwork - iwork + 1;
  3639. NUMlapack_dgebrd (n, n, &work[ir], &ldwrkr, &s[1], &work[ie], &work[itauq],
  3640. &work[itaup], &work[iwork], &i__2, &ierr);
  3641. /* Generate left vectors bidiagonalizing R in
  3642. WORK(IR) (Workspace: need N*N+4*N, prefer
  3643. N*N+3*N+N*NB) */
  3644. i__2 = *lwork - iwork + 1;
  3645. NUMlapack_dorgbr ("Q", n, n, n, &work[ir], &ldwrkr, &work[itauq], &work[iwork], &i__2,
  3646. &ierr);
  3647. iwork = ie + *n;
  3648. /* Perform bidiagonal QR iteration, computing left
  3649. singular vectors of R in WORK(IR) (Workspace: need
  3650. N*N+BDSPAC) */
  3651. NUMlapack_dbdsqr ("U", n, &c__0, n, &c__0, &s[1], &work[ie], dum, &c__1, &work[ir],
  3652. &ldwrkr, dum, &c__1, &work[iwork], info);
  3653. /* Multiply Q in A by left singular vectors of R in
  3654. WORK(IR), storing result in U (Workspace: need
  3655. N*N) */
  3656. NUMblas_dgemm ("N", "N", m, n, n, &c_b438, &a[a_offset], lda, &work[ir], &ldwrkr, &c_b416,
  3657. &u[u_offset], ldu);
  3658. } else {
  3659. /* Insufficient workspace for a fast algorithm */
  3660. itau = 1;
  3661. iwork = itau + *n;
  3662. /* Compute A=Q*R, copying result to U (Workspace:
  3663. need 2*N, prefer N+N*NB) */
  3664. i__2 = *lwork - iwork + 1;
  3665. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3666. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  3667. /* Generate Q in U (Workspace: need 2*N, prefer
  3668. N+N*NB) */
  3669. i__2 = *lwork - iwork + 1;
  3670. NUMlapack_dorgqr (m, n, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  3671. ie = itau;
  3672. itauq = ie + *n;
  3673. itaup = itauq + *n;
  3674. iwork = itaup + *n;
  3675. /* Zero out below R in A */
  3676. i__2 = *n - 1;
  3677. i__3 = *n - 1;
  3678. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &a_ref (2, 1), lda);
  3679. /* Bidiagonalize R in A (Workspace: need 4*N, prefer
  3680. 3*N+2*N*NB) */
  3681. i__2 = *lwork - iwork + 1;
  3682. NUMlapack_dgebrd (n, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  3683. &work[iwork], &i__2, &ierr);
  3684. /* Multiply Q in U by left vectors bidiagonalizing R
  3685. (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  3686. i__2 = *lwork - iwork + 1;
  3687. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &a[a_offset], lda, &work[itauq], &u[u_offset],
  3688. ldu, &work[iwork], &i__2, &ierr);
  3689. iwork = ie + *n;
  3690. /* Perform bidiagonal QR iteration, computing left
  3691. singular vectors of A in U (Workspace: need
  3692. BDSPAC) */
  3693. NUMlapack_dbdsqr ("U", n, &c__0, m, &c__0, &s[1], &work[ie], dum, &c__1, &u[u_offset],
  3694. ldu, dum, &c__1, &work[iwork], info);
  3695. }
  3696. } else if (wntvo) {
  3697. /* Path 5 (M much larger than N, JOBU='S', JOBVT='O') N
  3698. left singular vectors to be computed in U and N right
  3699. singular vectors to be overwritten on A
  3700. Computing MAX */
  3701. i__2 = *n << 2;
  3702. if (*lwork >= (*n << 1) * *n + MAX (i__2, bdspac)) {
  3703. /* Sufficient workspace for a fast algorithm */
  3704. iu = 1;
  3705. if (*lwork >= wrkbl + (*lda << 1) * *n) {
  3706. /* WORK(IU) is LDA by N and WORK(IR) is LDA by N */
  3707. ldwrku = *lda;
  3708. ir = iu + ldwrku * *n;
  3709. ldwrkr = *lda;
  3710. } else if (*lwork >= wrkbl + (*lda + *n) * *n) {
  3711. /* WORK(IU) is LDA by N and WORK(IR) is N by N */
  3712. ldwrku = *lda;
  3713. ir = iu + ldwrku * *n;
  3714. ldwrkr = *n;
  3715. } else {
  3716. /* WORK(IU) is N by N and WORK(IR) is N by N */
  3717. ldwrku = *n;
  3718. ir = iu + ldwrku * *n;
  3719. ldwrkr = *n;
  3720. }
  3721. itau = ir + ldwrkr * *n;
  3722. iwork = itau + *n;
  3723. /* Compute A=Q*R (Workspace: need 2*N*N+2*N, prefer
  3724. 2*N*N+N+N*NB) */
  3725. i__2 = *lwork - iwork + 1;
  3726. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3727. /* Copy R to WORK(IU), zeroing out below it */
  3728. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[iu], &ldwrku);
  3729. i__2 = *n - 1;
  3730. i__3 = *n - 1;
  3731. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[iu + 1], &ldwrku);
  3732. /* Generate Q in A (Workspace: need 2*N*N+2*N, prefer
  3733. 2*N*N+N+N*NB) */
  3734. i__2 = *lwork - iwork + 1;
  3735. NUMlapack_dorgqr (m, n, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3736. ie = itau;
  3737. itauq = ie + *n;
  3738. itaup = itauq + *n;
  3739. iwork = itaup + *n;
  3740. /* Bidiagonalize R in WORK(IU), copying result to
  3741. WORK(IR) (Workspace: need 2*N*N+4*N, prefer
  3742. 2*N*N+3*N+2*N*NB) */
  3743. i__2 = *lwork - iwork + 1;
  3744. NUMlapack_dgebrd (n, n, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  3745. &work[itaup], &work[iwork], &i__2, &ierr);
  3746. NUMlapack_dlacpy ("U", n, n, &work[iu], &ldwrku, &work[ir], &ldwrkr);
  3747. /* Generate left bidiagonalizing vectors in WORK(IU)
  3748. (Workspace: need 2*N*N+4*N, prefer 2*N*N+3*N+N*NB)
  3749. */
  3750. i__2 = *lwork - iwork + 1;
  3751. NUMlapack_dorgbr ("Q", n, n, n, &work[iu], &ldwrku, &work[itauq], &work[iwork], &i__2,
  3752. &ierr);
  3753. /* Generate right bidiagonalizing vectors in WORK(IR)
  3754. (Workspace: need 2*N*N+4*N-1, prefer
  3755. 2*N*N+3*N+(N-1)*NB) */
  3756. i__2 = *lwork - iwork + 1;
  3757. NUMlapack_dorgbr ("P", n, n, n, &work[ir], &ldwrkr, &work[itaup], &work[iwork], &i__2,
  3758. &ierr);
  3759. iwork = ie + *n;
  3760. /* Perform bidiagonal QR iteration, computing left
  3761. singular vectors of R in WORK(IU) and computing
  3762. right singular vectors of R in WORK(IR)
  3763. (Workspace: need 2*N*N+BDSPAC) */
  3764. NUMlapack_dbdsqr ("U", n, n, n, &c__0, &s[1], &work[ie], &work[ir], &ldwrkr, &work[iu],
  3765. &ldwrku, dum, &c__1, &work[iwork], info);
  3766. /* Multiply Q in A by left singular vectors of R in
  3767. WORK(IU), storing result in U (Workspace: need
  3768. N*N) */
  3769. NUMblas_dgemm ("N", "N", m, n, n, &c_b438, &a[a_offset], lda, &work[iu], &ldwrku, &c_b416,
  3770. &u[u_offset], ldu);
  3771. /* Copy right singular vectors of R to A (Workspace:
  3772. need N*N) */
  3773. NUMlapack_dlacpy ("F", n, n, &work[ir], &ldwrkr, &a[a_offset], lda);
  3774. } else {
  3775. /* Insufficient workspace for a fast algorithm */
  3776. itau = 1;
  3777. iwork = itau + *n;
  3778. /* Compute A=Q*R, copying result to U (Workspace:
  3779. need 2*N, prefer N+N*NB) */
  3780. i__2 = *lwork - iwork + 1;
  3781. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3782. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  3783. /* Generate Q in U (Workspace: need 2*N, prefer
  3784. N+N*NB) */
  3785. i__2 = *lwork - iwork + 1;
  3786. NUMlapack_dorgqr (m, n, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  3787. ie = itau;
  3788. itauq = ie + *n;
  3789. itaup = itauq + *n;
  3790. iwork = itaup + *n;
  3791. /* Zero out below R in A */
  3792. i__2 = *n - 1;
  3793. i__3 = *n - 1;
  3794. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &a_ref (2, 1), lda);
  3795. /* Bidiagonalize R in A (Workspace: need 4*N, prefer
  3796. 3*N+2*N*NB) */
  3797. i__2 = *lwork - iwork + 1;
  3798. NUMlapack_dgebrd (n, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  3799. &work[iwork], &i__2, &ierr);
  3800. /* Multiply Q in U by left vectors bidiagonalizing R
  3801. (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  3802. i__2 = *lwork - iwork + 1;
  3803. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &a[a_offset], lda, &work[itauq], &u[u_offset],
  3804. ldu, &work[iwork], &i__2, &ierr);
  3805. /* Generate right vectors bidiagonalizing R in A
  3806. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  3807. i__2 = *lwork - iwork + 1;
  3808. NUMlapack_dorgbr ("P", n, n, n, &a[a_offset], lda, &work[itaup], &work[iwork], &i__2,
  3809. &ierr);
  3810. iwork = ie + *n;
  3811. /* Perform bidiagonal QR iteration, computing left
  3812. singular vectors of A in U and computing right
  3813. singular vectors of A in A (Workspace: need
  3814. BDSPAC) */
  3815. NUMlapack_dbdsqr ("U", n, n, m, &c__0, &s[1], &work[ie], &a[a_offset], lda,
  3816. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  3817. }
  3818. } else if (wntvas) {
  3819. /* Path 6 (M much larger than N, JOBU='S', JOBVT='S' or
  3820. 'A') N left singular vectors to be computed in U and N
  3821. right singular vectors to be computed in VT
  3822. Computing MAX */
  3823. i__2 = *n << 2;
  3824. if (*lwork >= *n * *n + MAX (i__2, bdspac)) {
  3825. /* Sufficient workspace for a fast algorithm */
  3826. iu = 1;
  3827. if (*lwork >= wrkbl + *lda * *n) {
  3828. /* WORK(IU) is LDA by N */
  3829. ldwrku = *lda;
  3830. } else {
  3831. /* WORK(IU) is N by N */
  3832. ldwrku = *n;
  3833. }
  3834. itau = iu + ldwrku * *n;
  3835. iwork = itau + *n;
  3836. /* Compute A=Q*R (Workspace: need N*N+2*N, prefer
  3837. N*N+N+N*NB) */
  3838. i__2 = *lwork - iwork + 1;
  3839. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3840. /* Copy R to WORK(IU), zeroing out below it */
  3841. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[iu], &ldwrku);
  3842. i__2 = *n - 1;
  3843. i__3 = *n - 1;
  3844. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[iu + 1], &ldwrku);
  3845. /* Generate Q in A (Workspace: need N*N+2*N, prefer
  3846. N*N+N+N*NB) */
  3847. i__2 = *lwork - iwork + 1;
  3848. NUMlapack_dorgqr (m, n, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3849. ie = itau;
  3850. itauq = ie + *n;
  3851. itaup = itauq + *n;
  3852. iwork = itaup + *n;
  3853. /* Bidiagonalize R in WORK(IU), copying result to VT
  3854. (Workspace: need N*N+4*N, prefer N*N+3*N+2*N*NB) */
  3855. i__2 = *lwork - iwork + 1;
  3856. NUMlapack_dgebrd (n, n, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  3857. &work[itaup], &work[iwork], &i__2, &ierr);
  3858. NUMlapack_dlacpy ("U", n, n, &work[iu], &ldwrku, &vt[vt_offset], ldvt);
  3859. /* Generate left bidiagonalizing vectors in WORK(IU)
  3860. (Workspace: need N*N+4*N, prefer N*N+3*N+N*NB) */
  3861. i__2 = *lwork - iwork + 1;
  3862. NUMlapack_dorgbr ("Q", n, n, n, &work[iu], &ldwrku, &work[itauq], &work[iwork], &i__2,
  3863. &ierr);
  3864. /* Generate right bidiagonalizing vectors in VT
  3865. (Workspace: need N*N+4*N-1, prefer
  3866. N*N+3*N+(N-1)*NB) */
  3867. i__2 = *lwork - iwork + 1;
  3868. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__2,
  3869. &ierr);
  3870. iwork = ie + *n;
  3871. /* Perform bidiagonal QR iteration, computing left
  3872. singular vectors of R in WORK(IU) and computing
  3873. right singular vectors of R in VT (Workspace: need
  3874. N*N+BDSPAC) */
  3875. NUMlapack_dbdsqr ("U", n, n, n, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  3876. &work[iu], &ldwrku, dum, &c__1, &work[iwork], info);
  3877. /* Multiply Q in A by left singular vectors of R in
  3878. WORK(IU), storing result in U (Workspace: need
  3879. N*N) */
  3880. NUMblas_dgemm ("N", "N", m, n, n, &c_b438, &a[a_offset], lda, &work[iu], &ldwrku, &c_b416,
  3881. &u[u_offset], ldu);
  3882. } else {
  3883. /* Insufficient workspace for a fast algorithm */
  3884. itau = 1;
  3885. iwork = itau + *n;
  3886. /* Compute A=Q*R, copying result to U (Workspace:
  3887. need 2*N, prefer N+N*NB) */
  3888. i__2 = *lwork - iwork + 1;
  3889. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3890. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  3891. /* Generate Q in U (Workspace: need 2*N, prefer
  3892. N+N*NB) */
  3893. i__2 = *lwork - iwork + 1;
  3894. NUMlapack_dorgqr (m, n, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  3895. /* Copy R to VT, zeroing out below it */
  3896. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  3897. i__2 = *n - 1;
  3898. i__3 = *n - 1;
  3899. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &vt_ref (2, 1), ldvt);
  3900. ie = itau;
  3901. itauq = ie + *n;
  3902. itaup = itauq + *n;
  3903. iwork = itaup + *n;
  3904. /* Bidiagonalize R in VT (Workspace: need 4*N, prefer
  3905. 3*N+2*N*NB) */
  3906. i__2 = *lwork - iwork + 1;
  3907. NUMlapack_dgebrd (n, n, &vt[vt_offset], ldvt, &s[1], &work[ie], &work[itauq],
  3908. &work[itaup], &work[iwork], &i__2, &ierr);
  3909. /* Multiply Q in U by left bidiagonalizing vectors in
  3910. VT (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  3911. i__2 = *lwork - iwork + 1;
  3912. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &vt[vt_offset], ldvt, &work[itauq],
  3913. &u[u_offset], ldu, &work[iwork], &i__2, &ierr);
  3914. /* Generate right bidiagonalizing vectors in VT
  3915. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  3916. i__2 = *lwork - iwork + 1;
  3917. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__2,
  3918. &ierr);
  3919. iwork = ie + *n;
  3920. /* Perform bidiagonal QR iteration, computing left
  3921. singular vectors of A in U and computing right
  3922. singular vectors of A in VT (Workspace: need
  3923. BDSPAC) */
  3924. NUMlapack_dbdsqr ("U", n, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  3925. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  3926. }
  3927. }
  3928. } else if (wntua) {
  3929. if (wntvn) {
  3930. /* Path 7 (M much larger than N, JOBU='A', JOBVT='N') M
  3931. left singular vectors to be computed in U and no right
  3932. singular vectors to be computed
  3933. Computing MAX */
  3934. i__2 = *n + *m, i__3 = *n << 2, i__2 = MAX (i__2, i__3);
  3935. if (*lwork >= *n * *n + MAX (i__2, bdspac)) {
  3936. /* Sufficient workspace for a fast algorithm */
  3937. ir = 1;
  3938. if (*lwork >= wrkbl + *lda * *n) {
  3939. /* WORK(IR) is LDA by N */
  3940. ldwrkr = *lda;
  3941. } else {
  3942. /* WORK(IR) is N by N */
  3943. ldwrkr = *n;
  3944. }
  3945. itau = ir + ldwrkr * *n;
  3946. iwork = itau + *n;
  3947. /* Compute A=Q*R, copying result to U (Workspace:
  3948. need N*N+2*N, prefer N*N+N+N*NB) */
  3949. i__2 = *lwork - iwork + 1;
  3950. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3951. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  3952. /* Copy R to WORK(IR), zeroing out below it */
  3953. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[ir], &ldwrkr);
  3954. i__2 = *n - 1;
  3955. i__3 = *n - 1;
  3956. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[ir + 1], &ldwrkr);
  3957. /* Generate Q in U (Workspace: need N*N+N+M, prefer
  3958. N*N+N+M*NB) */
  3959. i__2 = *lwork - iwork + 1;
  3960. NUMlapack_dorgqr (m, m, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  3961. ie = itau;
  3962. itauq = ie + *n;
  3963. itaup = itauq + *n;
  3964. iwork = itaup + *n;
  3965. /* Bidiagonalize R in WORK(IR) (Workspace: need
  3966. N*N+4*N, prefer N*N+3*N+2*N*NB) */
  3967. i__2 = *lwork - iwork + 1;
  3968. NUMlapack_dgebrd (n, n, &work[ir], &ldwrkr, &s[1], &work[ie], &work[itauq],
  3969. &work[itaup], &work[iwork], &i__2, &ierr);
  3970. /* Generate left bidiagonalizing vectors in WORK(IR)
  3971. (Workspace: need N*N+4*N, prefer N*N+3*N+N*NB) */
  3972. i__2 = *lwork - iwork + 1;
  3973. NUMlapack_dorgbr ("Q", n, n, n, &work[ir], &ldwrkr, &work[itauq], &work[iwork], &i__2,
  3974. &ierr);
  3975. iwork = ie + *n;
  3976. /* Perform bidiagonal QR iteration, computing left
  3977. singular vectors of R in WORK(IR) (Workspace: need
  3978. N*N+BDSPAC) */
  3979. NUMlapack_dbdsqr ("U", n, &c__0, n, &c__0, &s[1], &work[ie], dum, &c__1, &work[ir],
  3980. &ldwrkr, dum, &c__1, &work[iwork], info);
  3981. /* Multiply Q in U by left singular vectors of R in
  3982. WORK(IR), storing result in A (Workspace: need
  3983. N*N) */
  3984. NUMblas_dgemm ("N", "N", m, n, n, &c_b438, &u[u_offset], ldu, &work[ir], &ldwrkr, &c_b416,
  3985. &a[a_offset], lda);
  3986. /* Copy left singular vectors of A from A to U */
  3987. NUMlapack_dlacpy ("F", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  3988. } else {
  3989. /* Insufficient workspace for a fast algorithm */
  3990. itau = 1;
  3991. iwork = itau + *n;
  3992. /* Compute A=Q*R, copying result to U (Workspace:
  3993. need 2*N, prefer N+N*NB) */
  3994. i__2 = *lwork - iwork + 1;
  3995. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  3996. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  3997. /* Generate Q in U (Workspace: need N+M, prefer
  3998. N+M*NB) */
  3999. i__2 = *lwork - iwork + 1;
  4000. NUMlapack_dorgqr (m, m, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  4001. ie = itau;
  4002. itauq = ie + *n;
  4003. itaup = itauq + *n;
  4004. iwork = itaup + *n;
  4005. /* Zero out below R in A */
  4006. i__2 = *n - 1;
  4007. i__3 = *n - 1;
  4008. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &a_ref (2, 1), lda);
  4009. /* Bidiagonalize R in A (Workspace: need 4*N, prefer
  4010. 3*N+2*N*NB) */
  4011. i__2 = *lwork - iwork + 1;
  4012. NUMlapack_dgebrd (n, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4013. &work[iwork], &i__2, &ierr);
  4014. /* Multiply Q in U by left bidiagonalizing vectors in
  4015. A (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  4016. i__2 = *lwork - iwork + 1;
  4017. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &a[a_offset], lda, &work[itauq], &u[u_offset],
  4018. ldu, &work[iwork], &i__2, &ierr);
  4019. iwork = ie + *n;
  4020. /* Perform bidiagonal QR iteration, computing left
  4021. singular vectors of A in U (Workspace: need
  4022. BDSPAC) */
  4023. NUMlapack_dbdsqr ("U", n, &c__0, m, &c__0, &s[1], &work[ie], dum, &c__1, &u[u_offset],
  4024. ldu, dum, &c__1, &work[iwork], info);
  4025. }
  4026. } else if (wntvo) {
  4027. /* Path 8 (M much larger than N, JOBU='A', JOBVT='O') M
  4028. left singular vectors to be computed in U and N right
  4029. singular vectors to be overwritten on A
  4030. Computing MAX */
  4031. i__2 = *n + *m, i__3 = *n << 2, i__2 = MAX (i__2, i__3);
  4032. if (*lwork >= (*n << 1) * *n + MAX (i__2, bdspac)) {
  4033. /* Sufficient workspace for a fast algorithm */
  4034. iu = 1;
  4035. if (*lwork >= wrkbl + (*lda << 1) * *n) {
  4036. /* WORK(IU) is LDA by N and WORK(IR) is LDA by N */
  4037. ldwrku = *lda;
  4038. ir = iu + ldwrku * *n;
  4039. ldwrkr = *lda;
  4040. } else if (*lwork >= wrkbl + (*lda + *n) * *n) {
  4041. /* WORK(IU) is LDA by N and WORK(IR) is N by N */
  4042. ldwrku = *lda;
  4043. ir = iu + ldwrku * *n;
  4044. ldwrkr = *n;
  4045. } else {
  4046. /* WORK(IU) is N by N and WORK(IR) is N by N */
  4047. ldwrku = *n;
  4048. ir = iu + ldwrku * *n;
  4049. ldwrkr = *n;
  4050. }
  4051. itau = ir + ldwrkr * *n;
  4052. iwork = itau + *n;
  4053. /* Compute A=Q*R, copying result to U (Workspace:
  4054. need 2*N*N+2*N, prefer 2*N*N+N+N*NB) */
  4055. i__2 = *lwork - iwork + 1;
  4056. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4057. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4058. /* Generate Q in U (Workspace: need 2*N*N+N+M, prefer
  4059. 2*N*N+N+M*NB) */
  4060. i__2 = *lwork - iwork + 1;
  4061. NUMlapack_dorgqr (m, m, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  4062. /* Copy R to WORK(IU), zeroing out below it */
  4063. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[iu], &ldwrku);
  4064. i__2 = *n - 1;
  4065. i__3 = *n - 1;
  4066. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[iu + 1], &ldwrku);
  4067. ie = itau;
  4068. itauq = ie + *n;
  4069. itaup = itauq + *n;
  4070. iwork = itaup + *n;
  4071. /* Bidiagonalize R in WORK(IU), copying result to
  4072. WORK(IR) (Workspace: need 2*N*N+4*N, prefer
  4073. 2*N*N+3*N+2*N*NB) */
  4074. i__2 = *lwork - iwork + 1;
  4075. NUMlapack_dgebrd (n, n, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  4076. &work[itaup], &work[iwork], &i__2, &ierr);
  4077. NUMlapack_dlacpy ("U", n, n, &work[iu], &ldwrku, &work[ir], &ldwrkr);
  4078. /* Generate left bidiagonalizing vectors in WORK(IU)
  4079. (Workspace: need 2*N*N+4*N, prefer 2*N*N+3*N+N*NB)
  4080. */
  4081. i__2 = *lwork - iwork + 1;
  4082. NUMlapack_dorgbr ("Q", n, n, n, &work[iu], &ldwrku, &work[itauq], &work[iwork], &i__2,
  4083. &ierr);
  4084. /* Generate right bidiagonalizing vectors in WORK(IR)
  4085. (Workspace: need 2*N*N+4*N-1, prefer
  4086. 2*N*N+3*N+(N-1)*NB) */
  4087. i__2 = *lwork - iwork + 1;
  4088. NUMlapack_dorgbr ("P", n, n, n, &work[ir], &ldwrkr, &work[itaup], &work[iwork], &i__2,
  4089. &ierr);
  4090. iwork = ie + *n;
  4091. /* Perform bidiagonal QR iteration, computing left
  4092. singular vectors of R in WORK(IU) and computing
  4093. right singular vectors of R in WORK(IR)
  4094. (Workspace: need 2*N*N+BDSPAC) */
  4095. NUMlapack_dbdsqr ("U", n, n, n, &c__0, &s[1], &work[ie], &work[ir], &ldwrkr, &work[iu],
  4096. &ldwrku, dum, &c__1, &work[iwork], info);
  4097. /* Multiply Q in U by left singular vectors of R in
  4098. WORK(IU), storing result in A (Workspace: need
  4099. N*N) */
  4100. NUMblas_dgemm ("N", "N", m, n, n, &c_b438, &u[u_offset], ldu, &work[iu], &ldwrku, &c_b416,
  4101. &a[a_offset], lda);
  4102. /* Copy left singular vectors of A from A to U */
  4103. NUMlapack_dlacpy ("F", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4104. /* Copy right singular vectors of R from WORK(IR) to
  4105. A */
  4106. NUMlapack_dlacpy ("F", n, n, &work[ir], &ldwrkr, &a[a_offset], lda);
  4107. } else {
  4108. /* Insufficient workspace for a fast algorithm */
  4109. itau = 1;
  4110. iwork = itau + *n;
  4111. /* Compute A=Q*R, copying result to U (Workspace:
  4112. need 2*N, prefer N+N*NB) */
  4113. i__2 = *lwork - iwork + 1;
  4114. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4115. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4116. /* Generate Q in U (Workspace: need N+M, prefer
  4117. N+M*NB) */
  4118. i__2 = *lwork - iwork + 1;
  4119. NUMlapack_dorgqr (m, m, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  4120. ie = itau;
  4121. itauq = ie + *n;
  4122. itaup = itauq + *n;
  4123. iwork = itaup + *n;
  4124. /* Zero out below R in A */
  4125. i__2 = *n - 1;
  4126. i__3 = *n - 1;
  4127. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &a_ref (2, 1), lda);
  4128. /* Bidiagonalize R in A (Workspace: need 4*N, prefer
  4129. 3*N+2*N*NB) */
  4130. i__2 = *lwork - iwork + 1;
  4131. NUMlapack_dgebrd (n, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4132. &work[iwork], &i__2, &ierr);
  4133. /* Multiply Q in U by left bidiagonalizing vectors in
  4134. A (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  4135. i__2 = *lwork - iwork + 1;
  4136. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &a[a_offset], lda, &work[itauq], &u[u_offset],
  4137. ldu, &work[iwork], &i__2, &ierr);
  4138. /* Generate right bidiagonalizing vectors in A
  4139. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  4140. i__2 = *lwork - iwork + 1;
  4141. NUMlapack_dorgbr ("P", n, n, n, &a[a_offset], lda, &work[itaup], &work[iwork], &i__2,
  4142. &ierr);
  4143. iwork = ie + *n;
  4144. /* Perform bidiagonal QR iteration, computing left
  4145. singular vectors of A in U and computing right
  4146. singular vectors of A in A (Workspace: need
  4147. BDSPAC) */
  4148. NUMlapack_dbdsqr ("U", n, n, m, &c__0, &s[1], &work[ie], &a[a_offset], lda,
  4149. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  4150. }
  4151. } else if (wntvas) {
  4152. /* Path 9 (M much larger than N, JOBU='A', JOBVT='S' or
  4153. 'A') M left singular vectors to be computed in U and N
  4154. right singular vectors to be computed in VT
  4155. Computing MAX */
  4156. i__2 = *n + *m, i__3 = *n << 2, i__2 = MAX (i__2, i__3);
  4157. if (*lwork >= *n * *n + MAX (i__2, bdspac)) {
  4158. /* Sufficient workspace for a fast algorithm */
  4159. iu = 1;
  4160. if (*lwork >= wrkbl + *lda * *n) {
  4161. /* WORK(IU) is LDA by N */
  4162. ldwrku = *lda;
  4163. } else {
  4164. /* WORK(IU) is N by N */
  4165. ldwrku = *n;
  4166. }
  4167. itau = iu + ldwrku * *n;
  4168. iwork = itau + *n;
  4169. /* Compute A=Q*R, copying result to U (Workspace:
  4170. need N*N+2*N, prefer N*N+N+N*NB) */
  4171. i__2 = *lwork - iwork + 1;
  4172. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4173. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4174. /* Generate Q in U (Workspace: need N*N+N+M, prefer
  4175. N*N+N+M*NB) */
  4176. i__2 = *lwork - iwork + 1;
  4177. NUMlapack_dorgqr (m, m, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  4178. /* Copy R to WORK(IU), zeroing out below it */
  4179. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &work[iu], &ldwrku);
  4180. i__2 = *n - 1;
  4181. i__3 = *n - 1;
  4182. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &work[iu + 1], &ldwrku);
  4183. ie = itau;
  4184. itauq = ie + *n;
  4185. itaup = itauq + *n;
  4186. iwork = itaup + *n;
  4187. /* Bidiagonalize R in WORK(IU), copying result to VT
  4188. (Workspace: need N*N+4*N, prefer N*N+3*N+2*N*NB) */
  4189. i__2 = *lwork - iwork + 1;
  4190. NUMlapack_dgebrd (n, n, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  4191. &work[itaup], &work[iwork], &i__2, &ierr);
  4192. NUMlapack_dlacpy ("U", n, n, &work[iu], &ldwrku, &vt[vt_offset], ldvt);
  4193. /* Generate left bidiagonalizing vectors in WORK(IU)
  4194. (Workspace: need N*N+4*N, prefer N*N+3*N+N*NB) */
  4195. i__2 = *lwork - iwork + 1;
  4196. NUMlapack_dorgbr ("Q", n, n, n, &work[iu], &ldwrku, &work[itauq], &work[iwork], &i__2,
  4197. &ierr);
  4198. /* Generate right bidiagonalizing vectors in VT
  4199. (Workspace: need N*N+4*N-1, prefer
  4200. N*N+3*N+(N-1)*NB) */
  4201. i__2 = *lwork - iwork + 1;
  4202. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__2,
  4203. &ierr);
  4204. iwork = ie + *n;
  4205. /* Perform bidiagonal QR iteration, computing left
  4206. singular vectors of R in WORK(IU) and computing
  4207. right singular vectors of R in VT (Workspace: need
  4208. N*N+BDSPAC) */
  4209. NUMlapack_dbdsqr ("U", n, n, n, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  4210. &work[iu], &ldwrku, dum, &c__1, &work[iwork], info);
  4211. /* Multiply Q in U by left singular vectors of R in
  4212. WORK(IU), storing result in A (Workspace: need
  4213. N*N) */
  4214. NUMblas_dgemm ("N", "N", m, n, n, &c_b438, &u[u_offset], ldu, &work[iu], &ldwrku, &c_b416,
  4215. &a[a_offset], lda);
  4216. /* Copy left singular vectors of A from A to U */
  4217. NUMlapack_dlacpy ("F", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4218. } else {
  4219. /* Insufficient workspace for a fast algorithm */
  4220. itau = 1;
  4221. iwork = itau + *n;
  4222. /* Compute A=Q*R, copying result to U (Workspace:
  4223. need 2*N, prefer N+N*NB) */
  4224. i__2 = *lwork - iwork + 1;
  4225. NUMlapack_dgeqrf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4226. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4227. /* Generate Q in U (Workspace: need N+M, prefer
  4228. N+M*NB) */
  4229. i__2 = *lwork - iwork + 1;
  4230. NUMlapack_dorgqr (m, m, n, &u[u_offset], ldu, &work[itau], &work[iwork], &i__2, &ierr);
  4231. /* Copy R from A to VT, zeroing out below it */
  4232. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  4233. i__2 = *n - 1;
  4234. i__3 = *n - 1;
  4235. NUMlapack_dlaset ("L", &i__2, &i__3, &c_b416, &c_b416, &vt_ref (2, 1), ldvt);
  4236. ie = itau;
  4237. itauq = ie + *n;
  4238. itaup = itauq + *n;
  4239. iwork = itaup + *n;
  4240. /* Bidiagonalize R in VT (Workspace: need 4*N, prefer
  4241. 3*N+2*N*NB) */
  4242. i__2 = *lwork - iwork + 1;
  4243. NUMlapack_dgebrd (n, n, &vt[vt_offset], ldvt, &s[1], &work[ie], &work[itauq],
  4244. &work[itaup], &work[iwork], &i__2, &ierr);
  4245. /* Multiply Q in U by left bidiagonalizing vectors in
  4246. VT (Workspace: need 3*N+M, prefer 3*N+M*NB) */
  4247. i__2 = *lwork - iwork + 1;
  4248. NUMlapack_dormbr ("Q", "R", "N", m, n, n, &vt[vt_offset], ldvt, &work[itauq],
  4249. &u[u_offset], ldu, &work[iwork], &i__2, &ierr);
  4250. /* Generate right bidiagonalizing vectors in VT
  4251. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  4252. i__2 = *lwork - iwork + 1;
  4253. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__2,
  4254. &ierr);
  4255. iwork = ie + *n;
  4256. /* Perform bidiagonal QR iteration, computing left
  4257. singular vectors of A in U and computing right
  4258. singular vectors of A in VT (Workspace: need
  4259. BDSPAC) */
  4260. NUMlapack_dbdsqr ("U", n, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  4261. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  4262. }
  4263. }
  4264. }
  4265. } else {
  4266. /* M .LT. MNTHR
  4267. Path 10 (M at least N, but not much larger) Reduce to
  4268. bidiagonal form without QR decomposition */
  4269. ie = 1;
  4270. itauq = ie + *n;
  4271. itaup = itauq + *n;
  4272. iwork = itaup + *n;
  4273. /* Bidiagonalize A (Workspace: need 3*N+M, prefer 3*N+(M+N)*NB) */
  4274. i__2 = *lwork - iwork + 1;
  4275. NUMlapack_dgebrd (m, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4276. &work[iwork], &i__2, &ierr);
  4277. if (wntuas) {
  4278. /* If left singular vectors desired in U, copy result to U
  4279. and generate left bidiagonalizing vectors in U (Workspace:
  4280. need 3*N+NCU, prefer 3*N+NCU*NB) */
  4281. NUMlapack_dlacpy ("L", m, n, &a[a_offset], lda, &u[u_offset], ldu);
  4282. if (wntus) {
  4283. ncu = *n;
  4284. }
  4285. if (wntua) {
  4286. ncu = *m;
  4287. }
  4288. i__2 = *lwork - iwork + 1;
  4289. NUMlapack_dorgbr ("Q", m, &ncu, n, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2, &ierr);
  4290. }
  4291. if (wntvas) {
  4292. /* If right singular vectors desired in VT, copy result to VT
  4293. and generate right bidiagonalizing vectors in VT
  4294. (Workspace: need 4*N-1, prefer 3*N+(N-1)*NB) */
  4295. NUMlapack_dlacpy ("U", n, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  4296. i__2 = *lwork - iwork + 1;
  4297. NUMlapack_dorgbr ("P", n, n, n, &vt[vt_offset], ldvt, &work[itaup], &work[iwork], &i__2, &ierr);
  4298. }
  4299. if (wntuo) {
  4300. /* If left singular vectors desired in A, generate left
  4301. bidiagonalizing vectors in A (Workspace: need 4*N, prefer
  4302. 3*N+N*NB) */
  4303. i__2 = *lwork - iwork + 1;
  4304. NUMlapack_dorgbr ("Q", m, n, n, &a[a_offset], lda, &work[itauq], &work[iwork], &i__2, &ierr);
  4305. }
  4306. if (wntvo) {
  4307. /* If right singular vectors desired in A, generate right
  4308. bidiagonalizing vectors in A (Workspace: need 4*N-1,
  4309. prefer 3*N+(N-1)*NB) */
  4310. i__2 = *lwork - iwork + 1;
  4311. NUMlapack_dorgbr ("P", n, n, n, &a[a_offset], lda, &work[itaup], &work[iwork], &i__2, &ierr);
  4312. }
  4313. iwork = ie + *n;
  4314. if (wntuas || wntuo) {
  4315. nru = *m;
  4316. }
  4317. if (wntun) {
  4318. nru = 0;
  4319. }
  4320. if (wntvas || wntvo) {
  4321. ncvt = *n;
  4322. }
  4323. if (wntvn) {
  4324. ncvt = 0;
  4325. }
  4326. if (!wntuo && !wntvo) {
  4327. /* Perform bidiagonal QR iteration, if desired, computing
  4328. left singular vectors in U and computing right singular
  4329. vectors in VT (Workspace: need BDSPAC) */
  4330. NUMlapack_dbdsqr ("U", n, &ncvt, &nru, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  4331. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  4332. } else if (!wntuo && wntvo) {
  4333. /* Perform bidiagonal QR iteration, if desired, computing
  4334. left singular vectors in U and computing right singular
  4335. vectors in A (Workspace: need BDSPAC) */
  4336. NUMlapack_dbdsqr ("U", n, &ncvt, &nru, &c__0, &s[1], &work[ie], &a[a_offset], lda, &u[u_offset],
  4337. ldu, dum, &c__1, &work[iwork], info);
  4338. } else {
  4339. /* Perform bidiagonal QR iteration, if desired, computing
  4340. left singular vectors in A and computing right singular
  4341. vectors in VT (Workspace: need BDSPAC) */
  4342. NUMlapack_dbdsqr ("U", n, &ncvt, &nru, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  4343. &a[a_offset], lda, dum, &c__1, &work[iwork], info);
  4344. }
  4345. }
  4346. } else {
  4347. /* A has more columns than rows. If A has sufficiently more columns
  4348. than rows, first reduce using the LQ decomposition (if sufficient
  4349. workspace available) */
  4350. if (*n >= mnthr) {
  4351. if (wntvn) {
  4352. /* Path 1t(N much larger than M, JOBVT='N') No right singular
  4353. vectors to be computed */
  4354. itau = 1;
  4355. iwork = itau + *m;
  4356. /* Compute A=L*Q (Workspace: need 2*M, prefer M+M*NB) */
  4357. i__2 = *lwork - iwork + 1;
  4358. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4359. /* Zero out above L */
  4360. i__2 = *m - 1;
  4361. i__3 = *m - 1;
  4362. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &a_ref (1, 2), lda);
  4363. ie = 1;
  4364. itauq = ie + *m;
  4365. itaup = itauq + *m;
  4366. iwork = itaup + *m;
  4367. /* Bidiagonalize L in A (Workspace: need 4*M, prefer
  4368. 3*M+2*M*NB) */
  4369. i__2 = *lwork - iwork + 1;
  4370. NUMlapack_dgebrd (m, m, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4371. &work[iwork], &i__2, &ierr);
  4372. if (wntuo || wntuas) {
  4373. /* If left singular vectors desired, generate Q
  4374. (Workspace: need 4*M, prefer 3*M+M*NB) */
  4375. i__2 = *lwork - iwork + 1;
  4376. NUMlapack_dorgbr ("Q", m, m, m, &a[a_offset], lda, &work[itauq], &work[iwork], &i__2,
  4377. &ierr);
  4378. }
  4379. iwork = ie + *m;
  4380. nru = 0;
  4381. if (wntuo || wntuas) {
  4382. nru = *m;
  4383. }
  4384. /* Perform bidiagonal QR iteration, computing left singular
  4385. vectors of A in A if desired (Workspace: need BDSPAC) */
  4386. NUMlapack_dbdsqr ("U", m, &c__0, &nru, &c__0, &s[1], &work[ie], dum, &c__1, &a[a_offset], lda,
  4387. dum, &c__1, &work[iwork], info);
  4388. /* If left singular vectors desired in U, copy them there */
  4389. if (wntuas) {
  4390. NUMlapack_dlacpy ("F", m, m, &a[a_offset], lda, &u[u_offset], ldu);
  4391. }
  4392. } else if (wntvo && wntun) {
  4393. /* Path 2t(N much larger than M, JOBU='N', JOBVT='O') M right
  4394. singular vectors to be overwritten on A and no left
  4395. singular vectors to be computed
  4396. Computing MAX */
  4397. i__2 = *m << 2;
  4398. if (*lwork >= *m * *m + MAX (i__2, bdspac)) {
  4399. /* Sufficient workspace for a fast algorithm */
  4400. ir = 1;
  4401. /* Computing MAX */
  4402. i__2 = wrkbl, i__3 = *lda * *n + *m;
  4403. if (*lwork >= MAX (i__2, i__3) + *lda * *m) {
  4404. /* WORK(IU) is LDA by N and WORK(IR) is LDA by M */
  4405. ldwrku = *lda;
  4406. chunk = *n;
  4407. ldwrkr = *lda;
  4408. } else { /* if(complicated condition) */
  4409. /* Computing MAX */
  4410. i__2 = wrkbl, i__3 = *lda * *n + *m;
  4411. if (*lwork >= MAX (i__2, i__3) + *m * *m) {
  4412. /* WORK(IU) is LDA by N and WORK(IR) is M by M */
  4413. ldwrku = *lda;
  4414. chunk = *n;
  4415. ldwrkr = *m;
  4416. } else {
  4417. /* WORK(IU) is M by CHUNK and WORK(IR) is M by M */
  4418. ldwrku = *m;
  4419. chunk = (*lwork - *m * *m - *m) / *m;
  4420. ldwrkr = *m;
  4421. }
  4422. }
  4423. itau = ir + ldwrkr * *m;
  4424. iwork = itau + *m;
  4425. /* Compute A=L*Q (Workspace: need M*M+2*M, prefer
  4426. M*M+M+M*NB) */
  4427. i__2 = *lwork - iwork + 1;
  4428. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4429. /* Copy L to WORK(IR) and zero out above it */
  4430. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[ir], &ldwrkr);
  4431. i__2 = *m - 1;
  4432. i__3 = *m - 1;
  4433. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[ir + ldwrkr], &ldwrkr);
  4434. /* Generate Q in A (Workspace: need M*M+2*M, prefer
  4435. M*M+M+M*NB) */
  4436. i__2 = *lwork - iwork + 1;
  4437. NUMlapack_dorglq (m, n, m, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4438. ie = itau;
  4439. itauq = ie + *m;
  4440. itaup = itauq + *m;
  4441. iwork = itaup + *m;
  4442. /* Bidiagonalize L in WORK(IR) (Workspace: need M*M+4*M,
  4443. prefer M*M+3*M+2*M*NB) */
  4444. i__2 = *lwork - iwork + 1;
  4445. NUMlapack_dgebrd (m, m, &work[ir], &ldwrkr, &s[1], &work[ie], &work[itauq], &work[itaup],
  4446. &work[iwork], &i__2, &ierr);
  4447. /* Generate right vectors bidiagonalizing L (Workspace:
  4448. need M*M+4*M-1, prefer M*M+3*M+(M-1)*NB) */
  4449. i__2 = *lwork - iwork + 1;
  4450. NUMlapack_dorgbr ("P", m, m, m, &work[ir], &ldwrkr, &work[itaup], &work[iwork], &i__2,
  4451. &ierr);
  4452. iwork = ie + *m;
  4453. /* Perform bidiagonal QR iteration, computing right
  4454. singular vectors of L in WORK(IR) (Workspace: need
  4455. M*M+BDSPAC) */
  4456. NUMlapack_dbdsqr ("U", m, m, &c__0, &c__0, &s[1], &work[ie], &work[ir], &ldwrkr, dum, &c__1,
  4457. dum, &c__1, &work[iwork], info);
  4458. iu = ie + *m;
  4459. /* Multiply right singular vectors of L in WORK(IR) by Q
  4460. in A, storing result in WORK(IU) and copying to A
  4461. (Workspace: need M*M+2*M, prefer M*M+M*N+M) */
  4462. i__2 = *n;
  4463. i__3 = chunk;
  4464. for (i__ = 1; i__3 < 0 ? i__ >= i__2 : i__ <= i__2; i__ += i__3) {
  4465. /* Computing MIN */
  4466. i__4 = *n - i__ + 1;
  4467. blk = MIN (i__4, chunk);
  4468. NUMblas_dgemm ("N", "N", m, &blk, m, &c_b438, &work[ir], &ldwrkr, &a_ref (1, i__), lda,
  4469. &c_b416, &work[iu], &ldwrku);
  4470. NUMlapack_dlacpy ("F", m, &blk, &work[iu], &ldwrku, &a_ref (1, i__), lda);
  4471. /* L30: */
  4472. }
  4473. } else {
  4474. /* Insufficient workspace for a fast algorithm */
  4475. ie = 1;
  4476. itauq = ie + *m;
  4477. itaup = itauq + *m;
  4478. iwork = itaup + *m;
  4479. /* Bidiagonalize A (Workspace: need 3*M+N, prefer
  4480. 3*M+(M+N)*NB) */
  4481. i__3 = *lwork - iwork + 1;
  4482. NUMlapack_dgebrd (m, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4483. &work[iwork], &i__3, &ierr);
  4484. /* Generate right vectors bidiagonalizing A (Workspace:
  4485. need 4*M, prefer 3*M+M*NB) */
  4486. i__3 = *lwork - iwork + 1;
  4487. NUMlapack_dorgbr ("P", m, n, m, &a[a_offset], lda, &work[itaup], &work[iwork], &i__3,
  4488. &ierr);
  4489. iwork = ie + *m;
  4490. /* Perform bidiagonal QR iteration, computing right
  4491. singular vectors of A in A (Workspace: need BDSPAC) */
  4492. NUMlapack_dbdsqr ("L", m, n, &c__0, &c__0, &s[1], &work[ie], &a[a_offset], lda, dum, &c__1,
  4493. dum, &c__1, &work[iwork], info);
  4494. }
  4495. } else if (wntvo && wntuas) {
  4496. /* Path 3t(N much larger than M, JOBU='S' or 'A', JOBVT='O')
  4497. M right singular vectors to be overwritten on A and M left
  4498. singular vectors to be computed in U
  4499. Computing MAX */
  4500. i__3 = *m << 2;
  4501. if (*lwork >= *m * *m + MAX (i__3, bdspac)) {
  4502. /* Sufficient workspace for a fast algorithm */
  4503. ir = 1;
  4504. /* Computing MAX */
  4505. i__3 = wrkbl, i__2 = *lda * *n + *m;
  4506. if (*lwork >= MAX (i__3, i__2) + *lda * *m) {
  4507. /* WORK(IU) is LDA by N and WORK(IR) is LDA by M */
  4508. ldwrku = *lda;
  4509. chunk = *n;
  4510. ldwrkr = *lda;
  4511. } else { /* if(complicated condition) */
  4512. /* Computing MAX */
  4513. i__3 = wrkbl, i__2 = *lda * *n + *m;
  4514. if (*lwork >= MAX (i__3, i__2) + *m * *m) {
  4515. /* WORK(IU) is LDA by N and WORK(IR) is M by M */
  4516. ldwrku = *lda;
  4517. chunk = *n;
  4518. ldwrkr = *m;
  4519. } else {
  4520. /* WORK(IU) is M by CHUNK and WORK(IR) is M by M */
  4521. ldwrku = *m;
  4522. chunk = (*lwork - *m * *m - *m) / *m;
  4523. ldwrkr = *m;
  4524. }
  4525. }
  4526. itau = ir + ldwrkr * *m;
  4527. iwork = itau + *m;
  4528. /* Compute A=L*Q (Workspace: need M*M+2*M, prefer
  4529. M*M+M+M*NB) */
  4530. i__3 = *lwork - iwork + 1;
  4531. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__3, &ierr);
  4532. /* Copy L to U, zeroing about above it */
  4533. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &u[u_offset], ldu);
  4534. i__3 = *m - 1;
  4535. i__2 = *m - 1;
  4536. NUMlapack_dlaset ("U", &i__3, &i__2, &c_b416, &c_b416, &u_ref (1, 2), ldu);
  4537. /* Generate Q in A (Workspace: need M*M+2*M, prefer
  4538. M*M+M+M*NB) */
  4539. i__3 = *lwork - iwork + 1;
  4540. NUMlapack_dorglq (m, n, m, &a[a_offset], lda, &work[itau], &work[iwork], &i__3, &ierr);
  4541. ie = itau;
  4542. itauq = ie + *m;
  4543. itaup = itauq + *m;
  4544. iwork = itaup + *m;
  4545. /* Bidiagonalize L in U, copying result to WORK(IR)
  4546. (Workspace: need M*M+4*M, prefer M*M+3*M+2*M*NB) */
  4547. i__3 = *lwork - iwork + 1;
  4548. NUMlapack_dgebrd (m, m, &u[u_offset], ldu, &s[1], &work[ie], &work[itauq], &work[itaup],
  4549. &work[iwork], &i__3, &ierr);
  4550. NUMlapack_dlacpy ("U", m, m, &u[u_offset], ldu, &work[ir], &ldwrkr);
  4551. /* Generate right vectors bidiagonalizing L in WORK(IR)
  4552. (Workspace: need M*M+4*M-1, prefer M*M+3*M+(M-1)*NB) */
  4553. i__3 = *lwork - iwork + 1;
  4554. NUMlapack_dorgbr ("P", m, m, m, &work[ir], &ldwrkr, &work[itaup], &work[iwork], &i__3,
  4555. &ierr);
  4556. /* Generate left vectors bidiagonalizing L in U
  4557. (Workspace: need M*M+4*M, prefer M*M+3*M+M*NB) */
  4558. i__3 = *lwork - iwork + 1;
  4559. NUMlapack_dorgbr ("Q", m, m, m, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__3,
  4560. &ierr);
  4561. iwork = ie + *m;
  4562. /* Perform bidiagonal QR iteration, computing left
  4563. singular vectors of L in U, and computing right
  4564. singular vectors of L in WORK(IR) (Workspace: need
  4565. M*M+BDSPAC) */
  4566. NUMlapack_dbdsqr ("U", m, m, m, &c__0, &s[1], &work[ie], &work[ir], &ldwrkr, &u[u_offset],
  4567. ldu, dum, &c__1, &work[iwork], info);
  4568. iu = ie + *m;
  4569. /* Multiply right singular vectors of L in WORK(IR) by Q
  4570. in A, storing result in WORK(IU) and copying to A
  4571. (Workspace: need M*M+2*M, prefer M*M+M*N+M)) */
  4572. i__3 = *n;
  4573. i__2 = chunk;
  4574. for (i__ = 1; i__2 < 0 ? i__ >= i__3 : i__ <= i__3; i__ += i__2) {
  4575. /* Computing MIN */
  4576. i__4 = *n - i__ + 1;
  4577. blk = MIN (i__4, chunk);
  4578. NUMblas_dgemm ("N", "N", m, &blk, m, &c_b438, &work[ir], &ldwrkr, &a_ref (1, i__), lda,
  4579. &c_b416, &work[iu], &ldwrku);
  4580. NUMlapack_dlacpy ("F", m, &blk, &work[iu], &ldwrku, &a_ref (1, i__), lda);
  4581. /* L40: */
  4582. }
  4583. } else {
  4584. /* Insufficient workspace for a fast algorithm */
  4585. itau = 1;
  4586. iwork = itau + *m;
  4587. /* Compute A=L*Q (Workspace: need 2*M, prefer M+M*NB) */
  4588. i__2 = *lwork - iwork + 1;
  4589. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4590. /* Copy L to U, zeroing out above it */
  4591. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &u[u_offset], ldu);
  4592. i__2 = *m - 1;
  4593. i__3 = *m - 1;
  4594. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &u_ref (1, 2), ldu);
  4595. /* Generate Q in A (Workspace: need 2*M, prefer M+M*NB) */
  4596. i__2 = *lwork - iwork + 1;
  4597. NUMlapack_dorglq (m, n, m, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4598. ie = itau;
  4599. itauq = ie + *m;
  4600. itaup = itauq + *m;
  4601. iwork = itaup + *m;
  4602. /* Bidiagonalize L in U (Workspace: need 4*M, prefer
  4603. 3*M+2*M*NB) */
  4604. i__2 = *lwork - iwork + 1;
  4605. NUMlapack_dgebrd (m, m, &u[u_offset], ldu, &s[1], &work[ie], &work[itauq], &work[itaup],
  4606. &work[iwork], &i__2, &ierr);
  4607. /* Multiply right vectors bidiagonalizing L by Q in A
  4608. (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  4609. i__2 = *lwork - iwork + 1;
  4610. NUMlapack_dormbr ("P", "L", "T", m, n, m, &u[u_offset], ldu, &work[itaup], &a[a_offset],
  4611. lda, &work[iwork], &i__2, &ierr);
  4612. /* Generate left vectors bidiagonalizing L in U
  4613. (Workspace: need 4*M, prefer 3*M+M*NB) */
  4614. i__2 = *lwork - iwork + 1;
  4615. NUMlapack_dorgbr ("Q", m, m, m, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2,
  4616. &ierr);
  4617. iwork = ie + *m;
  4618. /* Perform bidiagonal QR iteration, computing left
  4619. singular vectors of A in U and computing right
  4620. singular vectors of A in A (Workspace: need BDSPAC) */
  4621. NUMlapack_dbdsqr ("U", m, n, m, &c__0, &s[1], &work[ie], &a[a_offset], lda, &u[u_offset],
  4622. ldu, dum, &c__1, &work[iwork], info);
  4623. }
  4624. } else if (wntvs) {
  4625. if (wntun) {
  4626. /* Path 4t(N much larger than M, JOBU='N', JOBVT='S') M
  4627. right singular vectors to be computed in VT and no
  4628. left singular vectors to be computed
  4629. Computing MAX */
  4630. i__2 = *m << 2;
  4631. if (*lwork >= *m * *m + MAX (i__2, bdspac)) {
  4632. /* Sufficient workspace for a fast algorithm */
  4633. ir = 1;
  4634. if (*lwork >= wrkbl + *lda * *m) {
  4635. /* WORK(IR) is LDA by M */
  4636. ldwrkr = *lda;
  4637. } else {
  4638. /* WORK(IR) is M by M */
  4639. ldwrkr = *m;
  4640. }
  4641. itau = ir + ldwrkr * *m;
  4642. iwork = itau + *m;
  4643. /* Compute A=L*Q (Workspace: need M*M+2*M, prefer
  4644. M*M+M+M*NB) */
  4645. i__2 = *lwork - iwork + 1;
  4646. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4647. /* Copy L to WORK(IR), zeroing out above it */
  4648. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[ir], &ldwrkr);
  4649. i__2 = *m - 1;
  4650. i__3 = *m - 1;
  4651. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[ir + ldwrkr], &ldwrkr);
  4652. /* Generate Q in A (Workspace: need M*M+2*M, prefer
  4653. M*M+M+M*NB) */
  4654. i__2 = *lwork - iwork + 1;
  4655. NUMlapack_dorglq (m, n, m, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4656. ie = itau;
  4657. itauq = ie + *m;
  4658. itaup = itauq + *m;
  4659. iwork = itaup + *m;
  4660. /* Bidiagonalize L in WORK(IR) (Workspace: need
  4661. M*M+4*M, prefer M*M+3*M+2*M*NB) */
  4662. i__2 = *lwork - iwork + 1;
  4663. NUMlapack_dgebrd (m, m, &work[ir], &ldwrkr, &s[1], &work[ie], &work[itauq],
  4664. &work[itaup], &work[iwork], &i__2, &ierr);
  4665. /* Generate right vectors bidiagonalizing L in
  4666. WORK(IR) (Workspace: need M*M+4*M, prefer
  4667. M*M+3*M+(M-1)*NB) */
  4668. i__2 = *lwork - iwork + 1;
  4669. NUMlapack_dorgbr ("P", m, m, m, &work[ir], &ldwrkr, &work[itaup], &work[iwork], &i__2,
  4670. &ierr);
  4671. iwork = ie + *m;
  4672. /* Perform bidiagonal QR iteration, computing right
  4673. singular vectors of L in WORK(IR) (Workspace: need
  4674. M*M+BDSPAC) */
  4675. NUMlapack_dbdsqr ("U", m, m, &c__0, &c__0, &s[1], &work[ie], &work[ir], &ldwrkr, dum,
  4676. &c__1, dum, &c__1, &work[iwork], info);
  4677. /* Multiply right singular vectors of L in WORK(IR)
  4678. by Q in A, storing result in VT (Workspace: need
  4679. M*M) */
  4680. NUMblas_dgemm ("N", "N", m, n, m, &c_b438, &work[ir], &ldwrkr, &a[a_offset], lda, &c_b416,
  4681. &vt[vt_offset], ldvt);
  4682. } else {
  4683. /* Insufficient workspace for a fast algorithm */
  4684. itau = 1;
  4685. iwork = itau + *m;
  4686. /* Compute A=L*Q (Workspace: need 2*M, prefer M+M*NB)
  4687. */
  4688. i__2 = *lwork - iwork + 1;
  4689. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4690. /* Copy result to VT */
  4691. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  4692. /* Generate Q in VT (Workspace: need 2*M, prefer
  4693. M+M*NB) */
  4694. i__2 = *lwork - iwork + 1;
  4695. NUMlapack_dorglq (m, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  4696. &ierr);
  4697. ie = itau;
  4698. itauq = ie + *m;
  4699. itaup = itauq + *m;
  4700. iwork = itaup + *m;
  4701. /* Zero out above L in A */
  4702. i__2 = *m - 1;
  4703. i__3 = *m - 1;
  4704. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &a_ref (1, 2), lda);
  4705. /* Bidiagonalize L in A (Workspace: need 4*M, prefer
  4706. 3*M+2*M*NB) */
  4707. i__2 = *lwork - iwork + 1;
  4708. NUMlapack_dgebrd (m, m, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4709. &work[iwork], &i__2, &ierr);
  4710. /* Multiply right vectors bidiagonalizing L by Q in
  4711. VT (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  4712. i__2 = *lwork - iwork + 1;
  4713. NUMlapack_dormbr ("P", "L", "T", m, n, m, &a[a_offset], lda, &work[itaup],
  4714. &vt[vt_offset], ldvt, &work[iwork], &i__2, &ierr);
  4715. iwork = ie + *m;
  4716. /* Perform bidiagonal QR iteration, computing right
  4717. singular vectors of A in VT (Workspace: need
  4718. BDSPAC) */
  4719. NUMlapack_dbdsqr ("U", m, n, &c__0, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt, dum,
  4720. &c__1, dum, &c__1, &work[iwork], info);
  4721. }
  4722. } else if (wntuo) {
  4723. /* Path 5t(N much larger than M, JOBU='O', JOBVT='S') M
  4724. right singular vectors to be computed in VT and M left
  4725. singular vectors to be overwritten on A
  4726. Computing MAX */
  4727. i__2 = *m << 2;
  4728. if (*lwork >= (*m << 1) * *m + MAX (i__2, bdspac)) {
  4729. /* Sufficient workspace for a fast algorithm */
  4730. iu = 1;
  4731. if (*lwork >= wrkbl + (*lda << 1) * *m) {
  4732. /* WORK(IU) is LDA by M and WORK(IR) is LDA by M */
  4733. ldwrku = *lda;
  4734. ir = iu + ldwrku * *m;
  4735. ldwrkr = *lda;
  4736. } else if (*lwork >= wrkbl + (*lda + *m) * *m) {
  4737. /* WORK(IU) is LDA by M and WORK(IR) is M by M */
  4738. ldwrku = *lda;
  4739. ir = iu + ldwrku * *m;
  4740. ldwrkr = *m;
  4741. } else {
  4742. /* WORK(IU) is M by M and WORK(IR) is M by M */
  4743. ldwrku = *m;
  4744. ir = iu + ldwrku * *m;
  4745. ldwrkr = *m;
  4746. }
  4747. itau = ir + ldwrkr * *m;
  4748. iwork = itau + *m;
  4749. /* Compute A=L*Q (Workspace: need 2*M*M+2*M, prefer
  4750. 2*M*M+M+M*NB) */
  4751. i__2 = *lwork - iwork + 1;
  4752. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4753. /* Copy L to WORK(IU), zeroing out below it */
  4754. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[iu], &ldwrku);
  4755. i__2 = *m - 1;
  4756. i__3 = *m - 1;
  4757. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[iu + ldwrku], &ldwrku);
  4758. /* Generate Q in A (Workspace: need 2*M*M+2*M, prefer
  4759. 2*M*M+M+M*NB) */
  4760. i__2 = *lwork - iwork + 1;
  4761. NUMlapack_dorglq (m, n, m, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4762. ie = itau;
  4763. itauq = ie + *m;
  4764. itaup = itauq + *m;
  4765. iwork = itaup + *m;
  4766. /* Bidiagonalize L in WORK(IU), copying result to
  4767. WORK(IR) (Workspace: need 2*M*M+4*M, prefer
  4768. 2*M*M+3*M+2*M*NB) */
  4769. i__2 = *lwork - iwork + 1;
  4770. NUMlapack_dgebrd (m, m, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  4771. &work[itaup], &work[iwork], &i__2, &ierr);
  4772. NUMlapack_dlacpy ("L", m, m, &work[iu], &ldwrku, &work[ir], &ldwrkr);
  4773. /* Generate right bidiagonalizing vectors in WORK(IU)
  4774. (Workspace: need 2*M*M+4*M-1, prefer
  4775. 2*M*M+3*M+(M-1)*NB) */
  4776. i__2 = *lwork - iwork + 1;
  4777. NUMlapack_dorgbr ("P", m, m, m, &work[iu], &ldwrku, &work[itaup], &work[iwork], &i__2,
  4778. &ierr);
  4779. /* Generate left bidiagonalizing vectors in WORK(IR)
  4780. (Workspace: need 2*M*M+4*M, prefer 2*M*M+3*M+M*NB)
  4781. */
  4782. i__2 = *lwork - iwork + 1;
  4783. NUMlapack_dorgbr ("Q", m, m, m, &work[ir], &ldwrkr, &work[itauq], &work[iwork], &i__2,
  4784. &ierr);
  4785. iwork = ie + *m;
  4786. /* Perform bidiagonal QR iteration, computing left
  4787. singular vectors of L in WORK(IR) and computing
  4788. right singular vectors of L in WORK(IU)
  4789. (Workspace: need 2*M*M+BDSPAC) */
  4790. NUMlapack_dbdsqr ("U", m, m, m, &c__0, &s[1], &work[ie], &work[iu], &ldwrku, &work[ir],
  4791. &ldwrkr, dum, &c__1, &work[iwork], info);
  4792. /* Multiply right singular vectors of L in WORK(IU)
  4793. by Q in A, storing result in VT (Workspace: need
  4794. M*M) */
  4795. NUMblas_dgemm ("N", "N", m, n, m, &c_b438, &work[iu], &ldwrku, &a[a_offset], lda, &c_b416,
  4796. &vt[vt_offset], ldvt);
  4797. /* Copy left singular vectors of L to A (Workspace:
  4798. need M*M) */
  4799. NUMlapack_dlacpy ("F", m, m, &work[ir], &ldwrkr, &a[a_offset], lda);
  4800. } else {
  4801. /* Insufficient workspace for a fast algorithm */
  4802. itau = 1;
  4803. iwork = itau + *m;
  4804. /* Compute A=L*Q, copying result to VT (Workspace:
  4805. need 2*M, prefer M+M*NB) */
  4806. i__2 = *lwork - iwork + 1;
  4807. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4808. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  4809. /* Generate Q in VT (Workspace: need 2*M, prefer
  4810. M+M*NB) */
  4811. i__2 = *lwork - iwork + 1;
  4812. NUMlapack_dorglq (m, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  4813. &ierr);
  4814. ie = itau;
  4815. itauq = ie + *m;
  4816. itaup = itauq + *m;
  4817. iwork = itaup + *m;
  4818. /* Zero out above L in A */
  4819. i__2 = *m - 1;
  4820. i__3 = *m - 1;
  4821. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &a_ref (1, 2), lda);
  4822. /* Bidiagonalize L in A (Workspace: need 4*M, prefer
  4823. 3*M+2*M*NB) */
  4824. i__2 = *lwork - iwork + 1;
  4825. NUMlapack_dgebrd (m, m, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  4826. &work[iwork], &i__2, &ierr);
  4827. /* Multiply right vectors bidiagonalizing L by Q in
  4828. VT (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  4829. i__2 = *lwork - iwork + 1;
  4830. NUMlapack_dormbr ("P", "L", "T", m, n, m, &a[a_offset], lda, &work[itaup],
  4831. &vt[vt_offset], ldvt, &work[iwork], &i__2, &ierr);
  4832. /* Generate left bidiagonalizing vectors of L in A
  4833. (Workspace: need 4*M, prefer 3*M+M*NB) */
  4834. i__2 = *lwork - iwork + 1;
  4835. NUMlapack_dorgbr ("Q", m, m, m, &a[a_offset], lda, &work[itauq], &work[iwork], &i__2,
  4836. &ierr);
  4837. iwork = ie + *m;
  4838. /* Perform bidiagonal QR iteration, compute left
  4839. singular vectors of A in A and compute right
  4840. singular vectors of A in VT (Workspace: need
  4841. BDSPAC) */
  4842. NUMlapack_dbdsqr ("U", m, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  4843. &a[a_offset], lda, dum, &c__1, &work[iwork], info);
  4844. }
  4845. } else if (wntuas) {
  4846. /* Path 6t(N much larger than M, JOBU='S' or 'A',
  4847. JOBVT='S') M right singular vectors to be computed in
  4848. VT and M left singular vectors to be computed in U
  4849. Computing MAX */
  4850. i__2 = *m << 2;
  4851. if (*lwork >= *m * *m + MAX (i__2, bdspac)) {
  4852. /* Sufficient workspace for a fast algorithm */
  4853. iu = 1;
  4854. if (*lwork >= wrkbl + *lda * *m) {
  4855. /* WORK(IU) is LDA by N */
  4856. ldwrku = *lda;
  4857. } else {
  4858. /* WORK(IU) is LDA by M */
  4859. ldwrku = *m;
  4860. }
  4861. itau = iu + ldwrku * *m;
  4862. iwork = itau + *m;
  4863. /* Compute A=L*Q (Workspace: need M*M+2*M, prefer
  4864. M*M+M+M*NB) */
  4865. i__2 = *lwork - iwork + 1;
  4866. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4867. /* Copy L to WORK(IU), zeroing out above it */
  4868. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[iu], &ldwrku);
  4869. i__2 = *m - 1;
  4870. i__3 = *m - 1;
  4871. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[iu + ldwrku], &ldwrku);
  4872. /* Generate Q in A (Workspace: need M*M+2*M, prefer
  4873. M*M+M+M*NB) */
  4874. i__2 = *lwork - iwork + 1;
  4875. NUMlapack_dorglq (m, n, m, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4876. ie = itau;
  4877. itauq = ie + *m;
  4878. itaup = itauq + *m;
  4879. iwork = itaup + *m;
  4880. /* Bidiagonalize L in WORK(IU), copying result to U
  4881. (Workspace: need M*M+4*M, prefer M*M+3*M+2*M*NB) */
  4882. i__2 = *lwork - iwork + 1;
  4883. NUMlapack_dgebrd (m, m, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  4884. &work[itaup], &work[iwork], &i__2, &ierr);
  4885. NUMlapack_dlacpy ("L", m, m, &work[iu], &ldwrku, &u[u_offset], ldu);
  4886. /* Generate right bidiagonalizing vectors in WORK(IU)
  4887. (Workspace: need M*M+4*M-1, prefer
  4888. M*M+3*M+(M-1)*NB) */
  4889. i__2 = *lwork - iwork + 1;
  4890. NUMlapack_dorgbr ("P", m, m, m, &work[iu], &ldwrku, &work[itaup], &work[iwork], &i__2,
  4891. &ierr);
  4892. /* Generate left bidiagonalizing vectors in U
  4893. (Workspace: need M*M+4*M, prefer M*M+3*M+M*NB) */
  4894. i__2 = *lwork - iwork + 1;
  4895. NUMlapack_dorgbr ("Q", m, m, m, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2,
  4896. &ierr);
  4897. iwork = ie + *m;
  4898. /* Perform bidiagonal QR iteration, computing left
  4899. singular vectors of L in U and computing right
  4900. singular vectors of L in WORK(IU) (Workspace: need
  4901. M*M+BDSPAC) */
  4902. NUMlapack_dbdsqr ("U", m, m, m, &c__0, &s[1], &work[ie], &work[iu], &ldwrku,
  4903. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  4904. /* Multiply right singular vectors of L in WORK(IU)
  4905. by Q in A, storing result in VT (Workspace: need
  4906. M*M) */
  4907. NUMblas_dgemm ("N", "N", m, n, m, &c_b438, &work[iu], &ldwrku, &a[a_offset], lda, &c_b416,
  4908. &vt[vt_offset], ldvt);
  4909. } else {
  4910. /* Insufficient workspace for a fast algorithm */
  4911. itau = 1;
  4912. iwork = itau + *m;
  4913. /* Compute A=L*Q, copying result to VT (Workspace:
  4914. need 2*M, prefer M+M*NB) */
  4915. i__2 = *lwork - iwork + 1;
  4916. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4917. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  4918. /* Generate Q in VT (Workspace: need 2*M, prefer
  4919. M+M*NB) */
  4920. i__2 = *lwork - iwork + 1;
  4921. NUMlapack_dorglq (m, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  4922. &ierr);
  4923. /* Copy L to U, zeroing out above it */
  4924. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &u[u_offset], ldu);
  4925. i__2 = *m - 1;
  4926. i__3 = *m - 1;
  4927. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &u_ref (1, 2), ldu);
  4928. ie = itau;
  4929. itauq = ie + *m;
  4930. itaup = itauq + *m;
  4931. iwork = itaup + *m;
  4932. /* Bidiagonalize L in U (Workspace: need 4*M, prefer
  4933. 3*M+2*M*NB) */
  4934. i__2 = *lwork - iwork + 1;
  4935. NUMlapack_dgebrd (m, m, &u[u_offset], ldu, &s[1], &work[ie], &work[itauq], &work[itaup],
  4936. &work[iwork], &i__2, &ierr);
  4937. /* Multiply right bidiagonalizing vectors in U by Q
  4938. in VT (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  4939. i__2 = *lwork - iwork + 1;
  4940. NUMlapack_dormbr ("P", "L", "T", m, n, m, &u[u_offset], ldu, &work[itaup],
  4941. &vt[vt_offset], ldvt, &work[iwork], &i__2, &ierr);
  4942. /* Generate left bidiagonalizing vectors in U
  4943. (Workspace: need 4*M, prefer 3*M+M*NB) */
  4944. i__2 = *lwork - iwork + 1;
  4945. NUMlapack_dorgbr ("Q", m, m, m, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2,
  4946. &ierr);
  4947. iwork = ie + *m;
  4948. /* Perform bidiagonal QR iteration, computing left
  4949. singular vectors of A in U and computing right
  4950. singular vectors of A in VT (Workspace: need
  4951. BDSPAC) */
  4952. NUMlapack_dbdsqr ("U", m, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  4953. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  4954. }
  4955. }
  4956. } else if (wntva) {
  4957. if (wntun) {
  4958. /* Path 7t(N much larger than M, JOBU='N', JOBVT='A') N
  4959. right singular vectors to be computed in VT and no
  4960. left singular vectors to be computed
  4961. Computing MAX */
  4962. i__2 = *n + *m, i__3 = *m << 2, i__2 = MAX (i__2, i__3);
  4963. if (*lwork >= *m * *m + MAX (i__2, bdspac)) {
  4964. /* Sufficient workspace for a fast algorithm */
  4965. ir = 1;
  4966. if (*lwork >= wrkbl + *lda * *m) {
  4967. /* WORK(IR) is LDA by M */
  4968. ldwrkr = *lda;
  4969. } else {
  4970. /* WORK(IR) is M by M */
  4971. ldwrkr = *m;
  4972. }
  4973. itau = ir + ldwrkr * *m;
  4974. iwork = itau + *m;
  4975. /* Compute A=L*Q, copying result to VT (Workspace:
  4976. need M*M+2*M, prefer M*M+M+M*NB) */
  4977. i__2 = *lwork - iwork + 1;
  4978. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  4979. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  4980. /* Copy L to WORK(IR), zeroing out above it */
  4981. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[ir], &ldwrkr);
  4982. i__2 = *m - 1;
  4983. i__3 = *m - 1;
  4984. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[ir + ldwrkr], &ldwrkr);
  4985. /* Generate Q in VT (Workspace: need M*M+M+N, prefer
  4986. M*M+M+N*NB) */
  4987. i__2 = *lwork - iwork + 1;
  4988. NUMlapack_dorglq (n, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  4989. &ierr);
  4990. ie = itau;
  4991. itauq = ie + *m;
  4992. itaup = itauq + *m;
  4993. iwork = itaup + *m;
  4994. /* Bidiagonalize L in WORK(IR) (Workspace: need
  4995. M*M+4*M, prefer M*M+3*M+2*M*NB) */
  4996. i__2 = *lwork - iwork + 1;
  4997. NUMlapack_dgebrd (m, m, &work[ir], &ldwrkr, &s[1], &work[ie], &work[itauq],
  4998. &work[itaup], &work[iwork], &i__2, &ierr);
  4999. /* Generate right bidiagonalizing vectors in WORK(IR)
  5000. (Workspace: need M*M+4*M-1, prefer
  5001. M*M+3*M+(M-1)*NB) */
  5002. i__2 = *lwork - iwork + 1;
  5003. NUMlapack_dorgbr ("P", m, m, m, &work[ir], &ldwrkr, &work[itaup], &work[iwork], &i__2,
  5004. &ierr);
  5005. iwork = ie + *m;
  5006. /* Perform bidiagonal QR iteration, computing right
  5007. singular vectors of L in WORK(IR) (Workspace: need
  5008. M*M+BDSPAC) */
  5009. NUMlapack_dbdsqr ("U", m, m, &c__0, &c__0, &s[1], &work[ie], &work[ir], &ldwrkr, dum,
  5010. &c__1, dum, &c__1, &work[iwork], info);
  5011. /* Multiply right singular vectors of L in WORK(IR)
  5012. by Q in VT, storing result in A (Workspace: need
  5013. M*M) */
  5014. NUMblas_dgemm ("N", "N", m, n, m, &c_b438, &work[ir], &ldwrkr, &vt[vt_offset], ldvt, &c_b416,
  5015. &a[a_offset], lda);
  5016. /* Copy right singular vectors of A from A to VT */
  5017. NUMlapack_dlacpy ("F", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5018. } else {
  5019. /* Insufficient workspace for a fast algorithm */
  5020. itau = 1;
  5021. iwork = itau + *m;
  5022. /* Compute A=L*Q, copying result to VT (Workspace:
  5023. need 2*M, prefer M+M*NB) */
  5024. i__2 = *lwork - iwork + 1;
  5025. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  5026. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5027. /* Generate Q in VT (Workspace: need M+N, prefer
  5028. M+N*NB) */
  5029. i__2 = *lwork - iwork + 1;
  5030. NUMlapack_dorglq (n, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  5031. &ierr);
  5032. ie = itau;
  5033. itauq = ie + *m;
  5034. itaup = itauq + *m;
  5035. iwork = itaup + *m;
  5036. /* Zero out above L in A */
  5037. i__2 = *m - 1;
  5038. i__3 = *m - 1;
  5039. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &a_ref (1, 2), lda);
  5040. /* Bidiagonalize L in A (Workspace: need 4*M, prefer
  5041. 3*M+2*M*NB) */
  5042. i__2 = *lwork - iwork + 1;
  5043. NUMlapack_dgebrd (m, m, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  5044. &work[iwork], &i__2, &ierr);
  5045. /* Multiply right bidiagonalizing vectors in A by Q
  5046. in VT (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  5047. i__2 = *lwork - iwork + 1;
  5048. NUMlapack_dormbr ("P", "L", "T", m, n, m, &a[a_offset], lda, &work[itaup],
  5049. &vt[vt_offset], ldvt, &work[iwork], &i__2, &ierr);
  5050. iwork = ie + *m;
  5051. /* Perform bidiagonal QR iteration, computing right
  5052. singular vectors of A in VT (Workspace: need
  5053. BDSPAC) */
  5054. NUMlapack_dbdsqr ("U", m, n, &c__0, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt, dum,
  5055. &c__1, dum, &c__1, &work[iwork], info);
  5056. }
  5057. } else if (wntuo) {
  5058. /* Path 8t(N much larger than M, JOBU='O', JOBVT='A') N
  5059. right singular vectors to be computed in VT and M left
  5060. singular vectors to be overwritten on A
  5061. Computing MAX */
  5062. i__2 = *n + *m, i__3 = *m << 2, i__2 = MAX (i__2, i__3);
  5063. if (*lwork >= (*m << 1) * *m + MAX (i__2, bdspac)) {
  5064. /* Sufficient workspace for a fast algorithm */
  5065. iu = 1;
  5066. if (*lwork >= wrkbl + (*lda << 1) * *m) {
  5067. /* WORK(IU) is LDA by M and WORK(IR) is LDA by M */
  5068. ldwrku = *lda;
  5069. ir = iu + ldwrku * *m;
  5070. ldwrkr = *lda;
  5071. } else if (*lwork >= wrkbl + (*lda + *m) * *m) {
  5072. /* WORK(IU) is LDA by M and WORK(IR) is M by M */
  5073. ldwrku = *lda;
  5074. ir = iu + ldwrku * *m;
  5075. ldwrkr = *m;
  5076. } else {
  5077. /* WORK(IU) is M by M and WORK(IR) is M by M */
  5078. ldwrku = *m;
  5079. ir = iu + ldwrku * *m;
  5080. ldwrkr = *m;
  5081. }
  5082. itau = ir + ldwrkr * *m;
  5083. iwork = itau + *m;
  5084. /* Compute A=L*Q, copying result to VT (Workspace:
  5085. need 2*M*M+2*M, prefer 2*M*M+M+M*NB) */
  5086. i__2 = *lwork - iwork + 1;
  5087. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  5088. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5089. /* Generate Q in VT (Workspace: need 2*M*M+M+N,
  5090. prefer 2*M*M+M+N*NB) */
  5091. i__2 = *lwork - iwork + 1;
  5092. NUMlapack_dorglq (n, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  5093. &ierr);
  5094. /* Copy L to WORK(IU), zeroing out above it */
  5095. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[iu], &ldwrku);
  5096. i__2 = *m - 1;
  5097. i__3 = *m - 1;
  5098. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[iu + ldwrku], &ldwrku);
  5099. ie = itau;
  5100. itauq = ie + *m;
  5101. itaup = itauq + *m;
  5102. iwork = itaup + *m;
  5103. /* Bidiagonalize L in WORK(IU), copying result to
  5104. WORK(IR) (Workspace: need 2*M*M+4*M, prefer
  5105. 2*M*M+3*M+2*M*NB) */
  5106. i__2 = *lwork - iwork + 1;
  5107. NUMlapack_dgebrd (m, m, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  5108. &work[itaup], &work[iwork], &i__2, &ierr);
  5109. NUMlapack_dlacpy ("L", m, m, &work[iu], &ldwrku, &work[ir], &ldwrkr);
  5110. /* Generate right bidiagonalizing vectors in WORK(IU)
  5111. (Workspace: need 2*M*M+4*M-1, prefer
  5112. 2*M*M+3*M+(M-1)*NB) */
  5113. i__2 = *lwork - iwork + 1;
  5114. NUMlapack_dorgbr ("P", m, m, m, &work[iu], &ldwrku, &work[itaup], &work[iwork], &i__2,
  5115. &ierr);
  5116. /* Generate left bidiagonalizing vectors in WORK(IR)
  5117. (Workspace: need 2*M*M+4*M, prefer 2*M*M+3*M+M*NB)
  5118. */
  5119. i__2 = *lwork - iwork + 1;
  5120. NUMlapack_dorgbr ("Q", m, m, m, &work[ir], &ldwrkr, &work[itauq], &work[iwork], &i__2,
  5121. &ierr);
  5122. iwork = ie + *m;
  5123. /* Perform bidiagonal QR iteration, computing left
  5124. singular vectors of L in WORK(IR) and computing
  5125. right singular vectors of L in WORK(IU)
  5126. (Workspace: need 2*M*M+BDSPAC) */
  5127. NUMlapack_dbdsqr ("U", m, m, m, &c__0, &s[1], &work[ie], &work[iu], &ldwrku, &work[ir],
  5128. &ldwrkr, dum, &c__1, &work[iwork], info);
  5129. /* Multiply right singular vectors of L in WORK(IU)
  5130. by Q in VT, storing result in A (Workspace: need
  5131. M*M) */
  5132. NUMblas_dgemm ("N", "N", m, n, m, &c_b438, &work[iu], &ldwrku, &vt[vt_offset], ldvt, &c_b416,
  5133. &a[a_offset], lda);
  5134. /* Copy right singular vectors of A from A to VT */
  5135. NUMlapack_dlacpy ("F", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5136. /* Copy left singular vectors of A from WORK(IR) to A
  5137. */
  5138. NUMlapack_dlacpy ("F", m, m, &work[ir], &ldwrkr, &a[a_offset], lda);
  5139. } else {
  5140. /* Insufficient workspace for a fast algorithm */
  5141. itau = 1;
  5142. iwork = itau + *m;
  5143. /* Compute A=L*Q, copying result to VT (Workspace:
  5144. need 2*M, prefer M+M*NB) */
  5145. i__2 = *lwork - iwork + 1;
  5146. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  5147. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5148. /* Generate Q in VT (Workspace: need M+N, prefer
  5149. M+N*NB) */
  5150. i__2 = *lwork - iwork + 1;
  5151. NUMlapack_dorglq (n, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  5152. &ierr);
  5153. ie = itau;
  5154. itauq = ie + *m;
  5155. itaup = itauq + *m;
  5156. iwork = itaup + *m;
  5157. /* Zero out above L in A */
  5158. i__2 = *m - 1;
  5159. i__3 = *m - 1;
  5160. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &a_ref (1, 2), lda);
  5161. /* Bidiagonalize L in A (Workspace: need 4*M, prefer
  5162. 3*M+2*M*NB) */
  5163. i__2 = *lwork - iwork + 1;
  5164. NUMlapack_dgebrd (m, m, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  5165. &work[iwork], &i__2, &ierr);
  5166. /* Multiply right bidiagonalizing vectors in A by Q
  5167. in VT (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  5168. i__2 = *lwork - iwork + 1;
  5169. NUMlapack_dormbr ("P", "L", "T", m, n, m, &a[a_offset], lda, &work[itaup],
  5170. &vt[vt_offset], ldvt, &work[iwork], &i__2, &ierr);
  5171. /* Generate left bidiagonalizing vectors in A
  5172. (Workspace: need 4*M, prefer 3*M+M*NB) */
  5173. i__2 = *lwork - iwork + 1;
  5174. NUMlapack_dorgbr ("Q", m, m, m, &a[a_offset], lda, &work[itauq], &work[iwork], &i__2,
  5175. &ierr);
  5176. iwork = ie + *m;
  5177. /* Perform bidiagonal QR iteration, computing left
  5178. singular vectors of A in A and computing right
  5179. singular vectors of A in VT (Workspace: need
  5180. BDSPAC) */
  5181. NUMlapack_dbdsqr ("U", m, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  5182. &a[a_offset], lda, dum, &c__1, &work[iwork], info);
  5183. }
  5184. } else if (wntuas) {
  5185. /* Path 9t(N much larger than M, JOBU='S' or 'A',
  5186. JOBVT='A') N right singular vectors to be computed in
  5187. VT and M left singular vectors to be computed in U
  5188. Computing MAX */
  5189. i__2 = *n + *m, i__3 = *m << 2, i__2 = MAX (i__2, i__3);
  5190. if (*lwork >= *m * *m + MAX (i__2, bdspac)) {
  5191. /* Sufficient workspace for a fast algorithm */
  5192. iu = 1;
  5193. if (*lwork >= wrkbl + *lda * *m) {
  5194. /* WORK(IU) is LDA by M */
  5195. ldwrku = *lda;
  5196. } else {
  5197. /* WORK(IU) is M by M */
  5198. ldwrku = *m;
  5199. }
  5200. itau = iu + ldwrku * *m;
  5201. iwork = itau + *m;
  5202. /* Compute A=L*Q, copying result to VT (Workspace:
  5203. need M*M+2*M, prefer M*M+M+M*NB) */
  5204. i__2 = *lwork - iwork + 1;
  5205. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  5206. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5207. /* Generate Q in VT (Workspace: need M*M+M+N, prefer
  5208. M*M+M+N*NB) */
  5209. i__2 = *lwork - iwork + 1;
  5210. NUMlapack_dorglq (n, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  5211. &ierr);
  5212. /* Copy L to WORK(IU), zeroing out above it */
  5213. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &work[iu], &ldwrku);
  5214. i__2 = *m - 1;
  5215. i__3 = *m - 1;
  5216. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &work[iu + ldwrku], &ldwrku);
  5217. ie = itau;
  5218. itauq = ie + *m;
  5219. itaup = itauq + *m;
  5220. iwork = itaup + *m;
  5221. /* Bidiagonalize L in WORK(IU), copying result to U
  5222. (Workspace: need M*M+4*M, prefer M*M+3*M+2*M*NB) */
  5223. i__2 = *lwork - iwork + 1;
  5224. NUMlapack_dgebrd (m, m, &work[iu], &ldwrku, &s[1], &work[ie], &work[itauq],
  5225. &work[itaup], &work[iwork], &i__2, &ierr);
  5226. NUMlapack_dlacpy ("L", m, m, &work[iu], &ldwrku, &u[u_offset], ldu);
  5227. /* Generate right bidiagonalizing vectors in WORK(IU)
  5228. (Workspace: need M*M+4*M, prefer M*M+3*M+(M-1)*NB)
  5229. */
  5230. i__2 = *lwork - iwork + 1;
  5231. NUMlapack_dorgbr ("P", m, m, m, &work[iu], &ldwrku, &work[itaup], &work[iwork], &i__2,
  5232. &ierr);
  5233. /* Generate left bidiagonalizing vectors in U
  5234. (Workspace: need M*M+4*M, prefer M*M+3*M+M*NB) */
  5235. i__2 = *lwork - iwork + 1;
  5236. NUMlapack_dorgbr ("Q", m, m, m, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2,
  5237. &ierr);
  5238. iwork = ie + *m;
  5239. /* Perform bidiagonal QR iteration, computing left
  5240. singular vectors of L in U and computing right
  5241. singular vectors of L in WORK(IU) (Workspace: need
  5242. M*M+BDSPAC) */
  5243. NUMlapack_dbdsqr ("U", m, m, m, &c__0, &s[1], &work[ie], &work[iu], &ldwrku,
  5244. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  5245. /* Multiply right singular vectors of L in WORK(IU)
  5246. by Q in VT, storing result in A (Workspace: need
  5247. M*M) */
  5248. NUMblas_dgemm ("N", "N", m, n, m, &c_b438, &work[iu], &ldwrku, &vt[vt_offset], ldvt, &c_b416,
  5249. &a[a_offset], lda);
  5250. /* Copy right singular vectors of A from A to VT */
  5251. NUMlapack_dlacpy ("F", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5252. } else {
  5253. /* Insufficient workspace for a fast algorithm */
  5254. itau = 1;
  5255. iwork = itau + *m;
  5256. /* Compute A=L*Q, copying result to VT (Workspace:
  5257. need 2*M, prefer M+M*NB) */
  5258. i__2 = *lwork - iwork + 1;
  5259. NUMlapack_dgelqf (m, n, &a[a_offset], lda, &work[itau], &work[iwork], &i__2, &ierr);
  5260. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5261. /* Generate Q in VT (Workspace: need M+N, prefer
  5262. M+N*NB) */
  5263. i__2 = *lwork - iwork + 1;
  5264. NUMlapack_dorglq (n, n, m, &vt[vt_offset], ldvt, &work[itau], &work[iwork], &i__2,
  5265. &ierr);
  5266. /* Copy L to U, zeroing out above it */
  5267. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &u[u_offset], ldu);
  5268. i__2 = *m - 1;
  5269. i__3 = *m - 1;
  5270. NUMlapack_dlaset ("U", &i__2, &i__3, &c_b416, &c_b416, &u_ref (1, 2), ldu);
  5271. ie = itau;
  5272. itauq = ie + *m;
  5273. itaup = itauq + *m;
  5274. iwork = itaup + *m;
  5275. /* Bidiagonalize L in U (Workspace: need 4*M, prefer
  5276. 3*M+2*M*NB) */
  5277. i__2 = *lwork - iwork + 1;
  5278. NUMlapack_dgebrd (m, m, &u[u_offset], ldu, &s[1], &work[ie], &work[itauq], &work[itaup],
  5279. &work[iwork], &i__2, &ierr);
  5280. /* Multiply right bidiagonalizing vectors in U by Q
  5281. in VT (Workspace: need 3*M+N, prefer 3*M+N*NB) */
  5282. i__2 = *lwork - iwork + 1;
  5283. NUMlapack_dormbr ("P", "L", "T", m, n, m, &u[u_offset], ldu, &work[itaup],
  5284. &vt[vt_offset], ldvt, &work[iwork], &i__2, &ierr);
  5285. /* Generate left bidiagonalizing vectors in U
  5286. (Workspace: need 4*M, prefer 3*M+M*NB) */
  5287. i__2 = *lwork - iwork + 1;
  5288. NUMlapack_dorgbr ("Q", m, m, m, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2,
  5289. &ierr);
  5290. iwork = ie + *m;
  5291. /* Perform bidiagonal QR iteration, computing left
  5292. singular vectors of A in U and computing right
  5293. singular vectors of A in VT (Workspace: need
  5294. BDSPAC) */
  5295. NUMlapack_dbdsqr ("U", m, n, m, &c__0, &s[1], &work[ie], &vt[vt_offset], ldvt,
  5296. &u[u_offset], ldu, dum, &c__1, &work[iwork], info);
  5297. }
  5298. }
  5299. }
  5300. } else {
  5301. /* N .LT. MNTHR
  5302. Path 10t(N greater than M, but not much larger) Reduce to
  5303. bidiagonal form without LQ decomposition */
  5304. ie = 1;
  5305. itauq = ie + *m;
  5306. itaup = itauq + *m;
  5307. iwork = itaup + *m;
  5308. /* Bidiagonalize A (Workspace: need 3*M+N, prefer 3*M+(M+N)*NB) */
  5309. i__2 = *lwork - iwork + 1;
  5310. NUMlapack_dgebrd (m, n, &a[a_offset], lda, &s[1], &work[ie], &work[itauq], &work[itaup],
  5311. &work[iwork], &i__2, &ierr);
  5312. if (wntuas) {
  5313. /* If left singular vectors desired in U, copy result to U
  5314. and generate left bidiagonalizing vectors in U (Workspace:
  5315. need 4*M-1, prefer 3*M+(M-1)*NB) */
  5316. NUMlapack_dlacpy ("L", m, m, &a[a_offset], lda, &u[u_offset], ldu);
  5317. i__2 = *lwork - iwork + 1;
  5318. NUMlapack_dorgbr ("Q", m, m, n, &u[u_offset], ldu, &work[itauq], &work[iwork], &i__2, &ierr);
  5319. }
  5320. if (wntvas) {
  5321. /* If right singular vectors desired in VT, copy result to VT
  5322. and generate right bidiagonalizing vectors in VT
  5323. (Workspace: need 3*M+NRVT, prefer 3*M+NRVT*NB) */
  5324. NUMlapack_dlacpy ("U", m, n, &a[a_offset], lda, &vt[vt_offset], ldvt);
  5325. if (wntva) {
  5326. nrvt = *n;
  5327. }
  5328. if (wntvs)