pefile.py 231 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882
  1. #!/usr/bin/python
  2. # -*- coding: utf-8 -*-
  3. """pefile, Portable Executable reader module
  4. All the PE file basic structures are available with their default names as
  5. attributes of the instance returned.
  6. Processed elements such as the import table are made available with lowercase
  7. names, to differentiate them from the upper case basic structure names.
  8. pefile has been tested against many edge cases such as corrupted and malformed
  9. PEs as well as malware, which often attempts to abuse the format way beyond its
  10. standard use. To the best of my knowledge most of the abuse is handled
  11. gracefully.
  12. Copyright (c) 2005-2021 Ero Carrera <ero.carrera@gmail.com>
  13. """
  14. from __future__ import division
  15. from __future__ import print_function
  16. from builtins import bytes
  17. from builtins import chr
  18. from builtins import object
  19. from builtins import range
  20. from builtins import str
  21. __author__ = 'Ero Carrera'
  22. __version__ = '2021.5.24'
  23. __contact__ = 'ero.carrera@gmail.com'
  24. import collections
  25. import os
  26. import struct
  27. import sys
  28. import codecs
  29. import time
  30. import math
  31. import string
  32. import mmap
  33. import ordlookup
  34. from collections import Counter
  35. from hashlib import sha1
  36. from hashlib import sha256
  37. from hashlib import sha512
  38. from hashlib import md5
  39. import functools
  40. import copy as copymod
  41. PY3 = sys.version_info > (3,)
  42. if PY3:
  43. long = int
  44. # lru_cache with a shallow copy of the objects returned (list, dicts, ..)
  45. # we don't use deepcopy as it's _really_ slow and the data we retrieved using this is enough with copy.copy
  46. # taken from https://stackoverflow.com/questions/54909357/how-to-get-functools-lru-cache-to-return-new-instances
  47. def lru_cache(maxsize=128, typed=False, copy=False):
  48. if not copy:
  49. return functools.lru_cache(maxsize, typed)
  50. def decorator(f):
  51. cached_func = functools.lru_cache(maxsize, typed)(f)
  52. @functools.wraps(f)
  53. def wrapper(*args, **kwargs):
  54. # return copymod.deepcopy(cached_func(*args, **kwargs))
  55. return copymod.copy(cached_func(*args, **kwargs))
  56. return wrapper
  57. return decorator
  58. else:
  59. # lru_cache that does nothing on python2
  60. def lru_cache(maxsize=128, typed=False, copy=False):
  61. def decorator(f):
  62. @functools.wraps(f)
  63. def wrapper(*args, **kwargs):
  64. return f(*args, **kwargs)
  65. return wrapper
  66. return decorator
  67. @lru_cache(maxsize=2048)
  68. def cache_adjust_FileAlignment(val, file_alignment):
  69. if file_alignment < FILE_ALIGNMENT_HARDCODED_VALUE:
  70. return val
  71. return (int(val / 0x200)) * 0x200
  72. @lru_cache(maxsize=2048)
  73. def cache_adjust_SectionAlignment(val, section_alignment, file_alignment):
  74. if section_alignment < 0x1000: # page size
  75. section_alignment = file_alignment
  76. # 0x200 is the minimum valid FileAlignment according to the documentation
  77. # although ntoskrnl.exe has an alignment of 0x80 in some Windows versions
  78. #
  79. #elif section_alignment < 0x80:
  80. # section_alignment = 0x80
  81. if section_alignment and val % section_alignment:
  82. return section_alignment * ( int(val / section_alignment) )
  83. return val
  84. def count_zeroes(data):
  85. try:
  86. # newbytes' count() takes a str in Python 2
  87. count = data.count('\0')
  88. except TypeError:
  89. # bytes' count() takes an int in Python 3
  90. count = data.count(0)
  91. return count
  92. fast_load = False
  93. # This will set a maximum length of a string to be retrieved from the file.
  94. # It's there to prevent loading massive amounts of data from memory mapped
  95. # files. Strings longer than 1MB should be rather rare.
  96. MAX_STRING_LENGTH = 0x100000 # 2^20
  97. # Maximum number of imports to parse.
  98. MAX_IMPORT_SYMBOLS = 0x2000
  99. # Limit maximum length for specific string types separately
  100. MAX_IMPORT_NAME_LENGTH = 0x200
  101. MAX_DLL_LENGTH = 0x200
  102. MAX_SYMBOL_NAME_LENGTH = 0x200
  103. # Lmit maximum number of sections before processing of sections will stop
  104. MAX_SECTIONS = 0x800
  105. # The global maximum number of resource entries to parse per file
  106. MAX_RESOURCE_ENTRIES = 0x8000
  107. # The maximum depth of nested resource tables
  108. MAX_RESOURCE_DEPTH = 32
  109. # Limit number of exported symbols
  110. MAX_SYMBOL_EXPORT_COUNT = 0x2000
  111. IMAGE_DOS_SIGNATURE = 0x5A4D
  112. IMAGE_DOSZM_SIGNATURE = 0x4D5A
  113. IMAGE_NE_SIGNATURE = 0x454E
  114. IMAGE_LE_SIGNATURE = 0x454C
  115. IMAGE_LX_SIGNATURE = 0x584C
  116. IMAGE_TE_SIGNATURE = 0x5A56 # Terse Executables have a 'VZ' signature
  117. IMAGE_NT_SIGNATURE = 0x00004550
  118. IMAGE_NUMBEROF_DIRECTORY_ENTRIES= 16
  119. IMAGE_ORDINAL_FLAG = 0x80000000
  120. IMAGE_ORDINAL_FLAG64 = 0x8000000000000000
  121. OPTIONAL_HEADER_MAGIC_PE = 0x10b
  122. OPTIONAL_HEADER_MAGIC_PE_PLUS = 0x20b
  123. def two_way_dict(pairs):
  124. return dict([(e[1], e[0]) for e in pairs]+pairs)
  125. directory_entry_types = [
  126. ('IMAGE_DIRECTORY_ENTRY_EXPORT', 0),
  127. ('IMAGE_DIRECTORY_ENTRY_IMPORT', 1),
  128. ('IMAGE_DIRECTORY_ENTRY_RESOURCE', 2),
  129. ('IMAGE_DIRECTORY_ENTRY_EXCEPTION', 3),
  130. ('IMAGE_DIRECTORY_ENTRY_SECURITY', 4),
  131. ('IMAGE_DIRECTORY_ENTRY_BASERELOC', 5),
  132. ('IMAGE_DIRECTORY_ENTRY_DEBUG', 6),
  133. # Architecture on non-x86 platforms
  134. ('IMAGE_DIRECTORY_ENTRY_COPYRIGHT', 7),
  135. ('IMAGE_DIRECTORY_ENTRY_GLOBALPTR', 8),
  136. ('IMAGE_DIRECTORY_ENTRY_TLS', 9),
  137. ('IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG', 10),
  138. ('IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT', 11),
  139. ('IMAGE_DIRECTORY_ENTRY_IAT', 12),
  140. ('IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT', 13),
  141. ('IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR',14),
  142. ('IMAGE_DIRECTORY_ENTRY_RESERVED', 15) ]
  143. DIRECTORY_ENTRY = two_way_dict(directory_entry_types)
  144. image_characteristics = [
  145. ('IMAGE_FILE_RELOCS_STRIPPED', 0x0001),
  146. ('IMAGE_FILE_EXECUTABLE_IMAGE', 0x0002),
  147. ('IMAGE_FILE_LINE_NUMS_STRIPPED', 0x0004),
  148. ('IMAGE_FILE_LOCAL_SYMS_STRIPPED', 0x0008),
  149. ('IMAGE_FILE_AGGRESIVE_WS_TRIM', 0x0010),
  150. ('IMAGE_FILE_LARGE_ADDRESS_AWARE', 0x0020),
  151. ('IMAGE_FILE_16BIT_MACHINE', 0x0040),
  152. ('IMAGE_FILE_BYTES_REVERSED_LO', 0x0080),
  153. ('IMAGE_FILE_32BIT_MACHINE', 0x0100),
  154. ('IMAGE_FILE_DEBUG_STRIPPED', 0x0200),
  155. ('IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP', 0x0400),
  156. ('IMAGE_FILE_NET_RUN_FROM_SWAP', 0x0800),
  157. ('IMAGE_FILE_SYSTEM', 0x1000),
  158. ('IMAGE_FILE_DLL', 0x2000),
  159. ('IMAGE_FILE_UP_SYSTEM_ONLY', 0x4000),
  160. ('IMAGE_FILE_BYTES_REVERSED_HI', 0x8000) ]
  161. IMAGE_CHARACTERISTICS = two_way_dict(image_characteristics)
  162. section_characteristics = [
  163. ('IMAGE_SCN_TYPE_REG', 0x00000000), # reserved
  164. ('IMAGE_SCN_TYPE_DSECT', 0x00000001), # reserved
  165. ('IMAGE_SCN_TYPE_NOLOAD', 0x00000002), # reserved
  166. ('IMAGE_SCN_TYPE_GROUP', 0x00000004), # reserved
  167. ('IMAGE_SCN_TYPE_NO_PAD', 0x00000008), # reserved
  168. ('IMAGE_SCN_TYPE_COPY', 0x00000010), # reserved
  169. ('IMAGE_SCN_CNT_CODE', 0x00000020),
  170. ('IMAGE_SCN_CNT_INITIALIZED_DATA', 0x00000040),
  171. ('IMAGE_SCN_CNT_UNINITIALIZED_DATA', 0x00000080),
  172. ('IMAGE_SCN_LNK_OTHER', 0x00000100),
  173. ('IMAGE_SCN_LNK_INFO', 0x00000200),
  174. ('IMAGE_SCN_LNK_OVER', 0x00000400), # reserved
  175. ('IMAGE_SCN_LNK_REMOVE', 0x00000800),
  176. ('IMAGE_SCN_LNK_COMDAT', 0x00001000),
  177. ('IMAGE_SCN_MEM_PROTECTED', 0x00004000), # obsolete
  178. ('IMAGE_SCN_NO_DEFER_SPEC_EXC', 0x00004000),
  179. ('IMAGE_SCN_GPREL', 0x00008000),
  180. ('IMAGE_SCN_MEM_FARDATA', 0x00008000),
  181. ('IMAGE_SCN_MEM_SYSHEAP', 0x00010000), # obsolete
  182. ('IMAGE_SCN_MEM_PURGEABLE', 0x00020000),
  183. ('IMAGE_SCN_MEM_16BIT', 0x00020000),
  184. ('IMAGE_SCN_MEM_LOCKED', 0x00040000),
  185. ('IMAGE_SCN_MEM_PRELOAD', 0x00080000),
  186. ('IMAGE_SCN_ALIGN_1BYTES', 0x00100000),
  187. ('IMAGE_SCN_ALIGN_2BYTES', 0x00200000),
  188. ('IMAGE_SCN_ALIGN_4BYTES', 0x00300000),
  189. ('IMAGE_SCN_ALIGN_8BYTES', 0x00400000),
  190. ('IMAGE_SCN_ALIGN_16BYTES', 0x00500000), # default alignment
  191. ('IMAGE_SCN_ALIGN_32BYTES', 0x00600000),
  192. ('IMAGE_SCN_ALIGN_64BYTES', 0x00700000),
  193. ('IMAGE_SCN_ALIGN_128BYTES', 0x00800000),
  194. ('IMAGE_SCN_ALIGN_256BYTES', 0x00900000),
  195. ('IMAGE_SCN_ALIGN_512BYTES', 0x00A00000),
  196. ('IMAGE_SCN_ALIGN_1024BYTES', 0x00B00000),
  197. ('IMAGE_SCN_ALIGN_2048BYTES', 0x00C00000),
  198. ('IMAGE_SCN_ALIGN_4096BYTES', 0x00D00000),
  199. ('IMAGE_SCN_ALIGN_8192BYTES', 0x00E00000),
  200. ('IMAGE_SCN_ALIGN_MASK', 0x00F00000),
  201. ('IMAGE_SCN_LNK_NRELOC_OVFL', 0x01000000),
  202. ('IMAGE_SCN_MEM_DISCARDABLE', 0x02000000),
  203. ('IMAGE_SCN_MEM_NOT_CACHED', 0x04000000),
  204. ('IMAGE_SCN_MEM_NOT_PAGED', 0x08000000),
  205. ('IMAGE_SCN_MEM_SHARED', 0x10000000),
  206. ('IMAGE_SCN_MEM_EXECUTE', 0x20000000),
  207. ('IMAGE_SCN_MEM_READ', 0x40000000),
  208. ('IMAGE_SCN_MEM_WRITE', 0x80000000) ]
  209. SECTION_CHARACTERISTICS = two_way_dict(section_characteristics)
  210. debug_types = [
  211. ('IMAGE_DEBUG_TYPE_UNKNOWN', 0),
  212. ('IMAGE_DEBUG_TYPE_COFF', 1),
  213. ('IMAGE_DEBUG_TYPE_CODEVIEW', 2),
  214. ('IMAGE_DEBUG_TYPE_FPO', 3),
  215. ('IMAGE_DEBUG_TYPE_MISC', 4),
  216. ('IMAGE_DEBUG_TYPE_EXCEPTION', 5),
  217. ('IMAGE_DEBUG_TYPE_FIXUP', 6),
  218. ('IMAGE_DEBUG_TYPE_OMAP_TO_SRC', 7),
  219. ('IMAGE_DEBUG_TYPE_OMAP_FROM_SRC', 8),
  220. ('IMAGE_DEBUG_TYPE_BORLAND', 9),
  221. ('IMAGE_DEBUG_TYPE_RESERVED10', 10),
  222. ('IMAGE_DEBUG_TYPE_CLSID', 11),
  223. ('IMAGE_DEBUG_TYPE_VC_FEATURE', 12),
  224. ('IMAGE_DEBUG_TYPE_POGO', 13),
  225. ('IMAGE_DEBUG_TYPE_ILTCG', 14),
  226. ('IMAGE_DEBUG_TYPE_MPX', 15),
  227. ('IMAGE_DEBUG_TYPE_REPRO', 16),
  228. ('IMAGE_DEBUG_TYPE_EX_DLLCHARACTERISTICS', 20) ]
  229. DEBUG_TYPE = two_way_dict(debug_types)
  230. subsystem_types = [
  231. ('IMAGE_SUBSYSTEM_UNKNOWN', 0),
  232. ('IMAGE_SUBSYSTEM_NATIVE', 1),
  233. ('IMAGE_SUBSYSTEM_WINDOWS_GUI', 2),
  234. ('IMAGE_SUBSYSTEM_WINDOWS_CUI', 3),
  235. ('IMAGE_SUBSYSTEM_OS2_CUI', 5),
  236. ('IMAGE_SUBSYSTEM_POSIX_CUI', 7),
  237. ('IMAGE_SUBSYSTEM_NATIVE_WINDOWS', 8),
  238. ('IMAGE_SUBSYSTEM_WINDOWS_CE_GUI', 9),
  239. ('IMAGE_SUBSYSTEM_EFI_APPLICATION', 10),
  240. ('IMAGE_SUBSYSTEM_EFI_BOOT_SERVICE_DRIVER', 11),
  241. ('IMAGE_SUBSYSTEM_EFI_RUNTIME_DRIVER', 12),
  242. ('IMAGE_SUBSYSTEM_EFI_ROM', 13),
  243. ('IMAGE_SUBSYSTEM_XBOX', 14),
  244. ('IMAGE_SUBSYSTEM_WINDOWS_BOOT_APPLICATION', 16)]
  245. SUBSYSTEM_TYPE = two_way_dict(subsystem_types)
  246. machine_types = [
  247. ('IMAGE_FILE_MACHINE_UNKNOWN', 0),
  248. ('IMAGE_FILE_MACHINE_I386', 0x014c),
  249. ('IMAGE_FILE_MACHINE_R3000', 0x0162),
  250. ('IMAGE_FILE_MACHINE_R4000', 0x0166),
  251. ('IMAGE_FILE_MACHINE_R10000', 0x0168),
  252. ('IMAGE_FILE_MACHINE_WCEMIPSV2',0x0169),
  253. ('IMAGE_FILE_MACHINE_ALPHA', 0x0184),
  254. ('IMAGE_FILE_MACHINE_SH3', 0x01a2),
  255. ('IMAGE_FILE_MACHINE_SH3DSP', 0x01a3),
  256. ('IMAGE_FILE_MACHINE_SH3E', 0x01a4),
  257. ('IMAGE_FILE_MACHINE_SH4', 0x01a6),
  258. ('IMAGE_FILE_MACHINE_SH5', 0x01a8),
  259. ('IMAGE_FILE_MACHINE_ARM', 0x01c0),
  260. ('IMAGE_FILE_MACHINE_THUMB', 0x01c2),
  261. ('IMAGE_FILE_MACHINE_ARMNT', 0x01c4),
  262. ('IMAGE_FILE_MACHINE_AM33', 0x01d3),
  263. ('IMAGE_FILE_MACHINE_POWERPC', 0x01f0),
  264. ('IMAGE_FILE_MACHINE_POWERPCFP',0x01f1),
  265. ('IMAGE_FILE_MACHINE_IA64', 0x0200),
  266. ('IMAGE_FILE_MACHINE_MIPS16', 0x0266),
  267. ('IMAGE_FILE_MACHINE_ALPHA64', 0x0284),
  268. ('IMAGE_FILE_MACHINE_AXP64', 0x0284), # same
  269. ('IMAGE_FILE_MACHINE_MIPSFPU', 0x0366),
  270. ('IMAGE_FILE_MACHINE_MIPSFPU16',0x0466),
  271. ('IMAGE_FILE_MACHINE_TRICORE', 0x0520),
  272. ('IMAGE_FILE_MACHINE_CEF', 0x0cef),
  273. ('IMAGE_FILE_MACHINE_EBC', 0x0ebc),
  274. ('IMAGE_FILE_MACHINE_AMD64', 0x8664),
  275. ('IMAGE_FILE_MACHINE_M32R', 0x9041),
  276. ('IMAGE_FILE_MACHINE_ARM64', 0xaa64),
  277. ('IMAGE_FILE_MACHINE_CEE', 0xc0ee),
  278. ]
  279. MACHINE_TYPE = two_way_dict(machine_types)
  280. relocation_types = [
  281. ('IMAGE_REL_BASED_ABSOLUTE', 0),
  282. ('IMAGE_REL_BASED_HIGH', 1),
  283. ('IMAGE_REL_BASED_LOW', 2),
  284. ('IMAGE_REL_BASED_HIGHLOW', 3),
  285. ('IMAGE_REL_BASED_HIGHADJ', 4),
  286. ('IMAGE_REL_BASED_MIPS_JMPADDR', 5),
  287. ('IMAGE_REL_BASED_SECTION', 6),
  288. ('IMAGE_REL_BASED_REL', 7),
  289. ('IMAGE_REL_BASED_MIPS_JMPADDR16', 9),
  290. ('IMAGE_REL_BASED_IA64_IMM64', 9),
  291. ('IMAGE_REL_BASED_DIR64', 10),
  292. ('IMAGE_REL_BASED_HIGH3ADJ', 11) ]
  293. RELOCATION_TYPE = two_way_dict(relocation_types)
  294. dll_characteristics = [
  295. ('IMAGE_LIBRARY_PROCESS_INIT', 0x0001), # reserved
  296. ('IMAGE_LIBRARY_PROCESS_TERM', 0x0002), # reserved
  297. ('IMAGE_LIBRARY_THREAD_INIT', 0x0004), # reserved
  298. ('IMAGE_LIBRARY_THREAD_TERM', 0x0008), # reserved
  299. ('IMAGE_DLLCHARACTERISTICS_HIGH_ENTROPY_VA', 0x0020),
  300. ('IMAGE_DLLCHARACTERISTICS_DYNAMIC_BASE', 0x0040),
  301. ('IMAGE_DLLCHARACTERISTICS_FORCE_INTEGRITY', 0x0080),
  302. ('IMAGE_DLLCHARACTERISTICS_NX_COMPAT', 0x0100),
  303. ('IMAGE_DLLCHARACTERISTICS_NO_ISOLATION', 0x0200),
  304. ('IMAGE_DLLCHARACTERISTICS_NO_SEH', 0x0400),
  305. ('IMAGE_DLLCHARACTERISTICS_NO_BIND', 0x0800),
  306. ('IMAGE_DLLCHARACTERISTICS_APPCONTAINER', 0x1000),
  307. ('IMAGE_DLLCHARACTERISTICS_WDM_DRIVER', 0x2000),
  308. ('IMAGE_DLLCHARACTERISTICS_GUARD_CF', 0x4000),
  309. ('IMAGE_DLLCHARACTERISTICS_TERMINAL_SERVER_AWARE', 0x8000) ]
  310. DLL_CHARACTERISTICS = two_way_dict(dll_characteristics)
  311. FILE_ALIGNMENT_HARDCODED_VALUE = 0x200
  312. # Resource types
  313. resource_type = [
  314. ('RT_CURSOR', 1),
  315. ('RT_BITMAP', 2),
  316. ('RT_ICON', 3),
  317. ('RT_MENU', 4),
  318. ('RT_DIALOG', 5),
  319. ('RT_STRING', 6),
  320. ('RT_FONTDIR', 7),
  321. ('RT_FONT', 8),
  322. ('RT_ACCELERATOR', 9),
  323. ('RT_RCDATA', 10),
  324. ('RT_MESSAGETABLE', 11),
  325. ('RT_GROUP_CURSOR', 12),
  326. ('RT_GROUP_ICON', 14),
  327. ('RT_VERSION', 16),
  328. ('RT_DLGINCLUDE', 17),
  329. ('RT_PLUGPLAY', 19),
  330. ('RT_VXD', 20),
  331. ('RT_ANICURSOR', 21),
  332. ('RT_ANIICON', 22),
  333. ('RT_HTML', 23),
  334. ('RT_MANIFEST', 24) ]
  335. RESOURCE_TYPE = two_way_dict(resource_type)
  336. # Language definitions
  337. lang = [
  338. ('LANG_NEUTRAL', 0x00),
  339. ('LANG_INVARIANT', 0x7f),
  340. ('LANG_AFRIKAANS', 0x36),
  341. ('LANG_ALBANIAN', 0x1c),
  342. ('LANG_ARABIC', 0x01),
  343. ('LANG_ARMENIAN', 0x2b),
  344. ('LANG_ASSAMESE', 0x4d),
  345. ('LANG_AZERI', 0x2c),
  346. ('LANG_BASQUE', 0x2d),
  347. ('LANG_BELARUSIAN', 0x23),
  348. ('LANG_BENGALI', 0x45),
  349. ('LANG_BULGARIAN', 0x02),
  350. ('LANG_CATALAN', 0x03),
  351. ('LANG_CHINESE', 0x04),
  352. ('LANG_CROATIAN', 0x1a),
  353. ('LANG_CZECH', 0x05),
  354. ('LANG_DANISH', 0x06),
  355. ('LANG_DIVEHI', 0x65),
  356. ('LANG_DUTCH', 0x13),
  357. ('LANG_ENGLISH', 0x09),
  358. ('LANG_ESTONIAN', 0x25),
  359. ('LANG_FAEROESE', 0x38),
  360. ('LANG_FARSI', 0x29),
  361. ('LANG_FINNISH', 0x0b),
  362. ('LANG_FRENCH', 0x0c),
  363. ('LANG_GALICIAN', 0x56),
  364. ('LANG_GEORGIAN', 0x37),
  365. ('LANG_GERMAN', 0x07),
  366. ('LANG_GREEK', 0x08),
  367. ('LANG_GUJARATI', 0x47),
  368. ('LANG_HEBREW', 0x0d),
  369. ('LANG_HINDI', 0x39),
  370. ('LANG_HUNGARIAN', 0x0e),
  371. ('LANG_ICELANDIC', 0x0f),
  372. ('LANG_INDONESIAN', 0x21),
  373. ('LANG_ITALIAN', 0x10),
  374. ('LANG_JAPANESE', 0x11),
  375. ('LANG_KANNADA', 0x4b),
  376. ('LANG_KASHMIRI', 0x60),
  377. ('LANG_KAZAK', 0x3f),
  378. ('LANG_KONKANI', 0x57),
  379. ('LANG_KOREAN', 0x12),
  380. ('LANG_KYRGYZ', 0x40),
  381. ('LANG_LATVIAN', 0x26),
  382. ('LANG_LITHUANIAN', 0x27),
  383. ('LANG_MACEDONIAN', 0x2f),
  384. ('LANG_MALAY', 0x3e),
  385. ('LANG_MALAYALAM', 0x4c),
  386. ('LANG_MANIPURI', 0x58),
  387. ('LANG_MARATHI', 0x4e),
  388. ('LANG_MONGOLIAN', 0x50),
  389. ('LANG_NEPALI', 0x61),
  390. ('LANG_NORWEGIAN', 0x14),
  391. ('LANG_ORIYA', 0x48),
  392. ('LANG_POLISH', 0x15),
  393. ('LANG_PORTUGUESE', 0x16),
  394. ('LANG_PUNJABI', 0x46),
  395. ('LANG_ROMANIAN', 0x18),
  396. ('LANG_RUSSIAN', 0x19),
  397. ('LANG_SANSKRIT', 0x4f),
  398. ('LANG_SERBIAN', 0x1a),
  399. ('LANG_SINDHI', 0x59),
  400. ('LANG_SLOVAK', 0x1b),
  401. ('LANG_SLOVENIAN', 0x24),
  402. ('LANG_SPANISH', 0x0a),
  403. ('LANG_SWAHILI', 0x41),
  404. ('LANG_SWEDISH', 0x1d),
  405. ('LANG_SYRIAC', 0x5a),
  406. ('LANG_TAMIL', 0x49),
  407. ('LANG_TATAR', 0x44),
  408. ('LANG_TELUGU', 0x4a),
  409. ('LANG_THAI', 0x1e),
  410. ('LANG_TURKISH', 0x1f),
  411. ('LANG_UKRAINIAN', 0x22),
  412. ('LANG_URDU', 0x20),
  413. ('LANG_UZBEK', 0x43),
  414. ('LANG_VIETNAMESE', 0x2a),
  415. ('LANG_GAELIC', 0x3c),
  416. ('LANG_MALTESE', 0x3a),
  417. ('LANG_MAORI', 0x28),
  418. ('LANG_RHAETO_ROMANCE',0x17),
  419. ('LANG_SAAMI', 0x3b),
  420. ('LANG_SORBIAN', 0x2e),
  421. ('LANG_SUTU', 0x30),
  422. ('LANG_TSONGA', 0x31),
  423. ('LANG_TSWANA', 0x32),
  424. ('LANG_VENDA', 0x33),
  425. ('LANG_XHOSA', 0x34),
  426. ('LANG_ZULU', 0x35),
  427. ('LANG_ESPERANTO', 0x8f),
  428. ('LANG_WALON', 0x90),
  429. ('LANG_CORNISH', 0x91),
  430. ('LANG_WELSH', 0x92),
  431. ('LANG_BRETON', 0x93) ]
  432. LANG = two_way_dict(lang)
  433. # Sublanguage definitions
  434. sublang = [
  435. ('SUBLANG_NEUTRAL', 0x00),
  436. ('SUBLANG_DEFAULT', 0x01),
  437. ('SUBLANG_SYS_DEFAULT', 0x02),
  438. ('SUBLANG_ARABIC_SAUDI_ARABIA', 0x01),
  439. ('SUBLANG_ARABIC_IRAQ', 0x02),
  440. ('SUBLANG_ARABIC_EGYPT', 0x03),
  441. ('SUBLANG_ARABIC_LIBYA', 0x04),
  442. ('SUBLANG_ARABIC_ALGERIA', 0x05),
  443. ('SUBLANG_ARABIC_MOROCCO', 0x06),
  444. ('SUBLANG_ARABIC_TUNISIA', 0x07),
  445. ('SUBLANG_ARABIC_OMAN', 0x08),
  446. ('SUBLANG_ARABIC_YEMEN', 0x09),
  447. ('SUBLANG_ARABIC_SYRIA', 0x0a),
  448. ('SUBLANG_ARABIC_JORDAN', 0x0b),
  449. ('SUBLANG_ARABIC_LEBANON', 0x0c),
  450. ('SUBLANG_ARABIC_KUWAIT', 0x0d),
  451. ('SUBLANG_ARABIC_UAE', 0x0e),
  452. ('SUBLANG_ARABIC_BAHRAIN', 0x0f),
  453. ('SUBLANG_ARABIC_QATAR', 0x10),
  454. ('SUBLANG_AZERI_LATIN', 0x01),
  455. ('SUBLANG_AZERI_CYRILLIC', 0x02),
  456. ('SUBLANG_CHINESE_TRADITIONAL', 0x01),
  457. ('SUBLANG_CHINESE_SIMPLIFIED', 0x02),
  458. ('SUBLANG_CHINESE_HONGKONG', 0x03),
  459. ('SUBLANG_CHINESE_SINGAPORE', 0x04),
  460. ('SUBLANG_CHINESE_MACAU', 0x05),
  461. ('SUBLANG_DUTCH', 0x01),
  462. ('SUBLANG_DUTCH_BELGIAN', 0x02),
  463. ('SUBLANG_ENGLISH_US', 0x01),
  464. ('SUBLANG_ENGLISH_UK', 0x02),
  465. ('SUBLANG_ENGLISH_AUS', 0x03),
  466. ('SUBLANG_ENGLISH_CAN', 0x04),
  467. ('SUBLANG_ENGLISH_NZ', 0x05),
  468. ('SUBLANG_ENGLISH_EIRE', 0x06),
  469. ('SUBLANG_ENGLISH_SOUTH_AFRICA', 0x07),
  470. ('SUBLANG_ENGLISH_JAMAICA', 0x08),
  471. ('SUBLANG_ENGLISH_CARIBBEAN', 0x09),
  472. ('SUBLANG_ENGLISH_BELIZE', 0x0a),
  473. ('SUBLANG_ENGLISH_TRINIDAD', 0x0b),
  474. ('SUBLANG_ENGLISH_ZIMBABWE', 0x0c),
  475. ('SUBLANG_ENGLISH_PHILIPPINES', 0x0d),
  476. ('SUBLANG_FRENCH', 0x01),
  477. ('SUBLANG_FRENCH_BELGIAN', 0x02),
  478. ('SUBLANG_FRENCH_CANADIAN', 0x03),
  479. ('SUBLANG_FRENCH_SWISS', 0x04),
  480. ('SUBLANG_FRENCH_LUXEMBOURG', 0x05),
  481. ('SUBLANG_FRENCH_MONACO', 0x06),
  482. ('SUBLANG_GERMAN', 0x01),
  483. ('SUBLANG_GERMAN_SWISS', 0x02),
  484. ('SUBLANG_GERMAN_AUSTRIAN', 0x03),
  485. ('SUBLANG_GERMAN_LUXEMBOURG', 0x04),
  486. ('SUBLANG_GERMAN_LIECHTENSTEIN', 0x05),
  487. ('SUBLANG_ITALIAN', 0x01),
  488. ('SUBLANG_ITALIAN_SWISS', 0x02),
  489. ('SUBLANG_KASHMIRI_SASIA', 0x02),
  490. ('SUBLANG_KASHMIRI_INDIA', 0x02),
  491. ('SUBLANG_KOREAN', 0x01),
  492. ('SUBLANG_LITHUANIAN', 0x01),
  493. ('SUBLANG_MALAY_MALAYSIA', 0x01),
  494. ('SUBLANG_MALAY_BRUNEI_DARUSSALAM', 0x02),
  495. ('SUBLANG_NEPALI_INDIA', 0x02),
  496. ('SUBLANG_NORWEGIAN_BOKMAL', 0x01),
  497. ('SUBLANG_NORWEGIAN_NYNORSK', 0x02),
  498. ('SUBLANG_PORTUGUESE', 0x02),
  499. ('SUBLANG_PORTUGUESE_BRAZILIAN', 0x01),
  500. ('SUBLANG_SERBIAN_LATIN', 0x02),
  501. ('SUBLANG_SERBIAN_CYRILLIC', 0x03),
  502. ('SUBLANG_SPANISH', 0x01),
  503. ('SUBLANG_SPANISH_MEXICAN', 0x02),
  504. ('SUBLANG_SPANISH_MODERN', 0x03),
  505. ('SUBLANG_SPANISH_GUATEMALA', 0x04),
  506. ('SUBLANG_SPANISH_COSTA_RICA', 0x05),
  507. ('SUBLANG_SPANISH_PANAMA', 0x06),
  508. ('SUBLANG_SPANISH_DOMINICAN_REPUBLIC', 0x07),
  509. ('SUBLANG_SPANISH_VENEZUELA', 0x08),
  510. ('SUBLANG_SPANISH_COLOMBIA', 0x09),
  511. ('SUBLANG_SPANISH_PERU', 0x0a),
  512. ('SUBLANG_SPANISH_ARGENTINA', 0x0b),
  513. ('SUBLANG_SPANISH_ECUADOR', 0x0c),
  514. ('SUBLANG_SPANISH_CHILE', 0x0d),
  515. ('SUBLANG_SPANISH_URUGUAY', 0x0e),
  516. ('SUBLANG_SPANISH_PARAGUAY', 0x0f),
  517. ('SUBLANG_SPANISH_BOLIVIA', 0x10),
  518. ('SUBLANG_SPANISH_EL_SALVADOR', 0x11),
  519. ('SUBLANG_SPANISH_HONDURAS', 0x12),
  520. ('SUBLANG_SPANISH_NICARAGUA', 0x13),
  521. ('SUBLANG_SPANISH_PUERTO_RICO', 0x14),
  522. ('SUBLANG_SWEDISH', 0x01),
  523. ('SUBLANG_SWEDISH_FINLAND', 0x02),
  524. ('SUBLANG_URDU_PAKISTAN', 0x01),
  525. ('SUBLANG_URDU_INDIA', 0x02),
  526. ('SUBLANG_UZBEK_LATIN', 0x01),
  527. ('SUBLANG_UZBEK_CYRILLIC', 0x02),
  528. ('SUBLANG_DUTCH_SURINAM', 0x03),
  529. ('SUBLANG_ROMANIAN', 0x01),
  530. ('SUBLANG_ROMANIAN_MOLDAVIA', 0x02),
  531. ('SUBLANG_RUSSIAN', 0x01),
  532. ('SUBLANG_RUSSIAN_MOLDAVIA', 0x02),
  533. ('SUBLANG_CROATIAN', 0x01),
  534. ('SUBLANG_LITHUANIAN_CLASSIC', 0x02),
  535. ('SUBLANG_GAELIC', 0x01),
  536. ('SUBLANG_GAELIC_SCOTTISH', 0x02),
  537. ('SUBLANG_GAELIC_MANX', 0x03) ]
  538. SUBLANG = two_way_dict(sublang)
  539. # Initialize the dictionary with all the name->value pairs
  540. SUBLANG = dict( sublang )
  541. # Now add all the value->name information, handling duplicates appropriately
  542. for sublang_name, sublang_value in sublang:
  543. if sublang_value in SUBLANG:
  544. SUBLANG[ sublang_value ].append( sublang_name )
  545. else:
  546. SUBLANG[ sublang_value ] = [ sublang_name ]
  547. # Resolve a sublang name given the main lang name
  548. #
  549. def get_sublang_name_for_lang( lang_value, sublang_value ):
  550. lang_name = LANG.get(lang_value, '*unknown*')
  551. for sublang_name in SUBLANG.get(sublang_value, list()):
  552. # if the main language is a substring of sublang's name, then
  553. # return that
  554. if lang_name in sublang_name:
  555. return sublang_name
  556. # otherwise return the first sublang name
  557. return SUBLANG.get(sublang_value, ['*unknown*'])[0]
  558. # Ange Albertini's code to process resources' strings
  559. #
  560. def parse_strings(data, counter, l):
  561. i = 0
  562. error_count = 0
  563. while i < len(data):
  564. data_slice = data[i:i + 2]
  565. if len(data_slice) < 2:
  566. break
  567. len_ = struct.unpack("<h", data_slice)[0]
  568. i += 2
  569. if len_ != 0 and 0 <= len_*2 <= len(data):
  570. try:
  571. l[counter] = b(data[i: i + len_ * 2]).decode('utf-16le')
  572. except UnicodeDecodeError:
  573. error_count += 1
  574. pass
  575. if error_count >= 3:
  576. break
  577. i += len_ * 2
  578. counter += 1
  579. def retrieve_flags(flag_dict, flag_filter):
  580. """Read the flags from a dictionary and return them in a usable form.
  581. Will return a list of (flag, value) for all flags in "flag_dict"
  582. matching the filter "flag_filter".
  583. """
  584. return [(flag, flag_dict[flag]) for flag in flag_dict.keys() if
  585. isinstance(flag, (str, bytes)) and flag.startswith(flag_filter)]
  586. def set_flags(obj, flag_field, flags):
  587. """Will process the flags and set attributes in the object accordingly.
  588. The object "obj" will gain attributes named after the flags provided in
  589. "flags" and valued True/False, matching the results of applying each
  590. flag value from "flags" to flag_field.
  591. """
  592. for flag, value in flags:
  593. if value & flag_field:
  594. obj.__dict__[flag] = True
  595. else:
  596. obj.__dict__[flag] = False
  597. def power_of_two(val):
  598. return val != 0 and (val & (val-1)) == 0
  599. # These come from the great article[1] which contains great insights on
  600. # working with unicode in both Python 2 and 3.
  601. # [1]: http://python3porting.com/problems.html
  602. if not PY3:
  603. def handler(err):
  604. start = err.start
  605. end = err.end
  606. values = [
  607. ('\\u{0:04x}' if ord(err.object[i]) > 255 else '\\x{0:02x}',
  608. ord(err.object[i])) for i in range(start,end)]
  609. return (
  610. u"".join([elm[0].format(elm[1]) for elm in values]),
  611. end)
  612. import codecs
  613. codecs.register_error('backslashreplace_', handler)
  614. def b(x):
  615. return x
  616. else:
  617. import codecs
  618. codecs.register_error('backslashreplace_', codecs.lookup_error('backslashreplace'))
  619. def b(x):
  620. if isinstance(x, (bytes, bytearray)):
  621. return bytes(x)
  622. return codecs.encode(x, 'cp1252')
  623. class UnicodeStringWrapperPostProcessor(object):
  624. """This class attempts to help the process of identifying strings
  625. that might be plain Unicode or Pascal. A list of strings will be
  626. wrapped on it with the hope the overlappings will help make the
  627. decision about their type."""
  628. def __init__(self, pe, rva_ptr):
  629. self.pe = pe
  630. self.rva_ptr = rva_ptr
  631. self.string = None
  632. def get_rva(self):
  633. """Get the RVA of the string."""
  634. return self.rva_ptr
  635. def __str__(self):
  636. """Return the escaped UTF-8 representation of the string."""
  637. return self.decode('utf-8', 'backslashreplace_')
  638. def decode(self, *args):
  639. if not self.string:
  640. return ''
  641. return self.string.decode(*args)
  642. def invalidate(self):
  643. """Make this instance None, to express it's no known string type."""
  644. self = None
  645. def render_pascal_16(self):
  646. try:
  647. self.string = self.pe.get_string_u_at_rva(
  648. self.rva_ptr+2,
  649. max_length=self.get_pascal_16_length())
  650. except PEFormatError as excp:
  651. self.pe.get_warnings().append(
  652. 'Failed rendering pascal string, '
  653. 'attempting to read from RVA 0x{0:x}'.format(self.rva_ptr+2))
  654. def get_pascal_16_length(self):
  655. return self.__get_word_value_at_rva(self.rva_ptr)
  656. def __get_word_value_at_rva(self, rva):
  657. try:
  658. data = self.pe.get_data(self.rva_ptr, 2)
  659. except PEFormatError as e:
  660. return False
  661. if len(data)<2:
  662. return False
  663. return struct.unpack('<H', data)[0]
  664. def ask_unicode_16(self, next_rva_ptr):
  665. """The next RVA is taken to be the one immediately following this one.
  666. Such RVA could indicate the natural end of the string and will be checked
  667. to see if there's a Unicode NULL character there.
  668. """
  669. if self.__get_word_value_at_rva(next_rva_ptr-2) == 0:
  670. self.length = next_rva_ptr - self.rva_ptr
  671. return True
  672. return False
  673. def render_unicode_16(self):
  674. try:
  675. self.string = self.pe.get_string_u_at_rva(self.rva_ptr)
  676. except PEFormatError as excp:
  677. self.pe.get_warnings().append(
  678. 'Failed rendering unicode string, '
  679. 'attempting to read from RVA 0x{0:x}'.format(self.rva_ptr))
  680. class PEFormatError(Exception):
  681. """Generic PE format error exception."""
  682. def __init__(self, value):
  683. self.value = value
  684. def __str__(self):
  685. return repr(self.value)
  686. class Dump(object):
  687. """Convenience class for dumping the PE information."""
  688. def __init__(self):
  689. self.text = list()
  690. def add_lines(self, txt, indent=0):
  691. """Adds a list of lines.
  692. The list can be indented with the optional argument 'indent'.
  693. """
  694. for line in txt:
  695. self.add_line(line, indent)
  696. def add_line(self, txt, indent=0):
  697. """Adds a line.
  698. The line can be indented with the optional argument 'indent'.
  699. """
  700. self.add(txt+'\n', indent)
  701. def add(self, txt, indent=0):
  702. """Adds some text, no newline will be appended.
  703. The text can be indented with the optional argument 'indent'.
  704. """
  705. self.text.append(u'{0}{1}'.format(' '*indent, txt))
  706. def add_header(self, txt):
  707. """Adds a header element."""
  708. self.add_line('{0}{1}{0}\n'.format('-'*10, txt))
  709. def add_newline(self):
  710. """Adds a newline."""
  711. self.text.append('\n')
  712. def get_text(self):
  713. """Get the text in its current state."""
  714. return u''.join(u'{0}'.format(b) for b in self.text)
  715. STRUCT_SIZEOF_TYPES = {
  716. 'x': 1, 'c': 1, 'b': 1, 'B': 1,
  717. 'h': 2, 'H': 2,
  718. 'i': 4, 'I': 4, 'l': 4, 'L': 4, 'f': 4,
  719. 'q': 8, 'Q': 8, 'd': 8,
  720. 's': 1 }
  721. @lru_cache(maxsize=2048)
  722. def sizeof_type(t):
  723. count = 1
  724. _t = t
  725. if t[0] in string.digits:
  726. # extract the count
  727. count = int( ''.join([d for d in t if d in string.digits]) )
  728. _t = ''.join([d for d in t if d not in string.digits])
  729. return STRUCT_SIZEOF_TYPES[_t] * count
  730. @lru_cache(maxsize=2048, copy=True)
  731. def set_format(format):
  732. __format__ = '<'
  733. __unpacked_data_elms__ = []
  734. __field_offsets__ = dict()
  735. __keys__ = []
  736. __format_length__ = 0
  737. offset = 0
  738. for elm in format:
  739. if ',' in elm:
  740. elm_type, elm_name = elm.split(',', 1)
  741. __format__ += elm_type
  742. __unpacked_data_elms__.append(None)
  743. elm_names = elm_name.split(',')
  744. names = []
  745. for elm_name in elm_names:
  746. if elm_name in __keys__:
  747. search_list = [x[:len(elm_name)] for x in __keys__]
  748. occ_count = search_list.count(elm_name)
  749. elm_name = '{0}_{1:d}'.format(elm_name, occ_count)
  750. names.append(elm_name)
  751. __field_offsets__[elm_name] = offset
  752. offset += sizeof_type(elm_type)
  753. # Some PE header structures have unions on them, so a certain
  754. # value might have different names, so each key has a list of
  755. # all the possible members referring to the data.
  756. __keys__.append(names)
  757. __format_length__ = struct.calcsize(__format__)
  758. return ( __format__, __unpacked_data_elms__, __field_offsets__, __keys__, __format_length__)
  759. class Structure(object):
  760. """Prepare structure object to extract members from data.
  761. Format is a list containing definitions for the elements
  762. of the structure.
  763. """
  764. def __init__(self, format, name=None, file_offset=None):
  765. # Format is forced little endian, for big endian non Intel platforms
  766. self.__format__ = '<'
  767. self.__keys__ = []
  768. self.__format_length__ = 0
  769. self.__field_offsets__ = dict()
  770. self.__unpacked_data_elms__ = []
  771. d = format[1]
  772. # need a tuple to be hashable in set_format using lru cache
  773. if not isinstance(format[1], tuple):
  774. d = tuple(format[1])
  775. self.__format__, self.__unpacked_data_elms__, self.__field_offsets__, self.__keys__, self.__format_length__ = set_format(d)
  776. self.__all_zeroes__ = False
  777. self.__file_offset__ = file_offset
  778. if name:
  779. self.name = name
  780. else:
  781. self.name = format[0]
  782. def __get_format__(self):
  783. return self.__format__
  784. def get_field_absolute_offset(self, field_name):
  785. """Return the offset within the field for the requested field in the structure."""
  786. return self.__file_offset__ + self.__field_offsets__[field_name]
  787. def get_field_relative_offset(self, field_name):
  788. """Return the offset within the structure for the requested field."""
  789. return self.__field_offsets__[field_name]
  790. def get_file_offset(self):
  791. return self.__file_offset__
  792. def set_file_offset(self, offset):
  793. self.__file_offset__ = offset
  794. def all_zeroes(self):
  795. """Returns true is the unpacked data is all zeros."""
  796. return self.__all_zeroes__
  797. def sizeof(self):
  798. """Return size of the structure."""
  799. return self.__format_length__
  800. def __unpack__(self, data):
  801. data = b(data)
  802. if len(data) > self.__format_length__:
  803. data = data[:self.__format_length__]
  804. # OC Patch:
  805. # Some malware have incorrect header lengths.
  806. # Fail gracefully if this occurs
  807. # Buggy malware: a29b0118af8b7408444df81701ad5a7f
  808. #
  809. elif len(data) < self.__format_length__:
  810. raise PEFormatError('Data length less than expected header length.')
  811. if count_zeroes(data) == len(data):
  812. self.__all_zeroes__ = True
  813. self.__unpacked_data_elms__ = struct.unpack(self.__format__, data)
  814. for i in range(len(self.__unpacked_data_elms__)):
  815. for key in self.__keys__[i]:
  816. setattr(self, key, self.__unpacked_data_elms__[i])
  817. def __pack__(self):
  818. new_values = []
  819. for i in range(len(self.__unpacked_data_elms__)):
  820. for key in self.__keys__[i]:
  821. new_val = getattr(self, key)
  822. old_val = self.__unpacked_data_elms__[i]
  823. # In the case of unions, when the first changed value
  824. # is picked the loop is exited
  825. if new_val != old_val:
  826. break
  827. new_values.append(new_val)
  828. return struct.pack(self.__format__, *new_values)
  829. def __str__(self):
  830. return '\n'.join( self.dump() )
  831. def __repr__(self):
  832. return '<Structure: %s>' % (' '.join( [' '.join(s.split()) for s in self.dump()] ))
  833. def dump(self, indentation=0):
  834. """Returns a string representation of the structure."""
  835. dump = []
  836. dump.append('[{0}]'.format(self.name))
  837. printable_bytes = [ord(i) for i in string.printable if i not in string.whitespace]
  838. # Refer to the __set_format__ method for an explanation
  839. # of the following construct.
  840. for keys in self.__keys__:
  841. for key in keys:
  842. val = getattr(self, key)
  843. if isinstance(val, (int, long)):
  844. if key.startswith('Signature_'):
  845. val_str = '{:<8X}'.format(val)
  846. else:
  847. val_str = '0x{:<8X}'.format(val)
  848. if key == 'TimeDateStamp' or key == 'dwTimeStamp':
  849. try:
  850. val_str += ' [%s UTC]' % time.asctime(time.gmtime(val))
  851. except ValueError as e:
  852. val_str += ' [INVALID TIME]'
  853. else:
  854. val_str = bytearray(val)
  855. if key.startswith('Signature'):
  856. val_str = ''.join(
  857. ['{:02X}'.format(i) for i in val_str.rstrip(b'\x00')])
  858. else:
  859. val_str = ''.join(
  860. [chr(i) if (i in printable_bytes) else
  861. '\\x{0:02x}'.format(i) for i in val_str.rstrip(b'\x00')])
  862. dump.append('0x%-8X 0x%-3X %-30s %s' % (
  863. self.__field_offsets__[key] + self.__file_offset__,
  864. self.__field_offsets__[key], key+':', val_str))
  865. return dump
  866. def dump_dict(self):
  867. """Returns a dictionary representation of the structure."""
  868. dump_dict = dict()
  869. dump_dict['Structure'] = self.name
  870. # Refer to the __set_format__ method for an explanation
  871. # of the following construct.
  872. for keys in self.__keys__:
  873. for key in keys:
  874. val = getattr(self, key)
  875. if isinstance(val, (int, long)):
  876. if key == 'TimeDateStamp' or key == 'dwTimeStamp':
  877. try:
  878. val = '0x%-8X [%s UTC]' % (val, time.asctime(time.gmtime(val)))
  879. except ValueError as e:
  880. val = '0x%-8X [INVALID TIME]' % val
  881. else:
  882. val = ''.join(chr(d) if chr(d) in string.printable
  883. else "\\x%02x" % d for d in
  884. [ord(c) if not isinstance(c, int) else c for c in val])
  885. dump_dict[key] = {'FileOffset': self.__field_offsets__[key] + self.__file_offset__,
  886. 'Offset': self.__field_offsets__[key],
  887. 'Value': val}
  888. return dump_dict
  889. class SectionStructure(Structure):
  890. """Convenience section handling class."""
  891. def __init__(self, *argl, **argd):
  892. if 'pe' in argd:
  893. self.pe = argd['pe']
  894. del argd['pe']
  895. Structure.__init__(self, *argl, **argd)
  896. self.PointerToRawData_adj = None
  897. self.VirtualAddress_adj = None
  898. def get_PointerToRawData_adj(self):
  899. if self.PointerToRawData_adj is None:
  900. if self.PointerToRawData is not None:
  901. self.PointerToRawData_adj = self.pe.adjust_FileAlignment( self.PointerToRawData, self.pe.OPTIONAL_HEADER.FileAlignment )
  902. return self.PointerToRawData_adj
  903. def get_VirtualAddress_adj(self):
  904. if self.VirtualAddress_adj is None:
  905. if self.VirtualAddress is not None:
  906. self.VirtualAddress_adj = self.pe.adjust_SectionAlignment( self.VirtualAddress, self.pe.OPTIONAL_HEADER.SectionAlignment, self.pe.OPTIONAL_HEADER.FileAlignment )
  907. return self.VirtualAddress_adj
  908. def get_data(self, start=None, length=None):
  909. """Get data chunk from a section.
  910. Allows to query data from the section by passing the
  911. addresses where the PE file would be loaded by default.
  912. It is then possible to retrieve code and data by their real
  913. addresses as they would be if loaded.
  914. Returns bytes() under Python 3.x and set() under Python 2.7
  915. """
  916. if start is None:
  917. offset = self.get_PointerToRawData_adj()
  918. else:
  919. offset = ( start - self.get_VirtualAddress_adj() ) + self.get_PointerToRawData_adj()
  920. if length is not None:
  921. end = offset + length
  922. else:
  923. end = offset + self.SizeOfRawData
  924. # PointerToRawData is not adjusted here as we might want to read any possible extra bytes
  925. # that might get cut off by aligning the start (and hence cutting something off the end)
  926. #
  927. if end > self.PointerToRawData + self.SizeOfRawData:
  928. end = self.PointerToRawData + self.SizeOfRawData
  929. return self.pe.__data__[offset:end]
  930. def __setattr__(self, name, val):
  931. if name == 'Characteristics':
  932. section_flags = retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
  933. # Set the section's flags according to the Characteristics member
  934. set_flags(self, val, section_flags)
  935. elif 'IMAGE_SCN_' in name and hasattr(self, name):
  936. if val:
  937. self.__dict__['Characteristics'] |= SECTION_CHARACTERISTICS[name]
  938. else:
  939. self.__dict__['Characteristics'] ^= SECTION_CHARACTERISTICS[name]
  940. self.__dict__[name] = val
  941. def get_rva_from_offset(self, offset):
  942. return offset - self.get_PointerToRawData_adj() + self.get_VirtualAddress_adj()
  943. def get_offset_from_rva(self, rva):
  944. return rva - self.get_VirtualAddress_adj() + self.get_PointerToRawData_adj()
  945. def contains_offset(self, offset):
  946. """Check whether the section contains the file offset provided."""
  947. if self.PointerToRawData is None:
  948. # bss and other sections containing only uninitialized data must have 0
  949. # and do not take space in the file
  950. return False
  951. PointerToRawData_adj = self.get_PointerToRawData_adj()
  952. return ( PointerToRawData_adj <= offset < PointerToRawData_adj + self.SizeOfRawData )
  953. def contains_rva(self, rva):
  954. """Check whether the section contains the address provided."""
  955. VirtualAddress_adj = self.get_VirtualAddress_adj()
  956. # Check if the SizeOfRawData is realistic. If it's bigger than the size of
  957. # the whole PE file minus the start address of the section it could be
  958. # either truncated or the SizeOfRawData contains a misleading value.
  959. # In either of those cases we take the VirtualSize
  960. #
  961. if len(self.pe.__data__) - self.get_PointerToRawData_adj() < self.SizeOfRawData:
  962. # PECOFF documentation v8 says:
  963. # VirtualSize: The total size of the section when loaded into memory.
  964. # If this value is greater than SizeOfRawData, the section is zero-padded.
  965. # This field is valid only for executable images and should be set to zero
  966. # for object files.
  967. #
  968. size = self.Misc_VirtualSize
  969. else:
  970. size = max(self.SizeOfRawData, self.Misc_VirtualSize)
  971. # Check whether there's any section after the current one that starts before the
  972. # calculated end for the current one. If so, cut the current section's size
  973. # to fit in the range up to where the next section starts.
  974. if (self.next_section_virtual_address is not None and
  975. self.next_section_virtual_address > self.VirtualAddress and
  976. VirtualAddress_adj + size > self.next_section_virtual_address):
  977. size = self.next_section_virtual_address - VirtualAddress_adj
  978. return VirtualAddress_adj <= rva < VirtualAddress_adj + size
  979. def contains(self, rva):
  980. #print "DEPRECATION WARNING: you should use contains_rva() instead of contains()"
  981. return self.contains_rva(rva)
  982. def get_entropy(self):
  983. """Calculate and return the entropy for the section."""
  984. return self.entropy_H( self.get_data() )
  985. def get_hash_sha1(self):
  986. """Get the SHA-1 hex-digest of the section's data."""
  987. if sha1 is not None:
  988. return sha1( self.get_data() ).hexdigest()
  989. def get_hash_sha256(self):
  990. """Get the SHA-256 hex-digest of the section's data."""
  991. if sha256 is not None:
  992. return sha256( self.get_data() ).hexdigest()
  993. def get_hash_sha512(self):
  994. """Get the SHA-512 hex-digest of the section's data."""
  995. if sha512 is not None:
  996. return sha512( self.get_data() ).hexdigest()
  997. def get_hash_md5(self):
  998. """Get the MD5 hex-digest of the section's data."""
  999. if md5 is not None:
  1000. return md5( self.get_data() ).hexdigest()
  1001. def entropy_H(self, data):
  1002. """Calculate the entropy of a chunk of data."""
  1003. if not data:
  1004. return 0.0
  1005. occurences = Counter(bytearray(data))
  1006. entropy = 0
  1007. for x in occurences.values():
  1008. p_x = float(x) / len(data)
  1009. entropy -= p_x*math.log(p_x, 2)
  1010. return entropy
  1011. class DataContainer(object):
  1012. """Generic data container."""
  1013. def __init__(self, **args):
  1014. bare_setattr = super(DataContainer, self).__setattr__
  1015. for key, value in list(args.items()):
  1016. bare_setattr(key, value)
  1017. class ImportDescData(DataContainer):
  1018. """Holds import descriptor information.
  1019. dll: name of the imported DLL
  1020. imports: list of imported symbols (ImportData instances)
  1021. struct: IMAGE_IMPORT_DESCRIPTOR structure
  1022. """
  1023. class ImportData(DataContainer):
  1024. """Holds imported symbol's information.
  1025. ordinal: Ordinal of the symbol
  1026. name: Name of the symbol
  1027. bound: If the symbol is bound, this contains
  1028. the address.
  1029. """
  1030. def __setattr__(self, name, val):
  1031. # If the instance doesn't yet have an ordinal attribute
  1032. # it's not fully initialized so can't do any of the
  1033. # following
  1034. #
  1035. if hasattr(self, 'ordinal') and hasattr(self, 'bound') and hasattr(self, 'name'):
  1036. if name == 'ordinal':
  1037. if self.pe.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
  1038. ordinal_flag = IMAGE_ORDINAL_FLAG
  1039. elif self.pe.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
  1040. ordinal_flag = IMAGE_ORDINAL_FLAG64
  1041. # Set the ordinal and flag the entry as importing by ordinal
  1042. self.struct_table.Ordinal = ordinal_flag | (val & 0xffff)
  1043. self.struct_table.AddressOfData = self.struct_table.Ordinal
  1044. self.struct_table.Function = self.struct_table.Ordinal
  1045. self.struct_table.ForwarderString = self.struct_table.Ordinal
  1046. elif name == 'bound':
  1047. if self.struct_iat is not None:
  1048. self.struct_iat.AddressOfData = val
  1049. self.struct_iat.AddressOfData = self.struct_iat.AddressOfData
  1050. self.struct_iat.Function = self.struct_iat.AddressOfData
  1051. self.struct_iat.ForwarderString = self.struct_iat.AddressOfData
  1052. elif name == 'address':
  1053. self.struct_table.AddressOfData = val
  1054. self.struct_table.Ordinal = self.struct_table.AddressOfData
  1055. self.struct_table.Function = self.struct_table.AddressOfData
  1056. self.struct_table.ForwarderString = self.struct_table.AddressOfData
  1057. elif name == 'name':
  1058. # Make sure we reset the entry in case the import had been set to import by ordinal
  1059. if self.name_offset:
  1060. name_rva = self.pe.get_rva_from_offset( self.name_offset )
  1061. self.pe.set_dword_at_offset( self.ordinal_offset, (0<<31) | name_rva )
  1062. # Complain if the length of the new name is longer than the existing one
  1063. if len(val) > len(self.name):
  1064. #raise Exception('The export name provided is longer than the existing one.')
  1065. pass
  1066. self.pe.set_bytes_at_offset( self.name_offset, val )
  1067. self.__dict__[name] = val
  1068. class ExportDirData(DataContainer):
  1069. """Holds export directory information.
  1070. struct: IMAGE_EXPORT_DIRECTORY structure
  1071. symbols: list of exported symbols (ExportData instances)
  1072. """
  1073. class ExportData(DataContainer):
  1074. """Holds exported symbols' information.
  1075. ordinal: ordinal of the symbol
  1076. address: address of the symbol
  1077. name: name of the symbol (None if the symbol is
  1078. exported by ordinal only)
  1079. forwarder: if the symbol is forwarded it will
  1080. contain the name of the target symbol,
  1081. None otherwise.
  1082. """
  1083. def __setattr__(self, name, val):
  1084. # If the instance doesn't yet have an ordinal attribute
  1085. # it's not fully initialized so can't do any of the
  1086. # following
  1087. #
  1088. if hasattr(self, 'ordinal') and hasattr(self, 'address') and hasattr(self, 'forwarder') and hasattr(self, 'name'):
  1089. if name == 'ordinal':
  1090. self.pe.set_word_at_offset( self.ordinal_offset, val )
  1091. elif name == 'address':
  1092. self.pe.set_dword_at_offset( self.address_offset, val )
  1093. elif name == 'name':
  1094. # Complain if the length of the new name is longer than the existing one
  1095. if len(val) > len(self.name):
  1096. #raise Exception('The export name provided is longer than the existing one.')
  1097. pass
  1098. self.pe.set_bytes_at_offset( self.name_offset, val )
  1099. elif name == 'forwarder':
  1100. # Complain if the length of the new name is longer than the existing one
  1101. if len(val) > len(self.forwarder):
  1102. #raise Exception('The forwarder name provided is longer than the existing one.')
  1103. pass
  1104. self.pe.set_bytes_at_offset( self.forwarder_offset, val )
  1105. self.__dict__[name] = val
  1106. class ResourceDirData(DataContainer):
  1107. """Holds resource directory information.
  1108. struct: IMAGE_RESOURCE_DIRECTORY structure
  1109. entries: list of entries (ResourceDirEntryData instances)
  1110. """
  1111. class ResourceDirEntryData(DataContainer):
  1112. """Holds resource directory entry data.
  1113. struct: IMAGE_RESOURCE_DIRECTORY_ENTRY structure
  1114. name: If the resource is identified by name this
  1115. attribute will contain the name string. None
  1116. otherwise. If identified by id, the id is
  1117. available at 'struct.Id'
  1118. id: the id, also in struct.Id
  1119. directory: If this entry has a lower level directory
  1120. this attribute will point to the
  1121. ResourceDirData instance representing it.
  1122. data: If this entry has no further lower directories
  1123. and points to the actual resource data, this
  1124. attribute will reference the corresponding
  1125. ResourceDataEntryData instance.
  1126. (Either of the 'directory' or 'data' attribute will exist,
  1127. but not both.)
  1128. """
  1129. class ResourceDataEntryData(DataContainer):
  1130. """Holds resource data entry information.
  1131. struct: IMAGE_RESOURCE_DATA_ENTRY structure
  1132. lang: Primary language ID
  1133. sublang: Sublanguage ID
  1134. """
  1135. class DebugData(DataContainer):
  1136. """Holds debug information.
  1137. struct: IMAGE_DEBUG_DIRECTORY structure
  1138. entries: list of entries (IMAGE_DEBUG_TYPE instances)
  1139. """
  1140. class BaseRelocationData(DataContainer):
  1141. """Holds base relocation information.
  1142. struct: IMAGE_BASE_RELOCATION structure
  1143. entries: list of relocation data (RelocationData instances)
  1144. """
  1145. class RelocationData(DataContainer):
  1146. """Holds relocation information.
  1147. type: Type of relocation
  1148. The type string can be obtained by
  1149. RELOCATION_TYPE[type]
  1150. rva: RVA of the relocation
  1151. """
  1152. def __setattr__(self, name, val):
  1153. # If the instance doesn't yet have a struct attribute
  1154. # it's not fully initialized so can't do any of the
  1155. # following
  1156. #
  1157. if hasattr(self, 'struct'):
  1158. # Get the word containing the type and data
  1159. #
  1160. word = self.struct.Data
  1161. if name == 'type':
  1162. word = (val << 12) | (word & 0xfff)
  1163. elif name == 'rva':
  1164. offset = max(val-self.base_rva, 0)
  1165. word = ( word & 0xf000) | ( offset & 0xfff)
  1166. # Store the modified data
  1167. #
  1168. self.struct.Data = word
  1169. self.__dict__[name] = val
  1170. class TlsData(DataContainer):
  1171. """Holds TLS information.
  1172. struct: IMAGE_TLS_DIRECTORY structure
  1173. """
  1174. class BoundImportDescData(DataContainer):
  1175. """Holds bound import descriptor data.
  1176. This directory entry will provide information on the
  1177. DLLs this PE file has been bound to (if bound at all).
  1178. The structure will contain the name and timestamp of the
  1179. DLL at the time of binding so that the loader can know
  1180. whether it differs from the one currently present in the
  1181. system and must, therefore, re-bind the PE's imports.
  1182. struct: IMAGE_BOUND_IMPORT_DESCRIPTOR structure
  1183. name: DLL name
  1184. entries: list of entries (BoundImportRefData instances)
  1185. the entries will exist if this DLL has forwarded
  1186. symbols. If so, the destination DLL will have an
  1187. entry in this list.
  1188. """
  1189. class LoadConfigData(DataContainer):
  1190. """Holds Load Config data.
  1191. struct: IMAGE_LOAD_CONFIG_DIRECTORY structure
  1192. name: dll name
  1193. """
  1194. class BoundImportRefData(DataContainer):
  1195. """Holds bound import forwarder reference data.
  1196. Contains the same information as the bound descriptor but
  1197. for forwarded DLLs, if any.
  1198. struct: IMAGE_BOUND_FORWARDER_REF structure
  1199. name: dll name
  1200. """
  1201. # Valid FAT32 8.3 short filename characters according to:
  1202. # http://en.wikipedia.org/wiki/8.3_filename
  1203. # This will help decide whether DLL ASCII names are likely
  1204. # to be valid or otherwise corrupt data
  1205. #
  1206. # The filename length is not checked because the DLLs filename
  1207. # can be longer that the 8.3
  1208. if PY3:
  1209. allowed_filename = b(
  1210. string.ascii_lowercase + string.ascii_uppercase +
  1211. string.digits + "!#$%&'()-@^_`{}~+,.;=[]")
  1212. else: # Python 2.x
  1213. allowed_filename = b(
  1214. string.lowercase + string.uppercase + string.digits +
  1215. b"!#$%&'()-@^_`{}~+,.;=[]")
  1216. def is_valid_dos_filename(s):
  1217. if s is None or not isinstance(s, (str, bytes, bytearray)):
  1218. return False
  1219. # Allow path separators as import names can contain directories.
  1220. allowed = allowed_filename + b'\\/'
  1221. return all(c in allowed for c in set(s))
  1222. # Check if an imported name uses the valid accepted characters expected in mangled
  1223. # function names. If the symbol's characters don't fall within this charset
  1224. # we will assume the name is invalid
  1225. #
  1226. if PY3:
  1227. allowed_function_name = b(
  1228. string.ascii_lowercase + string.ascii_uppercase +
  1229. string.digits + '_?@$()<>')
  1230. else:
  1231. allowed_function_name = b(
  1232. string.lowercase + string.uppercase +
  1233. string.digits + b'_?@$()<>')
  1234. @lru_cache(maxsize=2048)
  1235. def is_valid_function_name(s):
  1236. return (s is not None and
  1237. isinstance(s, (str, bytes, bytearray)) and
  1238. all(c in allowed_function_name for c in set(s)))
  1239. class PE(object):
  1240. """A Portable Executable representation.
  1241. This class provides access to most of the information in a PE file.
  1242. It expects to be supplied the name of the file to load or PE data
  1243. to process and an optional argument 'fast_load' (False by default)
  1244. which controls whether to load all the directories information,
  1245. which can be quite time consuming.
  1246. pe = pefile.PE('module.dll')
  1247. pe = pefile.PE(name='module.dll')
  1248. would load 'module.dll' and process it. If the data is already
  1249. available in a buffer the same can be achieved with:
  1250. pe = pefile.PE(data=module_dll_data)
  1251. The "fast_load" can be set to a default by setting its value in the
  1252. module itself by means, for instance, of a "pefile.fast_load = True".
  1253. That will make all the subsequent instances not to load the
  1254. whole PE structure. The "full_load" method can be used to parse
  1255. the missing data at a later stage.
  1256. Basic headers information will be available in the attributes:
  1257. DOS_HEADER
  1258. NT_HEADERS
  1259. FILE_HEADER
  1260. OPTIONAL_HEADER
  1261. All of them will contain among their attributes the members of the
  1262. corresponding structures as defined in WINNT.H
  1263. The raw data corresponding to the header (from the beginning of the
  1264. file up to the start of the first section) will be available in the
  1265. instance's attribute 'header' as a string.
  1266. The sections will be available as a list in the 'sections' attribute.
  1267. Each entry will contain as attributes all the structure's members.
  1268. Directory entries will be available as attributes (if they exist):
  1269. (no other entries are processed at this point)
  1270. DIRECTORY_ENTRY_IMPORT (list of ImportDescData instances)
  1271. DIRECTORY_ENTRY_EXPORT (ExportDirData instance)
  1272. DIRECTORY_ENTRY_RESOURCE (ResourceDirData instance)
  1273. DIRECTORY_ENTRY_DEBUG (list of DebugData instances)
  1274. DIRECTORY_ENTRY_BASERELOC (list of BaseRelocationData instances)
  1275. DIRECTORY_ENTRY_TLS
  1276. DIRECTORY_ENTRY_BOUND_IMPORT (list of BoundImportData instances)
  1277. The following dictionary attributes provide ways of mapping different
  1278. constants. They will accept the numeric value and return the string
  1279. representation and the opposite, feed in the string and get the
  1280. numeric constant:
  1281. DIRECTORY_ENTRY
  1282. IMAGE_CHARACTERISTICS
  1283. SECTION_CHARACTERISTICS
  1284. DEBUG_TYPE
  1285. SUBSYSTEM_TYPE
  1286. MACHINE_TYPE
  1287. RELOCATION_TYPE
  1288. RESOURCE_TYPE
  1289. LANG
  1290. SUBLANG
  1291. """
  1292. #
  1293. # Format specifications for PE structures.
  1294. #
  1295. __IMAGE_DOS_HEADER_format__ = ('IMAGE_DOS_HEADER',
  1296. ('H,e_magic', 'H,e_cblp', 'H,e_cp',
  1297. 'H,e_crlc', 'H,e_cparhdr', 'H,e_minalloc',
  1298. 'H,e_maxalloc', 'H,e_ss', 'H,e_sp', 'H,e_csum',
  1299. 'H,e_ip', 'H,e_cs', 'H,e_lfarlc', 'H,e_ovno', '8s,e_res',
  1300. 'H,e_oemid', 'H,e_oeminfo', '20s,e_res2',
  1301. 'I,e_lfanew'))
  1302. __IMAGE_FILE_HEADER_format__ = ('IMAGE_FILE_HEADER',
  1303. ('H,Machine', 'H,NumberOfSections',
  1304. 'I,TimeDateStamp', 'I,PointerToSymbolTable',
  1305. 'I,NumberOfSymbols', 'H,SizeOfOptionalHeader',
  1306. 'H,Characteristics'))
  1307. __IMAGE_DATA_DIRECTORY_format__ = ('IMAGE_DATA_DIRECTORY',
  1308. ('I,VirtualAddress', 'I,Size'))
  1309. __IMAGE_OPTIONAL_HEADER_format__ = ('IMAGE_OPTIONAL_HEADER',
  1310. ('H,Magic', 'B,MajorLinkerVersion',
  1311. 'B,MinorLinkerVersion', 'I,SizeOfCode',
  1312. 'I,SizeOfInitializedData', 'I,SizeOfUninitializedData',
  1313. 'I,AddressOfEntryPoint', 'I,BaseOfCode', 'I,BaseOfData',
  1314. 'I,ImageBase', 'I,SectionAlignment', 'I,FileAlignment',
  1315. 'H,MajorOperatingSystemVersion', 'H,MinorOperatingSystemVersion',
  1316. 'H,MajorImageVersion', 'H,MinorImageVersion',
  1317. 'H,MajorSubsystemVersion', 'H,MinorSubsystemVersion',
  1318. 'I,Reserved1', 'I,SizeOfImage', 'I,SizeOfHeaders',
  1319. 'I,CheckSum', 'H,Subsystem', 'H,DllCharacteristics',
  1320. 'I,SizeOfStackReserve', 'I,SizeOfStackCommit',
  1321. 'I,SizeOfHeapReserve', 'I,SizeOfHeapCommit',
  1322. 'I,LoaderFlags', 'I,NumberOfRvaAndSizes' ))
  1323. __IMAGE_OPTIONAL_HEADER64_format__ = ('IMAGE_OPTIONAL_HEADER64',
  1324. ('H,Magic', 'B,MajorLinkerVersion',
  1325. 'B,MinorLinkerVersion', 'I,SizeOfCode',
  1326. 'I,SizeOfInitializedData', 'I,SizeOfUninitializedData',
  1327. 'I,AddressOfEntryPoint', 'I,BaseOfCode',
  1328. 'Q,ImageBase', 'I,SectionAlignment', 'I,FileAlignment',
  1329. 'H,MajorOperatingSystemVersion', 'H,MinorOperatingSystemVersion',
  1330. 'H,MajorImageVersion', 'H,MinorImageVersion',
  1331. 'H,MajorSubsystemVersion', 'H,MinorSubsystemVersion',
  1332. 'I,Reserved1', 'I,SizeOfImage', 'I,SizeOfHeaders',
  1333. 'I,CheckSum', 'H,Subsystem', 'H,DllCharacteristics',
  1334. 'Q,SizeOfStackReserve', 'Q,SizeOfStackCommit',
  1335. 'Q,SizeOfHeapReserve', 'Q,SizeOfHeapCommit',
  1336. 'I,LoaderFlags', 'I,NumberOfRvaAndSizes' ))
  1337. __IMAGE_NT_HEADERS_format__ = ('IMAGE_NT_HEADERS', ('I,Signature',))
  1338. __IMAGE_SECTION_HEADER_format__ = ('IMAGE_SECTION_HEADER',
  1339. ('8s,Name', 'I,Misc,Misc_PhysicalAddress,Misc_VirtualSize',
  1340. 'I,VirtualAddress', 'I,SizeOfRawData', 'I,PointerToRawData',
  1341. 'I,PointerToRelocations', 'I,PointerToLinenumbers',
  1342. 'H,NumberOfRelocations', 'H,NumberOfLinenumbers',
  1343. 'I,Characteristics'))
  1344. __IMAGE_DELAY_IMPORT_DESCRIPTOR_format__ = ('IMAGE_DELAY_IMPORT_DESCRIPTOR',
  1345. ('I,grAttrs', 'I,szName', 'I,phmod', 'I,pIAT', 'I,pINT',
  1346. 'I,pBoundIAT', 'I,pUnloadIAT', 'I,dwTimeStamp'))
  1347. __IMAGE_IMPORT_DESCRIPTOR_format__ = ('IMAGE_IMPORT_DESCRIPTOR',
  1348. ('I,OriginalFirstThunk,Characteristics',
  1349. 'I,TimeDateStamp', 'I,ForwarderChain', 'I,Name', 'I,FirstThunk'))
  1350. __IMAGE_EXPORT_DIRECTORY_format__ = ('IMAGE_EXPORT_DIRECTORY',
  1351. ('I,Characteristics',
  1352. 'I,TimeDateStamp', 'H,MajorVersion', 'H,MinorVersion', 'I,Name',
  1353. 'I,Base', 'I,NumberOfFunctions', 'I,NumberOfNames',
  1354. 'I,AddressOfFunctions', 'I,AddressOfNames', 'I,AddressOfNameOrdinals'))
  1355. __IMAGE_RESOURCE_DIRECTORY_format__ = ('IMAGE_RESOURCE_DIRECTORY',
  1356. ('I,Characteristics',
  1357. 'I,TimeDateStamp', 'H,MajorVersion', 'H,MinorVersion',
  1358. 'H,NumberOfNamedEntries', 'H,NumberOfIdEntries'))
  1359. __IMAGE_RESOURCE_DIRECTORY_ENTRY_format__ = ('IMAGE_RESOURCE_DIRECTORY_ENTRY',
  1360. ('I,Name',
  1361. 'I,OffsetToData'))
  1362. __IMAGE_RESOURCE_DATA_ENTRY_format__ = ('IMAGE_RESOURCE_DATA_ENTRY',
  1363. ('I,OffsetToData', 'I,Size', 'I,CodePage', 'I,Reserved'))
  1364. __VS_VERSIONINFO_format__ = ( 'VS_VERSIONINFO',
  1365. ('H,Length', 'H,ValueLength', 'H,Type' ))
  1366. __VS_FIXEDFILEINFO_format__ = ( 'VS_FIXEDFILEINFO',
  1367. ('I,Signature', 'I,StrucVersion', 'I,FileVersionMS', 'I,FileVersionLS',
  1368. 'I,ProductVersionMS', 'I,ProductVersionLS', 'I,FileFlagsMask', 'I,FileFlags',
  1369. 'I,FileOS', 'I,FileType', 'I,FileSubtype', 'I,FileDateMS', 'I,FileDateLS'))
  1370. __StringFileInfo_format__ = ( 'StringFileInfo',
  1371. ('H,Length', 'H,ValueLength', 'H,Type' ))
  1372. __StringTable_format__ = ( 'StringTable',
  1373. ('H,Length', 'H,ValueLength', 'H,Type' ))
  1374. __String_format__ = ( 'String',
  1375. ('H,Length', 'H,ValueLength', 'H,Type' ))
  1376. __Var_format__ = ( 'Var', ('H,Length', 'H,ValueLength', 'H,Type' ))
  1377. __IMAGE_THUNK_DATA_format__ = ('IMAGE_THUNK_DATA',
  1378. ('I,ForwarderString,Function,Ordinal,AddressOfData',))
  1379. __IMAGE_THUNK_DATA64_format__ = ('IMAGE_THUNK_DATA',
  1380. ('Q,ForwarderString,Function,Ordinal,AddressOfData',))
  1381. __IMAGE_DEBUG_DIRECTORY_format__ = ('IMAGE_DEBUG_DIRECTORY',
  1382. ('I,Characteristics', 'I,TimeDateStamp', 'H,MajorVersion',
  1383. 'H,MinorVersion', 'I,Type', 'I,SizeOfData', 'I,AddressOfRawData',
  1384. 'I,PointerToRawData'))
  1385. __IMAGE_BASE_RELOCATION_format__ = ('IMAGE_BASE_RELOCATION',
  1386. ('I,VirtualAddress', 'I,SizeOfBlock') )
  1387. __IMAGE_BASE_RELOCATION_ENTRY_format__ = ('IMAGE_BASE_RELOCATION_ENTRY',
  1388. ('H,Data',) )
  1389. __IMAGE_TLS_DIRECTORY_format__ = ('IMAGE_TLS_DIRECTORY',
  1390. ('I,StartAddressOfRawData', 'I,EndAddressOfRawData',
  1391. 'I,AddressOfIndex', 'I,AddressOfCallBacks',
  1392. 'I,SizeOfZeroFill', 'I,Characteristics' ) )
  1393. __IMAGE_TLS_DIRECTORY64_format__ = ('IMAGE_TLS_DIRECTORY',
  1394. ('Q,StartAddressOfRawData', 'Q,EndAddressOfRawData',
  1395. 'Q,AddressOfIndex', 'Q,AddressOfCallBacks',
  1396. 'I,SizeOfZeroFill', 'I,Characteristics' ) )
  1397. __IMAGE_LOAD_CONFIG_DIRECTORY_format__ = ('IMAGE_LOAD_CONFIG_DIRECTORY',
  1398. ('I,Size',
  1399. 'I,TimeDateStamp',
  1400. 'H,MajorVersion',
  1401. 'H,MinorVersion',
  1402. 'I,GlobalFlagsClear',
  1403. 'I,GlobalFlagsSet',
  1404. 'I,CriticalSectionDefaultTimeout',
  1405. 'I,DeCommitFreeBlockThreshold',
  1406. 'I,DeCommitTotalFreeThreshold',
  1407. 'I,LockPrefixTable',
  1408. 'I,MaximumAllocationSize',
  1409. 'I,VirtualMemoryThreshold',
  1410. 'I,ProcessHeapFlags',
  1411. 'I,ProcessAffinityMask',
  1412. 'H,CSDVersion',
  1413. 'H,Reserved1',
  1414. 'I,EditList',
  1415. 'I,SecurityCookie',
  1416. 'I,SEHandlerTable',
  1417. 'I,SEHandlerCount',
  1418. 'I,GuardCFCheckFunctionPointer',
  1419. 'I,Reserved2',
  1420. 'I,GuardCFFunctionTable',
  1421. 'I,GuardCFFunctionCount',
  1422. 'I,GuardFlags' ) )
  1423. __IMAGE_LOAD_CONFIG_DIRECTORY64_format__ = ('IMAGE_LOAD_CONFIG_DIRECTORY',
  1424. ('I,Size',
  1425. 'I,TimeDateStamp',
  1426. 'H,MajorVersion',
  1427. 'H,MinorVersion',
  1428. 'I,GlobalFlagsClear',
  1429. 'I,GlobalFlagsSet',
  1430. 'I,CriticalSectionDefaultTimeout',
  1431. 'Q,DeCommitFreeBlockThreshold',
  1432. 'Q,DeCommitTotalFreeThreshold',
  1433. 'Q,LockPrefixTable',
  1434. 'Q,MaximumAllocationSize',
  1435. 'Q,VirtualMemoryThreshold',
  1436. 'Q,ProcessAffinityMask',
  1437. 'I,ProcessHeapFlags',
  1438. 'H,CSDVersion',
  1439. 'H,Reserved1',
  1440. 'Q,EditList',
  1441. 'Q,SecurityCookie',
  1442. 'Q,SEHandlerTable',
  1443. 'Q,SEHandlerCount',
  1444. 'Q,GuardCFCheckFunctionPointer',
  1445. 'Q,Reserved2',
  1446. 'Q,GuardCFFunctionTable',
  1447. 'Q,GuardCFFunctionCount',
  1448. 'I,GuardFlags' ) )
  1449. __IMAGE_BOUND_IMPORT_DESCRIPTOR_format__ = ('IMAGE_BOUND_IMPORT_DESCRIPTOR',
  1450. ('I,TimeDateStamp', 'H,OffsetModuleName', 'H,NumberOfModuleForwarderRefs'))
  1451. __IMAGE_BOUND_FORWARDER_REF_format__ = ('IMAGE_BOUND_FORWARDER_REF',
  1452. ('I,TimeDateStamp', 'H,OffsetModuleName', 'H,Reserved') )
  1453. def __init__(self, name=None, data=None, fast_load=None,
  1454. max_symbol_exports=MAX_SYMBOL_EXPORT_COUNT,
  1455. max_repeated_symbol=120):
  1456. self.max_symbol_exports = max_symbol_exports
  1457. self.max_repeated_symbol = max_repeated_symbol
  1458. self.sections = []
  1459. self.__warnings = []
  1460. self.PE_TYPE = None
  1461. if name is None and data is None:
  1462. raise ValueError('Must supply either name or data')
  1463. # This list will keep track of all the structures created.
  1464. # That will allow for an easy iteration through the list
  1465. # in order to save the modifications made
  1466. self.__structures__ = []
  1467. self.__from_file = None
  1468. # We only want to print these warnings once
  1469. self.FileAlignment_Warning = False
  1470. self.SectionAlignment_Warning = False
  1471. # Count of total resource entries across nested tables
  1472. self.__total_resource_entries_count = 0
  1473. # Sum of the size of all resource entries parsed, which should not
  1474. # exceed the file size.
  1475. self.__total_resource_bytes = 0
  1476. # The number of imports parsed in this file
  1477. self.__total_import_symbols = 0
  1478. fast_load = fast_load or globals()['fast_load']
  1479. try:
  1480. self.__parse__(name, data, fast_load)
  1481. except:
  1482. self.close()
  1483. raise
  1484. def close(self):
  1485. if ( self.__from_file is True and hasattr(self, '__data__') and
  1486. ((isinstance(mmap.mmap, type) and isinstance(self.__data__, mmap.mmap)) or
  1487. 'mmap.mmap' in repr(type(self.__data__))) ):
  1488. self.__data__.close()
  1489. del self.__data__
  1490. def __unpack_data__(self, format, data, file_offset):
  1491. """Apply structure format to raw data.
  1492. Returns an unpacked structure object if successful, None otherwise.
  1493. """
  1494. structure = Structure(format, file_offset=file_offset)
  1495. try:
  1496. structure.__unpack__(data)
  1497. except PEFormatError as err:
  1498. self.__warnings.append(
  1499. 'Corrupt header "{0}" at file offset {1}. Exception: {2}'.format(
  1500. format[0], file_offset, err) )
  1501. return None
  1502. self.__structures__.append(structure)
  1503. return structure
  1504. def __parse__(self, fname, data, fast_load):
  1505. """Parse a Portable Executable file.
  1506. Loads a PE file, parsing all its structures and making them available
  1507. through the instance's attributes.
  1508. """
  1509. if fname is not None:
  1510. stat = os.stat(fname)
  1511. if stat.st_size == 0:
  1512. raise PEFormatError('The file is empty')
  1513. fd = None
  1514. try:
  1515. fd = open(fname, 'rb')
  1516. self.fileno = fd.fileno()
  1517. if hasattr(mmap, 'MAP_PRIVATE'):
  1518. # Unix
  1519. self.__data__ = mmap.mmap(self.fileno, 0, mmap.MAP_PRIVATE)
  1520. else:
  1521. # Windows
  1522. self.__data__ = mmap.mmap(self.fileno, 0, access=mmap.ACCESS_READ)
  1523. self.__from_file = True
  1524. except IOError as excp:
  1525. exception_msg = '{0}'.format(excp)
  1526. exception_msg = exception_msg and (': %s' % exception_msg)
  1527. raise Exception('Unable to access file \'{0}\'{1}'.format(fname, exception_msg))
  1528. finally:
  1529. if fd is not None:
  1530. fd.close()
  1531. elif data is not None:
  1532. self.__data__ = data
  1533. self.__from_file = False
  1534. # Resources should not overlap each other, so they should not exceed the
  1535. # file size.
  1536. self.__resource_size_limit_upperbounds = len(self.__data__)
  1537. self.__resource_size_limit_reached = False
  1538. if not fast_load:
  1539. for byte, byte_count in Counter(bytearray(self.__data__)).items():
  1540. # Only report the cases where a byte makes up for more than 50% (if
  1541. # zero) or 15% (if non-zero) of the file's contents. There are
  1542. # legitimate PEs where 0x00 bytes are close to 50% of the whole
  1543. # file's contents.
  1544. if (byte == 0 and 1.0 * byte_count / len(self.__data__) > 0.5) or (
  1545. byte != 0 and 1.0 * byte_count / len(self.__data__) > 0.15):
  1546. self.__warnings.append(
  1547. ("Byte 0x{0:02x} makes up {1:.4f}% of the file's contents."
  1548. " This may indicate truncation / malformation.").format(
  1549. byte, 100.0 * byte_count / len(self.__data__)))
  1550. dos_header_data = self.__data__[:64]
  1551. if len(dos_header_data) != 64:
  1552. raise PEFormatError('Unable to read the DOS Header, possibly a truncated file.')
  1553. self.DOS_HEADER = self.__unpack_data__(
  1554. self.__IMAGE_DOS_HEADER_format__,
  1555. dos_header_data, file_offset=0)
  1556. if self.DOS_HEADER.e_magic == IMAGE_DOSZM_SIGNATURE:
  1557. raise PEFormatError('Probably a ZM Executable (not a PE file).')
  1558. if not self.DOS_HEADER or self.DOS_HEADER.e_magic != IMAGE_DOS_SIGNATURE:
  1559. raise PEFormatError('DOS Header magic not found.')
  1560. # OC Patch:
  1561. # Check for sane value in e_lfanew
  1562. #
  1563. if self.DOS_HEADER.e_lfanew > len(self.__data__):
  1564. raise PEFormatError('Invalid e_lfanew value, probably not a PE file')
  1565. nt_headers_offset = self.DOS_HEADER.e_lfanew
  1566. self.NT_HEADERS = self.__unpack_data__(
  1567. self.__IMAGE_NT_HEADERS_format__,
  1568. self.__data__[nt_headers_offset:nt_headers_offset+8],
  1569. file_offset = nt_headers_offset)
  1570. # We better check the signature right here, before the file screws
  1571. # around with sections:
  1572. # OC Patch:
  1573. # Some malware will cause the Signature value to not exist at all
  1574. if not self.NT_HEADERS or not self.NT_HEADERS.Signature:
  1575. raise PEFormatError('NT Headers not found.')
  1576. if (0xFFFF & self.NT_HEADERS.Signature) == IMAGE_NE_SIGNATURE:
  1577. raise PEFormatError('Invalid NT Headers signature. Probably a NE file')
  1578. if (0xFFFF & self.NT_HEADERS.Signature) == IMAGE_LE_SIGNATURE:
  1579. raise PEFormatError('Invalid NT Headers signature. Probably a LE file')
  1580. if (0xFFFF & self.NT_HEADERS.Signature) == IMAGE_LX_SIGNATURE:
  1581. raise PEFormatError('Invalid NT Headers signature. Probably a LX file')
  1582. if (0xFFFF & self.NT_HEADERS.Signature) == IMAGE_TE_SIGNATURE:
  1583. raise PEFormatError('Invalid NT Headers signature. Probably a TE file')
  1584. if self.NT_HEADERS.Signature != IMAGE_NT_SIGNATURE:
  1585. raise PEFormatError('Invalid NT Headers signature.')
  1586. self.FILE_HEADER = self.__unpack_data__(
  1587. self.__IMAGE_FILE_HEADER_format__,
  1588. self.__data__[nt_headers_offset+4:nt_headers_offset+4+32],
  1589. file_offset = nt_headers_offset+4)
  1590. image_flags = retrieve_flags(IMAGE_CHARACTERISTICS, 'IMAGE_FILE_')
  1591. if not self.FILE_HEADER:
  1592. raise PEFormatError('File Header missing')
  1593. # Set the image's flags according the the Characteristics member
  1594. set_flags(self.FILE_HEADER, self.FILE_HEADER.Characteristics, image_flags)
  1595. optional_header_offset = \
  1596. nt_headers_offset+4+self.FILE_HEADER.sizeof()
  1597. # Note: location of sections can be controlled from PE header:
  1598. sections_offset = optional_header_offset + self.FILE_HEADER.SizeOfOptionalHeader
  1599. self.OPTIONAL_HEADER = self.__unpack_data__(
  1600. self.__IMAGE_OPTIONAL_HEADER_format__,
  1601. # Read up to 256 bytes to allow creating a copy of too much data
  1602. self.__data__[optional_header_offset:optional_header_offset+256],
  1603. file_offset = optional_header_offset)
  1604. # According to solardesigner's findings for his
  1605. # Tiny PE project, the optional header does not
  1606. # need fields beyond "Subsystem" in order to be
  1607. # loadable by the Windows loader (given that zeros
  1608. # are acceptable values and the header is loaded
  1609. # in a zeroed memory page)
  1610. # If trying to parse a full Optional Header fails
  1611. # we try to parse it again with some 0 padding
  1612. #
  1613. MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE = 69
  1614. if ( self.OPTIONAL_HEADER is None and
  1615. len(self.__data__[optional_header_offset:optional_header_offset+0x200])
  1616. >= MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE ):
  1617. # Add enough zeros to make up for the unused fields
  1618. #
  1619. padding_length = 128
  1620. # Create padding
  1621. #
  1622. padded_data = self.__data__[optional_header_offset:optional_header_offset+0x200] + (
  1623. b'\0' * padding_length)
  1624. self.OPTIONAL_HEADER = self.__unpack_data__(
  1625. self.__IMAGE_OPTIONAL_HEADER_format__,
  1626. padded_data,
  1627. file_offset = optional_header_offset)
  1628. # Check the Magic in the OPTIONAL_HEADER and set the PE file
  1629. # type accordingly
  1630. #
  1631. if self.OPTIONAL_HEADER is not None:
  1632. if self.OPTIONAL_HEADER.Magic == OPTIONAL_HEADER_MAGIC_PE:
  1633. self.PE_TYPE = OPTIONAL_HEADER_MAGIC_PE
  1634. elif self.OPTIONAL_HEADER.Magic == OPTIONAL_HEADER_MAGIC_PE_PLUS:
  1635. self.PE_TYPE = OPTIONAL_HEADER_MAGIC_PE_PLUS
  1636. self.OPTIONAL_HEADER = self.__unpack_data__(
  1637. self.__IMAGE_OPTIONAL_HEADER64_format__,
  1638. self.__data__[optional_header_offset:optional_header_offset+0x200],
  1639. file_offset = optional_header_offset)
  1640. # Again, as explained above, we try to parse
  1641. # a reduced form of the Optional Header which
  1642. # is still valid despite not including all
  1643. # structure members
  1644. #
  1645. MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE = 69+4
  1646. if ( self.OPTIONAL_HEADER is None and
  1647. len(self.__data__[optional_header_offset:optional_header_offset+0x200])
  1648. >= MINIMUM_VALID_OPTIONAL_HEADER_RAW_SIZE ):
  1649. padding_length = 128
  1650. padded_data = self.__data__[optional_header_offset:optional_header_offset+0x200] + (
  1651. b'\0' * padding_length)
  1652. self.OPTIONAL_HEADER = self.__unpack_data__(
  1653. self.__IMAGE_OPTIONAL_HEADER64_format__,
  1654. padded_data,
  1655. file_offset = optional_header_offset)
  1656. if not self.FILE_HEADER:
  1657. raise PEFormatError('File Header missing')
  1658. # OC Patch:
  1659. # Die gracefully if there is no OPTIONAL_HEADER field
  1660. # 975440f5ad5e2e4a92c4d9a5f22f75c1
  1661. if self.OPTIONAL_HEADER is None:
  1662. raise PEFormatError("No Optional Header found, invalid PE32 or PE32+ file.")
  1663. if self.PE_TYPE is None:
  1664. self.__warnings.append(
  1665. "Invalid type 0x{0:04x} in Optional Header.".format(
  1666. self.OPTIONAL_HEADER.Magic))
  1667. dll_characteristics_flags = retrieve_flags(DLL_CHARACTERISTICS, 'IMAGE_DLLCHARACTERISTICS_')
  1668. # Set the Dll Characteristics flags according the the DllCharacteristics member
  1669. set_flags(
  1670. self.OPTIONAL_HEADER,
  1671. self.OPTIONAL_HEADER.DllCharacteristics,
  1672. dll_characteristics_flags)
  1673. self.OPTIONAL_HEADER.DATA_DIRECTORY = []
  1674. #offset = (optional_header_offset + self.FILE_HEADER.SizeOfOptionalHeader)
  1675. offset = (optional_header_offset + self.OPTIONAL_HEADER.sizeof())
  1676. self.NT_HEADERS.FILE_HEADER = self.FILE_HEADER
  1677. self.NT_HEADERS.OPTIONAL_HEADER = self.OPTIONAL_HEADER
  1678. # Windows 8 specific check
  1679. #
  1680. if self.OPTIONAL_HEADER.AddressOfEntryPoint < self.OPTIONAL_HEADER.SizeOfHeaders:
  1681. self.__warnings.append(
  1682. 'SizeOfHeaders is smaller than AddressOfEntryPoint: this file cannot run under Windows 8.')
  1683. # The NumberOfRvaAndSizes is sanitized to stay within
  1684. # reasonable limits so can be casted to an int
  1685. #
  1686. if self.OPTIONAL_HEADER.NumberOfRvaAndSizes > 0x10:
  1687. self.__warnings.append(
  1688. 'Suspicious NumberOfRvaAndSizes in the Optional Header. '
  1689. 'Normal values are never larger than 0x10, the value is: 0x%x' %
  1690. self.OPTIONAL_HEADER.NumberOfRvaAndSizes )
  1691. MAX_ASSUMED_VALID_NUMBER_OF_RVA_AND_SIZES = 0x100
  1692. for i in range(int(0x7fffffff & self.OPTIONAL_HEADER.NumberOfRvaAndSizes)):
  1693. if len(self.__data__) - offset == 0:
  1694. break
  1695. if len(self.__data__) - offset < 8:
  1696. data = self.__data__[offset:] + b'\0'*8
  1697. else:
  1698. data = self.__data__[offset:offset+MAX_ASSUMED_VALID_NUMBER_OF_RVA_AND_SIZES]
  1699. dir_entry = self.__unpack_data__(
  1700. self.__IMAGE_DATA_DIRECTORY_format__,
  1701. data,
  1702. file_offset = offset)
  1703. if dir_entry is None:
  1704. break
  1705. # Would fail if missing an entry
  1706. # 1d4937b2fa4d84ad1bce0309857e70ca offending sample
  1707. try:
  1708. dir_entry.name = DIRECTORY_ENTRY[i]
  1709. except (KeyError, AttributeError):
  1710. break
  1711. offset += dir_entry.sizeof()
  1712. self.OPTIONAL_HEADER.DATA_DIRECTORY.append(dir_entry)
  1713. # If the offset goes outside the optional header,
  1714. # the loop is broken, regardless of how many directories
  1715. # NumberOfRvaAndSizes says there are
  1716. #
  1717. # We assume a normally sized optional header, hence that we do
  1718. # a sizeof() instead of reading SizeOfOptionalHeader.
  1719. # Then we add a default number of directories times their size,
  1720. # if we go beyond that, we assume the number of directories
  1721. # is wrong and stop processing
  1722. if offset >= (optional_header_offset +
  1723. self.OPTIONAL_HEADER.sizeof() + 8*16) :
  1724. break
  1725. offset = self.parse_sections(sections_offset)
  1726. # OC Patch:
  1727. # There could be a problem if there are no raw data sections
  1728. # greater than 0
  1729. # fc91013eb72529da005110a3403541b6 example
  1730. # Should this throw an exception in the minimum header offset
  1731. # can't be found?
  1732. #
  1733. rawDataPointers = [
  1734. self.adjust_FileAlignment( s.PointerToRawData,
  1735. self.OPTIONAL_HEADER.FileAlignment )
  1736. for s in self.sections if s.PointerToRawData>0 ]
  1737. if len(rawDataPointers) > 0:
  1738. lowest_section_offset = min(rawDataPointers)
  1739. else:
  1740. lowest_section_offset = None
  1741. if not lowest_section_offset or lowest_section_offset < offset:
  1742. self.header = self.__data__[:offset]
  1743. else:
  1744. self.header = self.__data__[:lowest_section_offset]
  1745. # Check whether the entry point lies within a section
  1746. #
  1747. if self.get_section_by_rva(self.OPTIONAL_HEADER.AddressOfEntryPoint) is not None:
  1748. # Check whether the entry point lies within the file
  1749. #
  1750. ep_offset = self.get_offset_from_rva(self.OPTIONAL_HEADER.AddressOfEntryPoint)
  1751. if ep_offset > len(self.__data__):
  1752. self.__warnings.append(
  1753. 'Possibly corrupt file. AddressOfEntryPoint lies outside the file. '
  1754. 'AddressOfEntryPoint: 0x%x' %
  1755. self.OPTIONAL_HEADER.AddressOfEntryPoint )
  1756. else:
  1757. self.__warnings.append(
  1758. 'AddressOfEntryPoint lies outside the sections\' boundaries. '
  1759. 'AddressOfEntryPoint: 0x%x' %
  1760. self.OPTIONAL_HEADER.AddressOfEntryPoint )
  1761. if not fast_load:
  1762. self.full_load()
  1763. def parse_rich_header(self):
  1764. """Parses the rich header
  1765. see http://www.ntcore.com/files/richsign.htm for more information
  1766. Structure:
  1767. 00 DanS ^ checksum, checksum, checksum, checksum
  1768. 10 Symbol RVA ^ checksum, Symbol size ^ checksum...
  1769. ...
  1770. XX Rich, checksum, 0, 0,...
  1771. """
  1772. # Rich Header constants
  1773. #
  1774. DANS = 0x536E6144 # 'DanS' as dword
  1775. RICH = 0x68636952 # 'Rich' as dword
  1776. rich_index = self.__data__.find(
  1777. b'Rich', 0x80, self.OPTIONAL_HEADER.get_file_offset())
  1778. if rich_index == -1:
  1779. return None
  1780. # Read a block of data
  1781. try:
  1782. # The end of the structure is 8 bytes after the start of the Rich
  1783. # string.
  1784. rich_data = self.__data__[0x80:rich_index + 8]
  1785. # Make the data have length a multiple of 4, otherwise the
  1786. # subsequent parsing will fail. It's not impossible that we retrieve
  1787. # truncated data that it's not a multiple.
  1788. rich_data = rich_data[:4*int(len(rich_data)/4)]
  1789. data = list(struct.unpack(
  1790. '<{0}I'.format(int(len(rich_data)/4)), rich_data))
  1791. if RICH not in data:
  1792. return None
  1793. except PEFormatError:
  1794. return None
  1795. # get key, raw_data and clear_data
  1796. key = struct.pack('<L', data[data.index(RICH)+1])
  1797. result = {"key": key}
  1798. raw_data = rich_data[:rich_data.find(b'Rich')]
  1799. result["raw_data"] = raw_data
  1800. ord_ = lambda c : ord(c) if not isinstance(c, int) else c
  1801. clear_data = bytearray()
  1802. for i in range(len(raw_data)):
  1803. clear_data.append((ord_(raw_data[i]) ^ ord_(key[i % len(key)])))
  1804. result["clear_data"] = bytes(clear_data)
  1805. # the checksum should be present 3 times after the DanS signature
  1806. #
  1807. checksum = data[1]
  1808. if (data[0] ^ checksum != DANS
  1809. or data[2] != checksum
  1810. or data[3] != checksum):
  1811. return None
  1812. result["checksum"] = checksum
  1813. headervalues = []
  1814. result["values"] = headervalues
  1815. data = data[4:]
  1816. for i in range(int(len(data) / 2)):
  1817. # Stop until the Rich footer signature is found
  1818. #
  1819. if data[2 * i] == RICH:
  1820. # it should be followed by the checksum
  1821. #
  1822. if data[2 * i + 1] != checksum:
  1823. self.__warnings.append('Rich Header is malformed')
  1824. break
  1825. # header values come by pairs
  1826. #
  1827. headervalues += [data[2 * i] ^ checksum, data[2 * i + 1] ^ checksum]
  1828. return result
  1829. def get_warnings(self):
  1830. """Return the list of warnings.
  1831. Non-critical problems found when parsing the PE file are
  1832. appended to a list of warnings. This method returns the
  1833. full list.
  1834. """
  1835. return self.__warnings
  1836. def show_warnings(self):
  1837. """Print the list of warnings.
  1838. Non-critical problems found when parsing the PE file are
  1839. appended to a list of warnings. This method prints the
  1840. full list to standard output.
  1841. """
  1842. for warning in self.__warnings:
  1843. print('>', warning)
  1844. def full_load(self):
  1845. """Process the data directories.
  1846. This method will load the data directories which might not have
  1847. been loaded if the "fast_load" option was used.
  1848. """
  1849. self.parse_data_directories()
  1850. class RichHeader(object):
  1851. pass
  1852. rich_header = self.parse_rich_header()
  1853. if rich_header:
  1854. self.RICH_HEADER = RichHeader()
  1855. self.RICH_HEADER.checksum = rich_header.get('checksum', None)
  1856. self.RICH_HEADER.values = rich_header.get('values', None)
  1857. self.RICH_HEADER.key = rich_header.get('key', None)
  1858. self.RICH_HEADER.raw_data = rich_header.get('raw_data', None)
  1859. self.RICH_HEADER.clear_data = rich_header.get('clear_data', None)
  1860. else:
  1861. self.RICH_HEADER = None
  1862. def write(self, filename=None):
  1863. """Write the PE file.
  1864. This function will process all headers and components
  1865. of the PE file and include all changes made (by just
  1866. assigning to attributes in the PE objects) and write
  1867. the changes back to a file whose name is provided as
  1868. an argument. The filename is optional, if not
  1869. provided the data will be returned as a 'str' object.
  1870. """
  1871. file_data = bytearray(self.__data__)
  1872. for structure in self.__structures__:
  1873. struct_data = bytearray(structure.__pack__())
  1874. offset = structure.get_file_offset()
  1875. file_data[offset:offset+len(struct_data)] = struct_data
  1876. if hasattr(self, 'VS_VERSIONINFO'):
  1877. if hasattr(self, 'FileInfo'):
  1878. for finfo in self.FileInfo:
  1879. for entry in finfo:
  1880. if hasattr(entry, 'StringTable'):
  1881. for st_entry in entry.StringTable:
  1882. for key, entry in list(st_entry.entries.items()):
  1883. # Offsets and lengths of the keys and values.
  1884. # Each value in the dictionary is a tuple:
  1885. # (key length, value length)
  1886. # The lengths are in characters, not in bytes.
  1887. offsets = st_entry.entries_offsets[key]
  1888. lengths = st_entry.entries_lengths[key]
  1889. if len( entry ) > lengths[1]:
  1890. l = entry.decode('utf-8').encode('utf-16le')
  1891. file_data[offsets[1]:offsets[1]+lengths[1]*2 ] = l[:lengths[1]*2]
  1892. else:
  1893. encoded_data = entry.decode('utf-8').encode('utf-16le')
  1894. file_data[offsets[1]:offsets[1]+len(encoded_data)] = encoded_data
  1895. new_file_data = file_data
  1896. if not filename:
  1897. return new_file_data
  1898. f = open(filename, 'wb+')
  1899. f.write(new_file_data)
  1900. f.close()
  1901. return
  1902. def parse_sections(self, offset):
  1903. """Fetch the PE file sections.
  1904. The sections will be readily available in the "sections" attribute.
  1905. Its attributes will contain all the section information plus "data"
  1906. a buffer containing the section's data.
  1907. The "Characteristics" member will be processed and attributes
  1908. representing the section characteristics (with the 'IMAGE_SCN_'
  1909. string trimmed from the constant's names) will be added to the
  1910. section instance.
  1911. Refer to the SectionStructure class for additional info.
  1912. """
  1913. self.sections = []
  1914. MAX_SIMULTANEOUS_ERRORS = 3
  1915. for i in range(self.FILE_HEADER.NumberOfSections):
  1916. if i >= MAX_SECTIONS:
  1917. self.__warnings.append("Too many sections {0} (>={1})".format(
  1918. self.FILE_HEADER.NumberOfSections, MAX_SECTIONS))
  1919. break
  1920. simultaneous_errors = 0
  1921. section = SectionStructure( self.__IMAGE_SECTION_HEADER_format__, pe=self )
  1922. if not section:
  1923. break
  1924. section_offset = offset + section.sizeof() * i
  1925. section.set_file_offset(section_offset)
  1926. section_data = self.__data__[section_offset : section_offset + section.sizeof()]
  1927. # Check if the section is all nulls and stop if so.
  1928. if count_zeroes(section_data) == section.sizeof():
  1929. self.__warnings.append(
  1930. 'Invalid section {0}. Contents are null-bytes.'.format(i))
  1931. break
  1932. if not section_data:
  1933. self.__warnings.append(
  1934. 'Invalid section {0}. No data in the file (is this corkami\'s virtsectblXP?).'.format(i))
  1935. break
  1936. section.__unpack__(section_data)
  1937. self.__structures__.append(section)
  1938. if section.SizeOfRawData+section.PointerToRawData > len(self.__data__):
  1939. simultaneous_errors += 1
  1940. self.__warnings.append(
  1941. 'Error parsing section {0}. SizeOfRawData is larger than file.'.format(i))
  1942. if self.adjust_FileAlignment( section.PointerToRawData,
  1943. self.OPTIONAL_HEADER.FileAlignment ) > len(self.__data__):
  1944. simultaneous_errors += 1
  1945. self.__warnings.append(
  1946. 'Error parsing section {0}. PointerToRawData points beyond the end of the file.'.format(i))
  1947. if section.Misc_VirtualSize > 0x10000000:
  1948. simultaneous_errors += 1
  1949. self.__warnings.append(
  1950. 'Suspicious value found parsing section {0}. VirtualSize is extremely large > 256MiB.'.format(i))
  1951. if self.adjust_SectionAlignment( section.VirtualAddress,
  1952. self.OPTIONAL_HEADER.SectionAlignment, self.OPTIONAL_HEADER.FileAlignment ) > 0x10000000:
  1953. simultaneous_errors += 1
  1954. self.__warnings.append(
  1955. 'Suspicious value found parsing section {0}. VirtualAddress is beyond 0x10000000.'.format(i))
  1956. if ( self.OPTIONAL_HEADER.FileAlignment != 0 and
  1957. ( section.PointerToRawData % self.OPTIONAL_HEADER.FileAlignment) != 0):
  1958. simultaneous_errors += 1
  1959. self.__warnings.append(
  1960. ('Error parsing section {0}. '
  1961. 'PointerToRawData should normally be '
  1962. 'a multiple of FileAlignment, this might imply the file '
  1963. 'is trying to confuse tools which parse this incorrectly.').format(i))
  1964. if simultaneous_errors >= MAX_SIMULTANEOUS_ERRORS:
  1965. self.__warnings.append('Too many warnings parsing section. Aborting.')
  1966. break
  1967. section_flags = retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
  1968. # Set the section's flags according the the Characteristics member
  1969. set_flags(section, section.Characteristics, section_flags)
  1970. if ( section.__dict__.get('IMAGE_SCN_MEM_WRITE', False) and
  1971. section.__dict__.get('IMAGE_SCN_MEM_EXECUTE', False) ):
  1972. if section.Name.rstrip(b'\x00') == b'PAGE' and self.is_driver():
  1973. # Drivers can have a PAGE section with those flags set without
  1974. # implying that it is malicious
  1975. pass
  1976. else:
  1977. self.__warnings.append(
  1978. ('Suspicious flags set for section %d. ' % i) +
  1979. 'Both IMAGE_SCN_MEM_WRITE and IMAGE_SCN_MEM_EXECUTE are set. '
  1980. 'This might indicate a packed executable.')
  1981. self.sections.append(section)
  1982. # Sort the sections by their VirtualAddress and add a field to each of them
  1983. # with the VirtualAddress of the next section. This will allow to check
  1984. # for potentially overlapping sections in badly constructed PEs.
  1985. self.sections.sort(key=lambda a: a.VirtualAddress)
  1986. for idx, section in enumerate(self.sections):
  1987. if idx == len(self.sections)-1:
  1988. section.next_section_virtual_address = None
  1989. else:
  1990. section.next_section_virtual_address = self.sections[idx+1].VirtualAddress
  1991. if self.FILE_HEADER.NumberOfSections > 0 and self.sections:
  1992. return offset + self.sections[0].sizeof()*self.FILE_HEADER.NumberOfSections
  1993. else:
  1994. return offset
  1995. def parse_data_directories(self, directories=None,
  1996. forwarded_exports_only=False,
  1997. import_dllnames_only=False):
  1998. """Parse and process the PE file's data directories.
  1999. If the optional argument 'directories' is given, only
  2000. the directories at the specified indexes will be parsed.
  2001. Such functionality allows parsing of areas of interest
  2002. without the burden of having to parse all others.
  2003. The directories can then be specified as:
  2004. For export / import only:
  2005. directories = [ 0, 1 ]
  2006. or (more verbosely):
  2007. directories = [ DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT'],
  2008. DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_EXPORT'] ]
  2009. If 'directories' is a list, the ones that are processed will be removed,
  2010. leaving only the ones that are not present in the image.
  2011. If `forwarded_exports_only` is True, the IMAGE_DIRECTORY_ENTRY_EXPORT
  2012. attribute will only contain exports that are forwarded to another DLL.
  2013. If `import_dllnames_only` is True, symbols will not be parsed from
  2014. the import table and the entries in the IMAGE_DIRECTORY_ENTRY_IMPORT
  2015. attribute will not have a `symbols` attribute.
  2016. """
  2017. directory_parsing = (
  2018. ('IMAGE_DIRECTORY_ENTRY_IMPORT', self.parse_import_directory),
  2019. ('IMAGE_DIRECTORY_ENTRY_EXPORT', self.parse_export_directory),
  2020. ('IMAGE_DIRECTORY_ENTRY_RESOURCE', self.parse_resources_directory),
  2021. ('IMAGE_DIRECTORY_ENTRY_DEBUG', self.parse_debug_directory),
  2022. ('IMAGE_DIRECTORY_ENTRY_BASERELOC', self.parse_relocations_directory),
  2023. ('IMAGE_DIRECTORY_ENTRY_TLS', self.parse_directory_tls),
  2024. ('IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG', self.parse_directory_load_config),
  2025. ('IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT', self.parse_delay_import_directory),
  2026. ('IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT', self.parse_directory_bound_imports) )
  2027. if directories is not None:
  2028. if not isinstance(directories, (tuple, list)):
  2029. directories = [directories]
  2030. for entry in directory_parsing:
  2031. # OC Patch:
  2032. #
  2033. try:
  2034. directory_index = DIRECTORY_ENTRY[entry[0]]
  2035. dir_entry = self.OPTIONAL_HEADER.DATA_DIRECTORY[directory_index]
  2036. except IndexError:
  2037. break
  2038. # Only process all the directories if no individual ones have
  2039. # been chosen
  2040. #
  2041. if directories is None or directory_index in directories:
  2042. if dir_entry.VirtualAddress:
  2043. if forwarded_exports_only and entry[0] == 'IMAGE_DIRECTORY_ENTRY_EXPORT':
  2044. value = entry[1](dir_entry.VirtualAddress, dir_entry.Size, forwarded_only=True)
  2045. elif import_dllnames_only and entry[0] == 'IMAGE_DIRECTORY_ENTRY_IMPORT':
  2046. value = entry[1](dir_entry.VirtualAddress, dir_entry.Size, dllnames_only=True)
  2047. else:
  2048. value = entry[1](dir_entry.VirtualAddress, dir_entry.Size)
  2049. if value:
  2050. setattr(self, entry[0][6:], value)
  2051. if (directories is not None) and isinstance(directories, list) and (entry[0] in directories):
  2052. directories.remove(directory_index)
  2053. def parse_directory_bound_imports(self, rva, size):
  2054. """"""
  2055. bnd_descr = Structure(self.__IMAGE_BOUND_IMPORT_DESCRIPTOR_format__)
  2056. bnd_descr_size = bnd_descr.sizeof()
  2057. start = rva
  2058. bound_imports = []
  2059. while True:
  2060. bnd_descr = self.__unpack_data__(
  2061. self.__IMAGE_BOUND_IMPORT_DESCRIPTOR_format__,
  2062. self.__data__[rva:rva+bnd_descr_size],
  2063. file_offset = rva)
  2064. if bnd_descr is None:
  2065. # If can't parse directory then silently return.
  2066. # This directory does not necessarily have to be valid to
  2067. # still have a valid PE file
  2068. self.__warnings.append(
  2069. 'The Bound Imports directory exists but can\'t be parsed.')
  2070. return
  2071. if bnd_descr.all_zeroes():
  2072. break
  2073. rva += bnd_descr.sizeof()
  2074. section = self.get_section_by_offset(rva)
  2075. file_offset = self.get_offset_from_rva(rva)
  2076. if section is None:
  2077. safety_boundary = len(self.__data__) - file_offset
  2078. sections_after_offset = [
  2079. s.PointerToRawData for s in self.sections
  2080. if s.PointerToRawData > file_offset]
  2081. if sections_after_offset:
  2082. # Find the first section starting at a later offset than that
  2083. # specified by 'rva'
  2084. first_section_after_offset = min(sections_after_offset)
  2085. section = self.get_section_by_offset(first_section_after_offset)
  2086. if section is not None:
  2087. safety_boundary = section.PointerToRawData - file_offset
  2088. else:
  2089. safety_boundary = (section.PointerToRawData +
  2090. len(section.get_data()) - file_offset)
  2091. if not section:
  2092. self.__warnings.append(
  2093. ('RVA of IMAGE_BOUND_IMPORT_DESCRIPTOR points '
  2094. 'to an invalid address: {0:x}').format(rva))
  2095. return
  2096. forwarder_refs = []
  2097. # 8 is the size of __IMAGE_BOUND_IMPORT_DESCRIPTOR_format__
  2098. for idx in range(min(bnd_descr.NumberOfModuleForwarderRefs,
  2099. int(safety_boundary / 8))):
  2100. # Both structures IMAGE_BOUND_IMPORT_DESCRIPTOR and
  2101. # IMAGE_BOUND_FORWARDER_REF have the same size.
  2102. bnd_frwd_ref = self.__unpack_data__(
  2103. self.__IMAGE_BOUND_FORWARDER_REF_format__,
  2104. self.__data__[rva:rva+bnd_descr_size],
  2105. file_offset = rva)
  2106. # OC Patch:
  2107. if not bnd_frwd_ref:
  2108. raise PEFormatError(
  2109. "IMAGE_BOUND_FORWARDER_REF cannot be read")
  2110. rva += bnd_frwd_ref.sizeof()
  2111. offset = start+bnd_frwd_ref.OffsetModuleName
  2112. name_str = self.get_string_from_data(
  2113. 0, self.__data__[offset : offset + MAX_STRING_LENGTH])
  2114. # OffsetModuleName points to a DLL name. These shouldn't be too long.
  2115. # Anything longer than a safety length of 128 will be taken to indicate
  2116. # a corrupt entry and abort the processing of these entries.
  2117. # Names shorter than 4 characters will be taken as invalid as well.
  2118. if name_str:
  2119. invalid_chars = [
  2120. c for c in bytearray(name_str) if
  2121. chr(c) not in string.printable]
  2122. if len(name_str) > 256 or invalid_chars:
  2123. break
  2124. forwarder_refs.append(BoundImportRefData(
  2125. struct = bnd_frwd_ref,
  2126. name = name_str))
  2127. offset = start+bnd_descr.OffsetModuleName
  2128. name_str = self.get_string_from_data(
  2129. 0, self.__data__[offset : offset + MAX_STRING_LENGTH])
  2130. if name_str:
  2131. invalid_chars = [
  2132. c for c in bytearray(name_str) if
  2133. chr(c) not in string.printable]
  2134. if len(name_str) > 256 or invalid_chars:
  2135. break
  2136. if not name_str:
  2137. break
  2138. bound_imports.append(
  2139. BoundImportDescData(
  2140. struct = bnd_descr,
  2141. name = name_str,
  2142. entries = forwarder_refs))
  2143. return bound_imports
  2144. def parse_directory_tls(self, rva, size):
  2145. """"""
  2146. # By default let's pretend the format is a 32-bit PE. It may help
  2147. # produce some output for files where the Magic in the Optional Header
  2148. # is incorrect.
  2149. format = self.__IMAGE_TLS_DIRECTORY_format__
  2150. if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
  2151. format = self.__IMAGE_TLS_DIRECTORY64_format__
  2152. try:
  2153. tls_struct = self.__unpack_data__(
  2154. format,
  2155. self.get_data( rva, Structure(format).sizeof() ),
  2156. file_offset = self.get_offset_from_rva(rva))
  2157. except PEFormatError:
  2158. self.__warnings.append(
  2159. 'Invalid TLS information. Can\'t read '
  2160. 'data at RVA: 0x%x' % rva)
  2161. tls_struct = None
  2162. if not tls_struct:
  2163. return None
  2164. return TlsData( struct = tls_struct )
  2165. def parse_directory_load_config(self, rva, size):
  2166. """"""
  2167. if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
  2168. format = self.__IMAGE_LOAD_CONFIG_DIRECTORY_format__
  2169. elif self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
  2170. format = self.__IMAGE_LOAD_CONFIG_DIRECTORY64_format__
  2171. else:
  2172. self.__warnings.append(
  2173. 'Don\'t know how to parse LOAD_CONFIG information for non-PE32/'
  2174. 'PE32+ file')
  2175. return None
  2176. load_config_struct = None
  2177. try:
  2178. load_config_struct = self.__unpack_data__(
  2179. format,
  2180. self.get_data( rva, Structure(format).sizeof() ),
  2181. file_offset = self.get_offset_from_rva(rva))
  2182. except PEFormatError:
  2183. self.__warnings.append(
  2184. 'Invalid LOAD_CONFIG information. Can\'t read '
  2185. 'data at RVA: 0x%x' % rva)
  2186. if not load_config_struct:
  2187. return None
  2188. return LoadConfigData( struct = load_config_struct )
  2189. def parse_relocations_directory(self, rva, size):
  2190. """"""
  2191. rlc_size = Structure(self.__IMAGE_BASE_RELOCATION_format__).sizeof()
  2192. end = rva+size
  2193. relocations = []
  2194. while rva < end:
  2195. # OC Patch:
  2196. # Malware that has bad RVA entries will cause an error.
  2197. # Just continue on after an exception
  2198. #
  2199. try:
  2200. rlc = self.__unpack_data__(
  2201. self.__IMAGE_BASE_RELOCATION_format__,
  2202. self.get_data(rva, rlc_size),
  2203. file_offset = self.get_offset_from_rva(rva) )
  2204. except PEFormatError:
  2205. self.__warnings.append(
  2206. 'Invalid relocation information. Can\'t read '
  2207. 'data at RVA: 0x%x' % rva)
  2208. rlc = None
  2209. if not rlc:
  2210. break
  2211. # rlc.VirtualAddress must lie within the Image
  2212. if rlc.VirtualAddress > self.OPTIONAL_HEADER.SizeOfImage:
  2213. self.__warnings.append(
  2214. 'Invalid relocation information. VirtualAddress outside'
  2215. ' of Image: 0x%x' % rlc.VirtualAddress)
  2216. break
  2217. # rlc.SizeOfBlock must be less or equal than the size of the image
  2218. # (It's a rather loose sanity test)
  2219. if rlc.SizeOfBlock > self.OPTIONAL_HEADER.SizeOfImage:
  2220. self.__warnings.append(
  2221. 'Invalid relocation information. SizeOfBlock too large'
  2222. ': %d' % rlc.SizeOfBlock)
  2223. break
  2224. reloc_entries = self.parse_relocations(
  2225. rva+rlc_size, rlc.VirtualAddress, rlc.SizeOfBlock-rlc_size )
  2226. relocations.append(
  2227. BaseRelocationData(
  2228. struct = rlc,
  2229. entries = reloc_entries))
  2230. if not rlc.SizeOfBlock:
  2231. break
  2232. rva += rlc.SizeOfBlock
  2233. return relocations
  2234. def parse_relocations(self, data_rva, rva, size):
  2235. """"""
  2236. try:
  2237. data = self.get_data(data_rva, size)
  2238. file_offset = self.get_offset_from_rva(data_rva)
  2239. except PEFormatError as excp:
  2240. self.__warnings.append(
  2241. 'Bad RVA in relocation data: 0x%x' % (data_rva))
  2242. return []
  2243. entries = []
  2244. offsets_and_type = []
  2245. for idx in range( int(len(data) / 2) ):
  2246. entry = self.__unpack_data__(
  2247. self.__IMAGE_BASE_RELOCATION_ENTRY_format__,
  2248. data[idx*2:(idx+1)*2],
  2249. file_offset = file_offset )
  2250. if not entry:
  2251. break
  2252. word = entry.Data
  2253. reloc_type = (word>>12)
  2254. reloc_offset = (word & 0x0fff)
  2255. if (reloc_offset, reloc_type) in offsets_and_type:
  2256. self.__warnings.append(
  2257. 'Overlapping offsets in relocation data '
  2258. 'at RVA: 0x%x' % (reloc_offset+rva))
  2259. break
  2260. if len(offsets_and_type) >= 1000:
  2261. offsets_and_type.pop()
  2262. offsets_and_type.insert(0, (reloc_offset, reloc_type))
  2263. entries.append(
  2264. RelocationData(
  2265. struct = entry,
  2266. type = reloc_type,
  2267. base_rva = rva,
  2268. rva = reloc_offset+rva))
  2269. file_offset += entry.sizeof()
  2270. return entries
  2271. def parse_debug_directory(self, rva, size):
  2272. """"""
  2273. dbg_size = Structure(self.__IMAGE_DEBUG_DIRECTORY_format__).sizeof()
  2274. debug = []
  2275. for idx in range(int(size / dbg_size)):
  2276. try:
  2277. data = self.get_data(rva+dbg_size*idx, dbg_size)
  2278. except PEFormatError as e:
  2279. self.__warnings.append(
  2280. 'Invalid debug information. Can\'t read '
  2281. 'data at RVA: 0x%x' % rva)
  2282. return None
  2283. dbg = self.__unpack_data__(
  2284. self.__IMAGE_DEBUG_DIRECTORY_format__,
  2285. data, file_offset = self.get_offset_from_rva(rva+dbg_size*idx))
  2286. if not dbg:
  2287. return None
  2288. # apply structure according to DEBUG_TYPE
  2289. # http://www.debuginfo.com/articles/debuginfomatch.html
  2290. #
  2291. dbg_type = None
  2292. if dbg.Type == 1:
  2293. # IMAGE_DEBUG_TYPE_COFF
  2294. pass
  2295. elif dbg.Type == 2:
  2296. # if IMAGE_DEBUG_TYPE_CODEVIEW
  2297. dbg_type_offset = dbg.PointerToRawData
  2298. dbg_type_size = dbg.SizeOfData
  2299. dbg_type_data = self.__data__[dbg_type_offset:dbg_type_offset+dbg_type_size]
  2300. if dbg_type_data[:4] == b'RSDS':
  2301. # pdb7.0
  2302. __CV_INFO_PDB70_format__ = ['CV_INFO_PDB70',
  2303. ['I,CvSignature',
  2304. 'I,Signature_Data1', # Signature is of GUID type
  2305. 'H,Signature_Data2',
  2306. 'H,Signature_Data3',
  2307. '8s,Signature_Data4',
  2308. # 'H,Signature_Data5',
  2309. # 'I,Signature_Data6',
  2310. 'I,Age']]
  2311. pdbFileName_size = (
  2312. dbg_type_size -
  2313. Structure(__CV_INFO_PDB70_format__).sizeof())
  2314. # pdbFileName_size can be negative here, as seen in the malware sample with hash
  2315. # MD5: 7c297600870d026c014d42596bb9b5fd
  2316. # SHA256: 83f4e63681fcba8a9d7bbb1688c71981b1837446514a1773597e0192bba9fac3
  2317. # Checking for positive size here to ensure proper parsing.
  2318. if pdbFileName_size > 0:
  2319. __CV_INFO_PDB70_format__[1].append(
  2320. '{0}s,PdbFileName'.format(pdbFileName_size))
  2321. dbg_type = self.__unpack_data__(
  2322. __CV_INFO_PDB70_format__,
  2323. dbg_type_data,
  2324. dbg_type_offset)
  2325. elif dbg_type_data[:4] == b'NB10':
  2326. # pdb2.0
  2327. __CV_INFO_PDB20_format__ = ['CV_INFO_PDB20',
  2328. ['I,CvHeaderSignature',
  2329. 'I,CvHeaderOffset',
  2330. 'I,Signature',
  2331. 'I,Age']]
  2332. pdbFileName_size = (
  2333. dbg_type_size -
  2334. Structure(__CV_INFO_PDB20_format__).sizeof())
  2335. # As with the PDB 7.0 case, ensuring a positive size for pdbFileName_size
  2336. # to ensure proper parsing.
  2337. if pdbFileName_size > 0:
  2338. # Add the last variable-length string field.
  2339. __CV_INFO_PDB20_format__[1].append(
  2340. '{0}s,PdbFileName'.format(pdbFileName_size))
  2341. dbg_type = self.__unpack_data__(
  2342. __CV_INFO_PDB20_format__,
  2343. dbg_type_data,
  2344. dbg_type_offset)
  2345. elif dbg.Type == 4:
  2346. # IMAGE_DEBUG_TYPE_MISC
  2347. dbg_type_offset = dbg.PointerToRawData
  2348. dbg_type_size = dbg.SizeOfData
  2349. dbg_type_data = self.__data__[dbg_type_offset:dbg_type_offset+dbg_type_size]
  2350. ___IMAGE_DEBUG_MISC_format__ = ['IMAGE_DEBUG_MISC',
  2351. ['I,DataType',
  2352. 'I,Length',
  2353. 'B,Unicode',
  2354. 'B,Reserved1',
  2355. 'H,Reserved2']]
  2356. dbg_type_partial = self.__unpack_data__(
  2357. ___IMAGE_DEBUG_MISC_format__,
  2358. dbg_type_data,
  2359. dbg_type_offset)
  2360. # Need to check that dbg_type_partial contains a correctly unpacked data
  2361. # structure, as the malware sample with the following hash
  2362. # MD5: 5e7d6707d693108de5a303045c17d95b
  2363. # SHA256: 5dd94a95025f3b6e3dd440d52f7c6d2964fdd1aa119e0ee92e38c7bf83829e5c
  2364. # contains a value of None for dbg_type_partial after unpacking, presumably
  2365. # due to a malformed DEBUG entry.
  2366. if dbg_type_partial:
  2367. # The Unicode bool should be set to 0 or 1.
  2368. if dbg_type_partial.Unicode in (0, 1):
  2369. data_size = (
  2370. dbg_type_size -
  2371. Structure(___IMAGE_DEBUG_MISC_format__).sizeof())
  2372. # As with the PDB case, ensuring a positive size for data_size here
  2373. # to ensure proper parsing.
  2374. if data_size > 0:
  2375. ___IMAGE_DEBUG_MISC_format__[1].append(
  2376. '{0}s,Data'.format(data_size))
  2377. dbg_type = self.__unpack_data__(
  2378. ___IMAGE_DEBUG_MISC_format__,
  2379. dbg_type_data,
  2380. dbg_type_offset)
  2381. debug.append(
  2382. DebugData(
  2383. struct = dbg,
  2384. entry = dbg_type))
  2385. return debug
  2386. def parse_resources_directory(self, rva, size=0, base_rva = None, level = 0, dirs=None):
  2387. """Parse the resources directory.
  2388. Given the RVA of the resources directory, it will process all
  2389. its entries.
  2390. The root will have the corresponding member of its structure,
  2391. IMAGE_RESOURCE_DIRECTORY plus 'entries', a list of all the
  2392. entries in the directory.
  2393. Those entries will have, correspondingly, all the structure's
  2394. members (IMAGE_RESOURCE_DIRECTORY_ENTRY) and an additional one,
  2395. "directory", pointing to the IMAGE_RESOURCE_DIRECTORY structure
  2396. representing upper layers of the tree. This one will also have
  2397. an 'entries' attribute, pointing to the 3rd, and last, level.
  2398. Another directory with more entries. Those last entries will
  2399. have a new attribute (both 'leaf' or 'data_entry' can be used to
  2400. access it). This structure finally points to the resource data.
  2401. All the members of this structure, IMAGE_RESOURCE_DATA_ENTRY,
  2402. are available as its attributes.
  2403. """
  2404. # OC Patch:
  2405. if dirs is None:
  2406. dirs = [rva]
  2407. if base_rva is None:
  2408. base_rva = rva
  2409. if level > MAX_RESOURCE_DEPTH:
  2410. self.__warnings.append(
  2411. 'Error parsing the resources directory. '
  2412. 'Excessively nested table depth %d (>%s)' %
  2413. (level, MAX_RESOURCE_DEPTH) )
  2414. return None
  2415. resources_section = self.get_section_by_rva(rva)
  2416. try:
  2417. # If the RVA is invalid all would blow up. Some EXEs seem to be
  2418. # specially nasty and have an invalid RVA.
  2419. data = self.get_data(rva, Structure(self.__IMAGE_RESOURCE_DIRECTORY_format__).sizeof() )
  2420. except PEFormatError as e:
  2421. self.__warnings.append(
  2422. 'Invalid resources directory. Can\'t read '
  2423. 'directory data at RVA: 0x%x' % rva)
  2424. return None
  2425. # Get the resource directory structure, that is, the header
  2426. # of the table preceding the actual entries
  2427. #
  2428. resource_dir = self.__unpack_data__(
  2429. self.__IMAGE_RESOURCE_DIRECTORY_format__, data,
  2430. file_offset = self.get_offset_from_rva(rva) )
  2431. if resource_dir is None:
  2432. # If we can't parse resources directory then silently return.
  2433. # This directory does not necessarily have to be valid to
  2434. # still have a valid PE file
  2435. self.__warnings.append(
  2436. 'Invalid resources directory. Can\'t parse '
  2437. 'directory data at RVA: 0x%x' % rva)
  2438. return None
  2439. dir_entries = []
  2440. # Advance the RVA to the position immediately following the directory
  2441. # table header and pointing to the first entry in the table
  2442. #
  2443. rva += resource_dir.sizeof()
  2444. number_of_entries = (
  2445. resource_dir.NumberOfNamedEntries +
  2446. resource_dir.NumberOfIdEntries )
  2447. # Set a hard limit on the maximum reasonable number of entries
  2448. MAX_ALLOWED_ENTRIES = 4096
  2449. if number_of_entries > MAX_ALLOWED_ENTRIES:
  2450. self.__warnings.append(
  2451. 'Error parsing the resources directory. '
  2452. 'The directory contains %d entries (>%s)' %
  2453. (number_of_entries, MAX_ALLOWED_ENTRIES) )
  2454. return None
  2455. self.__total_resource_entries_count += number_of_entries
  2456. if self.__total_resource_entries_count > MAX_RESOURCE_ENTRIES:
  2457. self.__warnings.append(
  2458. 'Error parsing the resources directory. '
  2459. 'The file contains at least %d entries (>%d)' %
  2460. (self.__total_resource_entries_count, MAX_RESOURCE_ENTRIES) )
  2461. return None
  2462. strings_to_postprocess = list()
  2463. # Keep track of the last name's start and end offsets in order
  2464. # to be able to detect overlapping entries that might suggest
  2465. # and invalid or corrupt directory.
  2466. last_name_begin_end = None
  2467. for idx in range(number_of_entries):
  2468. if (not self.__resource_size_limit_reached and
  2469. self.__total_resource_bytes > self.__resource_size_limit_upperbounds):
  2470. self.__resource_size_limit_reached = True
  2471. self.__warnings.append(
  2472. 'Resource size 0x%x exceeds file size 0x%x, overlapping '
  2473. 'resources found.' %
  2474. (self.__total_resource_bytes,
  2475. self.__resource_size_limit_upperbounds) )
  2476. res = self.parse_resource_entry(rva)
  2477. if res is None:
  2478. self.__warnings.append(
  2479. 'Error parsing the resources directory, '
  2480. 'Entry %d is invalid, RVA = 0x%x. ' %
  2481. (idx, rva) )
  2482. break
  2483. entry_name = None
  2484. entry_id = None
  2485. name_is_string = (res.Name & 0x80000000) >> 31
  2486. if not name_is_string:
  2487. entry_id = res.Name
  2488. else:
  2489. ustr_offset = base_rva+res.NameOffset
  2490. try:
  2491. entry_name = UnicodeStringWrapperPostProcessor(self, ustr_offset)
  2492. self.__total_resource_bytes += entry_name.get_pascal_16_length()
  2493. # If the last entry's offset points before the current's but its end
  2494. # is past the current's beginning, assume the overlap indicates a
  2495. # corrupt name.
  2496. if last_name_begin_end and (last_name_begin_end[0] < ustr_offset and
  2497. last_name_begin_end[1] >= ustr_offset):
  2498. # Remove the previous overlapping entry as it's likely to be already corrupt data.
  2499. strings_to_postprocess.pop()
  2500. self.__warnings.append(
  2501. 'Error parsing the resources directory, '
  2502. 'attempting to read entry name. '
  2503. 'Entry names overlap 0x%x' %
  2504. (ustr_offset) )
  2505. break
  2506. last_name_begin_end = (ustr_offset, ustr_offset+entry_name.get_pascal_16_length())
  2507. strings_to_postprocess.append(entry_name)
  2508. except PEFormatError as excp:
  2509. self.__warnings.append(
  2510. 'Error parsing the resources directory, '
  2511. 'attempting to read entry name. '
  2512. 'Can\'t read unicode string at offset 0x%x' %
  2513. (ustr_offset) )
  2514. if res.DataIsDirectory:
  2515. # OC Patch:
  2516. #
  2517. # One trick malware can do is to recursively reference
  2518. # the next directory. This causes hilarity to ensue when
  2519. # trying to parse everything correctly.
  2520. # If the original RVA given to this function is equal to
  2521. # the next one to parse, we assume that it's a trick.
  2522. # Instead of raising a PEFormatError this would skip some
  2523. # reasonable data so we just break.
  2524. #
  2525. # 9ee4d0a0caf095314fd7041a3e4404dc is the offending sample
  2526. if (base_rva + res.OffsetToDirectory) in dirs:
  2527. break
  2528. else:
  2529. entry_directory = self.parse_resources_directory(
  2530. base_rva+res.OffsetToDirectory,
  2531. size-(rva-base_rva), # size
  2532. base_rva=base_rva, level = level+1,
  2533. dirs=dirs + [base_rva + res.OffsetToDirectory])
  2534. if not entry_directory:
  2535. break
  2536. # Ange Albertini's code to process resources' strings
  2537. #
  2538. strings = None
  2539. if entry_id == RESOURCE_TYPE['RT_STRING']:
  2540. strings = dict()
  2541. for resource_id in entry_directory.entries:
  2542. if hasattr(resource_id, 'directory'):
  2543. resource_strings = dict()
  2544. for resource_lang in resource_id.directory.entries:
  2545. if (resource_lang is None or not hasattr(resource_lang, 'data') or
  2546. resource_lang.data.struct.Size is None or resource_id.id is None):
  2547. continue
  2548. string_entry_rva = resource_lang.data.struct.OffsetToData
  2549. string_entry_size = resource_lang.data.struct.Size
  2550. string_entry_id = resource_id.id
  2551. # XXX: has been raising exceptions preventing parsing
  2552. try:
  2553. string_entry_data = self.get_data(string_entry_rva, string_entry_size)
  2554. except:
  2555. self.__warnings.append(
  2556. 'Error parsing resource of type RT_STRING at RVA 0x%x with size %d' %
  2557. (string_entry_rva, string_entry_size))
  2558. continue
  2559. parse_strings(string_entry_data, (int(string_entry_id) - 1) * 16, resource_strings)
  2560. strings.update(resource_strings)
  2561. resource_id.directory.strings = resource_strings
  2562. dir_entries.append(
  2563. ResourceDirEntryData(
  2564. struct = res,
  2565. name = entry_name,
  2566. id = entry_id,
  2567. directory = entry_directory))
  2568. else:
  2569. struct = self.parse_resource_data_entry(
  2570. base_rva + res.OffsetToDirectory)
  2571. if struct:
  2572. self.__total_resource_bytes += struct.Size
  2573. entry_data = ResourceDataEntryData(
  2574. struct = struct,
  2575. lang = res.Name & 0x3ff,
  2576. sublang = res.Name >> 10 )
  2577. dir_entries.append(
  2578. ResourceDirEntryData(
  2579. struct = res,
  2580. name = entry_name,
  2581. id = entry_id,
  2582. data = entry_data))
  2583. else:
  2584. break
  2585. # Check if this entry contains version information
  2586. #
  2587. if level == 0 and res.Id == RESOURCE_TYPE['RT_VERSION']:
  2588. if dir_entries:
  2589. last_entry = dir_entries[-1]
  2590. try:
  2591. version_entries = last_entry.directory.entries[0].directory.entries
  2592. except:
  2593. # Maybe a malformed directory structure...?
  2594. # Let's ignore it
  2595. pass
  2596. else:
  2597. for version_entry in version_entries:
  2598. rt_version_struct = None
  2599. try:
  2600. rt_version_struct = version_entry.data.struct
  2601. except:
  2602. # Maybe a malformed directory structure...?
  2603. # Let's ignore it
  2604. pass
  2605. if rt_version_struct is not None:
  2606. self.parse_version_information(rt_version_struct)
  2607. rva += res.sizeof()
  2608. string_rvas = [s.get_rva() for s in strings_to_postprocess]
  2609. string_rvas.sort()
  2610. for idx, s in enumerate(strings_to_postprocess):
  2611. s.render_pascal_16()
  2612. resource_directory_data = ResourceDirData(
  2613. struct = resource_dir,
  2614. entries = dir_entries)
  2615. return resource_directory_data
  2616. def parse_resource_data_entry(self, rva):
  2617. """Parse a data entry from the resources directory."""
  2618. try:
  2619. # If the RVA is invalid all would blow up. Some EXEs seem to be
  2620. # specially nasty and have an invalid RVA.
  2621. data = self.get_data(rva, Structure(self.__IMAGE_RESOURCE_DATA_ENTRY_format__).sizeof() )
  2622. except PEFormatError as excp:
  2623. self.__warnings.append(
  2624. 'Error parsing a resource directory data entry, '
  2625. 'the RVA is invalid: 0x%x' % ( rva ) )
  2626. return None
  2627. data_entry = self.__unpack_data__(
  2628. self.__IMAGE_RESOURCE_DATA_ENTRY_format__, data,
  2629. file_offset = self.get_offset_from_rva(rva) )
  2630. return data_entry
  2631. def parse_resource_entry(self, rva):
  2632. """Parse a directory entry from the resources directory."""
  2633. try:
  2634. data = self.get_data( rva, Structure(self.__IMAGE_RESOURCE_DIRECTORY_ENTRY_format__).sizeof() )
  2635. except PEFormatError as excp:
  2636. # A warning will be added by the caller if this method returns None
  2637. return None
  2638. resource = self.__unpack_data__(
  2639. self.__IMAGE_RESOURCE_DIRECTORY_ENTRY_format__, data,
  2640. file_offset = self.get_offset_from_rva(rva) )
  2641. if resource is None:
  2642. return None
  2643. #resource.NameIsString = (resource.Name & 0x80000000L) >> 31
  2644. resource.NameOffset = resource.Name & 0x7FFFFFFF
  2645. resource.__pad = resource.Name & 0xFFFF0000
  2646. resource.Id = resource.Name & 0x0000FFFF
  2647. resource.DataIsDirectory = (resource.OffsetToData & 0x80000000) >> 31
  2648. resource.OffsetToDirectory = resource.OffsetToData & 0x7FFFFFFF
  2649. return resource
  2650. def parse_version_information(self, version_struct):
  2651. """Parse version information structure.
  2652. The date will be made available in three attributes of the PE object.
  2653. VS_VERSIONINFO will contain the first three fields of the main structure:
  2654. 'Length', 'ValueLength', and 'Type'
  2655. VS_FIXEDFILEINFO will hold the rest of the fields, accessible as sub-attributes:
  2656. 'Signature', 'StrucVersion', 'FileVersionMS', 'FileVersionLS',
  2657. 'ProductVersionMS', 'ProductVersionLS', 'FileFlagsMask', 'FileFlags',
  2658. 'FileOS', 'FileType', 'FileSubtype', 'FileDateMS', 'FileDateLS'
  2659. FileInfo is a list of all StringFileInfo and VarFileInfo structures.
  2660. StringFileInfo structures will have a list as an attribute named 'StringTable'
  2661. containing all the StringTable structures. Each of those structures contains a
  2662. dictionary 'entries' with all the key / value version information string pairs.
  2663. VarFileInfo structures will have a list as an attribute named 'Var' containing
  2664. all Var structures. Each Var structure will have a dictionary as an attribute
  2665. named 'entry' which will contain the name and value of the Var.
  2666. """
  2667. # Retrieve the data for the version info resource
  2668. #
  2669. try:
  2670. start_offset = self.get_offset_from_rva(version_struct.OffsetToData)
  2671. except PEFormatError as excp:
  2672. self.__warnings.append(
  2673. 'Error parsing the version information, '
  2674. 'attempting to read OffsetToData with RVA: 0x{:x}'.format(
  2675. version_struct.OffsetToData))
  2676. return
  2677. raw_data = self.__data__[start_offset:start_offset+version_struct.Size]
  2678. # Map the main structure and the subsequent string
  2679. #
  2680. versioninfo_struct = self.__unpack_data__(
  2681. self.__VS_VERSIONINFO_format__, raw_data,
  2682. file_offset = start_offset )
  2683. if versioninfo_struct is None:
  2684. return
  2685. ustr_offset = version_struct.OffsetToData + versioninfo_struct.sizeof()
  2686. section = self.get_section_by_rva(ustr_offset)
  2687. section_end = None
  2688. if section:
  2689. section_end = section.VirtualAddress + max(
  2690. section.SizeOfRawData, section.Misc_VirtualSize)
  2691. versioninfo_string = None
  2692. # These should return 'ascii' decoded data. For the case when it's
  2693. # garbled data the ascii string will retain the byte values while
  2694. # encoding it to something else may yield values that don't match the
  2695. # file's contents.
  2696. try:
  2697. if section_end is None:
  2698. versioninfo_string = self.get_string_u_at_rva(
  2699. ustr_offset, encoding='ascii')
  2700. else:
  2701. versioninfo_string = self.get_string_u_at_rva(
  2702. ustr_offset, (section_end - ustr_offset) >> 1,
  2703. encoding='ascii')
  2704. except PEFormatError as excp:
  2705. self.__warnings.append(
  2706. 'Error parsing the version information, '
  2707. 'attempting to read VS_VERSION_INFO string. Can\'t '
  2708. 'read unicode string at offset 0x%x' % (
  2709. ustr_offset))
  2710. if versioninfo_string == None:
  2711. self.__warnings.append('Invalid VS_VERSION_INFO block: {0}'.format(
  2712. versioninfo_string))
  2713. return
  2714. # If the structure does not contain the expected name, it's assumed to
  2715. # be invalid
  2716. if (versioninfo_string is not None and
  2717. versioninfo_string != b'VS_VERSION_INFO'):
  2718. if len(versioninfo_string) > 128:
  2719. excerpt = versioninfo_string[:128].decode('ascii')
  2720. # Don't leave any half-escaped characters
  2721. excerpt = excerpt[:excerpt.rfind('\\u')]
  2722. versioninfo_string = \
  2723. b('{0} ... ({1} bytes, too long to display)'.format(
  2724. excerpt,
  2725. len(versioninfo_string)))
  2726. self.__warnings.append('Invalid VS_VERSION_INFO block: {0}'.format(
  2727. versioninfo_string.decode('ascii').replace('\00', '\\00')))
  2728. return
  2729. if not hasattr(self, 'VS_VERSIONINFO'):
  2730. self.VS_VERSIONINFO = list()
  2731. # Set the PE object's VS_VERSIONINFO to this one
  2732. vinfo = versioninfo_struct
  2733. # Set the Key attribute to point to the unicode string identifying the structure
  2734. vinfo.Key = versioninfo_string
  2735. self.VS_VERSIONINFO.append(vinfo)
  2736. if versioninfo_string is None:
  2737. versioninfo_string = ''
  2738. # Process the fixed version information, get the offset and structure
  2739. fixedfileinfo_offset = self.dword_align(
  2740. versioninfo_struct.sizeof() + 2 * (len(versioninfo_string) + 1),
  2741. version_struct.OffsetToData)
  2742. fixedfileinfo_struct = self.__unpack_data__(
  2743. self.__VS_FIXEDFILEINFO_format__,
  2744. raw_data[fixedfileinfo_offset:],
  2745. file_offset = start_offset+fixedfileinfo_offset )
  2746. if not fixedfileinfo_struct:
  2747. return
  2748. if not hasattr(self, 'VS_FIXEDFILEINFO'):
  2749. self.VS_FIXEDFILEINFO = list()
  2750. # Set the PE object's VS_FIXEDFILEINFO to this one
  2751. self.VS_FIXEDFILEINFO.append(fixedfileinfo_struct)
  2752. # Start parsing all the StringFileInfo and VarFileInfo structures
  2753. # Get the first one
  2754. stringfileinfo_offset = self.dword_align(
  2755. fixedfileinfo_offset + fixedfileinfo_struct.sizeof(),
  2756. version_struct.OffsetToData)
  2757. original_stringfileinfo_offset = stringfileinfo_offset
  2758. # Set the PE object's attribute that will contain them all.
  2759. if not hasattr(self, 'FileInfo'):
  2760. self.FileInfo = list()
  2761. finfo = list()
  2762. while True:
  2763. # Process the StringFileInfo/VarFileInfo structure
  2764. stringfileinfo_struct = self.__unpack_data__(
  2765. self.__StringFileInfo_format__,
  2766. raw_data[stringfileinfo_offset:],
  2767. file_offset = start_offset+stringfileinfo_offset )
  2768. if stringfileinfo_struct is None:
  2769. self.__warnings.append(
  2770. 'Error parsing StringFileInfo/VarFileInfo struct' )
  2771. return None
  2772. # Get the subsequent string defining the structure.
  2773. ustr_offset = ( version_struct.OffsetToData +
  2774. stringfileinfo_offset + versioninfo_struct.sizeof() )
  2775. try:
  2776. stringfileinfo_string = self.get_string_u_at_rva( ustr_offset )
  2777. except PEFormatError as excp:
  2778. self.__warnings.append(
  2779. 'Error parsing the version information, '
  2780. 'attempting to read StringFileInfo string. Can\'t '
  2781. 'read unicode string at offset 0x{0:x}'.format(ustr_offset))
  2782. break
  2783. # Set such string as the Key attribute
  2784. stringfileinfo_struct.Key = stringfileinfo_string
  2785. # Append the structure to the PE object's list
  2786. finfo.append(stringfileinfo_struct)
  2787. # Parse a StringFileInfo entry
  2788. if stringfileinfo_string and stringfileinfo_string.startswith(b'StringFileInfo'):
  2789. if stringfileinfo_struct.Type in (0,1) and stringfileinfo_struct.ValueLength == 0:
  2790. stringtable_offset = self.dword_align(
  2791. stringfileinfo_offset + stringfileinfo_struct.sizeof() +
  2792. 2*(len(stringfileinfo_string)+1),
  2793. version_struct.OffsetToData)
  2794. stringfileinfo_struct.StringTable = list()
  2795. # Process the String Table entries
  2796. while True:
  2797. stringtable_struct = self.__unpack_data__(
  2798. self.__StringTable_format__,
  2799. raw_data[stringtable_offset:],
  2800. file_offset = start_offset+stringtable_offset )
  2801. if not stringtable_struct:
  2802. break
  2803. ustr_offset = ( version_struct.OffsetToData + stringtable_offset +
  2804. stringtable_struct.sizeof() )
  2805. try:
  2806. stringtable_string = self.get_string_u_at_rva(ustr_offset)
  2807. except PEFormatError as excp:
  2808. self.__warnings.append(
  2809. 'Error parsing the version information, '
  2810. 'attempting to read StringTable string. Can\'t '
  2811. 'read unicode string at offset 0x{0:x}'.format(ustr_offset) )
  2812. break
  2813. stringtable_struct.LangID = stringtable_string
  2814. stringtable_struct.entries = dict()
  2815. stringtable_struct.entries_offsets = dict()
  2816. stringtable_struct.entries_lengths = dict()
  2817. stringfileinfo_struct.StringTable.append(stringtable_struct)
  2818. entry_offset = self.dword_align(
  2819. stringtable_offset + stringtable_struct.sizeof() +
  2820. 2*(len(stringtable_string)+1),
  2821. version_struct.OffsetToData)
  2822. # Process all entries in the string table
  2823. while entry_offset < stringtable_offset + stringtable_struct.Length:
  2824. string_struct = self.__unpack_data__(
  2825. self.__String_format__, raw_data[entry_offset:],
  2826. file_offset = start_offset+entry_offset )
  2827. if not string_struct:
  2828. break
  2829. ustr_offset = ( version_struct.OffsetToData + entry_offset +
  2830. string_struct.sizeof() )
  2831. try:
  2832. key = self.get_string_u_at_rva( ustr_offset )
  2833. key_offset = self.get_offset_from_rva( ustr_offset )
  2834. except PEFormatError as excp:
  2835. self.__warnings.append(
  2836. 'Error parsing the version information, '
  2837. 'attempting to read StringTable Key string. Can\'t '
  2838. 'read unicode string at offset 0x{0:x}'.format(ustr_offset))
  2839. break
  2840. value_offset = self.dword_align(
  2841. 2*(len(key)+1) + entry_offset + string_struct.sizeof(),
  2842. version_struct.OffsetToData)
  2843. ustr_offset = version_struct.OffsetToData + value_offset
  2844. try:
  2845. value = self.get_string_u_at_rva( ustr_offset,
  2846. max_length = string_struct.ValueLength )
  2847. value_offset = self.get_offset_from_rva( ustr_offset )
  2848. except PEFormatError as excp:
  2849. self.__warnings.append(
  2850. 'Error parsing the version information, '
  2851. 'attempting to read StringTable Value string. '
  2852. 'Can\'t read unicode string at offset 0x{0:x}'.format(
  2853. ustr_offset))
  2854. break
  2855. if string_struct.Length == 0:
  2856. entry_offset = stringtable_offset + stringtable_struct.Length
  2857. else:
  2858. entry_offset = self.dword_align(
  2859. string_struct.Length+entry_offset, version_struct.OffsetToData)
  2860. stringtable_struct.entries[key] = value
  2861. stringtable_struct.entries_offsets[key] = (key_offset, value_offset)
  2862. stringtable_struct.entries_lengths[key] = (len(key), len(value))
  2863. new_stringtable_offset = self.dword_align(
  2864. stringtable_struct.Length + stringtable_offset,
  2865. version_struct.OffsetToData)
  2866. # Check if the entry is crafted in a way that would lead
  2867. # to an infinite loop and break if so.
  2868. if new_stringtable_offset == stringtable_offset:
  2869. break
  2870. stringtable_offset = new_stringtable_offset
  2871. if stringtable_offset >= stringfileinfo_struct.Length:
  2872. break
  2873. # Parse a VarFileInfo entry
  2874. elif stringfileinfo_string and stringfileinfo_string.startswith( b'VarFileInfo' ):
  2875. varfileinfo_struct = stringfileinfo_struct
  2876. varfileinfo_struct.name = 'VarFileInfo'
  2877. if varfileinfo_struct.Type in (0, 1) and varfileinfo_struct.ValueLength == 0:
  2878. var_offset = self.dword_align(
  2879. stringfileinfo_offset + varfileinfo_struct.sizeof() +
  2880. 2*(len(stringfileinfo_string)+1),
  2881. version_struct.OffsetToData)
  2882. varfileinfo_struct.Var = list()
  2883. # Process all entries
  2884. while True:
  2885. var_struct = self.__unpack_data__(
  2886. self.__Var_format__,
  2887. raw_data[var_offset:],
  2888. file_offset = start_offset+var_offset )
  2889. if not var_struct:
  2890. break
  2891. ustr_offset = ( version_struct.OffsetToData + var_offset +
  2892. var_struct.sizeof() )
  2893. try:
  2894. var_string = self.get_string_u_at_rva( ustr_offset )
  2895. except PEFormatError as excp:
  2896. self.__warnings.append(
  2897. 'Error parsing the version information, '
  2898. 'attempting to read VarFileInfo Var string. '
  2899. 'Can\'t read unicode string at offset 0x{0:x}'.format(ustr_offset))
  2900. break
  2901. if var_string is None:
  2902. break
  2903. varfileinfo_struct.Var.append(var_struct)
  2904. varword_offset = self.dword_align(
  2905. 2*(len(var_string)+1) + var_offset + var_struct.sizeof(),
  2906. version_struct.OffsetToData)
  2907. orig_varword_offset = varword_offset
  2908. while varword_offset < orig_varword_offset + var_struct.ValueLength:
  2909. word1 = self.get_word_from_data(
  2910. raw_data[varword_offset:varword_offset+2], 0)
  2911. word2 = self.get_word_from_data(
  2912. raw_data[varword_offset+2:varword_offset+4], 0)
  2913. varword_offset += 4
  2914. if isinstance(word1, int) and isinstance(word2, int):
  2915. var_struct.entry = {var_string: '0x%04x 0x%04x' % (word1, word2)}
  2916. var_offset = self.dword_align(
  2917. var_offset+var_struct.Length, version_struct.OffsetToData)
  2918. if var_offset <= var_offset+var_struct.Length:
  2919. break
  2920. # Increment and align the offset
  2921. stringfileinfo_offset = self.dword_align(
  2922. stringfileinfo_struct.Length+stringfileinfo_offset,
  2923. version_struct.OffsetToData)
  2924. # Check if all the StringFileInfo and VarFileInfo items have been processed
  2925. if stringfileinfo_struct.Length == 0 or stringfileinfo_offset >= versioninfo_struct.Length:
  2926. break
  2927. self.FileInfo.append(finfo)
  2928. def parse_export_directory(self, rva, size, forwarded_only=False):
  2929. """Parse the export directory.
  2930. Given the RVA of the export directory, it will process all
  2931. its entries.
  2932. The exports will be made available as a list of ExportData
  2933. instances in the 'IMAGE_DIRECTORY_ENTRY_EXPORT' PE attribute.
  2934. """
  2935. try:
  2936. export_dir = self.__unpack_data__(
  2937. self.__IMAGE_EXPORT_DIRECTORY_format__,
  2938. self.get_data( rva, Structure(self.__IMAGE_EXPORT_DIRECTORY_format__).sizeof() ),
  2939. file_offset = self.get_offset_from_rva(rva) )
  2940. except PEFormatError:
  2941. self.__warnings.append(
  2942. 'Error parsing export directory at RVA: 0x%x' % ( rva ) )
  2943. return
  2944. if not export_dir:
  2945. return
  2946. # We keep track of the bytes left in the file and use it to set a upper
  2947. # bound in the number of items that can be read from the different
  2948. # arrays.
  2949. def length_until_eof(rva):
  2950. return len(self.__data__) - self.get_offset_from_rva(rva)
  2951. try:
  2952. address_of_names = self.get_data(
  2953. export_dir.AddressOfNames,
  2954. min(length_until_eof(export_dir.AddressOfNames),
  2955. export_dir.NumberOfNames*4))
  2956. address_of_name_ordinals = self.get_data(
  2957. export_dir.AddressOfNameOrdinals,
  2958. min(length_until_eof(export_dir.AddressOfNameOrdinals),
  2959. export_dir.NumberOfNames*4))
  2960. address_of_functions = self.get_data(
  2961. export_dir.AddressOfFunctions,
  2962. min(length_until_eof(export_dir.AddressOfFunctions),
  2963. export_dir.NumberOfFunctions*4))
  2964. except PEFormatError:
  2965. self.__warnings.append(
  2966. 'Error parsing export directory at RVA: 0x%x' % ( rva ) )
  2967. return
  2968. exports = []
  2969. max_failed_entries_before_giving_up = 10
  2970. section = self.get_section_by_rva(export_dir.AddressOfNames)
  2971. # Overly generous upper bound
  2972. safety_boundary = len(self.__data__)
  2973. if section:
  2974. safety_boundary = (
  2975. section.VirtualAddress + len(section.get_data()) -
  2976. export_dir.AddressOfNames)
  2977. symbol_counts = collections.defaultdict(int)
  2978. export_parsing_loop_completed_normally = True
  2979. for i in range(min(export_dir.NumberOfNames, int(safety_boundary / 4))):
  2980. symbol_ordinal = self.get_word_from_data(
  2981. address_of_name_ordinals, i)
  2982. if (symbol_ordinal is not None and
  2983. symbol_ordinal*4 < len(address_of_functions)):
  2984. symbol_address = self.get_dword_from_data(
  2985. address_of_functions, symbol_ordinal)
  2986. else:
  2987. # Corrupt? a bad pointer... we assume it's all
  2988. # useless, no exports
  2989. return None
  2990. if symbol_address is None or symbol_address == 0:
  2991. continue
  2992. # If the function's RVA points within the export directory
  2993. # it will point to a string with the forwarded symbol's string
  2994. # instead of pointing the the function start address.
  2995. if symbol_address >= rva and symbol_address < rva+size:
  2996. forwarder_str = self.get_string_at_rva(symbol_address)
  2997. try:
  2998. forwarder_offset = self.get_offset_from_rva( symbol_address )
  2999. except PEFormatError:
  3000. continue
  3001. else:
  3002. if forwarded_only:
  3003. continue
  3004. forwarder_str = None
  3005. forwarder_offset = None
  3006. symbol_name_address = self.get_dword_from_data(address_of_names, i)
  3007. if symbol_name_address is None:
  3008. max_failed_entries_before_giving_up -= 1
  3009. if max_failed_entries_before_giving_up <= 0:
  3010. export_parsing_loop_completed_normally = False
  3011. break
  3012. symbol_name = self.get_string_at_rva(symbol_name_address, MAX_SYMBOL_NAME_LENGTH)
  3013. if not is_valid_function_name(symbol_name):
  3014. export_parsing_loop_completed_normally = False
  3015. break
  3016. try:
  3017. symbol_name_offset = self.get_offset_from_rva(symbol_name_address)
  3018. except PEFormatError:
  3019. max_failed_entries_before_giving_up -= 1
  3020. if max_failed_entries_before_giving_up <= 0:
  3021. export_parsing_loop_completed_normally = False
  3022. break
  3023. try:
  3024. symbol_name_offset = self.get_offset_from_rva( symbol_name_address )
  3025. except PEFormatError:
  3026. max_failed_entries_before_giving_up -= 1
  3027. if max_failed_entries_before_giving_up <= 0:
  3028. export_parsing_loop_completed_normally = False
  3029. break
  3030. continue
  3031. # File 0b1d3d3664915577ab9a32188d29bbf3542b86c7b9ce333e245496c3018819f1
  3032. # was being parsed as potentially containing millions of exports.
  3033. # Checking for duplicates addresses the issue.
  3034. symbol_counts[(symbol_name, symbol_address)] += 1
  3035. if symbol_counts[(symbol_name, symbol_address)] > 10:
  3036. self.__warnings.append(
  3037. f'Export directory contains more than 10 repeated entries '
  3038. f'({symbol_name}, {symbol_address:#02x}). Assuming corrupt.')
  3039. break
  3040. elif len(symbol_counts) > self.max_symbol_exports:
  3041. self.__warnings.append(
  3042. 'Export directory contains more than {} symbol entries. '
  3043. 'Assuming corrupt.'.format(self.max_symbol_exports))
  3044. break
  3045. exports.append(
  3046. ExportData(
  3047. pe = self,
  3048. ordinal = export_dir.Base+symbol_ordinal,
  3049. ordinal_offset = self.get_offset_from_rva( export_dir.AddressOfNameOrdinals + 2*i ),
  3050. address = symbol_address,
  3051. address_offset = self.get_offset_from_rva( export_dir.AddressOfFunctions + 4*symbol_ordinal ),
  3052. name = symbol_name,
  3053. name_offset = symbol_name_offset,
  3054. forwarder = forwarder_str,
  3055. forwarder_offset = forwarder_offset ))
  3056. if not export_parsing_loop_completed_normally:
  3057. self.__warnings.append(
  3058. 'RVA AddressOfNames in the export directory points to an invalid address: %x' %
  3059. export_dir.AddressOfNames)
  3060. ordinals = {exp.ordinal for exp in exports}
  3061. max_failed_entries_before_giving_up = 10
  3062. section = self.get_section_by_rva(export_dir.AddressOfFunctions)
  3063. # Overly generous upper bound
  3064. safety_boundary = len(self.__data__)
  3065. if section:
  3066. safety_boundary = (
  3067. section.VirtualAddress + len(section.get_data()) -
  3068. export_dir.AddressOfFunctions)
  3069. symbol_counts = collections.defaultdict(int)
  3070. export_parsing_loop_completed_normally = True
  3071. for idx in range(min(
  3072. export_dir.NumberOfFunctions,
  3073. int(safety_boundary / 4))):
  3074. if not idx+export_dir.Base in ordinals:
  3075. try:
  3076. symbol_address = self.get_dword_from_data(
  3077. address_of_functions, idx)
  3078. except PEFormatError:
  3079. symbol_address = None
  3080. if symbol_address is None:
  3081. max_failed_entries_before_giving_up -= 1
  3082. if max_failed_entries_before_giving_up <= 0:
  3083. export_parsing_loop_completed_normally = False
  3084. break
  3085. if symbol_address == 0:
  3086. continue
  3087. # Checking for forwarder again.
  3088. if symbol_address is not None and symbol_address >= rva and symbol_address < rva+size:
  3089. forwarder_str = self.get_string_at_rva(symbol_address)
  3090. else:
  3091. forwarder_str = None
  3092. # File 0b1d3d3664915577ab9a32188d29bbf3542b86c7b9ce333e245496c3018819f1
  3093. # was being parsed as potentially containing millions of exports.
  3094. # Checking for duplicates addresses the issue.
  3095. symbol_counts[symbol_address] += 1
  3096. if symbol_counts[symbol_address] > self.max_repeated_symbol:
  3097. # if most_common and most_common[0][1] > 10:
  3098. self.__warnings.append(
  3099. 'Export directory contains more than {} repeated '
  3100. 'ordinal entries (0x{:x}). Assuming corrupt.'.format(
  3101. self.max_repeated_symbol, symbol_address))
  3102. break
  3103. elif len(symbol_counts) > self.max_symbol_exports:
  3104. self.__warnings.append(
  3105. 'Export directory contains more than {} ordinal entries. Assuming corrupt.'.format(
  3106. self.max_symbol_exports))
  3107. break
  3108. exports.append(
  3109. ExportData(
  3110. ordinal = export_dir.Base+idx,
  3111. address = symbol_address,
  3112. name = None,
  3113. forwarder = forwarder_str))
  3114. if not export_parsing_loop_completed_normally:
  3115. self.__warnings.append(
  3116. 'RVA AddressOfFunctions in the export directory points to an invalid address: %x' %
  3117. export_dir.AddressOfFunctions)
  3118. return
  3119. if not exports and export_dir.all_zeroes():
  3120. return None
  3121. return ExportDirData(struct=export_dir, symbols=exports,
  3122. name=self.get_string_at_rva(export_dir.Name))
  3123. def dword_align(self, offset, base):
  3124. return ((offset+base+3) & 0xfffffffc) - (base & 0xfffffffc)
  3125. def normalize_import_va(self, va):
  3126. # Setup image range
  3127. begin_of_image = self.OPTIONAL_HEADER.ImageBase
  3128. end_of_image = self.OPTIONAL_HEADER.ImageBase + self.OPTIONAL_HEADER.SizeOfImage
  3129. # Try to avoid bogus VAs, which are out of the image.
  3130. # This also filters out entries that are zero
  3131. if(begin_of_image <= va and va < end_of_image):
  3132. va -= begin_of_image
  3133. return va
  3134. def parse_delay_import_directory(self, rva, size):
  3135. """Walk and parse the delay import directory."""
  3136. import_descs = []
  3137. error_count = 0
  3138. while True:
  3139. try:
  3140. # If the RVA is invalid all would blow up. Some PEs seem to be
  3141. # specially nasty and have an invalid RVA.
  3142. data = self.get_data( rva, Structure(self.__IMAGE_DELAY_IMPORT_DESCRIPTOR_format__).sizeof() )
  3143. except PEFormatError as e:
  3144. self.__warnings.append(
  3145. 'Error parsing the Delay import directory at RVA: 0x%x' % ( rva ) )
  3146. break
  3147. file_offset = self.get_offset_from_rva(rva)
  3148. import_desc = self.__unpack_data__(
  3149. self.__IMAGE_DELAY_IMPORT_DESCRIPTOR_format__,
  3150. data, file_offset = file_offset )
  3151. # If the structure is all zeros, we reached the end of the list
  3152. if not import_desc or import_desc.all_zeroes():
  3153. break
  3154. contains_addresses = False
  3155. # Handle old import descriptor that has Virtual Addresses instead of RVAs
  3156. # This version of import descriptor is created by old Visual Studio versions (pre 6.0)
  3157. # Can only be present in 32-bit binaries (no 64-bit compiler existed at the time)
  3158. # Sample: e8d3bff0c1a9a6955993f7a441121a2692261421e82fdfadaaded45d3bea9980
  3159. if import_desc.grAttrs == 0 and self.FILE_HEADER.Machine == MACHINE_TYPE['IMAGE_FILE_MACHINE_I386']:
  3160. import_desc.pBoundIAT = self.normalize_import_va(import_desc.pBoundIAT)
  3161. import_desc.pIAT = self.normalize_import_va(import_desc.pIAT)
  3162. import_desc.pINT = self.normalize_import_va(import_desc.pINT)
  3163. import_desc.pUnloadIAT = self.normalize_import_va(import_desc.pUnloadIAT)
  3164. import_desc.phmod = self.normalize_import_va(import_desc.pUnloadIAT)
  3165. import_desc.szName = self.normalize_import_va(import_desc.szName)
  3166. contains_addresses = True
  3167. rva += import_desc.sizeof()
  3168. # If the array of thunks is somewhere earlier than the import
  3169. # descriptor we can set a maximum length for the array. Otherwise
  3170. # just set a maximum length of the size of the file
  3171. max_len = len(self.__data__) - file_offset
  3172. if rva > import_desc.pINT or rva > import_desc.pIAT:
  3173. max_len = max(rva-import_desc.pINT, rva-import_desc.pIAT)
  3174. import_data = []
  3175. try:
  3176. import_data = self.parse_imports(
  3177. import_desc.pINT,
  3178. import_desc.pIAT,
  3179. None,
  3180. max_len,
  3181. contains_addresses)
  3182. except PEFormatError as e:
  3183. self.__warnings.append(
  3184. 'Error parsing the Delay import directory. '
  3185. 'Invalid import data at RVA: 0x{0:x} ({1})'.format(
  3186. rva, e.value))
  3187. if error_count > 5:
  3188. self.__warnings.append(
  3189. 'Too many errors parsing the Delay import directory. '
  3190. 'Invalid import data at RVA: 0x{0:x}'.format(rva) )
  3191. break
  3192. if not import_data:
  3193. error_count += 1
  3194. continue
  3195. if self.__total_import_symbols > MAX_IMPORT_SYMBOLS:
  3196. self.__warnings.append(
  3197. 'Error, too many imported symbols %d (>%s)' %
  3198. (self.__total_import_symbols, MAX_IMPORT_SYMBOLS) )
  3199. break
  3200. dll = self.get_string_at_rva(import_desc.szName, MAX_DLL_LENGTH)
  3201. if not is_valid_dos_filename(dll):
  3202. dll = b('*invalid*')
  3203. if dll:
  3204. for symbol in import_data:
  3205. if symbol.name is None:
  3206. funcname = ordlookup.ordLookup(dll.lower(), symbol.ordinal)
  3207. if funcname:
  3208. symbol.name = funcname
  3209. import_descs.append(
  3210. ImportDescData(
  3211. struct = import_desc,
  3212. imports = import_data,
  3213. dll = dll))
  3214. return import_descs
  3215. def get_imphash(self):
  3216. impstrs = []
  3217. exts = ['ocx', 'sys', 'dll']
  3218. if not hasattr(self, "DIRECTORY_ENTRY_IMPORT"):
  3219. return ""
  3220. for entry in self.DIRECTORY_ENTRY_IMPORT:
  3221. if isinstance(entry.dll, bytes):
  3222. libname = entry.dll.decode().lower()
  3223. else:
  3224. libname = entry.dll.lower()
  3225. parts = libname.rsplit('.', 1)
  3226. if len(parts) > 1 and parts[1] in exts:
  3227. libname = parts[0]
  3228. for imp in entry.imports:
  3229. funcname = None
  3230. if not imp.name:
  3231. funcname = ordlookup.ordLookup(entry.dll.lower(), imp.ordinal, make_name=True)
  3232. if not funcname:
  3233. raise Exception("Unable to look up ordinal %s:%04x" % (entry.dll, imp.ordinal))
  3234. else:
  3235. funcname = imp.name
  3236. if not funcname:
  3237. continue
  3238. if isinstance(funcname, bytes):
  3239. funcname = funcname.decode()
  3240. impstrs.append('%s.%s' % (libname.lower(),funcname.lower()))
  3241. return md5( ','.join( impstrs ).encode() ).hexdigest()
  3242. def parse_import_directory(self, rva, size, dllnames_only=False):
  3243. """Walk and parse the import directory."""
  3244. import_descs = []
  3245. error_count = 0
  3246. while True:
  3247. try:
  3248. # If the RVA is invalid all would blow up. Some EXEs seem to be
  3249. # specially nasty and have an invalid RVA.
  3250. data = self.get_data(rva, Structure(
  3251. self.__IMAGE_IMPORT_DESCRIPTOR_format__).sizeof() )
  3252. except PEFormatError as e:
  3253. self.__warnings.append(
  3254. 'Error parsing the import directory at RVA: 0x%x' % ( rva ) )
  3255. break
  3256. file_offset = self.get_offset_from_rva(rva)
  3257. import_desc = self.__unpack_data__(
  3258. self.__IMAGE_IMPORT_DESCRIPTOR_format__,
  3259. data, file_offset = file_offset )
  3260. # If the structure is all zeros, we reached the end of the list
  3261. if not import_desc or import_desc.all_zeroes():
  3262. break
  3263. rva += import_desc.sizeof()
  3264. # If the array of thunks is somewhere earlier than the import
  3265. # descriptor we can set a maximum length for the array. Otherwise
  3266. # just set a maximum length of the size of the file
  3267. max_len = len(self.__data__) - file_offset
  3268. if rva > import_desc.OriginalFirstThunk or rva > import_desc.FirstThunk:
  3269. max_len = max(rva-import_desc.OriginalFirstThunk, rva-import_desc.FirstThunk)
  3270. import_data = []
  3271. if not dllnames_only:
  3272. try:
  3273. import_data = self.parse_imports(
  3274. import_desc.OriginalFirstThunk,
  3275. import_desc.FirstThunk,
  3276. import_desc.ForwarderChain,
  3277. max_length = max_len)
  3278. except PEFormatError as e:
  3279. self.__warnings.append(
  3280. 'Error parsing the import directory. '
  3281. 'Invalid Import data at RVA: 0x{0:x} ({1})'.format(
  3282. rva, e.value))
  3283. if error_count > 5:
  3284. self.__warnings.append(
  3285. 'Too many errors parsing the import directory. '
  3286. 'Invalid import data at RVA: 0x{0:x}'.format(rva) )
  3287. break
  3288. if not import_data:
  3289. error_count += 1
  3290. # TODO: do not continue here
  3291. continue
  3292. dll = self.get_string_at_rva(import_desc.Name, MAX_DLL_LENGTH)
  3293. if not is_valid_dos_filename(dll):
  3294. dll = b('*invalid*')
  3295. if dll:
  3296. for symbol in import_data:
  3297. if symbol.name is None:
  3298. funcname = ordlookup.ordLookup(dll.lower(), symbol.ordinal)
  3299. if funcname:
  3300. symbol.name = funcname
  3301. import_descs.append(
  3302. ImportDescData(
  3303. struct = import_desc,
  3304. imports = import_data,
  3305. dll = dll))
  3306. if not dllnames_only:
  3307. suspicious_imports = set([ u'LoadLibrary', u'GetProcAddress' ])
  3308. suspicious_imports_count = 0
  3309. total_symbols = 0
  3310. for imp_dll in import_descs:
  3311. for symbol in imp_dll.imports:
  3312. for suspicious_symbol in suspicious_imports:
  3313. if not symbol or not symbol.name:
  3314. continue
  3315. name = symbol.name
  3316. if type(symbol.name) == bytes:
  3317. name = symbol.name.decode('utf-8')
  3318. if name.startswith(suspicious_symbol):
  3319. suspicious_imports_count += 1
  3320. break
  3321. total_symbols += 1
  3322. if suspicious_imports_count == len(suspicious_imports) and total_symbols < 20:
  3323. self.__warnings.append(
  3324. 'Imported symbols contain entries typical of packed executables.' )
  3325. return import_descs
  3326. def parse_imports(
  3327. self, original_first_thunk, first_thunk,
  3328. forwarder_chain, max_length=None, contains_addresses=False):
  3329. """Parse the imported symbols.
  3330. It will fill a list, which will be available as the dictionary
  3331. attribute "imports". Its keys will be the DLL names and the values
  3332. of all the symbols imported from that object.
  3333. """
  3334. imported_symbols = []
  3335. # Import Lookup Table. Contains ordinals or pointers to strings.
  3336. ilt = self.get_import_table(original_first_thunk, max_length, contains_addresses)
  3337. # Import Address Table. May have identical content to ILT if
  3338. # PE file is not bound. It will contain the address of the
  3339. # imported symbols once the binary is loaded or if it is already
  3340. # bound.
  3341. iat = self.get_import_table(first_thunk, max_length, contains_addresses)
  3342. # OC Patch:
  3343. # Would crash if IAT or ILT had None type
  3344. if (not iat or len(iat)==0) and (not ilt or len(ilt)==0):
  3345. self.__warnings.append(
  3346. 'Damaged Import Table information. '
  3347. 'ILT and/or IAT appear to be broken. '
  3348. 'OriginalFirstThunk: 0x{0:x} FirstThunk: 0x{1:x}'.format(
  3349. original_first_thunk, first_thunk))
  3350. return []
  3351. table = None
  3352. if ilt:
  3353. table = ilt
  3354. elif iat:
  3355. table = iat
  3356. else:
  3357. return None
  3358. imp_offset = 4
  3359. address_mask = 0x7fffffff
  3360. if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
  3361. ordinal_flag = IMAGE_ORDINAL_FLAG
  3362. elif self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
  3363. ordinal_flag = IMAGE_ORDINAL_FLAG64
  3364. imp_offset = 8
  3365. address_mask = 0x7fffffffffffffff
  3366. else:
  3367. # Some PEs may have an invalid value in the Magic field of the
  3368. # Optional Header. Just in case the remaining file is parseable
  3369. # let's pretend it's a 32bit PE32 by default.
  3370. ordinal_flag = IMAGE_ORDINAL_FLAG
  3371. num_invalid = 0
  3372. for idx in range(len(table)):
  3373. imp_ord = None
  3374. imp_hint = None
  3375. imp_name = None
  3376. name_offset = None
  3377. hint_name_table_rva = None
  3378. if table[idx].AddressOfData:
  3379. # If imported by ordinal, we will append the ordinal number
  3380. #
  3381. if table[idx].AddressOfData & ordinal_flag:
  3382. import_by_ordinal = True
  3383. imp_ord = table[idx].AddressOfData & 0xffff
  3384. imp_name = None
  3385. name_offset = None
  3386. else:
  3387. import_by_ordinal = False
  3388. try:
  3389. hint_name_table_rva = table[idx].AddressOfData & address_mask
  3390. data = self.get_data(hint_name_table_rva, 2)
  3391. # Get the Hint
  3392. imp_hint = self.get_word_from_data(data, 0)
  3393. imp_name = self.get_string_at_rva(table[idx].AddressOfData+2, MAX_IMPORT_NAME_LENGTH)
  3394. if not is_valid_function_name(imp_name):
  3395. imp_name = b('*invalid*')
  3396. name_offset = self.get_offset_from_rva(table[idx].AddressOfData+2)
  3397. except PEFormatError as e:
  3398. pass
  3399. # by nriva: we want the ThunkRVA and ThunkOffset
  3400. thunk_offset = table[idx].get_file_offset()
  3401. thunk_rva = self.get_rva_from_offset(thunk_offset)
  3402. imp_address = first_thunk + self.OPTIONAL_HEADER.ImageBase + idx * imp_offset
  3403. struct_iat = None
  3404. try:
  3405. if iat and ilt and ilt[idx].AddressOfData != iat[idx].AddressOfData:
  3406. imp_bound = iat[idx].AddressOfData
  3407. struct_iat = iat[idx]
  3408. else:
  3409. imp_bound = None
  3410. except IndexError:
  3411. imp_bound = None
  3412. # The file with hashes:
  3413. #
  3414. # MD5: bfe97192e8107d52dd7b4010d12b2924
  3415. # SHA256: 3d22f8b001423cb460811ab4f4789f277b35838d45c62ec0454c877e7c82c7f5
  3416. #
  3417. # has an invalid table built in a way that it's parseable but contains invalid
  3418. # entries that lead pefile to take extremely long amounts of time to
  3419. # parse. It also leads to extreme memory consumption.
  3420. # To prevent similar cases, if invalid entries are found in the middle of a
  3421. # table the parsing will be aborted
  3422. #
  3423. if imp_ord == None and imp_name == None:
  3424. raise PEFormatError('Invalid entries, aborting parsing.')
  3425. # Some PEs appear to interleave valid and invalid imports. Instead of
  3426. # aborting the parsing altogether we will simply skip the invalid entries.
  3427. # Although if we see 1000 invalid entries and no legit ones, we abort.
  3428. if imp_name == b('*invalid*'):
  3429. if num_invalid > 1000 and num_invalid == idx:
  3430. raise PEFormatError('Too many invalid names, aborting parsing.')
  3431. num_invalid += 1
  3432. continue
  3433. if imp_ord or imp_name:
  3434. imported_symbols.append(
  3435. ImportData(
  3436. pe = self,
  3437. struct_table = table[idx],
  3438. struct_iat = struct_iat, # for bound imports if any
  3439. import_by_ordinal = import_by_ordinal,
  3440. ordinal = imp_ord,
  3441. ordinal_offset = table[idx].get_file_offset(),
  3442. hint = imp_hint,
  3443. name = imp_name,
  3444. name_offset = name_offset,
  3445. bound = imp_bound,
  3446. address = imp_address,
  3447. hint_name_table_rva = hint_name_table_rva,
  3448. thunk_offset = thunk_offset,
  3449. thunk_rva = thunk_rva ))
  3450. return imported_symbols
  3451. def get_import_table(self, rva, max_length=None, contains_addresses=False):
  3452. table = []
  3453. # We need the ordinal flag for a simple heuristic
  3454. # we're implementing within the loop
  3455. #
  3456. if self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE:
  3457. ordinal_flag = IMAGE_ORDINAL_FLAG
  3458. format = self.__IMAGE_THUNK_DATA_format__
  3459. elif self.PE_TYPE == OPTIONAL_HEADER_MAGIC_PE_PLUS:
  3460. ordinal_flag = IMAGE_ORDINAL_FLAG64
  3461. format = self.__IMAGE_THUNK_DATA64_format__
  3462. else:
  3463. # Some PEs may have an invalid value in the Magic field of the
  3464. # Optional Header. Just in case the remaining file is parseable
  3465. # let's pretend it's a 32bit PE32 by default.
  3466. ordinal_flag = IMAGE_ORDINAL_FLAG
  3467. format = self.__IMAGE_THUNK_DATA_format__
  3468. MAX_ADDRESS_SPREAD = 128*2**20 # 64 MB
  3469. MAX_REPEATED_ADDRESSES = 15
  3470. repeated_address = 0
  3471. addresses_of_data_set_64 = set()
  3472. addresses_of_data_set_32 = set()
  3473. start_rva = rva
  3474. while True and rva:
  3475. if max_length is not None and rva >= start_rva+max_length:
  3476. self.__warnings.append(
  3477. 'Error parsing the import table. Entries go beyond bounds.')
  3478. break
  3479. # Enforce an upper bounds on import symbols.
  3480. if self.__total_import_symbols > MAX_IMPORT_SYMBOLS:
  3481. self.__warnings.append(
  3482. 'Excessive number of imports %d (>%s)' %
  3483. (self.__total_import_symbols, MAX_IMPORT_SYMBOLS) )
  3484. break
  3485. self.__total_import_symbols += 1
  3486. # if we see too many times the same entry we assume it could be
  3487. # a table containing bogus data (with malicious intent or otherwise)
  3488. if repeated_address >= MAX_REPEATED_ADDRESSES:
  3489. return []
  3490. # if the addresses point somewhere but the difference between the highest
  3491. # and lowest address is larger than MAX_ADDRESS_SPREAD we assume a bogus
  3492. # table as the addresses should be contained within a module
  3493. if (addresses_of_data_set_32 and
  3494. max(addresses_of_data_set_32) - min(addresses_of_data_set_32) > MAX_ADDRESS_SPREAD ):
  3495. return []
  3496. if (addresses_of_data_set_64 and
  3497. max(addresses_of_data_set_64) - min(addresses_of_data_set_64) > MAX_ADDRESS_SPREAD ):
  3498. return []
  3499. failed = False
  3500. try:
  3501. data = self.get_data(rva, Structure(format).sizeof())
  3502. except PEFormatError as e:
  3503. failed = True
  3504. if failed or len(data) != Structure(format).sizeof():
  3505. self.__warnings.append(
  3506. 'Error parsing the import table. '
  3507. 'Invalid data at RVA: 0x%x' % rva)
  3508. return None
  3509. thunk_data = self.__unpack_data__(
  3510. format, data, file_offset=self.get_offset_from_rva(rva) )
  3511. # If the thunk data contains VAs instead of RVAs, we need to normalize them
  3512. if contains_addresses:
  3513. thunk_data.AddressOfData = self.normalize_import_va(thunk_data.AddressOfData)
  3514. thunk_data.ForwarderString = self.normalize_import_va(thunk_data.ForwarderString)
  3515. thunk_data.Function = self.normalize_import_va(thunk_data.Function)
  3516. thunk_data.Ordinal = self.normalize_import_va(thunk_data.Ordinal)
  3517. # Check if the AddressOfData lies within the range of RVAs that it's
  3518. # being scanned, abort if that is the case, as it is very unlikely
  3519. # to be legitimate data.
  3520. # Seen in PE with SHA256:
  3521. # 5945bb6f0ac879ddf61b1c284f3b8d20c06b228e75ae4f571fa87f5b9512902c
  3522. if thunk_data and thunk_data.AddressOfData >= start_rva and thunk_data.AddressOfData <= rva:
  3523. self.__warnings.append(
  3524. 'Error parsing the import table. '
  3525. 'AddressOfData overlaps with THUNK_DATA for '
  3526. 'THUNK at RVA 0x%x' % ( rva ) )
  3527. break
  3528. if thunk_data and thunk_data.AddressOfData:
  3529. # If the entry looks like could be an ordinal...
  3530. if thunk_data.AddressOfData & ordinal_flag:
  3531. # but its value is beyond 2^16, we will assume it's a
  3532. # corrupted and ignore it altogether
  3533. if thunk_data.AddressOfData & 0x7fffffff > 0xffff:
  3534. return []
  3535. # and if it looks like it should be an RVA
  3536. else:
  3537. # keep track of the RVAs seen and store them to study their
  3538. # properties. When certain non-standard features are detected
  3539. # the parsing will be aborted
  3540. if (thunk_data.AddressOfData in addresses_of_data_set_32 or
  3541. thunk_data.AddressOfData in addresses_of_data_set_64):
  3542. repeated_address += 1
  3543. if thunk_data.AddressOfData >= 2**32:
  3544. addresses_of_data_set_64.add(thunk_data.AddressOfData)
  3545. else:
  3546. addresses_of_data_set_32.add(thunk_data.AddressOfData)
  3547. if not thunk_data or thunk_data.all_zeroes():
  3548. break
  3549. rva += thunk_data.sizeof()
  3550. table.append(thunk_data)
  3551. return table
  3552. def get_memory_mapped_image(self, max_virtual_address=0x10000000, ImageBase=None):
  3553. """Returns the data corresponding to the memory layout of the PE file.
  3554. The data includes the PE header and the sections loaded at offsets
  3555. corresponding to their relative virtual addresses. (the VirtualAddress
  3556. section header member).
  3557. Any offset in this data corresponds to the absolute memory address
  3558. ImageBase+offset.
  3559. The optional argument 'max_virtual_address' provides with means of limiting
  3560. which sections are processed.
  3561. Any section with their VirtualAddress beyond this value will be skipped.
  3562. Normally, sections with values beyond this range are just there to confuse
  3563. tools. It's a common trick to see in packed executables.
  3564. If the 'ImageBase' optional argument is supplied, the file's relocations
  3565. will be applied to the image by calling the 'relocate_image()' method. Beware
  3566. that the relocation information is applied permanently.
  3567. """
  3568. # Rebase if requested
  3569. #
  3570. if ImageBase is not None:
  3571. # Keep a copy of the image's data before modifying it by rebasing it
  3572. #
  3573. original_data = self.__data__
  3574. self.relocate_image(ImageBase)
  3575. # Collect all sections in one code block
  3576. mapped_data = self.__data__[:]
  3577. for section in self.sections:
  3578. # Miscellaneous integrity tests.
  3579. # Some packer will set these to bogus values to make tools go nuts.
  3580. if section.Misc_VirtualSize == 0 and section.SizeOfRawData == 0:
  3581. continue
  3582. srd = section.SizeOfRawData
  3583. prd = self.adjust_FileAlignment(
  3584. section.PointerToRawData, self.OPTIONAL_HEADER.FileAlignment)
  3585. VirtualAddress_adj = self.adjust_SectionAlignment(
  3586. section.VirtualAddress,
  3587. self.OPTIONAL_HEADER.SectionAlignment,
  3588. self.OPTIONAL_HEADER.FileAlignment )
  3589. if (srd > len(self.__data__) or
  3590. prd > len(self.__data__) or
  3591. srd + prd > len(self.__data__) or
  3592. VirtualAddress_adj >= max_virtual_address):
  3593. continue
  3594. padding_length = VirtualAddress_adj - len(mapped_data)
  3595. if padding_length>0:
  3596. mapped_data += b'\0'*padding_length
  3597. elif padding_length<0:
  3598. mapped_data = mapped_data[:padding_length]
  3599. mapped_data += section.get_data()
  3600. # If the image was rebased, restore it to its original form
  3601. #
  3602. if ImageBase is not None:
  3603. self.__data__ = original_data
  3604. return mapped_data
  3605. def get_resources_strings(self):
  3606. """Returns a list of all the strings found withing the resources (if any).
  3607. This method will scan all entries in the resources directory of the PE, if
  3608. there is one, and will return a list() with the strings.
  3609. An empty list will be returned otherwise.
  3610. """
  3611. resources_strings = list()
  3612. if hasattr(self, 'DIRECTORY_ENTRY_RESOURCE'):
  3613. for resource_type in self.DIRECTORY_ENTRY_RESOURCE.entries:
  3614. if hasattr(resource_type, 'directory'):
  3615. for resource_id in resource_type.directory.entries:
  3616. if hasattr(resource_id, 'directory'):
  3617. if hasattr(resource_id.directory, 'strings') and resource_id.directory.strings:
  3618. for res_string in list(resource_id.directory.strings.values()):
  3619. resources_strings.append(res_string)
  3620. return resources_strings
  3621. def get_data(self, rva=0, length=None):
  3622. """Get data regardless of the section where it lies on.
  3623. Given a RVA and the size of the chunk to retrieve, this method
  3624. will find the section where the data lies and return the data.
  3625. """
  3626. s = self.get_section_by_rva(rva)
  3627. if length:
  3628. end = rva + length
  3629. else:
  3630. end = None
  3631. if not s:
  3632. if rva < len(self.header):
  3633. return self.header[rva:end]
  3634. # Before we give up we check whether the file might
  3635. # contain the data anyway. There are cases of PE files
  3636. # without sections that rely on windows loading the first
  3637. # 8291 bytes into memory and assume the data will be
  3638. # there
  3639. # A functional file with these characteristics is:
  3640. # MD5: 0008892cdfbc3bda5ce047c565e52295
  3641. # SHA-1: c7116b9ff950f86af256defb95b5d4859d4752a9
  3642. #
  3643. if rva < len(self.__data__):
  3644. return self.__data__[rva:end]
  3645. raise PEFormatError('data at RVA can\'t be fetched. Corrupt header?')
  3646. return s.get_data(rva, length)
  3647. def get_rva_from_offset(self, offset):
  3648. """Get the RVA corresponding to this file offset. """
  3649. s = self.get_section_by_offset(offset)
  3650. if not s:
  3651. if self.sections:
  3652. lowest_rva = min( [ self.adjust_SectionAlignment( s.VirtualAddress,
  3653. self.OPTIONAL_HEADER.SectionAlignment, self.OPTIONAL_HEADER.FileAlignment ) for s in self.sections] )
  3654. if offset < lowest_rva:
  3655. # We will assume that the offset lies within the headers, or
  3656. # at least points before where the earliest section starts
  3657. # and we will simply return the offset as the RVA
  3658. #
  3659. # The case illustrating this behavior can be found at:
  3660. # http://corkami.blogspot.com/2010/01/hey-hey-hey-whats-in-your-head.html
  3661. # where the import table is not contained by any section
  3662. # hence the RVA needs to be resolved to a raw offset
  3663. return offset
  3664. return None
  3665. else:
  3666. return offset
  3667. #raise PEFormatError("specified offset (0x%x) doesn't belong to any section." % offset)
  3668. return s.get_rva_from_offset(offset)
  3669. def get_offset_from_rva(self, rva):
  3670. """Get the file offset corresponding to this RVA.
  3671. Given a RVA , this method will find the section where the
  3672. data lies and return the offset within the file.
  3673. """
  3674. s = self.get_section_by_rva(rva)
  3675. if not s:
  3676. # If not found within a section assume it might
  3677. # point to overlay data or otherwise data present
  3678. # but not contained in any section. In those
  3679. # cases the RVA should equal the offset
  3680. if rva < len(self.__data__):
  3681. return rva
  3682. raise PEFormatError('data at RVA can\'t be fetched. Corrupt header?')
  3683. return s.get_offset_from_rva(rva)
  3684. def get_string_at_rva(self, rva, max_length=MAX_STRING_LENGTH):
  3685. """Get an ASCII string located at the given address."""
  3686. if rva is None:
  3687. return None
  3688. s = self.get_section_by_rva(rva)
  3689. if not s:
  3690. return self.get_string_from_data(0, self.__data__[rva:rva+max_length])
  3691. return self.get_string_from_data(0, s.get_data(rva, length=max_length))
  3692. def get_bytes_from_data(self, offset, data):
  3693. """."""
  3694. if offset > len(data):
  3695. return b''
  3696. d = data[offset:]
  3697. if isinstance(d, bytearray):
  3698. return bytes(d)
  3699. return d
  3700. def get_string_from_data(self, offset, data):
  3701. """Get an ASCII string from data."""
  3702. s = self.get_bytes_from_data(offset, data)
  3703. end = s.find(b'\0')
  3704. if end >= 0:
  3705. s = s[:end]
  3706. return s
  3707. def get_string_u_at_rva(self, rva, max_length = 2**16, encoding=None):
  3708. """Get an Unicode string located at the given address."""
  3709. if max_length == 0:
  3710. return b''
  3711. # If the RVA is invalid let the exception reach the callers. All
  3712. # call-sites of get_string_u_at_rva() will handle it.
  3713. data = self.get_data(rva, 2)
  3714. # max_length is the maximum count of 16bit characters needs to be
  3715. # doubled to get size in bytes
  3716. max_length <<= 1
  3717. requested = min(max_length, 256)
  3718. data = self.get_data(rva, requested)
  3719. # try to find null-termination
  3720. null_index = -1
  3721. while True:
  3722. null_index = data.find(b'\x00\x00', null_index + 1)
  3723. if null_index == -1:
  3724. data_length = len(data)
  3725. if data_length < requested or data_length == max_length:
  3726. null_index = len(data) >> 1
  3727. break
  3728. else:
  3729. # Request remaining part of data limited by max_length
  3730. data += self.get_data(rva + data_length, max_length - data_length)
  3731. null_index = requested - 1
  3732. requested = max_length
  3733. elif null_index % 2 == 0:
  3734. null_index >>= 1
  3735. break
  3736. # convert selected part of the string to unicode
  3737. uchrs = struct.unpack('<{:d}H'.format(null_index), data[:null_index * 2])
  3738. s = u''.join(map(chr, uchrs))
  3739. if encoding:
  3740. return b(s.encode(encoding, 'backslashreplace_'))
  3741. return b(s.encode('utf-8', 'backslashreplace_'))
  3742. def get_section_by_offset(self, offset):
  3743. """Get the section containing the given file offset."""
  3744. for section in self.sections:
  3745. if section.contains_offset(offset):
  3746. return section
  3747. return None
  3748. def get_section_by_rva(self, rva):
  3749. """Get the section containing the given address."""
  3750. for section in self.sections:
  3751. if section.contains_rva(rva):
  3752. return section
  3753. return None
  3754. def __str__(self):
  3755. return self.dump_info()
  3756. def has_relocs(self):
  3757. """Checks if the PE file has relocation directory"""
  3758. return hasattr(self, 'DIRECTORY_ENTRY_BASERELOC')
  3759. def print_info(self, encoding='utf-8'):
  3760. """Print all the PE header information in a human readable from."""
  3761. print(self.dump_info(encoding=encoding))
  3762. def dump_info(self, dump=None, encoding='ascii'):
  3763. """Dump all the PE header information into human readable string."""
  3764. if dump is None:
  3765. dump = Dump()
  3766. warnings = self.get_warnings()
  3767. if warnings:
  3768. dump.add_header('Parsing Warnings')
  3769. for warning in warnings:
  3770. dump.add_line(warning)
  3771. dump.add_newline()
  3772. dump.add_header('DOS_HEADER')
  3773. dump.add_lines(self.DOS_HEADER.dump())
  3774. dump.add_newline()
  3775. dump.add_header('NT_HEADERS')
  3776. dump.add_lines(self.NT_HEADERS.dump())
  3777. dump.add_newline()
  3778. dump.add_header('FILE_HEADER')
  3779. dump.add_lines(self.FILE_HEADER.dump())
  3780. image_flags = retrieve_flags(IMAGE_CHARACTERISTICS, 'IMAGE_FILE_')
  3781. dump.add('Flags: ')
  3782. flags = []
  3783. for flag in sorted(image_flags):
  3784. if getattr(self.FILE_HEADER, flag[0]):
  3785. flags.append(flag[0])
  3786. dump.add_line(', '.join(flags))
  3787. dump.add_newline()
  3788. if hasattr(self, 'OPTIONAL_HEADER') and self.OPTIONAL_HEADER is not None:
  3789. dump.add_header('OPTIONAL_HEADER')
  3790. dump.add_lines(self.OPTIONAL_HEADER.dump())
  3791. dll_characteristics_flags = retrieve_flags(DLL_CHARACTERISTICS, 'IMAGE_DLLCHARACTERISTICS_')
  3792. dump.add('DllCharacteristics: ')
  3793. flags = []
  3794. for flag in sorted(dll_characteristics_flags):
  3795. if getattr(self.OPTIONAL_HEADER, flag[0]):
  3796. flags.append(flag[0])
  3797. dump.add_line(', '.join(flags))
  3798. dump.add_newline()
  3799. dump.add_header('PE Sections')
  3800. section_flags = retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
  3801. for section in self.sections:
  3802. dump.add_lines(section.dump())
  3803. dump.add('Flags: ')
  3804. flags = []
  3805. for flag in sorted(section_flags):
  3806. if getattr(section, flag[0]):
  3807. flags.append(flag[0])
  3808. dump.add_line(', '.join(flags))
  3809. dump.add_line('Entropy: {0:f} (Min=0.0, Max=8.0)'.format(
  3810. section.get_entropy()))
  3811. if md5 is not None:
  3812. dump.add_line('MD5 hash: {0}'.format(
  3813. section.get_hash_md5()))
  3814. if sha1 is not None:
  3815. dump.add_line('SHA-1 hash: %s' % section.get_hash_sha1() )
  3816. if sha256 is not None:
  3817. dump.add_line('SHA-256 hash: %s' % section.get_hash_sha256() )
  3818. if sha512 is not None:
  3819. dump.add_line('SHA-512 hash: %s' % section.get_hash_sha512() )
  3820. dump.add_newline()
  3821. if (hasattr(self, 'OPTIONAL_HEADER') and
  3822. hasattr(self.OPTIONAL_HEADER, 'DATA_DIRECTORY') ):
  3823. dump.add_header('Directories')
  3824. for idx in range(len(self.OPTIONAL_HEADER.DATA_DIRECTORY)):
  3825. directory = self.OPTIONAL_HEADER.DATA_DIRECTORY[idx]
  3826. dump.add_lines(directory.dump())
  3827. dump.add_newline()
  3828. if hasattr(self, 'VS_VERSIONINFO'):
  3829. for idx in range(len(self.VS_VERSIONINFO)):
  3830. if len(self.VS_VERSIONINFO) > 1:
  3831. dump.add_header('Version Information {:d}'.format(idx + 1))
  3832. else:
  3833. dump.add_header('Version Information')
  3834. dump.add_lines(self.VS_VERSIONINFO[idx].dump())
  3835. dump.add_newline()
  3836. if hasattr(self, 'VS_FIXEDFILEINFO'):
  3837. dump.add_lines(self.VS_FIXEDFILEINFO[idx].dump())
  3838. dump.add_newline()
  3839. if hasattr(self, 'FileInfo') and len(self.FileInfo) > idx:
  3840. for entry in self.FileInfo[idx]:
  3841. dump.add_lines(entry.dump())
  3842. dump.add_newline()
  3843. if hasattr(entry, 'StringTable'):
  3844. for st_entry in entry.StringTable:
  3845. [dump.add_line(u' '+line) for line in st_entry.dump()]
  3846. dump.add_line(u' LangID: {0}'.format(
  3847. st_entry.LangID.decode(encoding, 'backslashreplace_')))
  3848. dump.add_newline()
  3849. for str_entry in sorted(list(st_entry.entries.items())):
  3850. # try:
  3851. dump.add_line( u' {0}: {1}'.format(
  3852. str_entry[0].decode(encoding, 'backslashreplace_'),
  3853. str_entry[1].decode(encoding, 'backslashreplace_')))
  3854. dump.add_newline()
  3855. elif hasattr(entry, 'Var'):
  3856. for var_entry in entry.Var:
  3857. if hasattr(var_entry, 'entry'):
  3858. [dump.add_line(' '+line) for line in var_entry.dump()]
  3859. dump.add_line(
  3860. u' {0}: {1}'.format(
  3861. list(var_entry.entry.keys())[0].decode(
  3862. 'utf-8', 'backslashreplace_'),
  3863. list(var_entry.entry.values())[0]))
  3864. dump.add_newline()
  3865. if hasattr(self, 'DIRECTORY_ENTRY_EXPORT'):
  3866. dump.add_header('Exported symbols')
  3867. dump.add_lines(self.DIRECTORY_ENTRY_EXPORT.struct.dump())
  3868. dump.add_newline()
  3869. dump.add_line(u'%-10s %-10s %s' % ('Ordinal', 'RVA', 'Name'))
  3870. for export in self.DIRECTORY_ENTRY_EXPORT.symbols:
  3871. if export.address is not None:
  3872. name = b('None')
  3873. if export.name:
  3874. name = export.name
  3875. dump.add(u'%-10d 0x%08X %s' % (
  3876. export.ordinal, export.address, name.decode(encoding)))
  3877. if export.forwarder:
  3878. dump.add_line(u' forwarder: {0}'.format(
  3879. export.forwarder.decode(encoding, 'backslashreplace_')))
  3880. else:
  3881. dump.add_newline()
  3882. dump.add_newline()
  3883. if hasattr(self, 'DIRECTORY_ENTRY_IMPORT'):
  3884. dump.add_header('Imported symbols')
  3885. for module in self.DIRECTORY_ENTRY_IMPORT:
  3886. dump.add_lines(module.struct.dump())
  3887. # Print the name of the DLL if there are no imports.
  3888. if not module.imports:
  3889. dump.add(' Name -> {0}'.format(
  3890. self.get_string_at_rva(module.struct.Name).decode(
  3891. encoding, 'backslashreplace_')))
  3892. dump.add_newline()
  3893. dump.add_newline()
  3894. for symbol in module.imports:
  3895. if symbol.import_by_ordinal is True:
  3896. if symbol.name is not None:
  3897. dump.add('{0}.{1} Ordinal[{2}] (Imported by Ordinal)'.format(
  3898. module.dll.decode('utf-8'),
  3899. symbol.name.decode('utf-8'),
  3900. symbol.ordinal))
  3901. else:
  3902. dump.add('{0} Ordinal[{1}] (Imported by Ordinal)'.format(
  3903. module.dll.decode('utf-8'), symbol.ordinal))
  3904. else:
  3905. dump.add('{0}.{1} Hint[{2:d}]'.format(
  3906. module.dll.decode(encoding, 'backslashreplace_'),
  3907. symbol.name.decode(encoding, 'backslashreplace_'),
  3908. symbol.hint))
  3909. if symbol.bound:
  3910. dump.add_line(' Bound: 0x{0:08X}'.format(symbol.bound))
  3911. else:
  3912. dump.add_newline()
  3913. dump.add_newline()
  3914. if hasattr(self, 'DIRECTORY_ENTRY_BOUND_IMPORT'):
  3915. dump.add_header('Bound imports')
  3916. for bound_imp_desc in self.DIRECTORY_ENTRY_BOUND_IMPORT:
  3917. dump.add_lines(bound_imp_desc.struct.dump())
  3918. dump.add_line('DLL: {0}'.format(
  3919. bound_imp_desc.name.decode(encoding, 'backslashreplace_')))
  3920. dump.add_newline()
  3921. for bound_imp_ref in bound_imp_desc.entries:
  3922. dump.add_lines(bound_imp_ref.struct.dump(), 4)
  3923. dump.add_line('DLL: {0}'.format(
  3924. bound_imp_ref.name.decode(encoding, 'backslashreplace_')), 4)
  3925. dump.add_newline()
  3926. if hasattr(self, 'DIRECTORY_ENTRY_DELAY_IMPORT'):
  3927. dump.add_header('Delay Imported symbols')
  3928. for module in self.DIRECTORY_ENTRY_DELAY_IMPORT:
  3929. dump.add_lines(module.struct.dump())
  3930. dump.add_newline()
  3931. for symbol in module.imports:
  3932. if symbol.import_by_ordinal is True:
  3933. dump.add('{0} Ordinal[{1:d}] (Imported by Ordinal)'.format(
  3934. module.dll.decode(encoding, 'backslashreplace_'),
  3935. symbol.ordinal))
  3936. else:
  3937. dump.add('{0}.{1} Hint[{2}]'.format(
  3938. module.dll.decode(encoding, 'backslashreplace_'),
  3939. symbol.name.decode(encoding, 'backslashreplace_'), symbol.hint))
  3940. if symbol.bound:
  3941. dump.add_line(' Bound: 0x{0:08X}'.format(symbol.bound))
  3942. else:
  3943. dump.add_newline()
  3944. dump.add_newline()
  3945. if hasattr(self, 'DIRECTORY_ENTRY_RESOURCE'):
  3946. dump.add_header('Resource directory')
  3947. dump.add_lines(self.DIRECTORY_ENTRY_RESOURCE.struct.dump())
  3948. for resource_type in self.DIRECTORY_ENTRY_RESOURCE.entries:
  3949. if resource_type.name is not None:
  3950. # name = str(resource_type.name) #.string if resource_type.name.string else ''
  3951. dump.add_line(u'Name: [{0}]'.format(
  3952. resource_type.name.decode(encoding, 'backslashreplace_')
  3953. ), 2)
  3954. else:
  3955. dump.add_line(u'Id: [0x{0:X}] ({1})'.format(
  3956. resource_type.struct.Id, RESOURCE_TYPE.get(
  3957. resource_type.struct.Id, '-')),
  3958. 2)
  3959. dump.add_lines(resource_type.struct.dump(), 2)
  3960. if hasattr(resource_type, 'directory'):
  3961. dump.add_lines(resource_type.directory.struct.dump(), 4)
  3962. for resource_id in resource_type.directory.entries:
  3963. if resource_id.name is not None:
  3964. dump.add_line(u'Name: [{0}]'.format(
  3965. resource_id.name.decode(
  3966. 'utf-8', 'backslashreplace_')), 6)
  3967. else:
  3968. dump.add_line('Id: [0x{0:X}]'.format(resource_id.struct.Id), 6)
  3969. dump.add_lines(resource_id.struct.dump(), 6)
  3970. if hasattr(resource_id, 'directory'):
  3971. dump.add_lines(resource_id.directory.struct.dump(), 8)
  3972. for resource_lang in resource_id.directory.entries:
  3973. if hasattr(resource_lang, 'data'):
  3974. dump.add_line(u'\\--- LANG [%d,%d][%s,%s]' % (
  3975. resource_lang.data.lang,
  3976. resource_lang.data.sublang,
  3977. LANG.get(resource_lang.data.lang, '*unknown*'),
  3978. get_sublang_name_for_lang( resource_lang.data.lang, resource_lang.data.sublang ) ), 8)
  3979. dump.add_lines(resource_lang.struct.dump(), 10)
  3980. dump.add_lines(resource_lang.data.struct.dump(), 12)
  3981. if hasattr(resource_id.directory, 'strings') and resource_id.directory.strings:
  3982. dump.add_line(u'[STRINGS]' , 10 )
  3983. for idx, res_string in list(sorted(resource_id.directory.strings.items())):
  3984. dump.add_line( '{0:6d}: {1}'.format(idx,
  3985. res_string.encode(
  3986. 'unicode-escape',
  3987. 'backslashreplace').decode(
  3988. 'ascii')),
  3989. 12)
  3990. dump.add_newline()
  3991. dump.add_newline()
  3992. if ( hasattr(self, 'DIRECTORY_ENTRY_TLS') and
  3993. self.DIRECTORY_ENTRY_TLS and
  3994. self.DIRECTORY_ENTRY_TLS.struct ):
  3995. dump.add_header('TLS')
  3996. dump.add_lines(self.DIRECTORY_ENTRY_TLS.struct.dump())
  3997. dump.add_newline()
  3998. if ( hasattr(self, 'DIRECTORY_ENTRY_LOAD_CONFIG') and
  3999. self.DIRECTORY_ENTRY_LOAD_CONFIG and
  4000. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct ):
  4001. dump.add_header('LOAD_CONFIG')
  4002. dump.add_lines(self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.dump())
  4003. dump.add_newline()
  4004. if hasattr(self, 'DIRECTORY_ENTRY_DEBUG'):
  4005. dump.add_header('Debug information')
  4006. for dbg in self.DIRECTORY_ENTRY_DEBUG:
  4007. dump.add_lines(dbg.struct.dump())
  4008. try:
  4009. dump.add_line('Type: '+DEBUG_TYPE[dbg.struct.Type])
  4010. except KeyError:
  4011. dump.add_line(
  4012. 'Type: 0x{0:x}(Unknown)'.format(dbg.struct.Type))
  4013. dump.add_newline()
  4014. if dbg.entry:
  4015. dump.add_lines(dbg.entry.dump(), 4)
  4016. dump.add_newline()
  4017. if self.has_relocs():
  4018. dump.add_header('Base relocations')
  4019. for base_reloc in self.DIRECTORY_ENTRY_BASERELOC:
  4020. dump.add_lines(base_reloc.struct.dump())
  4021. for reloc in base_reloc.entries:
  4022. try:
  4023. dump.add_line('%08Xh %s' % (
  4024. reloc.rva, RELOCATION_TYPE[reloc.type][16:]), 4)
  4025. except KeyError:
  4026. dump.add_line('0x%08X 0x%x(Unknown)' % (
  4027. reloc.rva, reloc.type), 4)
  4028. dump.add_newline()
  4029. return dump.get_text()
  4030. def dump_dict(self, dump=None):
  4031. """Dump all the PE header information into a dictionary."""
  4032. dump_dict = dict()
  4033. warnings = self.get_warnings()
  4034. if warnings:
  4035. dump_dict['Parsing Warnings'] = warnings
  4036. dump_dict['DOS_HEADER'] = self.DOS_HEADER.dump_dict()
  4037. dump_dict['NT_HEADERS'] = self.NT_HEADERS.dump_dict()
  4038. dump_dict['FILE_HEADER'] = self.FILE_HEADER.dump_dict()
  4039. image_flags = retrieve_flags(IMAGE_CHARACTERISTICS, 'IMAGE_FILE_')
  4040. dump_dict['Flags'] = list()
  4041. for flag in image_flags:
  4042. if getattr(self.FILE_HEADER, flag[0]):
  4043. dump_dict['Flags'].append(flag[0])
  4044. if hasattr(self, 'OPTIONAL_HEADER') and self.OPTIONAL_HEADER is not None:
  4045. dump_dict['OPTIONAL_HEADER'] = self.OPTIONAL_HEADER.dump_dict()
  4046. dll_characteristics_flags = retrieve_flags(DLL_CHARACTERISTICS, 'IMAGE_DLLCHARACTERISTICS_')
  4047. dump_dict['DllCharacteristics'] = list()
  4048. for flag in dll_characteristics_flags:
  4049. if getattr(self.OPTIONAL_HEADER, flag[0]):
  4050. dump_dict['DllCharacteristics'].append(flag[0])
  4051. dump_dict['PE Sections'] = list()
  4052. section_flags = retrieve_flags(SECTION_CHARACTERISTICS, 'IMAGE_SCN_')
  4053. for section in self.sections:
  4054. section_dict = section.dump_dict()
  4055. dump_dict['PE Sections'].append(section_dict)
  4056. section_dict['Flags'] = list()
  4057. for flag in section_flags:
  4058. if getattr(section, flag[0]):
  4059. section_dict['Flags'].append(flag[0])
  4060. section_dict['Entropy'] = section.get_entropy()
  4061. if md5 is not None:
  4062. section_dict['MD5'] = section.get_hash_md5()
  4063. if sha1 is not None:
  4064. section_dict['SHA1'] = section.get_hash_sha1()
  4065. if sha256 is not None:
  4066. section_dict['SHA256'] = section.get_hash_sha256()
  4067. if sha512 is not None:
  4068. section_dict['SHA512'] = section.get_hash_sha512()
  4069. if (hasattr(self, 'OPTIONAL_HEADER') and
  4070. hasattr(self.OPTIONAL_HEADER, 'DATA_DIRECTORY') ):
  4071. dump_dict['Directories'] = list()
  4072. for idx in range(len(self.OPTIONAL_HEADER.DATA_DIRECTORY)):
  4073. directory = self.OPTIONAL_HEADER.DATA_DIRECTORY[idx]
  4074. dump_dict['Directories'].append(directory.dump_dict())
  4075. if hasattr(self, 'VS_VERSIONINFO'):
  4076. dump_dict['Version Information'] = list()
  4077. for idx in range(len(self.VS_VERSIONINFO)):
  4078. version_info_list = list()
  4079. version_info_list.append(self.VS_VERSIONINFO[idx].dump_dict())
  4080. if hasattr(self, 'VS_FIXEDFILEINFO'):
  4081. version_info_list.append(self.VS_FIXEDFILEINFO[idx].dump_dict())
  4082. if hasattr(self, 'FileInfo') and len(self.FileInfo) > idx:
  4083. fileinfo_list = list()
  4084. for entry in self.FileInfo[idx]:
  4085. fileinfo_list.append(entry.dump_dict())
  4086. if hasattr(entry, 'StringTable'):
  4087. stringtable_dict = dict()
  4088. for st_entry in entry.StringTable:
  4089. [fileinfo_list.append(line) for line in st_entry.dump_dict()]
  4090. stringtable_dict['LangID'] = st_entry.LangID
  4091. for str_entry in list(st_entry.entries.items()):
  4092. stringtable_dict[str_entry[0]] = str_entry[1]
  4093. fileinfo_list.append(stringtable_dict)
  4094. elif hasattr(entry, 'Var'):
  4095. for var_entry in entry.Var:
  4096. var_dict = dict()
  4097. if hasattr(var_entry, 'entry'):
  4098. [fileinfo_list.append(line) for line in var_entry.dump_dict()]
  4099. var_dict[list(var_entry.entry.keys())[0]] = list(
  4100. var_entry.entry.values())[0]
  4101. fileinfo_list.append(var_dict)
  4102. dump_dict['Version Information'].append(version_info_list)
  4103. if hasattr(self, 'DIRECTORY_ENTRY_EXPORT'):
  4104. dump_dict['Exported symbols'] = list()
  4105. dump_dict['Exported symbols'].append(self.DIRECTORY_ENTRY_EXPORT.struct.dump_dict())
  4106. for export in self.DIRECTORY_ENTRY_EXPORT.symbols:
  4107. export_dict = dict()
  4108. if export.address is not None:
  4109. export_dict.update({'Ordinal': export.ordinal, 'RVA': export.address, 'Name': export.name})
  4110. if export.forwarder:
  4111. export_dict['forwarder'] = export.forwarder
  4112. dump_dict['Exported symbols'].append(export_dict)
  4113. if hasattr(self, 'DIRECTORY_ENTRY_IMPORT'):
  4114. dump_dict['Imported symbols'] = list()
  4115. for module in self.DIRECTORY_ENTRY_IMPORT:
  4116. import_list = list()
  4117. dump_dict['Imported symbols'].append(import_list)
  4118. import_list.append(module.struct.dump_dict())
  4119. for symbol in module.imports:
  4120. symbol_dict = dict()
  4121. if symbol.import_by_ordinal is True:
  4122. symbol_dict['DLL'] = module.dll
  4123. symbol_dict['Ordinal'] = symbol.ordinal
  4124. else:
  4125. symbol_dict['DLL'] = module.dll
  4126. symbol_dict['Name'] = symbol.name
  4127. symbol_dict['Hint'] = symbol.hint
  4128. if symbol.bound:
  4129. symbol_dict['Bound'] = symbol.bound
  4130. import_list.append(symbol_dict)
  4131. if hasattr(self, 'DIRECTORY_ENTRY_BOUND_IMPORT'):
  4132. dump_dict['Bound imports'] = list()
  4133. for bound_imp_desc in self.DIRECTORY_ENTRY_BOUND_IMPORT:
  4134. bound_imp_desc_dict = dict()
  4135. dump_dict['Bound imports'].append(bound_imp_desc_dict)
  4136. bound_imp_desc_dict.update(bound_imp_desc.struct.dump_dict())
  4137. bound_imp_desc_dict['DLL'] = bound_imp_desc.name
  4138. for bound_imp_ref in bound_imp_desc.entries:
  4139. bound_imp_ref_dict = dict()
  4140. bound_imp_ref_dict.update(bound_imp_ref.struct.dump_dict())
  4141. bound_imp_ref_dict['DLL'] = bound_imp_ref.name
  4142. if hasattr(self, 'DIRECTORY_ENTRY_DELAY_IMPORT'):
  4143. dump_dict['Delay Imported symbols'] = list()
  4144. for module in self.DIRECTORY_ENTRY_DELAY_IMPORT:
  4145. module_list = list()
  4146. dump_dict['Delay Imported symbols'].append(module_list)
  4147. module_list.append(module.struct.dump_dict())
  4148. for symbol in module.imports:
  4149. symbol_dict = dict()
  4150. if symbol.import_by_ordinal is True:
  4151. symbol_dict['DLL'] = module.dll
  4152. symbol_dict['Ordinal'] = symbol.ordinal
  4153. else:
  4154. symbol_dict['DLL'] = module.dll
  4155. symbol_dict['Name'] = symbol.name
  4156. symbol_dict['Hint'] = symbol.hint
  4157. if symbol.bound:
  4158. symbol_dict['Bound'] = symbol.bound
  4159. module_list.append(symbol_dict)
  4160. if hasattr(self, 'DIRECTORY_ENTRY_RESOURCE'):
  4161. dump_dict['Resource directory'] = list()
  4162. dump_dict['Resource directory'].append(self.DIRECTORY_ENTRY_RESOURCE.struct.dump_dict())
  4163. for resource_type in self.DIRECTORY_ENTRY_RESOURCE.entries:
  4164. resource_type_dict = dict()
  4165. if resource_type.name is not None:
  4166. resource_type_dict['Name'] = resource_type.name
  4167. else:
  4168. resource_type_dict['Id'] = (
  4169. resource_type.struct.Id, RESOURCE_TYPE.get(resource_type.struct.Id, '-'))
  4170. resource_type_dict.update(resource_type.struct.dump_dict())
  4171. dump_dict['Resource directory'].append(resource_type_dict)
  4172. if hasattr(resource_type, 'directory'):
  4173. directory_list = list()
  4174. directory_list.append(resource_type.directory.struct.dump_dict())
  4175. dump_dict['Resource directory'].append(directory_list)
  4176. for resource_id in resource_type.directory.entries:
  4177. resource_id_dict = dict()
  4178. if resource_id.name is not None:
  4179. resource_id_dict['Name'] = resource_id.name
  4180. else:
  4181. resource_id_dict['Id'] = resource_id.struct.Id
  4182. resource_id_dict.update(resource_id.struct.dump_dict())
  4183. directory_list.append(resource_id_dict)
  4184. if hasattr(resource_id, 'directory'):
  4185. resource_id_list = list()
  4186. resource_id_list.append(resource_id.directory.struct.dump_dict())
  4187. directory_list.append(resource_id_list)
  4188. for resource_lang in resource_id.directory.entries:
  4189. if hasattr(resource_lang, 'data'):
  4190. resource_lang_dict = dict()
  4191. resource_lang_dict['LANG'] = resource_lang.data.lang
  4192. resource_lang_dict['SUBLANG'] = resource_lang.data.sublang
  4193. resource_lang_dict['LANG_NAME'] = LANG.get(resource_lang.data.lang, '*unknown*')
  4194. resource_lang_dict['SUBLANG_NAME'] = get_sublang_name_for_lang(resource_lang.data.lang, resource_lang.data.sublang)
  4195. resource_lang_dict.update(resource_lang.struct.dump_dict())
  4196. resource_lang_dict.update(resource_lang.data.struct.dump_dict())
  4197. resource_id_list.append(resource_lang_dict)
  4198. if hasattr(resource_id.directory, 'strings') and resource_id.directory.strings:
  4199. for idx, res_string in list(resource_id.directory.strings.items()):
  4200. resource_id_list.append(res_string.encode(
  4201. 'unicode-escape',
  4202. 'backslashreplace').decode(
  4203. 'ascii'))
  4204. if ( hasattr(self, 'DIRECTORY_ENTRY_TLS') and
  4205. self.DIRECTORY_ENTRY_TLS and
  4206. self.DIRECTORY_ENTRY_TLS.struct ):
  4207. dump_dict['TLS'] = self.DIRECTORY_ENTRY_TLS.struct.dump_dict()
  4208. if ( hasattr(self, 'DIRECTORY_ENTRY_LOAD_CONFIG') and
  4209. self.DIRECTORY_ENTRY_LOAD_CONFIG and
  4210. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct ):
  4211. dump_dict['LOAD_CONFIG'] = self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.dump_dict()
  4212. if hasattr(self, 'DIRECTORY_ENTRY_DEBUG'):
  4213. dump_dict['Debug information'] = list()
  4214. for dbg in self.DIRECTORY_ENTRY_DEBUG:
  4215. dbg_dict = dict()
  4216. dump_dict['Debug information'].append(dbg_dict)
  4217. dbg_dict.update(dbg.struct.dump_dict())
  4218. dbg_dict['Type'] = DEBUG_TYPE.get(dbg.struct.Type, dbg.struct.Type)
  4219. if self.has_relocs():
  4220. dump_dict['Base relocations'] = list()
  4221. for base_reloc in self.DIRECTORY_ENTRY_BASERELOC:
  4222. base_reloc_list = list()
  4223. dump_dict['Base relocations'].append(base_reloc_list)
  4224. base_reloc_list.append(base_reloc.struct.dump_dict())
  4225. for reloc in base_reloc.entries:
  4226. reloc_dict = dict()
  4227. base_reloc_list.append(reloc_dict)
  4228. reloc_dict['RVA'] = reloc.rva
  4229. try:
  4230. reloc_dict['Type'] = RELOCATION_TYPE[reloc.type][16:]
  4231. except KeyError:
  4232. reloc_dict['Type'] = reloc.type
  4233. return dump_dict
  4234. # OC Patch
  4235. def get_physical_by_rva(self, rva):
  4236. """Gets the physical address in the PE file from an RVA value."""
  4237. try:
  4238. return self.get_offset_from_rva(rva)
  4239. except Exception:
  4240. return None
  4241. ##
  4242. # Double-Word get / set
  4243. ##
  4244. def get_data_from_dword(self, dword):
  4245. """Return a four byte string representing the double word value. (little endian)."""
  4246. return struct.pack('<L', dword & 0xffffffff)
  4247. def get_dword_from_data(self, data, offset):
  4248. """Convert four bytes of data to a double word (little endian)
  4249. 'offset' is assumed to index into a dword array. So setting it to
  4250. N will return a dword out of the data starting at offset N*4.
  4251. Returns None if the data can't be turned into a double word.
  4252. """
  4253. if (offset+1)*4 > len(data):
  4254. return None
  4255. return struct.unpack('<I', data[offset*4:(offset+1)*4])[0]
  4256. def get_dword_at_rva(self, rva):
  4257. """Return the double word value at the given RVA.
  4258. Returns None if the value can't be read, i.e. the RVA can't be mapped
  4259. to a file offset.
  4260. """
  4261. try:
  4262. return self.get_dword_from_data(self.get_data(rva, 4), 0)
  4263. except PEFormatError:
  4264. return None
  4265. def get_dword_from_offset(self, offset):
  4266. """Return the double word value at the given file offset. (little endian)"""
  4267. if offset+4 > len(self.__data__):
  4268. return None
  4269. return self.get_dword_from_data(self.__data__[offset:offset+4], 0)
  4270. def set_dword_at_rva(self, rva, dword):
  4271. """Set the double word value at the file offset corresponding to the given RVA."""
  4272. return self.set_bytes_at_rva(rva, self.get_data_from_dword(dword))
  4273. def set_dword_at_offset(self, offset, dword):
  4274. """Set the double word value at the given file offset."""
  4275. return self.set_bytes_at_offset(offset, self.get_data_from_dword(dword))
  4276. ##
  4277. # Word get / set
  4278. ##
  4279. def get_data_from_word(self, word):
  4280. """Return a two byte string representing the word value. (little endian)."""
  4281. return struct.pack('<H', word)
  4282. def get_word_from_data(self, data, offset):
  4283. """Convert two bytes of data to a word (little endian)
  4284. 'offset' is assumed to index into a word array. So setting it to
  4285. N will return a dword out of the data starting at offset N*2.
  4286. Returns None if the data can't be turned into a word.
  4287. """
  4288. if (offset+1)*2 > len(data):
  4289. return None
  4290. return struct.unpack('<H', data[offset*2:(offset+1)*2])[0]
  4291. def get_word_at_rva(self, rva):
  4292. """Return the word value at the given RVA.
  4293. Returns None if the value can't be read, i.e. the RVA can't be mapped
  4294. to a file offset.
  4295. """
  4296. try:
  4297. return self.get_word_from_data(self.get_data(rva)[:2], 0)
  4298. except PEFormatError:
  4299. return None
  4300. def get_word_from_offset(self, offset):
  4301. """Return the word value at the given file offset. (little endian)"""
  4302. if offset+2 > len(self.__data__):
  4303. return None
  4304. return self.get_word_from_data(self.__data__[offset:offset+2], 0)
  4305. def set_word_at_rva(self, rva, word):
  4306. """Set the word value at the file offset corresponding to the given RVA."""
  4307. return self.set_bytes_at_rva(rva, self.get_data_from_word(word))
  4308. def set_word_at_offset(self, offset, word):
  4309. """Set the word value at the given file offset."""
  4310. return self.set_bytes_at_offset(offset, self.get_data_from_word(word))
  4311. ##
  4312. # Quad-Word get / set
  4313. ##
  4314. def get_data_from_qword(self, word):
  4315. """Return an eight byte string representing the quad-word value. (little endian)."""
  4316. return struct.pack('<Q', word)
  4317. def get_qword_from_data(self, data, offset):
  4318. """Convert eight bytes of data to a word (little endian)
  4319. 'offset' is assumed to index into a word array. So setting it to
  4320. N will return a dword out of the data starting at offset N*8.
  4321. Returns None if the data can't be turned into a quad word.
  4322. """
  4323. if (offset+1)*8 > len(data):
  4324. return None
  4325. return struct.unpack('<Q', data[offset*8:(offset+1)*8])[0]
  4326. def get_qword_at_rva(self, rva):
  4327. """Return the quad-word value at the given RVA.
  4328. Returns None if the value can't be read, i.e. the RVA can't be mapped
  4329. to a file offset.
  4330. """
  4331. try:
  4332. return self.get_qword_from_data(self.get_data(rva)[:8], 0)
  4333. except PEFormatError:
  4334. return None
  4335. def get_qword_from_offset(self, offset):
  4336. """Return the quad-word value at the given file offset. (little endian)"""
  4337. if offset+8 > len(self.__data__):
  4338. return None
  4339. return self.get_qword_from_data(self.__data__[offset:offset+8], 0)
  4340. def set_qword_at_rva(self, rva, qword):
  4341. """Set the quad-word value at the file offset corresponding to the given RVA."""
  4342. return self.set_bytes_at_rva(rva, self.get_data_from_qword(qword))
  4343. def set_qword_at_offset(self, offset, qword):
  4344. """Set the quad-word value at the given file offset."""
  4345. return self.set_bytes_at_offset(offset, self.get_data_from_qword(qword))
  4346. ##
  4347. # Set bytes
  4348. ##
  4349. def set_bytes_at_rva(self, rva, data):
  4350. """Overwrite, with the given string, the bytes at the file offset corresponding to the given RVA.
  4351. Return True if successful, False otherwise. It can fail if the
  4352. offset is outside the file's boundaries.
  4353. """
  4354. if not isinstance(data, bytes):
  4355. raise TypeError('data should be of type: bytes')
  4356. offset = self.get_physical_by_rva(rva)
  4357. if not offset:
  4358. return False
  4359. return self.set_bytes_at_offset(offset, data)
  4360. def set_bytes_at_offset(self, offset, data):
  4361. """Overwrite the bytes at the given file offset with the given string.
  4362. Return True if successful, False otherwise. It can fail if the
  4363. offset is outside the file's boundaries.
  4364. """
  4365. if not isinstance(data, bytes):
  4366. raise TypeError('data should be of type: bytes')
  4367. if 0 <= offset < len(self.__data__):
  4368. self.__data__ = ( self.__data__[:offset] + data + self.__data__[offset+len(data):] )
  4369. else:
  4370. return False
  4371. return True
  4372. def merge_modified_section_data(self):
  4373. """Update the PE image content with any individual section data that has been modified."""
  4374. for section in self.sections:
  4375. section_data_start = self.adjust_FileAlignment( section.PointerToRawData,
  4376. self.OPTIONAL_HEADER.FileAlignment )
  4377. section_data_end = section_data_start+section.SizeOfRawData
  4378. if section_data_start < len(self.__data__) and section_data_end < len(self.__data__):
  4379. self.__data__ = self.__data__[:section_data_start] + section.get_data() + self.__data__[section_data_end:]
  4380. def relocate_image(self, new_ImageBase):
  4381. """Apply the relocation information to the image using the provided new image base.
  4382. This method will apply the relocation information to the image. Given the new base,
  4383. all the relocations will be processed and both the raw data and the section's data
  4384. will be fixed accordingly.
  4385. The resulting image can be retrieved as well through the method:
  4386. get_memory_mapped_image()
  4387. In order to get something that would more closely match what could be found in memory
  4388. once the Windows loader finished its work.
  4389. """
  4390. relocation_difference = new_ImageBase - self.OPTIONAL_HEADER.ImageBase
  4391. if (len(self.OPTIONAL_HEADER.DATA_DIRECTORY)>=6 and
  4392. self.OPTIONAL_HEADER.DATA_DIRECTORY[5].Size):
  4393. if not hasattr(self, 'DIRECTORY_ENTRY_BASERELOC'):
  4394. self.parse_data_directories(
  4395. directories=[DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_BASERELOC']])
  4396. for reloc in self.DIRECTORY_ENTRY_BASERELOC:
  4397. virtual_address = reloc.struct.VirtualAddress
  4398. size_of_block = reloc.struct.SizeOfBlock
  4399. # We iterate with an index because if the relocation is of type
  4400. # IMAGE_REL_BASED_HIGHADJ we need to also process the next entry
  4401. # at once and skip it for the next iteration
  4402. #
  4403. entry_idx = 0
  4404. while entry_idx<len(reloc.entries):
  4405. entry = reloc.entries[entry_idx]
  4406. entry_idx += 1
  4407. if entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_ABSOLUTE']:
  4408. # Nothing to do for this type of relocation
  4409. pass
  4410. elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGH']:
  4411. # Fix the high 16-bits of a relocation
  4412. #
  4413. # Add high 16-bits of relocation_difference to the
  4414. # 16-bit value at RVA=entry.rva
  4415. self.set_word_at_rva(
  4416. entry.rva,
  4417. ( self.get_word_at_rva(entry.rva) + relocation_difference>>16)&0xffff )
  4418. elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_LOW']:
  4419. # Fix the low 16-bits of a relocation
  4420. #
  4421. # Add low 16 bits of relocation_difference to the 16-bit value
  4422. # at RVA=entry.rva
  4423. self.set_word_at_rva(
  4424. entry.rva,
  4425. ( self.get_word_at_rva(entry.rva) + relocation_difference)&0xffff)
  4426. elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHLOW']:
  4427. # Handle all high and low parts of a 32-bit relocation
  4428. #
  4429. # Add relocation_difference to the value at RVA=entry.rva
  4430. self.set_dword_at_rva(
  4431. entry.rva,
  4432. self.get_dword_at_rva(entry.rva)+relocation_difference)
  4433. elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHADJ']:
  4434. # Fix the high 16-bits of a relocation and adjust
  4435. #
  4436. # Add high 16-bits of relocation_difference to the 32-bit value
  4437. # composed from the (16-bit value at RVA=entry.rva)<<16 plus
  4438. # the 16-bit value at the next relocation entry.
  4439. #
  4440. # If the next entry is beyond the array's limits,
  4441. # abort... the table is corrupt
  4442. #
  4443. if entry_idx == len(reloc.entries):
  4444. break
  4445. next_entry = reloc.entries[entry_idx]
  4446. entry_idx += 1
  4447. self.set_word_at_rva( entry.rva,
  4448. ((self.get_word_at_rva(entry.rva)<<16) + next_entry.rva +
  4449. relocation_difference & 0xffff0000) >> 16 )
  4450. elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_DIR64']:
  4451. # Apply the difference to the 64-bit value at the offset
  4452. # RVA=entry.rva
  4453. self.set_qword_at_rva(
  4454. entry.rva,
  4455. self.get_qword_at_rva(entry.rva) + relocation_difference)
  4456. self.OPTIONAL_HEADER.ImageBase = new_ImageBase
  4457. #correct VAs(virtual addresses) occurrences in directory information
  4458. if hasattr(self, 'DIRECTORY_ENTRY_IMPORT'):
  4459. for dll in self.DIRECTORY_ENTRY_IMPORT:
  4460. for func in dll.imports:
  4461. func.address += relocation_difference
  4462. if hasattr(self, 'DIRECTORY_ENTRY_TLS'):
  4463. self.DIRECTORY_ENTRY_TLS.struct.StartAddressOfRawData += relocation_difference
  4464. self.DIRECTORY_ENTRY_TLS.struct.EndAddressOfRawData += relocation_difference
  4465. self.DIRECTORY_ENTRY_TLS.struct.AddressOfIndex += relocation_difference
  4466. self.DIRECTORY_ENTRY_TLS.struct.AddressOfCallBacks += relocation_difference
  4467. if hasattr(self, 'DIRECTORY_ENTRY_LOAD_CONFIG'):
  4468. if self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.LockPrefixTable:
  4469. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.LockPrefixTable += relocation_difference
  4470. if self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.EditList:
  4471. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.EditList += relocation_difference
  4472. if self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SecurityCookie:
  4473. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SecurityCookie += relocation_difference
  4474. if self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SEHandlerTable:
  4475. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.SEHandlerTable += relocation_difference
  4476. if self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.GuardCFCheckFunctionPointer:
  4477. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.GuardCFCheckFunctionPointer += relocation_difference
  4478. if self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.GuardCFFunctionTable:
  4479. self.DIRECTORY_ENTRY_LOAD_CONFIG.struct.GuardCFFunctionTable += relocation_difference
  4480. def verify_checksum(self):
  4481. return self.OPTIONAL_HEADER.CheckSum == self.generate_checksum()
  4482. def generate_checksum(self):
  4483. # This will make sure that the data representing the PE image
  4484. # is updated with any changes that might have been made by
  4485. # assigning values to header fields as those are not automatically
  4486. # updated upon assignment.
  4487. #
  4488. # data = self.write()
  4489. # print('{0}'.format(len(data)))
  4490. # for idx, b in enumerate(data):
  4491. # if b != ord(self.__data__[idx]) or (idx > 1244440 and idx < 1244460):
  4492. # print('Idx: {0} G {1:02x} {3} B {2:02x}'.format(
  4493. # idx, ord(self.__data__[idx]), b,
  4494. # self.__data__[idx], chr(b)))
  4495. self.__data__ = self.write()
  4496. # Get the offset to the CheckSum field in the OptionalHeader
  4497. # (The offset is the same in PE32 and PE32+)
  4498. checksum_offset = self.OPTIONAL_HEADER.get_file_offset() + 0x40 # 64
  4499. checksum = 0
  4500. # Verify the data is dword-aligned. Add padding if needed
  4501. #
  4502. remainder = len(self.__data__) % 4
  4503. data_len = len(self.__data__) + ((4-remainder) * ( remainder != 0 ))
  4504. for i in range( int(data_len / 4) ):
  4505. # Skip the checksum field
  4506. if i == int(checksum_offset / 4):
  4507. continue
  4508. if i+1 == (int(data_len / 4)) and remainder:
  4509. dword = struct.unpack('I', self.__data__[i*4:]+ (b'\0' * (4-remainder)) )[0]
  4510. else:
  4511. dword = struct.unpack('I', self.__data__[ i*4 : i*4+4 ])[0]
  4512. # Optimized the calculation (thanks to Emmanuel Bourg for pointing it out!)
  4513. checksum += dword
  4514. if checksum >= 2**32:
  4515. checksum = (checksum & 0xffffffff) + (checksum >> 32)
  4516. checksum = (checksum & 0xffff) + (checksum >> 16)
  4517. checksum = (checksum) + (checksum >> 16)
  4518. checksum = checksum & 0xffff
  4519. # The length is the one of the original data, not the padded one
  4520. #
  4521. return checksum + len(self.__data__)
  4522. def is_exe(self):
  4523. """Check whether the file is a standard executable.
  4524. This will return true only if the file has the IMAGE_FILE_EXECUTABLE_IMAGE flag set
  4525. and the IMAGE_FILE_DLL not set and the file does not appear to be a driver either.
  4526. """
  4527. EXE_flag = IMAGE_CHARACTERISTICS['IMAGE_FILE_EXECUTABLE_IMAGE']
  4528. if (not self.is_dll()) and (not self.is_driver()) and (
  4529. EXE_flag & self.FILE_HEADER.Characteristics) == EXE_flag:
  4530. return True
  4531. return False
  4532. def is_dll(self):
  4533. """Check whether the file is a standard DLL.
  4534. This will return true only if the image has the IMAGE_FILE_DLL flag set.
  4535. """
  4536. DLL_flag = IMAGE_CHARACTERISTICS['IMAGE_FILE_DLL']
  4537. if ( DLL_flag & self.FILE_HEADER.Characteristics) == DLL_flag:
  4538. return True
  4539. return False
  4540. def is_driver(self):
  4541. """Check whether the file is a Windows driver.
  4542. This will return true only if there are reliable indicators of the image
  4543. being a driver.
  4544. """
  4545. # Checking that the ImageBase field of the OptionalHeader is above or
  4546. # equal to 0x80000000 (that is, whether it lies in the upper 2GB of
  4547. # the address space, normally belonging to the kernel) is not a
  4548. # reliable enough indicator. For instance, PEs that play the invalid
  4549. # ImageBase trick to get relocated could be incorrectly assumed to be
  4550. # drivers.
  4551. # This is not reliable either...
  4552. #
  4553. # if any((section.Characteristics &
  4554. # SECTION_CHARACTERISTICS['IMAGE_SCN_MEM_NOT_PAGED']) for
  4555. # section in self.sections ):
  4556. # return True
  4557. # If the import directory was not parsed (fast_load = True); do it now.
  4558. if not hasattr(self, 'DIRECTORY_ENTRY_IMPORT'):
  4559. self.parse_data_directories(directories=[
  4560. DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_IMPORT']])
  4561. # If there's still no import directory (the PE doesn't have one or it's
  4562. # malformed), give up.
  4563. if not hasattr(self, 'DIRECTORY_ENTRY_IMPORT'):
  4564. return False
  4565. # self.DIRECTORY_ENTRY_IMPORT will now exist, although it may be empty.
  4566. # If it imports from "ntoskrnl.exe" or other kernel components it should
  4567. # be a driver
  4568. #
  4569. system_DLLs = set((b'ntoskrnl.exe', b'hal.dll', b'ndis.sys',
  4570. b'bootvid.dll', b'kdcom.dll'))
  4571. if system_DLLs.intersection(
  4572. [imp.dll.lower() for imp in self.DIRECTORY_ENTRY_IMPORT]):
  4573. return True
  4574. driver_like_section_names = set(
  4575. (b'page', b'paged'))
  4576. if driver_like_section_names.intersection(
  4577. [section.Name.lower().rstrip(b'\x00') for section in self.sections]) and (
  4578. self.OPTIONAL_HEADER.Subsystem in (
  4579. SUBSYSTEM_TYPE['IMAGE_SUBSYSTEM_NATIVE'],
  4580. SUBSYSTEM_TYPE['IMAGE_SUBSYSTEM_NATIVE_WINDOWS'])):
  4581. return True
  4582. return False
  4583. def get_overlay_data_start_offset(self):
  4584. """Get the offset of data appended to the file and not contained within
  4585. the area described in the headers."""
  4586. largest_offset_and_size = (0, 0)
  4587. def update_if_sum_is_larger_and_within_file(offset_and_size, file_size=len(self.__data__)):
  4588. if sum(offset_and_size) <= file_size and sum(offset_and_size) > sum(largest_offset_and_size):
  4589. return offset_and_size
  4590. return largest_offset_and_size
  4591. if hasattr(self, 'OPTIONAL_HEADER'):
  4592. largest_offset_and_size = update_if_sum_is_larger_and_within_file(
  4593. (self.OPTIONAL_HEADER.get_file_offset(), self.FILE_HEADER.SizeOfOptionalHeader))
  4594. for section in self.sections:
  4595. largest_offset_and_size = update_if_sum_is_larger_and_within_file(
  4596. (section.PointerToRawData, section.SizeOfRawData))
  4597. skip_directories = [DIRECTORY_ENTRY['IMAGE_DIRECTORY_ENTRY_SECURITY']]
  4598. for idx, directory in enumerate(self.OPTIONAL_HEADER.DATA_DIRECTORY):
  4599. if idx in skip_directories:
  4600. continue
  4601. try:
  4602. largest_offset_and_size = update_if_sum_is_larger_and_within_file(
  4603. (self.get_offset_from_rva(directory.VirtualAddress), directory.Size))
  4604. # Ignore directories with RVA out of file
  4605. except PEFormatError:
  4606. continue
  4607. if len(self.__data__) > sum(largest_offset_and_size):
  4608. return sum(largest_offset_and_size)
  4609. return None
  4610. def get_overlay(self):
  4611. """Get the data appended to the file and not contained within the area described in the headers."""
  4612. overlay_data_offset = self.get_overlay_data_start_offset()
  4613. if overlay_data_offset is not None:
  4614. return self.__data__[ overlay_data_offset : ]
  4615. return None
  4616. def trim(self):
  4617. """Return the just data defined by the PE headers, removing any overlayed data."""
  4618. overlay_data_offset = self.get_overlay_data_start_offset()
  4619. if overlay_data_offset is not None:
  4620. return self.__data__[ : overlay_data_offset ]
  4621. return self.__data__[:]
  4622. # According to http://corkami.blogspot.com/2010/01/parce-que-la-planche-aura-brule.html
  4623. # if PointerToRawData is less that 0x200 it's rounded to zero. Loading the test file
  4624. # in a debugger it's easy to verify that the PointerToRawData value of 1 is rounded
  4625. # to zero. Hence we reproduce the behavior
  4626. #
  4627. # According to the document:
  4628. # [ Microsoft Portable Executable and Common Object File Format Specification ]
  4629. # "The alignment factor (in bytes) that is used to align the raw data of sections in
  4630. # the image file. The value should be a power of 2 between 512 and 64 K, inclusive.
  4631. # The default is 512. If the SectionAlignment is less than the architecture's page
  4632. # size, then FileAlignment must match SectionAlignment."
  4633. #
  4634. # The following is a hard-coded constant if the Windows loader
  4635. def adjust_FileAlignment(self, val, file_alignment ):
  4636. if file_alignment > FILE_ALIGNMENT_HARDCODED_VALUE:
  4637. # If it's not a power of two, report it:
  4638. if self.FileAlignment_Warning is False and not power_of_two(file_alignment):
  4639. self.__warnings.append(
  4640. 'If FileAlignment > 0x200 it should be a power of 2. Value: %x' % (
  4641. file_alignment) )
  4642. self.FileAlignment_Warning = True
  4643. return cache_adjust_FileAlignment(val, file_alignment)
  4644. # According to the document:
  4645. # [ Microsoft Portable Executable and Common Object File Format Specification ]
  4646. # "The alignment (in bytes) of sections when they are loaded into memory. It must be
  4647. # greater than or equal to FileAlignment. The default is the page size for the
  4648. # architecture."
  4649. #
  4650. def adjust_SectionAlignment( self, val, section_alignment, file_alignment ):
  4651. if file_alignment < FILE_ALIGNMENT_HARDCODED_VALUE:
  4652. if file_alignment != section_alignment and self.SectionAlignment_Warning is False:
  4653. self.__warnings.append(
  4654. 'If FileAlignment(%x) < 0x200 it should equal SectionAlignment(%x)' % (
  4655. file_alignment, section_alignment) )
  4656. self.SectionAlignment_Warning = True
  4657. return cache_adjust_SectionAlignment(val, section_alignment, file_alignment)
  4658. def main():
  4659. import sys
  4660. usage = """\
  4661. pefile.py <filename>
  4662. pefile.py exports <filename>"""
  4663. if not sys.argv[1:]:
  4664. print(usage)
  4665. elif sys.argv[1] == 'exports':
  4666. if not sys.argv[2:]:
  4667. sys.exit('error: <filename> required')
  4668. pe = PE(sys.argv[2])
  4669. for exp in pe.DIRECTORY_ENTRY_EXPORT.symbols:
  4670. print(hex(pe.OPTIONAL_HEADER.ImageBase + exp.address), exp.name, exp.ordinal)
  4671. else:
  4672. print(PE(sys.argv[1]).dump_info())
  4673. if __name__ == '__main__':
  4674. main()