phmap.h 197 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227
  1. #if !defined(phmap_h_guard_)
  2. #define phmap_h_guard_
  3. // ---------------------------------------------------------------------------
  4. // Copyright (c) 2019, Gregory Popovitch - [email protected]
  5. //
  6. // Licensed under the Apache License, Version 2.0 (the "License");
  7. // you may not use this file except in compliance with the License.
  8. // You may obtain a copy of the License at
  9. //
  10. // https://www.apache.org/licenses/LICENSE-2.0
  11. //
  12. // Unless required by applicable law or agreed to in writing, software
  13. // distributed under the License is distributed on an "AS IS" BASIS,
  14. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. // See the License for the specific language governing permissions and
  16. // limitations under the License.
  17. //
  18. // Includes work from abseil-cpp (https://github.com/abseil/abseil-cpp)
  19. // with modifications.
  20. //
  21. // Copyright 2018 The Abseil Authors.
  22. //
  23. // Licensed under the Apache License, Version 2.0 (the "License");
  24. // you may not use this file except in compliance with the License.
  25. // You may obtain a copy of the License at
  26. //
  27. // https://www.apache.org/licenses/LICENSE-2.0
  28. //
  29. // Unless required by applicable law or agreed to in writing, software
  30. // distributed under the License is distributed on an "AS IS" BASIS,
  31. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  32. // See the License for the specific language governing permissions and
  33. // limitations under the License.
  34. // ---------------------------------------------------------------------------
  35. // ---------------------------------------------------------------------------
  36. // IMPLEMENTATION DETAILS
  37. //
  38. // The table stores elements inline in a slot array. In addition to the slot
  39. // array the table maintains some control state per slot. The extra state is one
  40. // byte per slot and stores empty or deleted marks, or alternatively 7 bits from
  41. // the hash of an occupied slot. The table is split into logical groups of
  42. // slots, like so:
  43. //
  44. // Group 1 Group 2 Group 3
  45. // +---------------+---------------+---------------+
  46. // | | | | | | | | | | | | | | | | | | | | | | | | |
  47. // +---------------+---------------+---------------+
  48. //
  49. // On lookup the hash is split into two parts:
  50. // - H2: 7 bits (those stored in the control bytes)
  51. // - H1: the rest of the bits
  52. // The groups are probed using H1. For each group the slots are matched to H2 in
  53. // parallel. Because H2 is 7 bits (128 states) and the number of slots per group
  54. // is low (8 or 16) in almost all cases a match in H2 is also a lookup hit.
  55. //
  56. // On insert, once the right group is found (as in lookup), its slots are
  57. // filled in order.
  58. //
  59. // On erase a slot is cleared. In case the group did not have any empty slots
  60. // before the erase, the erased slot is marked as deleted.
  61. //
  62. // Groups without empty slots (but maybe with deleted slots) extend the probe
  63. // sequence. The probing algorithm is quadratic. Given N the number of groups,
  64. // the probing function for the i'th probe is:
  65. //
  66. // P(0) = H1 % N
  67. //
  68. // P(i) = (P(i - 1) + i) % N
  69. //
  70. // This probing function guarantees that after N probes, all the groups of the
  71. // table will be probed exactly once.
  72. //
  73. // The control state and slot array are stored contiguously in a shared heap
  74. // allocation. The layout of this allocation is: `capacity()` control bytes,
  75. // one sentinel control byte, `Group::kWidth - 1` cloned control bytes,
  76. // <possible padding>, `capacity()` slots. The sentinel control byte is used in
  77. // iteration so we know when we reach the end of the table. The cloned control
  78. // bytes at the end of the table are cloned from the beginning of the table so
  79. // groups that begin near the end of the table can see a full group. In cases in
  80. // which there are more than `capacity()` cloned control bytes, the extra bytes
  81. // are `kEmpty`, and these ensure that we always see at least one empty slot and
  82. // can stop an unsuccessful search.
  83. // ---------------------------------------------------------------------------
  84. #ifdef _MSC_VER
  85. #pragma warning(push)
  86. #pragma warning(disable : 4127) // conditional expression is constant
  87. #pragma warning(disable : 4324) // structure was padded due to alignment specifier
  88. #pragma warning(disable : 4514) // unreferenced inline function has been removed
  89. #pragma warning(disable : 4623) // default constructor was implicitly defined as deleted
  90. #pragma warning(disable : 4625) // copy constructor was implicitly defined as deleted
  91. #pragma warning(disable : 4626) // assignment operator was implicitly defined as deleted
  92. #pragma warning(disable : 4710) // function not inlined
  93. #pragma warning(disable : 4711) // selected for automatic inline expansion
  94. #pragma warning(disable : 4820) // '6' bytes padding added after data member
  95. #pragma warning(disable : 4868) // compiler may not enforce left-to-right evaluation order in braced initializer list
  96. #pragma warning(disable : 5027) // move assignment operator was implicitly defined as deleted
  97. #pragma warning(disable : 5045) // Compiler will insert Spectre mitigation for memory load if /Qspectre switch specified
  98. #endif
  99. #include <algorithm>
  100. #include <cmath>
  101. #include <cstring>
  102. #include <iterator>
  103. #include <limits>
  104. #include <memory>
  105. #include <tuple>
  106. #include <type_traits>
  107. #include <utility>
  108. #include <array>
  109. #include <cassert>
  110. #include <atomic>
  111. #include "llpreprocessor.h" // For LL_INLINE
  112. // This is a personal speed optimization of phmap containers that removes the
  113. // fiddling with the key hashes: those shall be good enough already (the hash
  114. // functions in use by the viewer code have been optimized for speed and
  115. // efficiency, so no need to complexify the key hash generation and slow it
  116. // down for no practical benefit). HB
  117. #define PHMAP_NO_MIXING
  118. // Define to 1 if you wish to allow boost shared mutexes use. Not enabled here,
  119. // since anyway, we use C++17 to compile the viewer and phmap can therefore use
  120. // the standard C++17 shared mutexes... HB
  121. #define PHMAP_USE_BOOST_MUTEX 0
  122. #include "phmap_fwd_decl.h"
  123. #include "phmap_utils.h"
  124. #include "phmap_base.h"
  125. #if PHMAP_HAVE_STD_STRING_VIEW
  126. #include <string_view>
  127. #endif
  128. namespace phmap {
  129. namespace priv {
  130. // --------------------------------------------------------------------------
  131. template <typename AllocType>
  132. void SwapAlloc(AllocType& lhs, AllocType& rhs,
  133. std::true_type /* propagate_on_container_swap */) {
  134. using std::swap;
  135. swap(lhs, rhs);
  136. }
  137. template <typename AllocType>
  138. void SwapAlloc(AllocType& /*lhs*/, AllocType& /*rhs*/,
  139. std::false_type /* propagate_on_container_swap */) {}
  140. // --------------------------------------------------------------------------
  141. template <size_t Width>
  142. class probe_seq
  143. {
  144. public:
  145. probe_seq(size_t hashval, size_t mask) {
  146. assert(((mask + 1) & mask) == 0 && "not a mask");
  147. mask_ = mask;
  148. offset_ = hashval & mask_;
  149. }
  150. size_t offset() const { return offset_; }
  151. size_t offset(size_t i) const { return (offset_ + i) & mask_; }
  152. void next() {
  153. index_ += Width;
  154. offset_ += index_;
  155. offset_ &= mask_;
  156. }
  157. // 0-based probe index. The i-th probe in the probe sequence.
  158. size_t getindex() const { return index_; }
  159. private:
  160. size_t mask_;
  161. size_t offset_;
  162. size_t index_ = 0;
  163. };
  164. // --------------------------------------------------------------------------
  165. template <class ContainerKey, class Hash, class Eq>
  166. struct RequireUsableKey
  167. {
  168. template <class PassedKey, class... Args>
  169. std::pair<
  170. decltype(std::declval<const Hash&>()(std::declval<const PassedKey&>())),
  171. decltype(std::declval<const Eq&>()(std::declval<const ContainerKey&>(),
  172. std::declval<const PassedKey&>()))>*
  173. operator()(const PassedKey&, const Args&...) const;
  174. };
  175. // --------------------------------------------------------------------------
  176. template <class E, class Policy, class Hash, class Eq, class... Ts>
  177. struct IsDecomposable : std::false_type {};
  178. template <class Policy, class Hash, class Eq, class... Ts>
  179. struct IsDecomposable<
  180. phmap::void_t<decltype(
  181. Policy::apply(RequireUsableKey<typename Policy::key_type, Hash, Eq>(),
  182. std::declval<Ts>()...))>,
  183. Policy, Hash, Eq, Ts...> : std::true_type {};
  184. // TODO(alkis): Switch to std::is_nothrow_swappable when gcc/clang supports it.
  185. // --------------------------------------------------------------------------
  186. template <class T>
  187. constexpr bool IsNoThrowSwappable(std::true_type = {} /* is_swappable */) {
  188. using std::swap;
  189. return noexcept(swap(std::declval<T&>(), std::declval<T&>()));
  190. }
  191. template <class T>
  192. constexpr bool IsNoThrowSwappable(std::false_type /* is_swappable */) {
  193. return false;
  194. }
  195. // --------------------------------------------------------------------------
  196. template <typename T>
  197. LL_INLINE uint32_t TrailingZeros(T x) {
  198. uint32_t res;
  199. PHMAP_IF_CONSTEXPR(sizeof(T) == 8)
  200. res = base_internal::CountTrailingZerosNonZero64(static_cast<uint64_t>(x));
  201. else
  202. res = base_internal::CountTrailingZerosNonZero32(static_cast<uint32_t>(x));
  203. return res;
  204. }
  205. // --------------------------------------------------------------------------
  206. template <typename T>
  207. LL_INLINE uint32_t LeadingZeros(T x) {
  208. uint32_t res;
  209. PHMAP_IF_CONSTEXPR(sizeof(T) == 8)
  210. res = base_internal::CountLeadingZeros64(static_cast<uint64_t>(x));
  211. else
  212. res = base_internal::CountLeadingZeros32(static_cast<uint32_t>(x));
  213. return res;
  214. }
  215. // --------------------------------------------------------------------------
  216. // An abstraction over a bitmask. It provides an easy way to iterate through the
  217. // indexes of the set bits of a bitmask. When Shift=0 (platforms with SSE),
  218. // this is a true bitmask. On non-SSE, platforms the arithematic used to
  219. // emulate the SSE behavior works in bytes (Shift=3) and leaves each bytes as
  220. // either 0x00 or 0x80.
  221. //
  222. // For example:
  223. // for (int i : BitMask<uint32_t, 16>(0x5)) -> yields 0, 2
  224. // for (int i : BitMask<uint64_t, 8, 3>(0x0000000080800000)) -> yields 2, 3
  225. // --------------------------------------------------------------------------
  226. template <class T, int SignificantBits, int Shift = 0>
  227. class BitMask
  228. {
  229. static_assert(std::is_unsigned<T>::value, "");
  230. static_assert(Shift == 0 || Shift == 3, "");
  231. public:
  232. // These are useful for unit tests (gunit).
  233. using value_type = int;
  234. using iterator = BitMask;
  235. using const_iterator = BitMask;
  236. explicit BitMask(T mask) : mask_(mask) {}
  237. BitMask& operator++() { // ++iterator
  238. mask_ &= (mask_ - 1); // clear the least significant bit set
  239. return *this;
  240. }
  241. explicit operator bool() const { return mask_ != 0; }
  242. uint32_t operator*() const { return LowestBitSet(); }
  243. uint32_t LowestBitSet() const {
  244. return priv::TrailingZeros(mask_) >> Shift;
  245. }
  246. uint32_t HighestBitSet() const {
  247. return (sizeof(T) * CHAR_BIT - priv::LeadingZeros(mask_) - 1) >> Shift;
  248. }
  249. BitMask begin() const { return *this; }
  250. BitMask end() const { return BitMask(0); }
  251. uint32_t TrailingZeros() const {
  252. return priv::TrailingZeros(mask_) >> Shift;
  253. }
  254. uint32_t LeadingZeros() const {
  255. constexpr uint32_t total_significant_bits = SignificantBits << Shift;
  256. constexpr uint32_t extra_bits = sizeof(T) * 8 - total_significant_bits;
  257. return priv::LeadingZeros(mask_ << extra_bits) >> Shift;
  258. }
  259. private:
  260. friend bool operator==(const BitMask& a, const BitMask& b) {
  261. return a.mask_ == b.mask_;
  262. }
  263. friend bool operator!=(const BitMask& a, const BitMask& b) {
  264. return a.mask_ != b.mask_;
  265. }
  266. T mask_;
  267. };
  268. // --------------------------------------------------------------------------
  269. using ctrl_t = signed char;
  270. using h2_t = uint8_t;
  271. // --------------------------------------------------------------------------
  272. // The values here are selected for maximum performance. See the static asserts
  273. // below for details.
  274. // --------------------------------------------------------------------------
  275. enum Ctrl : ctrl_t
  276. {
  277. kEmpty = -128, // 0b10000000 or 0x80
  278. kDeleted = -2, // 0b11111110 or 0xfe
  279. kSentinel = -1, // 0b11111111 or 0xff
  280. };
  281. static_assert(
  282. kEmpty & kDeleted & kSentinel & 0x80,
  283. "Special markers need to have the MSB to make checking for them efficient");
  284. static_assert(kEmpty < kSentinel && kDeleted < kSentinel,
  285. "kEmpty and kDeleted must be smaller than kSentinel to make the "
  286. "SIMD test of IsEmptyOrDeleted() efficient");
  287. static_assert(kSentinel == -1,
  288. "kSentinel must be -1 to elide loading it from memory into SIMD "
  289. "registers (pcmpeqd xmm, xmm)");
  290. static_assert(kEmpty == -128,
  291. "kEmpty must be -128 to make the SIMD check for its "
  292. "existence efficient (psignb xmm, xmm)");
  293. static_assert(~kEmpty & ~kDeleted & kSentinel & 0x7F,
  294. "kEmpty and kDeleted must share an unset bit that is not shared "
  295. "by kSentinel to make the scalar test for MatchEmptyOrDeleted() "
  296. "efficient");
  297. static_assert(kDeleted == -2,
  298. "kDeleted must be -2 to make the implementation of "
  299. "ConvertSpecialToEmptyAndFullToDeleted efficient");
  300. // --------------------------------------------------------------------------
  301. // A single block of empty control bytes for tables without any slots allocated.
  302. // This enables removing a branch in the hot path of find().
  303. // --------------------------------------------------------------------------
  304. LL_INLINE ctrl_t* EmptyGroup() {
  305. alignas(16) static constexpr ctrl_t empty_group[] = {
  306. kSentinel, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty,
  307. kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty, kEmpty};
  308. return const_cast<ctrl_t*>(empty_group);
  309. }
  310. // --------------------------------------------------------------------------
  311. LL_INLINE size_t HashSeed(const ctrl_t* ctrl) {
  312. // The low bits of the pointer have little or no entropy because of
  313. // alignment. We shift the pointer to try to use higher entropy bits. A
  314. // good number seems to be 12 bits, because that aligns with page size.
  315. return reinterpret_cast<uintptr_t>(ctrl) >> 12;
  316. }
  317. #ifdef PHMAP_NON_DETERMINISTIC
  318. LL_INLINE size_t H1(size_t hashval, const ctrl_t* ctrl) {
  319. // use ctrl_ pointer to add entropy to ensure
  320. // non-deterministic iteration order.
  321. return (hashval >> 7) ^ HashSeed(ctrl);
  322. }
  323. #else
  324. LL_INLINE size_t H1(size_t hashval, const ctrl_t* ) {
  325. return (hashval >> 7);
  326. }
  327. #endif
  328. LL_INLINE ctrl_t H2(size_t hashval) { return (ctrl_t)(hashval & 0x7F); }
  329. LL_INLINE bool IsEmpty(ctrl_t c) { return c == kEmpty; }
  330. LL_INLINE bool IsFull(ctrl_t c) { return c >= static_cast<ctrl_t>(0); }
  331. LL_INLINE bool IsDeleted(ctrl_t c) { return c == kDeleted; }
  332. LL_INLINE bool IsEmptyOrDeleted(ctrl_t c) { return c < kSentinel; }
  333. #if PHMAP_HAVE_SSE2
  334. #ifdef _MSC_VER
  335. #pragma warning(push)
  336. #pragma warning(disable : 4365) // conversion from 'int' to 'T', signed/unsigned mismatch
  337. #endif
  338. // --------------------------------------------------------------------------
  339. // https://github.com/abseil/abseil-cpp/issues/209
  340. // https://gcc.gnu.org/bugzilla/show_bug.cgi?id=87853
  341. // _mm_cmpgt_epi8 is broken under GCC with -funsigned-char
  342. // Work around this by using the portable implementation of Group
  343. // when using -funsigned-char under GCC.
  344. // --------------------------------------------------------------------------
  345. LL_INLINE __m128i _mm_cmpgt_epi8_fixed(__m128i a, __m128i b) {
  346. #if defined(__GNUC__) && !defined(__clang__)
  347. #pragma GCC diagnostic push
  348. #pragma GCC diagnostic ignored "-Woverflow"
  349. if (std::is_unsigned<char>::value) {
  350. const __m128i mask = _mm_set1_epi8(static_cast<char>(0x80));
  351. const __m128i diff = _mm_subs_epi8(b, a);
  352. return _mm_cmpeq_epi8(_mm_and_si128(diff, mask), mask);
  353. }
  354. #pragma GCC diagnostic pop
  355. #endif
  356. return _mm_cmpgt_epi8(a, b);
  357. }
  358. // --------------------------------------------------------------------------
  359. // --------------------------------------------------------------------------
  360. struct GroupSse2Impl
  361. {
  362. enum { kWidth = 16 }; // the number of slots per group
  363. explicit GroupSse2Impl(const ctrl_t* pos) {
  364. ctrl = _mm_loadu_si128(reinterpret_cast<const __m128i*>(pos));
  365. }
  366. // Returns a bitmask representing the positions of slots that match hash.
  367. // ----------------------------------------------------------------------
  368. BitMask<uint32_t, kWidth> Match(h2_t hash) const {
  369. auto match = _mm_set1_epi8((char)hash);
  370. return BitMask<uint32_t, kWidth>(
  371. static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpeq_epi8(match, ctrl))));
  372. }
  373. // Returns a bitmask representing the positions of empty slots.
  374. // ------------------------------------------------------------
  375. BitMask<uint32_t, kWidth> MatchEmpty() const {
  376. #if PHMAP_HAVE_SSSE3
  377. // This only works because kEmpty is -128.
  378. return BitMask<uint32_t, kWidth>(
  379. static_cast<uint32_t>(_mm_movemask_epi8(_mm_sign_epi8(ctrl, ctrl))));
  380. #else
  381. return Match(static_cast<h2_t>(kEmpty));
  382. #endif
  383. }
  384. // Returns a bitmask representing the positions of empty or deleted slots.
  385. // -----------------------------------------------------------------------
  386. BitMask<uint32_t, kWidth> MatchEmptyOrDeleted() const {
  387. auto special = _mm_set1_epi8(static_cast<char>(kSentinel));
  388. return BitMask<uint32_t, kWidth>(
  389. static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl))));
  390. }
  391. // Returns the number of trailing empty or deleted elements in the group.
  392. // ----------------------------------------------------------------------
  393. uint32_t CountLeadingEmptyOrDeleted() const {
  394. auto special = _mm_set1_epi8(static_cast<char>(kSentinel));
  395. return TrailingZeros(
  396. static_cast<uint32_t>(_mm_movemask_epi8(_mm_cmpgt_epi8_fixed(special, ctrl)) + 1));
  397. }
  398. // ----------------------------------------------------------------------
  399. void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
  400. auto msbs = _mm_set1_epi8(static_cast<char>(-128));
  401. auto x126 = _mm_set1_epi8(126);
  402. #if PHMAP_HAVE_SSSE3
  403. auto res = _mm_or_si128(_mm_shuffle_epi8(x126, ctrl), msbs);
  404. #else
  405. auto zero = _mm_setzero_si128();
  406. auto special_mask = _mm_cmpgt_epi8_fixed(zero, ctrl);
  407. auto res = _mm_or_si128(msbs, _mm_andnot_si128(special_mask, x126));
  408. #endif
  409. _mm_storeu_si128(reinterpret_cast<__m128i*>(dst), res);
  410. }
  411. __m128i ctrl;
  412. };
  413. #ifdef _MSC_VER
  414. #pragma warning(pop)
  415. #endif
  416. #endif // PHMAP_HAVE_SSE2
  417. // --------------------------------------------------------------------------
  418. // --------------------------------------------------------------------------
  419. struct GroupPortableImpl
  420. {
  421. enum { kWidth = 8 };
  422. explicit GroupPortableImpl(const ctrl_t* pos)
  423. : ctrl(little_endian::Load64(pos)) {}
  424. BitMask<uint64_t, kWidth, 3> Match(h2_t hash) const {
  425. // For the technique, see:
  426. // http://graphics.stanford.edu/~seander/bithacks.html##ValueInWord
  427. // (Determine if a word has a byte equal to n).
  428. //
  429. // Caveat: there are false positives but:
  430. // - they only occur if there is a real match
  431. // - they never occur on kEmpty, kDeleted, kSentinel
  432. // - they will be handled gracefully by subsequent checks in code
  433. //
  434. // Example:
  435. // v = 0x1716151413121110
  436. // hash = 0x12
  437. // retval = (v - lsbs) & ~v & msbs = 0x0000000080800000
  438. constexpr uint64_t msbs = 0x8080808080808080ULL;
  439. constexpr uint64_t lsbs = 0x0101010101010101ULL;
  440. auto x = ctrl ^ (lsbs * hash);
  441. return BitMask<uint64_t, kWidth, 3>((x - lsbs) & ~x & msbs);
  442. }
  443. BitMask<uint64_t, kWidth, 3> MatchEmpty() const { // bit 1 of each byte is 0 for empty (but not for deleted)
  444. constexpr uint64_t msbs = 0x8080808080808080ULL;
  445. return BitMask<uint64_t, kWidth, 3>((ctrl & (~ctrl << 6)) & msbs);
  446. }
  447. BitMask<uint64_t, kWidth, 3> MatchEmptyOrDeleted() const { // lsb of each byte is 0 for empty or deleted
  448. constexpr uint64_t msbs = 0x8080808080808080ULL;
  449. return BitMask<uint64_t, kWidth, 3>((ctrl & (~ctrl << 7)) & msbs);
  450. }
  451. uint32_t CountLeadingEmptyOrDeleted() const {
  452. constexpr uint64_t gaps = 0x00FEFEFEFEFEFEFEULL;
  453. return (uint32_t)((TrailingZeros(((~ctrl & (ctrl >> 7)) | gaps) + 1) + 7) >> 3);
  454. }
  455. void ConvertSpecialToEmptyAndFullToDeleted(ctrl_t* dst) const {
  456. constexpr uint64_t msbs = 0x8080808080808080ULL;
  457. constexpr uint64_t lsbs = 0x0101010101010101ULL;
  458. auto x = ctrl & msbs;
  459. auto res = (~x + (x >> 7)) & ~lsbs;
  460. little_endian::Store64(dst, res);
  461. }
  462. uint64_t ctrl;
  463. };
  464. #if PHMAP_HAVE_SSE2
  465. using Group = GroupSse2Impl;
  466. #else
  467. using Group = GroupPortableImpl;
  468. #endif
  469. // The number of cloned control bytes that we copy from the beginning to the
  470. // end of the control bytes array.
  471. // -------------------------------------------------------------------------
  472. constexpr size_t NumClonedBytes() { return Group::kWidth - 1; }
  473. template <class Policy, class Hash, class Eq, class Alloc>
  474. class raw_hash_set;
  475. LL_INLINE bool IsValidCapacity(size_t n) { return ((n + 1) & n) == 0 && n > 0; }
  476. // --------------------------------------------------------------------------
  477. // PRECONDITION:
  478. // IsValidCapacity(capacity)
  479. // ctrl[capacity] == kSentinel
  480. // ctrl[i] != kSentinel for all i < capacity
  481. // Applies mapping for every byte in ctrl:
  482. // DELETED -> EMPTY
  483. // EMPTY -> EMPTY
  484. // FULL -> DELETED
  485. // --------------------------------------------------------------------------
  486. LL_INLINE void ConvertDeletedToEmptyAndFullToDeleted(
  487. ctrl_t* ctrl, size_t capacity)
  488. {
  489. assert(ctrl[capacity] == kSentinel);
  490. assert(IsValidCapacity(capacity));
  491. for (ctrl_t* pos = ctrl; pos != ctrl + capacity + 1; pos += Group::kWidth) {
  492. Group{pos}.ConvertSpecialToEmptyAndFullToDeleted(pos);
  493. }
  494. // Copy the cloned ctrl bytes.
  495. std::memcpy(ctrl + capacity + 1, ctrl, Group::kWidth);
  496. ctrl[capacity] = kSentinel;
  497. }
  498. // --------------------------------------------------------------------------
  499. // Rounds up the capacity to the next power of 2 minus 1, with a minimum of 1.
  500. // --------------------------------------------------------------------------
  501. LL_INLINE size_t NormalizeCapacity(size_t n)
  502. {
  503. return n ? ~size_t{} >> LeadingZeros(n) : 1;
  504. }
  505. // --------------------------------------------------------------------------
  506. // We use 7/8th as maximum load factor.
  507. // For 16-wide groups, that gives an average of two empty slots per group.
  508. // --------------------------------------------------------------------------
  509. LL_INLINE size_t CapacityToGrowth(size_t capacity)
  510. {
  511. assert(IsValidCapacity(capacity));
  512. // `capacity*7/8`
  513. PHMAP_IF_CONSTEXPR (Group::kWidth == 8) {
  514. if (capacity == 7) {
  515. // x-x/8 does not work when x==7.
  516. return 6;
  517. }
  518. }
  519. return capacity - capacity / 8;
  520. }
  521. // --------------------------------------------------------------------------
  522. // From desired "growth" to a lowerbound of the necessary capacity.
  523. // Might not be a valid one and required NormalizeCapacity().
  524. // --------------------------------------------------------------------------
  525. LL_INLINE size_t GrowthToLowerboundCapacity(size_t growth)
  526. {
  527. // `growth*8/7`
  528. PHMAP_IF_CONSTEXPR (Group::kWidth == 8) {
  529. if (growth == 7) {
  530. // x+(x-1)/7 does not work when x==7.
  531. return 8;
  532. }
  533. }
  534. return growth + static_cast<size_t>((static_cast<int64_t>(growth) - 1) / 7);
  535. }
  536. namespace hashtable_debug_internal {
  537. // If it is a map, call get<0>().
  538. using std::get;
  539. template <typename T, typename = typename T::mapped_type>
  540. auto GetKey(const typename T::value_type& pair, int) -> decltype(get<0>(pair)) {
  541. return get<0>(pair);
  542. }
  543. // If it is not a map, return the value directly.
  544. template <typename T>
  545. const typename T::key_type& GetKey(const typename T::key_type& key, char) {
  546. return key;
  547. }
  548. // --------------------------------------------------------------------------
  549. // Containers should specialize this to provide debug information for that
  550. // container.
  551. // --------------------------------------------------------------------------
  552. template <class Container, typename Enabler = void>
  553. struct HashtableDebugAccess
  554. {
  555. // Returns the number of probes required to find `key` in `c`. The "number of
  556. // probes" is a concept that can vary by container. Implementations should
  557. // return 0 when `key` was found in the minimum number of operations and
  558. // should increment the result for each non-trivial operation required to find
  559. // `key`.
  560. //
  561. // The default implementation uses the bucket api from the standard and thus
  562. // works for `std::unordered_*` containers.
  563. // --------------------------------------------------------------------------
  564. static size_t GetNumProbes(const Container& c,
  565. const typename Container::key_type& key) {
  566. if (!c.bucket_count()) return {};
  567. size_t num_probes = 0;
  568. size_t bucket = c.bucket(key);
  569. for (auto it = c.begin(bucket), e = c.end(bucket);; ++it, ++num_probes) {
  570. if (it == e) return num_probes;
  571. if (c.key_eq()(key, GetKey<Container>(*it, 0))) return num_probes;
  572. }
  573. }
  574. };
  575. } // namespace hashtable_debug_internal
  576. // ----------------------------------------------------------------------------
  577. // I N F O Z S T U B S
  578. // ----------------------------------------------------------------------------
  579. struct HashtablezInfo
  580. {
  581. void PrepareForSampling() {}
  582. };
  583. LL_INLINE void RecordRehashSlow(HashtablezInfo*, size_t ) {}
  584. static LL_INLINE void RecordInsertSlow(HashtablezInfo* , size_t, size_t ) {}
  585. static LL_INLINE void RecordEraseSlow(HashtablezInfo*) {}
  586. static LL_INLINE HashtablezInfo* SampleSlow(int64_t*) { return nullptr; }
  587. static LL_INLINE void UnsampleSlow(HashtablezInfo* ) {}
  588. class HashtablezInfoHandle
  589. {
  590. public:
  591. LL_INLINE void RecordStorageChanged(size_t , size_t ) {}
  592. LL_INLINE void RecordRehash(size_t ) {}
  593. LL_INLINE void RecordInsert(size_t , size_t ) {}
  594. LL_INLINE void RecordErase() {}
  595. friend LL_INLINE void swap(HashtablezInfoHandle& ,
  596. HashtablezInfoHandle& ) noexcept {}
  597. };
  598. static LL_INLINE HashtablezInfoHandle Sample() { return HashtablezInfoHandle(); }
  599. class HashtablezSampler
  600. {
  601. public:
  602. // Returns a global Sampler.
  603. static HashtablezSampler& Global() { static HashtablezSampler hzs; return hzs; }
  604. HashtablezInfo* Register() { static HashtablezInfo info; return &info; }
  605. void Unregister(HashtablezInfo* ) {}
  606. using DisposeCallback = void (*)(const HashtablezInfo&);
  607. DisposeCallback SetDisposeCallback(DisposeCallback ) { return nullptr; }
  608. int64_t Iterate(const std::function<void(const HashtablezInfo& stack)>& ) { return 0; }
  609. };
  610. static LL_INLINE void SetHashtablezEnabled(bool ) {}
  611. static LL_INLINE void SetHashtablezSampleParameter(int32_t ) {}
  612. static LL_INLINE void SetHashtablezMaxSamples(int32_t ) {}
  613. namespace memory_internal {
  614. // Constructs T into uninitialized storage pointed by `ptr` using the args
  615. // specified in the tuple.
  616. // ----------------------------------------------------------------------------
  617. template <class Alloc, class T, class Tuple, size_t... I>
  618. void ConstructFromTupleImpl(Alloc* alloc, T* ptr, Tuple&& t,
  619. phmap::index_sequence<I...>) {
  620. phmap::allocator_traits<Alloc>::construct(
  621. *alloc, ptr, std::get<I>(std::forward<Tuple>(t))...);
  622. }
  623. template <class T, class F>
  624. struct WithConstructedImplF {
  625. template <class... Args>
  626. decltype(std::declval<F>()(std::declval<T>())) operator()(
  627. Args&&... args) const {
  628. return std::forward<F>(f)(T(std::forward<Args>(args)...));
  629. }
  630. F&& f;
  631. };
  632. template <class T, class Tuple, size_t... Is, class F>
  633. decltype(std::declval<F>()(std::declval<T>())) WithConstructedImpl(
  634. Tuple&& t, phmap::index_sequence<Is...>, F&& f) {
  635. return WithConstructedImplF<T, F>{std::forward<F>(f)}(
  636. std::get<Is>(std::forward<Tuple>(t))...);
  637. }
  638. template <class T, size_t... Is>
  639. auto TupleRefImpl(T&& t, phmap::index_sequence<Is...>)
  640. -> decltype(std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...)) {
  641. return std::forward_as_tuple(std::get<Is>(std::forward<T>(t))...);
  642. }
  643. // Returns a tuple of references to the elements of the input tuple. T must be a
  644. // tuple.
  645. // ----------------------------------------------------------------------------
  646. template <class T>
  647. auto TupleRef(T&& t) -> decltype(
  648. TupleRefImpl(std::forward<T>(t),
  649. phmap::make_index_sequence<
  650. std::tuple_size<typename std::decay<T>::type>::value>())) {
  651. return TupleRefImpl(
  652. std::forward<T>(t),
  653. phmap::make_index_sequence<
  654. std::tuple_size<typename std::decay<T>::type>::value>());
  655. }
  656. template <class F, class K, class V>
  657. decltype(std::declval<F>()(std::declval<const K&>(), std::piecewise_construct,
  658. std::declval<std::tuple<K>>(), std::declval<V>()))
  659. DecomposePairImpl(F&& f, std::pair<std::tuple<K>, V> p) {
  660. const auto& key = std::get<0>(p.first);
  661. return std::forward<F>(f)(key, std::piecewise_construct, std::move(p.first),
  662. std::move(p.second));
  663. }
  664. } // namespace memory_internal
  665. // ----------------------------------------------------------------------------
  666. // R A W _ H A S H _ S E T
  667. // ----------------------------------------------------------------------------
  668. // An open-addressing
  669. // hashtable with quadratic probing.
  670. //
  671. // This is a low level hashtable on top of which different interfaces can be
  672. // implemented, like flat_hash_set, node_hash_set, string_hash_set, etc.
  673. //
  674. // The table interface is similar to that of std::unordered_set. Notable
  675. // differences are that most member functions support heterogeneous keys when
  676. // BOTH the hash and eq functions are marked as transparent. They do so by
  677. // providing a typedef called `is_transparent`.
  678. //
  679. // When heterogeneous lookup is enabled, functions that take key_type act as if
  680. // they have an overload set like:
  681. //
  682. // iterator find(const key_type& key);
  683. // template <class K>
  684. // iterator find(const K& key);
  685. //
  686. // size_type erase(const key_type& key);
  687. // template <class K>
  688. // size_type erase(const K& key);
  689. //
  690. // std::pair<iterator, iterator> equal_range(const key_type& key);
  691. // template <class K>
  692. // std::pair<iterator, iterator> equal_range(const K& key);
  693. //
  694. // When heterogeneous lookup is disabled, only the explicit `key_type` overloads
  695. // exist.
  696. //
  697. // find() also supports passing the hash explicitly:
  698. //
  699. // iterator find(const key_type& key, size_t hash);
  700. // template <class U>
  701. // iterator find(const U& key, size_t hash);
  702. //
  703. // In addition the pointer to element and iterator stability guarantees are
  704. // weaker: all iterators and pointers are invalidated after a new element is
  705. // inserted.
  706. //
  707. // IMPLEMENTATION DETAILS
  708. //
  709. // The table stores elements inline in a slot array. In addition to the slot
  710. // array the table maintains some control state per slot. The extra state is one
  711. // byte per slot and stores empty or deleted marks, or alternatively 7 bits from
  712. // the hash of an occupied slot. The table is split into logical groups of
  713. // slots, like so:
  714. //
  715. // Group 1 Group 2 Group 3
  716. // +---------------+---------------+---------------+
  717. // | | | | | | | | | | | | | | | | | | | | | | | | |
  718. // +---------------+---------------+---------------+
  719. //
  720. // On lookup the hash is split into two parts:
  721. // - H2: 7 bits (those stored in the control bytes)
  722. // - H1: the rest of the bits
  723. // The groups are probed using H1. For each group the slots are matched to H2 in
  724. // parallel. Because H2 is 7 bits (128 states) and the number of slots per group
  725. // is low (8 or 16) in almost all cases a match in H2 is also a lookup hit.
  726. //
  727. // On insert, once the right group is found (as in lookup), its slots are
  728. // filled in order.
  729. //
  730. // On erase a slot is cleared. In case the group did not have any empty slots
  731. // before the erase, the erased slot is marked as deleted.
  732. //
  733. // Groups without empty slots (but maybe with deleted slots) extend the probe
  734. // sequence. The probing algorithm is quadratic. Given N the number of groups,
  735. // the probing function for the i'th probe is:
  736. //
  737. // P(0) = H1 % N
  738. //
  739. // P(i) = (P(i - 1) + i) % N
  740. //
  741. // This probing function guarantees that after N probes, all the groups of the
  742. // table will be probed exactly once.
  743. // ----------------------------------------------------------------------------
  744. template <class Policy, class Hash, class Eq, class Alloc>
  745. class raw_hash_set
  746. {
  747. using PolicyTraits = hash_policy_traits<Policy>;
  748. using KeyArgImpl =
  749. KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
  750. public:
  751. using init_type = typename PolicyTraits::init_type;
  752. using key_type = typename PolicyTraits::key_type;
  753. // TODO(sbenza): Hide slot_type as it is an implementation detail. Needs user
  754. // code fixes!
  755. using slot_type = typename PolicyTraits::slot_type;
  756. using allocator_type = Alloc;
  757. using size_type = size_t;
  758. using difference_type = ptrdiff_t;
  759. using hasher = Hash;
  760. using key_equal = Eq;
  761. using policy_type = Policy;
  762. using value_type = typename PolicyTraits::value_type;
  763. using reference = value_type&;
  764. using const_reference = const value_type&;
  765. using pointer = typename phmap::allocator_traits<
  766. allocator_type>::template rebind_traits<value_type>::pointer;
  767. using const_pointer = typename phmap::allocator_traits<
  768. allocator_type>::template rebind_traits<value_type>::const_pointer;
  769. // Alias used for heterogeneous lookup functions.
  770. // `key_arg<K>` evaluates to `K` when the functors are transparent and to
  771. // `key_type` otherwise. It permits template argument deduction on `K` for the
  772. // transparent case.
  773. template <class K>
  774. using key_arg = typename KeyArgImpl::template type<K, key_type>;
  775. private:
  776. // Give an early error when key_type is not hashable/eq.
  777. auto KeyTypeCanBeHashed(const Hash& h, const key_type& k) -> decltype(h(k));
  778. auto KeyTypeCanBeEq(const Eq& eq, const key_type& k) -> decltype(eq(k, k));
  779. using Layout = phmap::priv::Layout<ctrl_t, slot_type>;
  780. static Layout MakeLayout(size_t capacity) {
  781. assert(IsValidCapacity(capacity));
  782. return Layout(capacity + Group::kWidth + 1, capacity);
  783. }
  784. using AllocTraits = phmap::allocator_traits<allocator_type>;
  785. using SlotAlloc = typename phmap::allocator_traits<
  786. allocator_type>::template rebind_alloc<slot_type>;
  787. using SlotAllocTraits = typename phmap::allocator_traits<
  788. allocator_type>::template rebind_traits<slot_type>;
  789. static_assert(std::is_lvalue_reference<reference>::value,
  790. "Policy::element() must return a reference");
  791. template <typename T>
  792. struct SameAsElementReference
  793. : std::is_same<typename std::remove_cv<
  794. typename std::remove_reference<reference>::type>::type,
  795. typename std::remove_cv<
  796. typename std::remove_reference<T>::type>::type> {};
  797. // An enabler for insert(T&&): T must be convertible to init_type or be the
  798. // same as [cv] value_type [ref].
  799. // Note: we separate SameAsElementReference into its own type to avoid using
  800. // reference unless we need to. MSVC doesn't seem to like it in some
  801. // cases.
  802. template <class T>
  803. using RequiresInsertable = typename std::enable_if<
  804. phmap::disjunction<std::is_convertible<T, init_type>,
  805. SameAsElementReference<T>>::value,
  806. int>::type;
  807. // RequiresNotInit is a workaround for gcc prior to 7.1.
  808. // See https://godbolt.org/g/Y4xsUh.
  809. template <class T>
  810. using RequiresNotInit =
  811. typename std::enable_if<!std::is_same<T, init_type>::value, int>::type;
  812. template <class... Ts>
  813. using IsDecomposable = IsDecomposable<void, PolicyTraits, Hash, Eq, Ts...>;
  814. public:
  815. static_assert(std::is_same<pointer, value_type*>::value,
  816. "Allocators with custom pointer types are not supported");
  817. static_assert(std::is_same<const_pointer, const value_type*>::value,
  818. "Allocators with custom pointer types are not supported");
  819. class iterator
  820. {
  821. friend class raw_hash_set;
  822. public:
  823. using iterator_category = std::forward_iterator_tag;
  824. using value_type = typename raw_hash_set::value_type;
  825. using reference =
  826. phmap::conditional_t<PolicyTraits::constant_iterators::value,
  827. const value_type&, value_type&>;
  828. using pointer = phmap::remove_reference_t<reference>*;
  829. using difference_type = typename raw_hash_set::difference_type;
  830. iterator() {}
  831. // PRECONDITION: not an end() iterator.
  832. reference operator*() const { return PolicyTraits::element(slot_); }
  833. // PRECONDITION: not an end() iterator.
  834. pointer operator->() const { return &operator*(); }
  835. // PRECONDITION: not an end() iterator.
  836. iterator& operator++() {
  837. ++ctrl_;
  838. ++slot_;
  839. skip_empty_or_deleted();
  840. return *this;
  841. }
  842. // PRECONDITION: not an end() iterator.
  843. iterator operator++(int) {
  844. auto tmp = *this;
  845. ++*this;
  846. return tmp;
  847. }
  848. #if 0 // PHMAP_BIDIRECTIONAL
  849. // PRECONDITION: not a begin() iterator.
  850. iterator& operator--() {
  851. assert(ctrl_);
  852. do {
  853. --ctrl_;
  854. --slot_;
  855. } while (IsEmptyOrDeleted(*ctrl_));
  856. return *this;
  857. }
  858. // PRECONDITION: not a begin() iterator.
  859. iterator operator--(int) {
  860. auto tmp = *this;
  861. --*this;
  862. return tmp;
  863. }
  864. #endif
  865. friend bool operator==(const iterator& a, const iterator& b) {
  866. return a.ctrl_ == b.ctrl_;
  867. }
  868. friend bool operator!=(const iterator& a, const iterator& b) {
  869. return !(a == b);
  870. }
  871. private:
  872. iterator(ctrl_t* ctrl) : ctrl_(ctrl) {} // for end()
  873. iterator(ctrl_t* ctrl, slot_type* slot) : ctrl_(ctrl), slot_(slot) {}
  874. void skip_empty_or_deleted() {
  875. while (IsEmptyOrDeleted(*ctrl_)) {
  876. // ctrl is not necessarily aligned to Group::kWidth. It is also likely
  877. // to read past the space for ctrl bytes and into slots. This is ok
  878. // because ctrl has sizeof() == 1 and slot has sizeof() >= 1 so there
  879. // is no way to read outside the combined slot array.
  880. uint32_t shift = Group{ctrl_}.CountLeadingEmptyOrDeleted();
  881. ctrl_ += shift;
  882. slot_ += shift;
  883. }
  884. }
  885. ctrl_t* ctrl_ = nullptr;
  886. // To avoid uninitialized member warnings, put slot_ in an anonymous union.
  887. // The member is not initialized on singleton and end iterators.
  888. union {
  889. slot_type* slot_;
  890. };
  891. };
  892. class const_iterator
  893. {
  894. friend class raw_hash_set;
  895. public:
  896. using iterator_category = typename iterator::iterator_category;
  897. using value_type = typename raw_hash_set::value_type;
  898. using reference = typename raw_hash_set::const_reference;
  899. using pointer = typename raw_hash_set::const_pointer;
  900. using difference_type = typename raw_hash_set::difference_type;
  901. const_iterator() {}
  902. // Implicit construction from iterator.
  903. const_iterator(iterator i) : inner_(std::move(i)) {}
  904. reference operator*() const { return *inner_; }
  905. pointer operator->() const { return inner_.operator->(); }
  906. const_iterator& operator++() {
  907. ++inner_;
  908. return *this;
  909. }
  910. const_iterator operator++(int) { return inner_++; }
  911. friend bool operator==(const const_iterator& a, const const_iterator& b) {
  912. return a.inner_ == b.inner_;
  913. }
  914. friend bool operator!=(const const_iterator& a, const const_iterator& b) {
  915. return !(a == b);
  916. }
  917. private:
  918. const_iterator(const ctrl_t* ctrl, const slot_type* slot)
  919. : inner_(const_cast<ctrl_t*>(ctrl), const_cast<slot_type*>(slot)) {}
  920. iterator inner_;
  921. };
  922. using node_type = node_handle<Policy, hash_policy_traits<Policy>, Alloc>;
  923. using insert_return_type = InsertReturnType<iterator, node_type>;
  924. raw_hash_set() noexcept(
  925. std::is_nothrow_default_constructible<hasher>::value&&
  926. std::is_nothrow_default_constructible<key_equal>::value&&
  927. std::is_nothrow_default_constructible<allocator_type>::value) {}
  928. explicit raw_hash_set(size_t bucket_cnt, const hasher& hashfn = hasher(),
  929. const key_equal& eq = key_equal(),
  930. const allocator_type& alloc = allocator_type())
  931. : ctrl_(EmptyGroup()), settings_(0, hashfn, eq, alloc) {
  932. if (bucket_cnt) {
  933. size_t new_capacity = NormalizeCapacity(bucket_cnt);
  934. reset_growth_left(new_capacity);
  935. initialize_slots(new_capacity);
  936. capacity_ = new_capacity;
  937. }
  938. }
  939. raw_hash_set(size_t bucket_cnt, const hasher& hashfn,
  940. const allocator_type& alloc)
  941. : raw_hash_set(bucket_cnt, hashfn, key_equal(), alloc) {}
  942. raw_hash_set(size_t bucket_cnt, const allocator_type& alloc)
  943. : raw_hash_set(bucket_cnt, hasher(), key_equal(), alloc) {}
  944. explicit raw_hash_set(const allocator_type& alloc)
  945. : raw_hash_set(0, hasher(), key_equal(), alloc) {}
  946. template <class InputIter>
  947. raw_hash_set(InputIter first, InputIter last, size_t bucket_cnt = 0,
  948. const hasher& hashfn = hasher(), const key_equal& eq = key_equal(),
  949. const allocator_type& alloc = allocator_type())
  950. : raw_hash_set(bucket_cnt, hashfn, eq, alloc) {
  951. insert(first, last);
  952. }
  953. template <class InputIter>
  954. raw_hash_set(InputIter first, InputIter last, size_t bucket_cnt,
  955. const hasher& hashfn, const allocator_type& alloc)
  956. : raw_hash_set(first, last, bucket_cnt, hashfn, key_equal(), alloc) {}
  957. template <class InputIter>
  958. raw_hash_set(InputIter first, InputIter last, size_t bucket_cnt,
  959. const allocator_type& alloc)
  960. : raw_hash_set(first, last, bucket_cnt, hasher(), key_equal(), alloc) {}
  961. template <class InputIter>
  962. raw_hash_set(InputIter first, InputIter last, const allocator_type& alloc)
  963. : raw_hash_set(first, last, 0, hasher(), key_equal(), alloc) {}
  964. // Instead of accepting std::initializer_list<value_type> as the first
  965. // argument like std::unordered_set<value_type> does, we have two overloads
  966. // that accept std::initializer_list<T> and std::initializer_list<init_type>.
  967. // This is advantageous for performance.
  968. //
  969. // // Turns {"abc", "def"} into std::initializer_list<std::string>, then
  970. // // copies the strings into the set.
  971. // std::unordered_set<std::string> s = {"abc", "def"};
  972. //
  973. // // Turns {"abc", "def"} into std::initializer_list<const char*>, then
  974. // // copies the strings into the set.
  975. // phmap::flat_hash_set<std::string> s = {"abc", "def"};
  976. //
  977. // The same trick is used in insert().
  978. //
  979. // The enabler is necessary to prevent this constructor from triggering where
  980. // the copy constructor is meant to be called.
  981. //
  982. // phmap::flat_hash_set<int> a, b{a};
  983. //
  984. // RequiresNotInit<T> is a workaround for gcc prior to 7.1.
  985. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  986. raw_hash_set(std::initializer_list<T> init, size_t bucket_cnt = 0,
  987. const hasher& hashfn = hasher(), const key_equal& eq = key_equal(),
  988. const allocator_type& alloc = allocator_type())
  989. : raw_hash_set(init.begin(), init.end(), bucket_cnt, hashfn, eq, alloc) {}
  990. raw_hash_set(std::initializer_list<init_type> init, size_t bucket_cnt = 0,
  991. const hasher& hashfn = hasher(), const key_equal& eq = key_equal(),
  992. const allocator_type& alloc = allocator_type())
  993. : raw_hash_set(init.begin(), init.end(), bucket_cnt, hashfn, eq, alloc) {}
  994. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  995. raw_hash_set(std::initializer_list<T> init, size_t bucket_cnt,
  996. const hasher& hashfn, const allocator_type& alloc)
  997. : raw_hash_set(init, bucket_cnt, hashfn, key_equal(), alloc) {}
  998. raw_hash_set(std::initializer_list<init_type> init, size_t bucket_cnt,
  999. const hasher& hashfn, const allocator_type& alloc)
  1000. : raw_hash_set(init, bucket_cnt, hashfn, key_equal(), alloc) {}
  1001. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  1002. raw_hash_set(std::initializer_list<T> init, size_t bucket_cnt,
  1003. const allocator_type& alloc)
  1004. : raw_hash_set(init, bucket_cnt, hasher(), key_equal(), alloc) {}
  1005. raw_hash_set(std::initializer_list<init_type> init, size_t bucket_cnt,
  1006. const allocator_type& alloc)
  1007. : raw_hash_set(init, bucket_cnt, hasher(), key_equal(), alloc) {}
  1008. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  1009. raw_hash_set(std::initializer_list<T> init, const allocator_type& alloc)
  1010. : raw_hash_set(init, 0, hasher(), key_equal(), alloc) {}
  1011. raw_hash_set(std::initializer_list<init_type> init,
  1012. const allocator_type& alloc)
  1013. : raw_hash_set(init, 0, hasher(), key_equal(), alloc) {}
  1014. raw_hash_set(const raw_hash_set& that)
  1015. : raw_hash_set(that, AllocTraits::select_on_container_copy_construction(
  1016. that.alloc_ref())) {}
  1017. raw_hash_set(const raw_hash_set& that, const allocator_type& a)
  1018. : raw_hash_set(0, that.hash_ref(), that.eq_ref(), a) {
  1019. rehash(that.capacity()); // operator=() should preserve load_factor
  1020. // Because the table is guaranteed to be empty, we can do something faster
  1021. // than a full `insert`.
  1022. for (const auto& v : that) {
  1023. const size_t hashval = PolicyTraits::apply(HashElement{hash_ref()}, v);
  1024. auto target = find_first_non_full(hashval);
  1025. set_ctrl(target.offset, H2(hashval));
  1026. emplace_at(target.offset, v);
  1027. infoz_.RecordInsert(hashval, target.probe_length);
  1028. }
  1029. size_ = that.size();
  1030. growth_left() -= that.size();
  1031. }
  1032. raw_hash_set(raw_hash_set&& that) noexcept(
  1033. std::is_nothrow_copy_constructible<hasher>::value&&
  1034. std::is_nothrow_copy_constructible<key_equal>::value&&
  1035. std::is_nothrow_copy_constructible<allocator_type>::value)
  1036. : ctrl_(phmap::exchange(that.ctrl_, EmptyGroup())),
  1037. slots_(phmap::exchange(that.slots_, nullptr)),
  1038. size_(phmap::exchange(that.size_, 0)),
  1039. capacity_(phmap::exchange(that.capacity_, 0)),
  1040. infoz_(phmap::exchange(that.infoz_, HashtablezInfoHandle())),
  1041. // Hash, equality and allocator are copied instead of moved because
  1042. // `that` must be left valid. If Hash is std::function<Key>, moving it
  1043. // would create a nullptr functor that cannot be called.
  1044. settings_(std::move(that.settings_)) {
  1045. // growth_left was copied above, reset the one from `that`.
  1046. that.growth_left() = 0;
  1047. }
  1048. raw_hash_set(raw_hash_set&& that, const allocator_type& a)
  1049. : ctrl_(EmptyGroup()),
  1050. slots_(nullptr),
  1051. size_(0),
  1052. capacity_(0),
  1053. settings_(0, that.hash_ref(), that.eq_ref(), a) {
  1054. if (a == that.alloc_ref()) {
  1055. std::swap(ctrl_, that.ctrl_);
  1056. std::swap(slots_, that.slots_);
  1057. std::swap(size_, that.size_);
  1058. std::swap(capacity_, that.capacity_);
  1059. std::swap(growth_left(), that.growth_left());
  1060. std::swap(infoz_, that.infoz_);
  1061. } else {
  1062. reserve(that.size());
  1063. // Note: this will copy elements of dense_set and unordered_set instead of
  1064. // moving them. This can be fixed if it ever becomes an issue.
  1065. for (auto& elem : that) insert(std::move(elem));
  1066. }
  1067. }
  1068. raw_hash_set& operator=(const raw_hash_set& that) {
  1069. raw_hash_set tmp(that,
  1070. AllocTraits::propagate_on_container_copy_assignment::value
  1071. ? that.alloc_ref()
  1072. : alloc_ref());
  1073. swap(tmp);
  1074. return *this;
  1075. }
  1076. raw_hash_set& operator=(raw_hash_set&& that) noexcept(
  1077. phmap::allocator_traits<allocator_type>::is_always_equal::value&&
  1078. std::is_nothrow_move_assignable<hasher>::value&&
  1079. std::is_nothrow_move_assignable<key_equal>::value) {
  1080. // TODO(sbenza): We should only use the operations from the noexcept clause
  1081. // to make sure we actually adhere to that contract.
  1082. return move_assign(
  1083. std::move(that),
  1084. typename AllocTraits::propagate_on_container_move_assignment());
  1085. }
  1086. ~raw_hash_set() { destroy_slots(); }
  1087. iterator begin() {
  1088. auto it = iterator_at(0);
  1089. it.skip_empty_or_deleted();
  1090. return it;
  1091. }
  1092. iterator end()
  1093. {
  1094. #if 0 // PHMAP_BIDIRECTIONAL
  1095. return iterator_at(capacity_);
  1096. #else
  1097. return {ctrl_ + capacity_};
  1098. #endif
  1099. }
  1100. const_iterator begin() const {
  1101. return const_cast<raw_hash_set*>(this)->begin();
  1102. }
  1103. const_iterator end() const { return const_cast<raw_hash_set*>(this)->end(); }
  1104. const_iterator cbegin() const { return begin(); }
  1105. const_iterator cend() const { return end(); }
  1106. bool empty() const { return !size(); }
  1107. size_t size() const { return size_; }
  1108. size_t capacity() const { return capacity_; }
  1109. size_t max_size() const { return (std::numeric_limits<size_t>::max)(); }
  1110. PHMAP_ATTRIBUTE_REINITIALIZES void clear() {
  1111. if (empty())
  1112. return;
  1113. if (capacity_) {
  1114. PHMAP_IF_CONSTEXPR((!std::is_trivially_destructible<typename PolicyTraits::value_type>::value ||
  1115. std::is_same<typename Policy::is_flat, std::false_type>::value)) {
  1116. // node map or not trivially destructible... we need to iterate and destroy values one by one
  1117. for (size_t i = 0; i != capacity_; ++i) {
  1118. if (IsFull(ctrl_[i])) {
  1119. PolicyTraits::destroy(&alloc_ref(), slots_ + i);
  1120. }
  1121. }
  1122. }
  1123. size_ = 0;
  1124. reset_ctrl(capacity_);
  1125. reset_growth_left(capacity_);
  1126. }
  1127. assert(empty());
  1128. infoz_.RecordStorageChanged(0, capacity_);
  1129. }
  1130. // This overload kicks in when the argument is an rvalue of insertable and
  1131. // decomposable type other than init_type.
  1132. //
  1133. // flat_hash_map<std::string, int> m;
  1134. // m.insert(std::make_pair("abc", 42));
  1135. template <class T, RequiresInsertable<T> = 0,
  1136. typename std::enable_if<IsDecomposable<T>::value, int>::type = 0,
  1137. T* = nullptr>
  1138. std::pair<iterator, bool> insert(T&& value) {
  1139. return emplace(std::forward<T>(value));
  1140. }
  1141. // This overload kicks in when the argument is a bitfield or an lvalue of
  1142. // insertable and decomposable type.
  1143. //
  1144. // union { int n : 1; };
  1145. // flat_hash_set<int> s;
  1146. // s.insert(n);
  1147. //
  1148. // flat_hash_set<std::string> s;
  1149. // const char* p = "hello";
  1150. // s.insert(p);
  1151. //
  1152. // TODO(romanp): Once we stop supporting gcc 5.1 and below, replace
  1153. // RequiresInsertable<T> with RequiresInsertable<const T&>.
  1154. // We are hitting this bug: https://godbolt.org/g/1Vht4f.
  1155. template <class T, RequiresInsertable<T> = 0,
  1156. typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
  1157. std::pair<iterator, bool> insert(const T& value) {
  1158. return emplace(value);
  1159. }
  1160. // This overload kicks in when the argument is an rvalue of init_type. Its
  1161. // purpose is to handle brace-init-list arguments.
  1162. //
  1163. // flat_hash_set<std::string, int> s;
  1164. // s.insert({"abc", 42});
  1165. std::pair<iterator, bool> insert(init_type&& value) {
  1166. return emplace(std::move(value));
  1167. }
  1168. template <class T, RequiresInsertable<T> = 0,
  1169. typename std::enable_if<IsDecomposable<T>::value, int>::type = 0,
  1170. T* = nullptr>
  1171. iterator insert(const_iterator, T&& value) {
  1172. return insert(std::forward<T>(value)).first;
  1173. }
  1174. // TODO(romanp): Once we stop supporting gcc 5.1 and below, replace
  1175. // RequiresInsertable<T> with RequiresInsertable<const T&>.
  1176. // We are hitting this bug: https://godbolt.org/g/1Vht4f.
  1177. template <class T, RequiresInsertable<T> = 0,
  1178. typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
  1179. iterator insert(const_iterator, const T& value) {
  1180. return insert(value).first;
  1181. }
  1182. iterator insert(const_iterator, init_type&& value) {
  1183. return insert(std::move(value)).first;
  1184. }
  1185. template <typename It>
  1186. using IsRandomAccess = std::is_same<typename std::iterator_traits<It>::iterator_category,
  1187. std::random_access_iterator_tag>;
  1188. template<typename T>
  1189. struct has_difference_operator
  1190. {
  1191. private:
  1192. using yes = std::true_type;
  1193. using no = std::false_type;
  1194. template<typename U> static auto test(int) -> decltype(std::declval<U>() - std::declval<U>() == 1, yes());
  1195. template<typename> static no test(...);
  1196. public:
  1197. static constexpr bool value = std::is_same<decltype(test<T>(0)), yes>::value;
  1198. };
  1199. template <class InputIt, typename phmap::enable_if_t<has_difference_operator<InputIt>::value, int> = 0>
  1200. void insert(InputIt first, InputIt last) {
  1201. this->reserve(this->size() + (last - first));
  1202. for (; first != last; ++first)
  1203. emplace(*first);
  1204. }
  1205. template <class InputIt, typename phmap::enable_if_t<!has_difference_operator<InputIt>::value, int> = 0>
  1206. void insert(InputIt first, InputIt last) {
  1207. for (; first != last; ++first)
  1208. emplace(*first);
  1209. }
  1210. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
  1211. void insert(std::initializer_list<T> ilist) {
  1212. insert(ilist.begin(), ilist.end());
  1213. }
  1214. void insert(std::initializer_list<init_type> ilist) {
  1215. insert(ilist.begin(), ilist.end());
  1216. }
  1217. insert_return_type insert(node_type&& node) {
  1218. if (!node) return {end(), false, node_type()};
  1219. const auto& elem = PolicyTraits::element(CommonAccess::GetSlot(node));
  1220. auto res = PolicyTraits::apply(
  1221. InsertSlot<false>{*this, std::move(*CommonAccess::GetSlot(node))},
  1222. elem);
  1223. if (res.second) {
  1224. CommonAccess::Reset(&node);
  1225. return {res.first, true, node_type()};
  1226. } else {
  1227. return {res.first, false, std::move(node)};
  1228. }
  1229. }
  1230. insert_return_type insert(node_type&& node, size_t hashval) {
  1231. if (!node) return {end(), false, node_type()};
  1232. const auto& elem = PolicyTraits::element(CommonAccess::GetSlot(node));
  1233. auto res = PolicyTraits::apply(
  1234. InsertSlotWithHash<false>{*this, std::move(*CommonAccess::GetSlot(node)), hashval},
  1235. elem);
  1236. if (res.second) {
  1237. CommonAccess::Reset(&node);
  1238. return {res.first, true, node_type()};
  1239. } else {
  1240. return {res.first, false, std::move(node)};
  1241. }
  1242. }
  1243. iterator insert(const_iterator, node_type&& node) {
  1244. auto res = insert(std::move(node));
  1245. node = std::move(res.node);
  1246. return res.position;
  1247. }
  1248. // This overload kicks in if we can deduce the key from args. This enables us
  1249. // to avoid constructing value_type if an entry with the same key already
  1250. // exists.
  1251. //
  1252. // For example:
  1253. //
  1254. // flat_hash_map<std::string, std::string> m = {{"abc", "def"}};
  1255. // // Creates no std::string copies and makes no heap allocations.
  1256. // m.emplace("abc", "xyz");
  1257. template <class... Args, typename std::enable_if<
  1258. IsDecomposable<Args...>::value, int>::type = 0>
  1259. std::pair<iterator, bool> emplace(Args&&... args) {
  1260. return PolicyTraits::apply(EmplaceDecomposable{*this},
  1261. std::forward<Args>(args)...);
  1262. }
  1263. template <class... Args, typename std::enable_if<IsDecomposable<Args...>::value, int>::type = 0>
  1264. std::pair<iterator, bool> emplace_with_hash(size_t hashval, Args&&... args) {
  1265. return PolicyTraits::apply(EmplaceDecomposableHashval{*this, hashval}, std::forward<Args>(args)...);
  1266. }
  1267. // This overload kicks in if we cannot deduce the key from args. It constructs
  1268. // value_type unconditionally and then either moves it into the table or
  1269. // destroys.
  1270. template <class... Args, typename std::enable_if<!IsDecomposable<Args...>::value, int>::type = 0>
  1271. std::pair<iterator, bool> emplace(Args&&... args) {
  1272. typename phmap::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
  1273. raw;
  1274. slot_type* slot = reinterpret_cast<slot_type*>(&raw);
  1275. PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
  1276. const auto& elem = PolicyTraits::element(slot);
  1277. return PolicyTraits::apply(InsertSlot<true>{*this, std::move(*slot)}, elem);
  1278. }
  1279. template <class... Args, typename std::enable_if<!IsDecomposable<Args...>::value, int>::type = 0>
  1280. std::pair<iterator, bool> emplace_with_hash(size_t hashval, Args&&... args) {
  1281. typename phmap::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type raw;
  1282. slot_type* slot = reinterpret_cast<slot_type*>(&raw);
  1283. PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
  1284. const auto& elem = PolicyTraits::element(slot);
  1285. return PolicyTraits::apply(InsertSlotWithHash<true>{*this, std::move(*slot), hashval}, elem);
  1286. }
  1287. template <class... Args>
  1288. iterator emplace_hint(const_iterator, Args&&... args) {
  1289. return emplace(std::forward<Args>(args)...).first;
  1290. }
  1291. template <class... Args>
  1292. iterator emplace_hint_with_hash(size_t hashval, const_iterator, Args&&... args) {
  1293. return emplace_with_hash(hashval, std::forward<Args>(args)...).first;
  1294. }
  1295. // Extension API: support for lazy emplace.
  1296. //
  1297. // Looks up key in the table. If found, returns the iterator to the element.
  1298. // Otherwise calls f with one argument of type raw_hash_set::constructor. f
  1299. // MUST call raw_hash_set::constructor with arguments as if a
  1300. // raw_hash_set::value_type is constructed, otherwise the behavior is
  1301. // undefined.
  1302. //
  1303. // For example:
  1304. //
  1305. // std::unordered_set<ArenaString> s;
  1306. // // Makes ArenaStr even if "abc" is in the map.
  1307. // s.insert(ArenaString(&arena, "abc"));
  1308. //
  1309. // flat_hash_set<ArenaStr> s;
  1310. // // Makes ArenaStr only if "abc" is not in the map.
  1311. // s.lazy_emplace("abc", [&](const constructor& ctor) {
  1312. // ctor(&arena, "abc");
  1313. // });
  1314. //
  1315. // WARNING: This API is currently experimental. If there is a way to implement
  1316. // the same thing with the rest of the API, prefer that.
  1317. class constructor
  1318. {
  1319. friend class raw_hash_set;
  1320. public:
  1321. slot_type* slot() const {
  1322. return *slot_;
  1323. }
  1324. template <class... Args>
  1325. void operator()(Args&&... args) const {
  1326. assert(*slot_);
  1327. PolicyTraits::construct(alloc_, *slot_, std::forward<Args>(args)...);
  1328. *slot_ = nullptr;
  1329. }
  1330. private:
  1331. constructor(allocator_type* a, slot_type** slot) : alloc_(a), slot_(slot) {}
  1332. allocator_type* alloc_;
  1333. slot_type** slot_;
  1334. };
  1335. // Extension API: support for lazy emplace.
  1336. // Looks up key in the table. If found, returns the iterator to the element.
  1337. // Otherwise calls f with one argument of type raw_hash_set::constructor. f
  1338. // MUST call raw_hash_set::constructor with arguments as if a
  1339. // raw_hash_set::value_type is constructed, otherwise the behavior is
  1340. // undefined.
  1341. //
  1342. // For example:
  1343. //
  1344. // std::unordered_set<ArenaString> s;
  1345. // // Makes ArenaStr even if "abc" is in the map.
  1346. // s.insert(ArenaString(&arena, "abc"));
  1347. //
  1348. // flat_hash_set<ArenaStr> s;
  1349. // // Makes ArenaStr only if "abc" is not in the map.
  1350. // s.lazy_emplace("abc", [&](const constructor& ctor) {
  1351. // ctor(&arena, "abc");
  1352. // });
  1353. // -----------------------------------------------------
  1354. template <class K = key_type, class F>
  1355. iterator lazy_emplace(const key_arg<K>& key, F&& f) {
  1356. return lazy_emplace_with_hash(key, this->hash(key), std::forward<F>(f));
  1357. }
  1358. template <class K = key_type, class F>
  1359. iterator lazy_emplace_with_hash(const key_arg<K>& key, size_t hashval, F&& f) {
  1360. size_t offset = _find_key(key, hashval);
  1361. if (offset == (size_t)-1) {
  1362. offset = prepare_insert(hashval);
  1363. lazy_emplace_at(offset, std::forward<F>(f));
  1364. this->set_ctrl(offset, H2(hashval));
  1365. }
  1366. return iterator_at(offset);
  1367. }
  1368. template <class K = key_type, class F>
  1369. void lazy_emplace_at(size_t& idx, F&& f) {
  1370. slot_type* slot = slots_ + idx;
  1371. std::forward<F>(f)(constructor(&alloc_ref(), &slot));
  1372. assert(!slot);
  1373. }
  1374. template <class K = key_type, class F>
  1375. void emplace_single_with_hash(const key_arg<K>& key, size_t hashval, F&& f) {
  1376. size_t offset = _find_key(key, hashval);
  1377. if (offset == (size_t)-1) {
  1378. offset = prepare_insert(hashval);
  1379. lazy_emplace_at(offset, std::forward<F>(f));
  1380. this->set_ctrl(offset, H2(hashval));
  1381. } else
  1382. _erase(iterator_at(offset));
  1383. }
  1384. // Extension API: support for heterogeneous keys.
  1385. //
  1386. // std::unordered_set<std::string> s;
  1387. // // Turns "abc" into std::string.
  1388. // s.erase("abc");
  1389. //
  1390. // flat_hash_set<std::string> s;
  1391. // // Uses "abc" directly without copying it into std::string.
  1392. // s.erase("abc");
  1393. template <class K = key_type>
  1394. size_type erase(const key_arg<K>& key) {
  1395. auto it = find(key);
  1396. if (it == end()) return 0;
  1397. _erase(it);
  1398. return 1;
  1399. }
  1400. iterator erase(const_iterator cit) { return erase(cit.inner_); }
  1401. // Erases the element pointed to by `it`. Unlike `std::unordered_set::erase`,
  1402. // this method returns void to reduce algorithmic complexity to O(1). In
  1403. // order to erase while iterating across a map, use the following idiom (which
  1404. // also works for standard containers):
  1405. //
  1406. // for (auto it = m.begin(), end = m.end(); it != end;) {
  1407. // if (<pred>) {
  1408. // m._erase(it++);
  1409. // } else {
  1410. // ++it;
  1411. // }
  1412. // }
  1413. void _erase(iterator it) {
  1414. assert(it != end());
  1415. PolicyTraits::destroy(&alloc_ref(), it.slot_);
  1416. erase_meta_only(it);
  1417. }
  1418. LL_INLINE void _erase(const_iterator cit) { _erase(cit.inner_); }
  1419. // This overload is necessary because otherwise erase<K>(const K&) would be
  1420. // a better match if non-const iterator is passed as an argument.
  1421. iterator erase(iterator it) {
  1422. auto res = it;
  1423. ++res;
  1424. _erase(it);
  1425. return res;
  1426. }
  1427. iterator erase(const_iterator first, const_iterator last) {
  1428. while (first != last) {
  1429. _erase(first++);
  1430. }
  1431. return last.inner_;
  1432. }
  1433. // Moves elements from `src` into `this`.
  1434. // If the element already exists in `this`, it is left unmodified in `src`.
  1435. template <typename H, typename E>
  1436. void merge(raw_hash_set<Policy, H, E, Alloc>& src) { // NOLINT
  1437. assert(this != &src);
  1438. for (auto it = src.begin(), e = src.end(); it != e; ++it) {
  1439. if (PolicyTraits::apply(InsertSlot<false>{*this, std::move(*it.slot_)},
  1440. PolicyTraits::element(it.slot_))
  1441. .second) {
  1442. src.erase_meta_only(it);
  1443. }
  1444. }
  1445. }
  1446. template <typename H, typename E>
  1447. void merge(raw_hash_set<Policy, H, E, Alloc>&& src) {
  1448. merge(src);
  1449. }
  1450. node_type extract(const_iterator position) {
  1451. auto node =
  1452. CommonAccess::Make<node_type>(alloc_ref(), position.inner_.slot_);
  1453. erase_meta_only(position);
  1454. return node;
  1455. }
  1456. template <
  1457. class K = key_type,
  1458. typename std::enable_if<!std::is_same<K, iterator>::value, int>::type = 0>
  1459. node_type extract(const key_arg<K>& key) {
  1460. auto it = find(key);
  1461. return it == end() ? node_type() : extract(const_iterator{it});
  1462. }
  1463. void swap(raw_hash_set& that) noexcept(
  1464. IsNoThrowSwappable<hasher>() && IsNoThrowSwappable<key_equal>() &&
  1465. (!AllocTraits::propagate_on_container_swap::value ||
  1466. IsNoThrowSwappable<allocator_type>(typename AllocTraits::propagate_on_container_swap{}))) {
  1467. using std::swap;
  1468. swap(ctrl_, that.ctrl_);
  1469. swap(slots_, that.slots_);
  1470. swap(size_, that.size_);
  1471. swap(capacity_, that.capacity_);
  1472. swap(growth_left(), that.growth_left());
  1473. swap(hash_ref(), that.hash_ref());
  1474. swap(eq_ref(), that.eq_ref());
  1475. swap(infoz_, that.infoz_);
  1476. SwapAlloc(alloc_ref(), that.alloc_ref(), typename AllocTraits::propagate_on_container_swap{});
  1477. }
  1478. #if !defined(PHMAP_NON_DETERMINISTIC)
  1479. template<typename OutputArchive>
  1480. bool phmap_dump(OutputArchive&) const;
  1481. template<typename InputArchive>
  1482. bool phmap_load(InputArchive&);
  1483. #endif
  1484. void rehash(size_t n) {
  1485. if (n == 0 && capacity_ == 0) return;
  1486. if (n == 0 && size_ == 0) {
  1487. destroy_slots();
  1488. infoz_.RecordStorageChanged(0, 0);
  1489. return;
  1490. }
  1491. // bitor is a faster way of doing `max` here. We will round up to the next
  1492. // power-of-2-minus-1, so bitor is good enough.
  1493. auto m = NormalizeCapacity((std::max)(n, size()));
  1494. // n == 0 unconditionally rehashes as per the standard.
  1495. if (n == 0 || m > capacity_) {
  1496. resize(m);
  1497. }
  1498. }
  1499. void reserve(size_t n) { rehash(GrowthToLowerboundCapacity(n)); }
  1500. // Extension API: support for heterogeneous keys.
  1501. //
  1502. // std::unordered_set<std::string> s;
  1503. // // Turns "abc" into std::string.
  1504. // s.count("abc");
  1505. //
  1506. // ch_set<std::string> s;
  1507. // // Uses "abc" directly without copying it into std::string.
  1508. // s.count("abc");
  1509. template <class K = key_type>
  1510. size_t count(const key_arg<K>& key) const {
  1511. return find(key) == end() ? size_t(0) : size_t(1);
  1512. }
  1513. // Issues CPU prefetch instructions for the memory needed to find or insert
  1514. // a key. Like all lookup functions, this support heterogeneous keys.
  1515. //
  1516. // NOTE: This is a very low level operation and should not be used without
  1517. // specific benchmarks indicating its importance.
  1518. void prefetch_hash(size_t hashval) const {
  1519. (void)hashval;
  1520. #if defined(_MSC_VER) && (defined(_M_X64) || defined(_M_IX86))
  1521. auto seq = probe(hashval);
  1522. _mm_prefetch((const char *)(ctrl_ + seq.offset()), _MM_HINT_NTA);
  1523. _mm_prefetch((const char *)(slots_ + seq.offset()), _MM_HINT_NTA);
  1524. #elif defined(__GNUC__)
  1525. auto seq = probe(hashval);
  1526. __builtin_prefetch(static_cast<const void*>(ctrl_ + seq.offset()));
  1527. __builtin_prefetch(static_cast<const void*>(slots_ + seq.offset()));
  1528. #endif // __GNUC__
  1529. }
  1530. template <class K = key_type>
  1531. void prefetch(const key_arg<K>& key) const {
  1532. prefetch_hash(this->hash(key));
  1533. }
  1534. // The API of find() has two extensions.
  1535. //
  1536. // 1. The hash can be passed by the user. It must be equal to the hash of the
  1537. // key.
  1538. //
  1539. // 2. The type of the key argument doesn't have to be key_type. This is so
  1540. // called heterogeneous key support.
  1541. template <class K = key_type>
  1542. iterator find(const key_arg<K>& key, size_t hashval) {
  1543. size_t offset;
  1544. if (find_impl(key, hashval, offset))
  1545. return iterator_at(offset);
  1546. else
  1547. return end();
  1548. }
  1549. template <class K = key_type>
  1550. pointer find_ptr(const key_arg<K>& key, size_t hashval) {
  1551. size_t offset;
  1552. if (find_impl(key, hashval, offset))
  1553. return &PolicyTraits::element(slots_ + offset);
  1554. else
  1555. return nullptr;
  1556. }
  1557. template <class K = key_type>
  1558. LL_INLINE iterator find(const key_arg<K>& key) {
  1559. return find(key, this->hash(key));
  1560. }
  1561. template <class K = key_type>
  1562. LL_INLINE const_iterator find(const key_arg<K>& key, size_t hashval) const {
  1563. return const_cast<raw_hash_set*>(this)->find(key, hashval);
  1564. }
  1565. template <class K = key_type>
  1566. LL_INLINE const_iterator find(const key_arg<K>& key) const {
  1567. return find(key, this->hash(key));
  1568. }
  1569. template <class K = key_type>
  1570. LL_INLINE bool contains(const key_arg<K>& key) const {
  1571. return find(key) != end();
  1572. }
  1573. template <class K = key_type>
  1574. LL_INLINE bool contains(const key_arg<K>& key, size_t hashval) const {
  1575. return find(key, hashval) != end();
  1576. }
  1577. template <class K = key_type>
  1578. std::pair<iterator, iterator> equal_range(const key_arg<K>& key) {
  1579. auto it = find(key);
  1580. if (it != end()) return {it, std::next(it)};
  1581. return {it, it};
  1582. }
  1583. template <class K = key_type>
  1584. std::pair<const_iterator, const_iterator> equal_range(
  1585. const key_arg<K>& key) const {
  1586. auto it = find(key);
  1587. if (it != end()) return {it, std::next(it)};
  1588. return {it, it};
  1589. }
  1590. size_t bucket_count() const { return capacity_; }
  1591. float load_factor() const {
  1592. return capacity_ ? static_cast<float>(static_cast<double>(size()) / capacity_) : 0.0f;
  1593. }
  1594. float max_load_factor() const { return 1.0f; }
  1595. void max_load_factor(float) {
  1596. // Does nothing.
  1597. }
  1598. hasher hash_function() const { return hash_ref(); } // warning: doesn't match internal hash - use hash() member function
  1599. key_equal key_eq() const { return eq_ref(); }
  1600. allocator_type get_allocator() const { return alloc_ref(); }
  1601. friend bool operator==(const raw_hash_set& a, const raw_hash_set& b) {
  1602. if (a.size() != b.size()) return false;
  1603. const raw_hash_set* outer = &a;
  1604. const raw_hash_set* inner = &b;
  1605. if (outer->capacity() > inner->capacity())
  1606. std::swap(outer, inner);
  1607. for (const value_type& elem : *outer)
  1608. if (!inner->has_element(elem)) return false;
  1609. return true;
  1610. }
  1611. LL_INLINE friend bool operator!=(const raw_hash_set& a, const raw_hash_set& b) {
  1612. return !(a == b);
  1613. }
  1614. LL_INLINE friend void swap(raw_hash_set& a,
  1615. raw_hash_set& b) noexcept(noexcept(a.swap(b))) {
  1616. a.swap(b);
  1617. }
  1618. template <class K>
  1619. LL_INLINE size_t hash(const K& key) const {
  1620. return HashElement{hash_ref()}(key);
  1621. }
  1622. private:
  1623. template <class Container, typename Enabler>
  1624. friend struct phmap::priv::hashtable_debug_internal::HashtableDebugAccess;
  1625. template <class K = key_type>
  1626. bool find_impl(const key_arg<K>& key, size_t hashval, size_t& offset) {
  1627. auto seq = probe(hashval);
  1628. while (true) {
  1629. Group g{ ctrl_ + seq.offset() };
  1630. for (uint32_t i : g.Match((h2_t)H2(hashval))) {
  1631. offset = seq.offset((size_t)i);
  1632. if (PHMAP_PREDICT_TRUE(PolicyTraits::apply(
  1633. EqualElement<K>{key, eq_ref()},
  1634. PolicyTraits::element(slots_ + offset))))
  1635. return true;
  1636. }
  1637. if (PHMAP_PREDICT_TRUE(g.MatchEmpty()))
  1638. return false;
  1639. seq.next();
  1640. }
  1641. }
  1642. struct FindElement
  1643. {
  1644. template <class K, class... Args>
  1645. const_iterator operator()(const K& key, Args&&...) const {
  1646. return s.find(key);
  1647. }
  1648. const raw_hash_set& s;
  1649. };
  1650. struct HashElement
  1651. {
  1652. template <class K, class... Args>
  1653. LL_INLINE size_t operator()(const K& key, Args&&...) const {
  1654. #ifdef PHMAP_NO_MIXING
  1655. return (size_t)h(key);
  1656. #else
  1657. return phmap_mix<sizeof(size_t)>()(h(key));
  1658. #endif
  1659. }
  1660. const hasher& h;
  1661. };
  1662. template <class K1>
  1663. struct EqualElement
  1664. {
  1665. template <class K2, class... Args>
  1666. bool operator()(const K2& lhs, Args&&...) const {
  1667. return eq(lhs, rhs);
  1668. }
  1669. const K1& rhs;
  1670. const key_equal& eq;
  1671. };
  1672. template <class K, class... Args>
  1673. std::pair<iterator, bool> emplace_decomposable(const K& key, size_t hashval,
  1674. Args&&... args)
  1675. {
  1676. size_t offset = _find_key(key, hashval);
  1677. if (offset == (size_t)-1) {
  1678. offset = prepare_insert(hashval);
  1679. emplace_at(offset, std::forward<Args>(args)...);
  1680. this->set_ctrl(offset, H2(hashval));
  1681. return {iterator_at(offset), true};
  1682. }
  1683. return {iterator_at(offset), false};
  1684. }
  1685. struct EmplaceDecomposable
  1686. {
  1687. template <class K, class... Args>
  1688. std::pair<iterator, bool> operator()(const K& key, Args&&... args) const {
  1689. return s.emplace_decomposable(key, s.hash(key), std::forward<Args>(args)...);
  1690. }
  1691. raw_hash_set& s;
  1692. };
  1693. struct EmplaceDecomposableHashval {
  1694. template <class K, class... Args>
  1695. std::pair<iterator, bool> operator()(const K& key, Args&&... args) const {
  1696. return s.emplace_decomposable(key, hashval, std::forward<Args>(args)...);
  1697. }
  1698. raw_hash_set& s;
  1699. size_t hashval;
  1700. };
  1701. template <bool do_destroy>
  1702. struct InsertSlot
  1703. {
  1704. template <class K, class... Args>
  1705. std::pair<iterator, bool> operator()(const K& key, Args&&...) && {
  1706. size_t hashval = s.hash(key);
  1707. auto res = s.find_or_prepare_insert(key, hashval);
  1708. if (res.second) {
  1709. PolicyTraits::transfer(&s.alloc_ref(), s.slots_ + res.first, &slot);
  1710. s.set_ctrl(res.first, H2(hashval));
  1711. } else if (do_destroy) {
  1712. PolicyTraits::destroy(&s.alloc_ref(), &slot);
  1713. }
  1714. return {s.iterator_at(res.first), res.second};
  1715. }
  1716. raw_hash_set& s;
  1717. // Constructed slot. Either moved into place or destroyed.
  1718. slot_type&& slot;
  1719. };
  1720. template <bool do_destroy>
  1721. struct InsertSlotWithHash
  1722. {
  1723. template <class K, class... Args>
  1724. std::pair<iterator, bool> operator()(const K& key, Args&&...) && {
  1725. auto res = s.find_or_prepare_insert(key, hashval);
  1726. if (res.second) {
  1727. PolicyTraits::transfer(&s.alloc_ref(), s.slots_ + res.first, &slot);
  1728. s.set_ctrl(res.first, H2(hashval));
  1729. } else if (do_destroy) {
  1730. PolicyTraits::destroy(&s.alloc_ref(), &slot);
  1731. }
  1732. return {s.iterator_at(res.first), res.second};
  1733. }
  1734. raw_hash_set& s;
  1735. // Constructed slot. Either moved into place or destroyed.
  1736. slot_type&& slot;
  1737. size_t &hashval;
  1738. };
  1739. // "erases" the object from the container, except that it doesn't actually
  1740. // destroy the object. It only updates all the metadata of the class.
  1741. // This can be used in conjunction with Policy::transfer to move the object to
  1742. // another place.
  1743. void erase_meta_only(const_iterator it) {
  1744. assert(IsFull(*it.inner_.ctrl_) && "erasing a dangling iterator");
  1745. --size_;
  1746. const size_t index = (size_t)(it.inner_.ctrl_ - ctrl_);
  1747. const size_t index_before = (index - Group::kWidth) & capacity_;
  1748. const auto empty_after = Group(it.inner_.ctrl_).MatchEmpty();
  1749. const auto empty_before = Group(ctrl_ + index_before).MatchEmpty();
  1750. // We count how many consecutive non empties we have to the right and to the
  1751. // left of `it`. If the sum is >= kWidth then there is at least one probe
  1752. // window that might have seen a full group.
  1753. bool was_never_full =
  1754. empty_before && empty_after &&
  1755. static_cast<size_t>(empty_after.TrailingZeros() +
  1756. empty_before.LeadingZeros()) < Group::kWidth;
  1757. set_ctrl(index, was_never_full ? kEmpty : kDeleted);
  1758. growth_left() += was_never_full;
  1759. infoz_.RecordErase();
  1760. }
  1761. void initialize_slots(size_t new_capacity) {
  1762. assert(new_capacity);
  1763. if (std::is_same<SlotAlloc, std::allocator<slot_type>>::value &&
  1764. slots_ == nullptr) {
  1765. infoz_ = Sample();
  1766. }
  1767. auto layout = MakeLayout(new_capacity);
  1768. char* mem = static_cast<char*>(
  1769. Allocate<Layout::Alignment()>(&alloc_ref(), layout.AllocSize()));
  1770. ctrl_ = reinterpret_cast<ctrl_t*>(layout.template Pointer<0>(mem));
  1771. slots_ = layout.template Pointer<1>(mem);
  1772. reset_ctrl(new_capacity);
  1773. reset_growth_left(new_capacity);
  1774. infoz_.RecordStorageChanged(size_, new_capacity);
  1775. }
  1776. void destroy_slots() {
  1777. if (!capacity_)
  1778. return;
  1779. PHMAP_IF_CONSTEXPR((!std::is_trivially_destructible<typename PolicyTraits::value_type>::value ||
  1780. std::is_same<typename Policy::is_flat, std::false_type>::value)) {
  1781. // node map, or not trivially destructible... we need to iterate and destroy values one by one
  1782. // std::cout << "either this is a node map or " << type_name<typename PolicyTraits::value_type>() << " is not trivially_destructible\n";
  1783. for (size_t i = 0; i != capacity_; ++i) {
  1784. if (IsFull(ctrl_[i])) {
  1785. PolicyTraits::destroy(&alloc_ref(), slots_ + i);
  1786. }
  1787. }
  1788. }
  1789. auto layout = MakeLayout(capacity_);
  1790. // Unpoison before returning the memory to the allocator.
  1791. SanitizerUnpoisonMemoryRegion(slots_, sizeof(slot_type) * capacity_);
  1792. Deallocate<Layout::Alignment()>(&alloc_ref(), ctrl_, layout.AllocSize());
  1793. ctrl_ = EmptyGroup();
  1794. slots_ = nullptr;
  1795. size_ = 0;
  1796. capacity_ = 0;
  1797. growth_left() = 0;
  1798. }
  1799. void resize(size_t new_capacity) {
  1800. assert(IsValidCapacity(new_capacity));
  1801. auto* old_ctrl = ctrl_;
  1802. auto* old_slots = slots_;
  1803. const size_t old_capacity = capacity_;
  1804. initialize_slots(new_capacity);
  1805. capacity_ = new_capacity;
  1806. for (size_t i = 0; i != old_capacity; ++i) {
  1807. if (IsFull(old_ctrl[i])) {
  1808. size_t hashval = PolicyTraits::apply(HashElement{hash_ref()},
  1809. PolicyTraits::element(old_slots + i));
  1810. auto target = find_first_non_full(hashval);
  1811. size_t new_i = target.offset;
  1812. set_ctrl(new_i, H2(hashval));
  1813. PolicyTraits::transfer(&alloc_ref(), slots_ + new_i, old_slots + i);
  1814. }
  1815. }
  1816. if (old_capacity) {
  1817. SanitizerUnpoisonMemoryRegion(old_slots,
  1818. sizeof(slot_type) * old_capacity);
  1819. auto layout = MakeLayout(old_capacity);
  1820. Deallocate<Layout::Alignment()>(&alloc_ref(), old_ctrl,
  1821. layout.AllocSize());
  1822. }
  1823. }
  1824. LL_NO_INLINE void drop_deletes_without_resize() {
  1825. assert(IsValidCapacity(capacity_));
  1826. assert(!is_small());
  1827. // Algorithm:
  1828. // - mark all DELETED slots as EMPTY
  1829. // - mark all FULL slots as DELETED
  1830. // - for each slot marked as DELETED
  1831. // hash = Hash(element)
  1832. // target = find_first_non_full(hash)
  1833. // if target is in the same group
  1834. // mark slot as FULL
  1835. // else if target is EMPTY
  1836. // transfer element to target
  1837. // mark slot as EMPTY
  1838. // mark target as FULL
  1839. // else if target is DELETED
  1840. // swap current element with target element
  1841. // mark target as FULL
  1842. // repeat procedure for current slot with moved from element (target)
  1843. ConvertDeletedToEmptyAndFullToDeleted(ctrl_, capacity_);
  1844. typename phmap::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type
  1845. raw;
  1846. slot_type* slot = reinterpret_cast<slot_type*>(&raw);
  1847. for (size_t i = 0; i != capacity_; ++i) {
  1848. if (!IsDeleted(ctrl_[i])) continue;
  1849. size_t hashval = PolicyTraits::apply(HashElement{hash_ref()},
  1850. PolicyTraits::element(slots_ + i));
  1851. auto target = find_first_non_full(hashval);
  1852. size_t new_i = target.offset;
  1853. // Verify if the old and new i fall within the same group wrt the hashval.
  1854. // If they do, we don't need to move the object as it falls already in the
  1855. // best probe we can.
  1856. const auto probe_index = [&](size_t pos) {
  1857. return ((pos - probe(hashval).offset()) & capacity_) / Group::kWidth;
  1858. };
  1859. // Element doesn't move.
  1860. if (PHMAP_PREDICT_TRUE(probe_index(new_i) == probe_index(i))) {
  1861. set_ctrl(i, H2(hashval));
  1862. continue;
  1863. }
  1864. if (IsEmpty(ctrl_[new_i])) {
  1865. // Transfer element to the empty spot.
  1866. // set_ctrl poisons/unpoisons the slots so we have to call it at the
  1867. // right time.
  1868. set_ctrl(new_i, H2(hashval));
  1869. PolicyTraits::transfer(&alloc_ref(), slots_ + new_i, slots_ + i);
  1870. set_ctrl(i, kEmpty);
  1871. } else {
  1872. assert(IsDeleted(ctrl_[new_i]));
  1873. set_ctrl(new_i, H2(hashval));
  1874. // Until we are done rehashing, DELETED marks previously FULL slots.
  1875. // Swap i and new_i elements.
  1876. PolicyTraits::transfer(&alloc_ref(), slot, slots_ + i);
  1877. PolicyTraits::transfer(&alloc_ref(), slots_ + i, slots_ + new_i);
  1878. PolicyTraits::transfer(&alloc_ref(), slots_ + new_i, slot);
  1879. --i; // repeat
  1880. }
  1881. }
  1882. reset_growth_left(capacity_);
  1883. }
  1884. void rehash_and_grow_if_necessary() {
  1885. if (capacity_ == 0) {
  1886. resize(1);
  1887. } else if (size() <= CapacityToGrowth(capacity()) / 2) {
  1888. // Squash DELETED without growing if there is enough capacity.
  1889. drop_deletes_without_resize();
  1890. } else {
  1891. // Otherwise grow the container.
  1892. resize(capacity_ * 2 + 1);
  1893. }
  1894. }
  1895. bool has_element(const value_type& elem, size_t hashval) const {
  1896. auto seq = probe(hashval);
  1897. while (true) {
  1898. Group g{ctrl_ + seq.offset()};
  1899. for (uint32_t i : g.Match((h2_t)H2(hashval))) {
  1900. if (PHMAP_PREDICT_TRUE(PolicyTraits::element(slots_ + seq.offset((size_t)i)) ==
  1901. elem))
  1902. return true;
  1903. }
  1904. if (PHMAP_PREDICT_TRUE(g.MatchEmpty())) return false;
  1905. seq.next();
  1906. assert(seq.getindex() < capacity_ && "full table!");
  1907. }
  1908. return false;
  1909. }
  1910. bool has_element(const value_type& elem) const {
  1911. size_t hashval = PolicyTraits::apply(HashElement{hash_ref()}, elem);
  1912. return has_element(elem, hashval);
  1913. }
  1914. // Probes the raw_hash_set with the probe sequence for hash and returns the
  1915. // pointer to the first empty or deleted slot.
  1916. // NOTE: this function must work with tables having both kEmpty and kDelete
  1917. // in one group. Such tables appears during drop_deletes_without_resize.
  1918. //
  1919. // This function is very useful when insertions happen and:
  1920. // - the input is already a set
  1921. // - there are enough slots
  1922. // - the element with the hash is not in the table
  1923. struct FindInfo
  1924. {
  1925. size_t offset;
  1926. size_t probe_length;
  1927. };
  1928. FindInfo find_first_non_full(size_t hashval) {
  1929. auto seq = probe(hashval);
  1930. while (true) {
  1931. Group g{ctrl_ + seq.offset()};
  1932. auto mask = g.MatchEmptyOrDeleted();
  1933. if (mask) {
  1934. return {seq.offset((size_t)mask.LowestBitSet()), seq.getindex()};
  1935. }
  1936. assert(seq.getindex() < capacity_ && "full table!");
  1937. seq.next();
  1938. }
  1939. }
  1940. // TODO(alkis): Optimize this assuming *this and that don't overlap.
  1941. raw_hash_set& move_assign(raw_hash_set&& that, std::true_type) {
  1942. raw_hash_set tmp(std::move(that));
  1943. swap(tmp);
  1944. return *this;
  1945. }
  1946. raw_hash_set& move_assign(raw_hash_set&& that, std::false_type) {
  1947. raw_hash_set tmp(std::move(that), alloc_ref());
  1948. swap(tmp);
  1949. return *this;
  1950. }
  1951. protected:
  1952. template <class K>
  1953. size_t _find_key(const K& key, size_t hashval) {
  1954. auto seq = probe(hashval);
  1955. while (true) {
  1956. Group g{ctrl_ + seq.offset()};
  1957. for (uint32_t i : g.Match((h2_t)H2(hashval))) {
  1958. if (PHMAP_PREDICT_TRUE(PolicyTraits::apply(
  1959. EqualElement<K>{key, eq_ref()},
  1960. PolicyTraits::element(slots_ + seq.offset((size_t)i)))))
  1961. return seq.offset((size_t)i);
  1962. }
  1963. if (PHMAP_PREDICT_TRUE(g.MatchEmpty())) break;
  1964. seq.next();
  1965. }
  1966. return (size_t)-1;
  1967. }
  1968. template <class K>
  1969. std::pair<size_t, bool> find_or_prepare_insert(const K& key, size_t hashval) {
  1970. size_t offset = _find_key(key, hashval);
  1971. if (offset == (size_t)-1)
  1972. return {prepare_insert(hashval), true};
  1973. return {offset, false};
  1974. }
  1975. LL_NO_INLINE size_t prepare_insert(size_t hashval) {
  1976. auto target = find_first_non_full(hashval);
  1977. if (PHMAP_PREDICT_FALSE(growth_left() == 0 &&
  1978. !IsDeleted(ctrl_[target.offset]))) {
  1979. rehash_and_grow_if_necessary();
  1980. target = find_first_non_full(hashval);
  1981. }
  1982. ++size_;
  1983. growth_left() -= IsEmpty(ctrl_[target.offset]);
  1984. // set_ctrl(target.offset, H2(hashval));
  1985. infoz_.RecordInsert(hashval, target.probe_length);
  1986. return target.offset;
  1987. }
  1988. // Constructs the value in the space pointed by the iterator. This only works
  1989. // after an unsuccessful find_or_prepare_insert() and before any other
  1990. // modifications happen in the raw_hash_set.
  1991. //
  1992. // PRECONDITION: i is an index returned from find_or_prepare_insert(k), where
  1993. // k is the key decomposed from `forward<Args>(args)...`, and the bool
  1994. // returned by find_or_prepare_insert(k) was true.
  1995. // POSTCONDITION: *m.iterator_at(i) == value_type(forward<Args>(args)...).
  1996. template <class... Args>
  1997. void emplace_at(size_t i, Args&&... args) {
  1998. PolicyTraits::construct(&alloc_ref(), slots_ + i,
  1999. std::forward<Args>(args)...);
  2000. #ifdef PHMAP_CHECK_CONSTRUCTED_VALUE
  2001. // this check can be costly, so do it only when requested
  2002. assert(PolicyTraits::apply(FindElement{*this}, *iterator_at(i)) ==
  2003. iterator_at(i) &&
  2004. "constructed value does not match the lookup key");
  2005. #endif
  2006. }
  2007. iterator iterator_at(size_t i) { return {ctrl_ + i, slots_ + i}; }
  2008. const_iterator iterator_at(size_t i) const { return {ctrl_ + i, slots_ + i}; }
  2009. protected:
  2010. // Sets the control byte, and if `i < Group::kWidth`, set the cloned byte at
  2011. // the end too.
  2012. void set_ctrl(size_t i, ctrl_t h) {
  2013. assert(i < capacity_);
  2014. if (IsFull(h)) {
  2015. SanitizerUnpoisonObject(slots_ + i);
  2016. } else {
  2017. SanitizerPoisonObject(slots_ + i);
  2018. }
  2019. ctrl_[i] = h;
  2020. ctrl_[((i - Group::kWidth) & capacity_) + 1 +
  2021. ((Group::kWidth - 1) & capacity_)] = h;
  2022. }
  2023. private:
  2024. friend struct RawHashSetTestOnlyAccess;
  2025. probe_seq<Group::kWidth> probe(size_t hashval) const {
  2026. return probe_seq<Group::kWidth>(H1(hashval, ctrl_), capacity_);
  2027. }
  2028. // Reset all ctrl bytes back to kEmpty, except the sentinel.
  2029. void reset_ctrl(size_t new_capacity) {
  2030. std::memset(ctrl_, kEmpty, new_capacity + Group::kWidth);
  2031. ctrl_[new_capacity] = kSentinel;
  2032. SanitizerPoisonMemoryRegion(slots_, sizeof(slot_type) * new_capacity);
  2033. }
  2034. void reset_growth_left(size_t new_capacity) {
  2035. growth_left() = CapacityToGrowth(new_capacity) - size_;
  2036. }
  2037. LL_INLINE size_t& growth_left() { return std::get<0>(settings_); }
  2038. LL_INLINE const size_t& growth_left() const { return std::get<0>(settings_); }
  2039. template <size_t N,
  2040. template <class, class, class, class> class RefSet,
  2041. class M, class P, class H, class E, class A>
  2042. friend class parallel_hash_set;
  2043. template <size_t N,
  2044. template <class, class, class, class> class RefSet,
  2045. class M, class P, class H, class E, class A>
  2046. friend class parallel_hash_map;
  2047. // The representation of the object has two modes:
  2048. // - small: For capacities < kWidth-1
  2049. // - large: For the rest.
  2050. //
  2051. // Differences:
  2052. // - In small mode we are able to use the whole capacity. The extra control
  2053. // bytes give us at least one "empty" control byte to stop the iteration.
  2054. // This is important to make 1 a valid capacity.
  2055. //
  2056. // - In small mode only the first `capacity()` control bytes after the
  2057. // sentinel are valid. The rest contain dummy kEmpty values that do not
  2058. // represent a real slot. This is important to take into account on
  2059. // find_first_non_full(), where we never try ShouldInsertBackwards() for
  2060. // small tables.
  2061. bool is_small() const { return capacity_ < Group::kWidth - 1; }
  2062. hasher& hash_ref() { return std::get<1>(settings_); }
  2063. const hasher& hash_ref() const { return std::get<1>(settings_); }
  2064. key_equal& eq_ref() { return std::get<2>(settings_); }
  2065. const key_equal& eq_ref() const { return std::get<2>(settings_); }
  2066. allocator_type& alloc_ref() { return std::get<3>(settings_); }
  2067. const allocator_type& alloc_ref() const {
  2068. return std::get<3>(settings_);
  2069. }
  2070. // TODO(alkis): Investigate removing some of these fields:
  2071. // - ctrl/slots can be derived from each other
  2072. // - size can be moved into the slot array
  2073. ctrl_t* ctrl_ = EmptyGroup(); // [(capacity + 1) * ctrl_t]
  2074. slot_type* slots_ = nullptr; // [capacity * slot_type]
  2075. size_t size_ = 0; // number of full slots
  2076. size_t capacity_ = 0; // total number of slots
  2077. HashtablezInfoHandle infoz_;
  2078. std::tuple<size_t /* growth_left */, hasher, key_equal, allocator_type>
  2079. settings_{0, hasher{}, key_equal{}, allocator_type{}};
  2080. };
  2081. // --------------------------------------------------------------------------
  2082. // --------------------------------------------------------------------------
  2083. template <class Policy, class Hash, class Eq, class Alloc>
  2084. class raw_hash_map : public raw_hash_set<Policy, Hash, Eq, Alloc>
  2085. {
  2086. // P is Policy. It's passed as a template argument to support maps that have
  2087. // incomplete types as values, as in unordered_map<K, IncompleteType>.
  2088. // MappedReference<> may be a non-reference type.
  2089. template <class P>
  2090. using MappedReference = decltype(P::value(
  2091. std::addressof(std::declval<typename raw_hash_map::reference>())));
  2092. // MappedConstReference<> may be a non-reference type.
  2093. template <class P>
  2094. using MappedConstReference = decltype(P::value(
  2095. std::addressof(std::declval<typename raw_hash_map::const_reference>())));
  2096. using KeyArgImpl =
  2097. KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
  2098. using Base = raw_hash_set<Policy, Hash, Eq, Alloc>;
  2099. public:
  2100. using key_type = typename Policy::key_type;
  2101. using mapped_type = typename Policy::mapped_type;
  2102. template <class K>
  2103. using key_arg = typename KeyArgImpl::template type<K, key_type>;
  2104. static_assert(!std::is_reference<key_type>::value, "");
  2105. // TODO(b/187807849): Evaluate whether to support reference mapped_type and
  2106. // remove this assertion if/when it is supported.
  2107. static_assert(!std::is_reference<mapped_type>::value, "");
  2108. using iterator = typename raw_hash_map::raw_hash_set::iterator;
  2109. using const_iterator = typename raw_hash_map::raw_hash_set::const_iterator;
  2110. raw_hash_map() {}
  2111. using Base::raw_hash_set; // use raw_hash_set constructor
  2112. // The last two template parameters ensure that both arguments are rvalues
  2113. // (lvalue arguments are handled by the overloads below). This is necessary
  2114. // for supporting bitfield arguments.
  2115. //
  2116. // union { int n : 1; };
  2117. // flat_hash_map<int, int> m;
  2118. // m.insert_or_assign(n, n);
  2119. template <class K = key_type, class V = mapped_type, K* = nullptr,
  2120. V* = nullptr>
  2121. std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, V&& v) {
  2122. return insert_or_assign_impl(std::forward<K>(k), std::forward<V>(v));
  2123. }
  2124. template <class K = key_type, class V = mapped_type, K* = nullptr>
  2125. std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, const V& v) {
  2126. return insert_or_assign_impl(std::forward<K>(k), v);
  2127. }
  2128. template <class K = key_type, class V = mapped_type, V* = nullptr>
  2129. std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, V&& v) {
  2130. return insert_or_assign_impl(k, std::forward<V>(v));
  2131. }
  2132. template <class K = key_type, class V = mapped_type>
  2133. std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, const V& v) {
  2134. return insert_or_assign_impl(k, v);
  2135. }
  2136. template <class K = key_type, class V = mapped_type, K* = nullptr,
  2137. V* = nullptr>
  2138. iterator insert_or_assign(const_iterator, key_arg<K>&& k, V&& v) {
  2139. return insert_or_assign(std::forward<K>(k), std::forward<V>(v)).first;
  2140. }
  2141. template <class K = key_type, class V = mapped_type, K* = nullptr>
  2142. iterator insert_or_assign(const_iterator, key_arg<K>&& k, const V& v) {
  2143. return insert_or_assign(std::forward<K>(k), v).first;
  2144. }
  2145. template <class K = key_type, class V = mapped_type, V* = nullptr>
  2146. iterator insert_or_assign(const_iterator, const key_arg<K>& k, V&& v) {
  2147. return insert_or_assign(k, std::forward<V>(v)).first;
  2148. }
  2149. template <class K = key_type, class V = mapped_type>
  2150. iterator insert_or_assign(const_iterator, const key_arg<K>& k, const V& v) {
  2151. return insert_or_assign(k, v).first;
  2152. }
  2153. template <class K = key_type, class... Args,
  2154. typename std::enable_if<
  2155. !std::is_convertible<K, const_iterator>::value, int>::type = 0,
  2156. K* = nullptr>
  2157. std::pair<iterator, bool> try_emplace(key_arg<K>&& k, Args&&... args) {
  2158. return try_emplace_impl(std::forward<K>(k), std::forward<Args>(args)...);
  2159. }
  2160. template <class K = key_type, class... Args,
  2161. typename std::enable_if<
  2162. !std::is_convertible<K, const_iterator>::value, int>::type = 0>
  2163. std::pair<iterator, bool> try_emplace(const key_arg<K>& k, Args&&... args) {
  2164. return try_emplace_impl(k, std::forward<Args>(args)...);
  2165. }
  2166. template <class K = key_type, class... Args, K* = nullptr>
  2167. iterator try_emplace(const_iterator, key_arg<K>&& k, Args&&... args) {
  2168. return try_emplace(std::forward<K>(k), std::forward<Args>(args)...).first;
  2169. }
  2170. template <class K = key_type, class... Args>
  2171. iterator try_emplace(const_iterator, const key_arg<K>& k, Args&&... args) {
  2172. return try_emplace(k, std::forward<Args>(args)...).first;
  2173. }
  2174. template <class K = key_type, class P = Policy>
  2175. MappedReference<P> at(const key_arg<K>& key) {
  2176. auto it = this->find(key);
  2177. if (it == this->end())
  2178. phmap::base_internal::ThrowStdOutOfRange("phmap at(): lookup non-existent key");
  2179. return Policy::value(&*it);
  2180. }
  2181. template <class K = key_type, class P = Policy>
  2182. MappedConstReference<P> at(const key_arg<K>& key) const {
  2183. auto it = this->find(key);
  2184. if (it == this->end())
  2185. phmap::base_internal::ThrowStdOutOfRange("phmap at(): lookup non-existent key");
  2186. return Policy::value(&*it);
  2187. }
  2188. template <class K = key_type, class P = Policy, K* = nullptr>
  2189. MappedReference<P> operator[](key_arg<K>&& key) {
  2190. return Policy::value(&*try_emplace(std::forward<K>(key)).first);
  2191. }
  2192. template <class K = key_type, class P = Policy>
  2193. MappedReference<P> operator[](const key_arg<K>& key) {
  2194. return Policy::value(&*try_emplace(key).first);
  2195. }
  2196. private:
  2197. template <class K, class V>
  2198. std::pair<iterator, bool> insert_or_assign_impl(K&& k, V&& v) {
  2199. size_t hashval = this->hash(k);
  2200. size_t offset = this->_find_key(k, hashval);
  2201. if (offset == (size_t)-1) {
  2202. offset = this->prepare_insert(hashval);
  2203. this->emplace_at(offset, std::forward<K>(k), std::forward<V>(v));
  2204. this->set_ctrl(offset, H2(hashval));
  2205. return {this->iterator_at(offset), true};
  2206. }
  2207. Policy::value(&*this->iterator_at(offset)) = std::forward<V>(v);
  2208. return {this->iterator_at(offset), false};
  2209. }
  2210. template <class K = key_type, class... Args>
  2211. std::pair<iterator, bool> try_emplace_impl(K&& k, Args&&... args) {
  2212. size_t hashval = this->hash(k);
  2213. size_t offset = this->_find_key(k, hashval);
  2214. if (offset == (size_t)-1) {
  2215. offset = this->prepare_insert(hashval);
  2216. this->emplace_at(offset, std::piecewise_construct,
  2217. std::forward_as_tuple(std::forward<K>(k)),
  2218. std::forward_as_tuple(std::forward<Args>(args)...));
  2219. this->set_ctrl(offset, H2(hashval));
  2220. return {this->iterator_at(offset), true};
  2221. }
  2222. return {this->iterator_at(offset), false};
  2223. }
  2224. };
  2225. // ----------------------------------------------------------------------------
  2226. // ----------------------------------------------------------------------------
  2227. // Returns "random" seed.
  2228. LL_INLINE size_t RandomSeed()
  2229. {
  2230. #if PHMAP_HAVE_THREAD_LOCAL
  2231. static thread_local size_t counter = 0;
  2232. size_t value = ++counter;
  2233. #else // PHMAP_HAVE_THREAD_LOCAL
  2234. static std::atomic<size_t> counter(0);
  2235. size_t value = counter.fetch_add(1, std::memory_order_relaxed);
  2236. #endif // PHMAP_HAVE_THREAD_LOCAL
  2237. return value ^ static_cast<size_t>(reinterpret_cast<uintptr_t>(&counter));
  2238. }
  2239. // ----------------------------------------------------------------------------
  2240. // ----------------------------------------------------------------------------
  2241. template <size_t N,
  2242. template <class, class, class, class> class RefSet,
  2243. class Mtx_,
  2244. class Policy, class Hash, class Eq, class Alloc>
  2245. class parallel_hash_set
  2246. {
  2247. using PolicyTraits = hash_policy_traits<Policy>;
  2248. using KeyArgImpl =
  2249. KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
  2250. static_assert(N <= 12, "N = 12 means 4096 hash tables!");
  2251. constexpr static size_t num_tables = 1 << N;
  2252. constexpr static size_t mask = num_tables - 1;
  2253. public:
  2254. using EmbeddedSet = RefSet<Policy, Hash, Eq, Alloc>;
  2255. using EmbeddedIterator= typename EmbeddedSet::iterator;
  2256. using EmbeddedConstIterator= typename EmbeddedSet::const_iterator;
  2257. using constructor = typename EmbeddedSet::constructor;
  2258. using init_type = typename PolicyTraits::init_type;
  2259. using key_type = typename PolicyTraits::key_type;
  2260. using slot_type = typename PolicyTraits::slot_type;
  2261. using allocator_type = Alloc;
  2262. using size_type = size_t;
  2263. using difference_type = ptrdiff_t;
  2264. using hasher = Hash;
  2265. using key_equal = Eq;
  2266. using policy_type = Policy;
  2267. using value_type = typename PolicyTraits::value_type;
  2268. using reference = value_type&;
  2269. using const_reference = const value_type&;
  2270. using pointer = typename phmap::allocator_traits<
  2271. allocator_type>::template rebind_traits<value_type>::pointer;
  2272. using const_pointer = typename phmap::allocator_traits<
  2273. allocator_type>::template rebind_traits<value_type>::const_pointer;
  2274. // Alias used for heterogeneous lookup functions.
  2275. // `key_arg<K>` evaluates to `K` when the functors are transparent and to
  2276. // `key_type` otherwise. It permits template argument deduction on `K` for the
  2277. // transparent case.
  2278. // --------------------------------------------------------------------
  2279. template <class K>
  2280. using key_arg = typename KeyArgImpl::template type<K, key_type>;
  2281. protected:
  2282. using Lockable = phmap::LockableImpl<Mtx_>;
  2283. using UniqueLock = typename Lockable::UniqueLock;
  2284. using SharedLock = typename Lockable::SharedLock;
  2285. using ReadWriteLock = typename Lockable::ReadWriteLock;
  2286. // --------------------------------------------------------------------
  2287. struct Inner : public Lockable
  2288. {
  2289. struct Params
  2290. {
  2291. size_t bucket_cnt;
  2292. const hasher& hashfn;
  2293. const key_equal& eq;
  2294. const allocator_type& alloc;
  2295. };
  2296. Inner() {}
  2297. Inner(Params const &p) : set_(p.bucket_cnt, p.hashfn, p.eq, p.alloc)
  2298. {}
  2299. bool operator==(const Inner& o) const
  2300. {
  2301. typename Lockable::SharedLocks l(const_cast<Inner &>(*this), const_cast<Inner &>(o));
  2302. return set_ == o.set_;
  2303. }
  2304. EmbeddedSet set_;
  2305. };
  2306. private:
  2307. // Give an early error when key_type is not hashable/eq.
  2308. // --------------------------------------------------------------------
  2309. auto KeyTypeCanBeHashed(const Hash& h, const key_type& k) -> decltype(h(k));
  2310. auto KeyTypeCanBeEq(const Eq& eq, const key_type& k) -> decltype(eq(k, k));
  2311. using AllocTraits = phmap::allocator_traits<allocator_type>;
  2312. static_assert(std::is_lvalue_reference<reference>::value,
  2313. "Policy::element() must return a reference");
  2314. template <typename T>
  2315. struct SameAsElementReference : std::is_same<
  2316. typename std::remove_cv<typename std::remove_reference<reference>::type>::type,
  2317. typename std::remove_cv<typename std::remove_reference<T>::type>::type> {};
  2318. // An enabler for insert(T&&): T must be convertible to init_type or be the
  2319. // same as [cv] value_type [ref].
  2320. // Note: we separate SameAsElementReference into its own type to avoid using
  2321. // reference unless we need to. MSVC doesn't seem to like it in some
  2322. // cases.
  2323. // --------------------------------------------------------------------
  2324. template <class T>
  2325. using RequiresInsertable = typename std::enable_if<
  2326. phmap::disjunction<std::is_convertible<T, init_type>, SameAsElementReference<T>>::value, int>::type;
  2327. // RequiresNotInit is a workaround for gcc prior to 7.1.
  2328. // See https://godbolt.org/g/Y4xsUh.
  2329. template <class T>
  2330. using RequiresNotInit =
  2331. typename std::enable_if<!std::is_same<T, init_type>::value, int>::type;
  2332. template <class... Ts>
  2333. using IsDecomposable = IsDecomposable<void, PolicyTraits, Hash, Eq, Ts...>;
  2334. public:
  2335. static_assert(std::is_same<pointer, value_type*>::value,
  2336. "Allocators with custom pointer types are not supported");
  2337. static_assert(std::is_same<const_pointer, const value_type*>::value,
  2338. "Allocators with custom pointer types are not supported");
  2339. // --------------------- i t e r a t o r ------------------------------
  2340. class iterator
  2341. {
  2342. friend class parallel_hash_set;
  2343. public:
  2344. using iterator_category = std::forward_iterator_tag;
  2345. using value_type = typename parallel_hash_set::value_type;
  2346. using reference =
  2347. phmap::conditional_t<PolicyTraits::constant_iterators::value,
  2348. const value_type&, value_type&>;
  2349. using pointer = phmap::remove_reference_t<reference>*;
  2350. using difference_type = typename parallel_hash_set::difference_type;
  2351. using Inner = typename parallel_hash_set::Inner;
  2352. using EmbeddedSet = typename parallel_hash_set::EmbeddedSet;
  2353. using EmbeddedIterator = typename EmbeddedSet::iterator;
  2354. iterator() {}
  2355. reference operator*() const { return *it_; }
  2356. pointer operator->() const { return &operator*(); }
  2357. iterator& operator++() {
  2358. assert(inner_); // null inner means we are already at the end
  2359. ++it_;
  2360. skip_empty();
  2361. return *this;
  2362. }
  2363. iterator operator++(int) {
  2364. assert(inner_); // null inner means we are already at the end
  2365. auto tmp = *this;
  2366. ++*this;
  2367. return tmp;
  2368. }
  2369. friend bool operator==(const iterator& a, const iterator& b) {
  2370. return a.inner_ == b.inner_ && (!a.inner_ || a.it_ == b.it_);
  2371. }
  2372. friend bool operator!=(const iterator& a, const iterator& b) {
  2373. return !(a == b);
  2374. }
  2375. private:
  2376. iterator(Inner *inner, Inner *inner_end, const EmbeddedIterator& it) :
  2377. inner_(inner), inner_end_(inner_end), it_(it) { // for begin() and end()
  2378. if (inner)
  2379. it_end_ = inner->set_.end();
  2380. }
  2381. void skip_empty() {
  2382. while (it_ == it_end_) {
  2383. ++inner_;
  2384. if (inner_ == inner_end_) {
  2385. inner_ = nullptr; // marks end()
  2386. break;
  2387. }
  2388. else {
  2389. it_ = inner_->set_.begin();
  2390. it_end_ = inner_->set_.end();
  2391. }
  2392. }
  2393. }
  2394. Inner *inner_ = nullptr;
  2395. Inner *inner_end_ = nullptr;
  2396. EmbeddedIterator it_, it_end_;
  2397. };
  2398. // --------------------- c o n s t i t e r a t o r -----------------
  2399. class const_iterator
  2400. {
  2401. friend class parallel_hash_set;
  2402. public:
  2403. using iterator_category = typename iterator::iterator_category;
  2404. using value_type = typename parallel_hash_set::value_type;
  2405. using reference = typename parallel_hash_set::const_reference;
  2406. using pointer = typename parallel_hash_set::const_pointer;
  2407. using difference_type = typename parallel_hash_set::difference_type;
  2408. using Inner = typename parallel_hash_set::Inner;
  2409. const_iterator() {}
  2410. // Implicit construction from iterator.
  2411. const_iterator(iterator i) : iter_(std::move(i)) {}
  2412. reference operator*() const { return *(iter_); }
  2413. pointer operator->() const { return iter_.operator->(); }
  2414. const_iterator& operator++() {
  2415. ++iter_;
  2416. return *this;
  2417. }
  2418. const_iterator operator++(int) { return iter_++; }
  2419. friend bool operator==(const const_iterator& a, const const_iterator& b) {
  2420. return a.iter_ == b.iter_;
  2421. }
  2422. friend bool operator!=(const const_iterator& a, const const_iterator& b) {
  2423. return !(a == b);
  2424. }
  2425. private:
  2426. const_iterator(const Inner *inner, const Inner *inner_end, const EmbeddedIterator& it)
  2427. : iter_(const_cast<Inner**>(inner),
  2428. const_cast<Inner**>(inner_end),
  2429. const_cast<EmbeddedIterator*>(it)) {}
  2430. iterator iter_;
  2431. };
  2432. using node_type = node_handle<Policy, hash_policy_traits<Policy>, Alloc>;
  2433. using insert_return_type = InsertReturnType<iterator, node_type>;
  2434. // ------------------------- c o n s t r u c t o r s ------------------
  2435. parallel_hash_set() noexcept(
  2436. std::is_nothrow_default_constructible<hasher>::value&&
  2437. std::is_nothrow_default_constructible<key_equal>::value&&
  2438. std::is_nothrow_default_constructible<allocator_type>::value) {}
  2439. #if (__cplusplus >= 201703L || _MSVC_LANG >= 201402) && (defined(_MSC_VER) || defined(__clang__) || (defined(__GNUC__) && __GNUC__ > 6))
  2440. explicit parallel_hash_set(size_t bucket_cnt,
  2441. const hasher& hash_param = hasher(),
  2442. const key_equal& eq = key_equal(),
  2443. const allocator_type& alloc = allocator_type()) :
  2444. parallel_hash_set(typename Inner::Params{bucket_cnt, hash_param, eq, alloc},
  2445. phmap::make_index_sequence<num_tables>{})
  2446. {}
  2447. template <std::size_t... i>
  2448. parallel_hash_set(typename Inner::Params const &p,
  2449. phmap::index_sequence<i...>) : sets_{((void)i, p)...}
  2450. {}
  2451. #else
  2452. explicit parallel_hash_set(size_t bucket_cnt,
  2453. const hasher& hash_param = hasher(),
  2454. const key_equal& eq = key_equal(),
  2455. const allocator_type& alloc = allocator_type()) {
  2456. for (auto& inner : sets_)
  2457. inner.set_ = EmbeddedSet(bucket_cnt / N, hash_param, eq, alloc);
  2458. }
  2459. #endif
  2460. parallel_hash_set(size_t bucket_cnt,
  2461. const hasher& hash_param,
  2462. const allocator_type& alloc)
  2463. : parallel_hash_set(bucket_cnt, hash_param, key_equal(), alloc) {}
  2464. parallel_hash_set(size_t bucket_cnt, const allocator_type& alloc)
  2465. : parallel_hash_set(bucket_cnt, hasher(), key_equal(), alloc) {}
  2466. explicit parallel_hash_set(const allocator_type& alloc)
  2467. : parallel_hash_set(0, hasher(), key_equal(), alloc) {}
  2468. template <class InputIter>
  2469. parallel_hash_set(InputIter first, InputIter last, size_t bucket_cnt = 0,
  2470. const hasher& hash_param = hasher(), const key_equal& eq = key_equal(),
  2471. const allocator_type& alloc = allocator_type())
  2472. : parallel_hash_set(bucket_cnt, hash_param, eq, alloc) {
  2473. insert(first, last);
  2474. }
  2475. template <class InputIter>
  2476. parallel_hash_set(InputIter first, InputIter last, size_t bucket_cnt,
  2477. const hasher& hash_param, const allocator_type& alloc)
  2478. : parallel_hash_set(first, last, bucket_cnt, hash_param, key_equal(), alloc) {}
  2479. template <class InputIter>
  2480. parallel_hash_set(InputIter first, InputIter last, size_t bucket_cnt,
  2481. const allocator_type& alloc)
  2482. : parallel_hash_set(first, last, bucket_cnt, hasher(), key_equal(), alloc) {}
  2483. template <class InputIter>
  2484. parallel_hash_set(InputIter first, InputIter last, const allocator_type& alloc)
  2485. : parallel_hash_set(first, last, 0, hasher(), key_equal(), alloc) {}
  2486. // Instead of accepting std::initializer_list<value_type> as the first
  2487. // argument like std::unordered_set<value_type> does, we have two overloads
  2488. // that accept std::initializer_list<T> and std::initializer_list<init_type>.
  2489. // This is advantageous for performance.
  2490. //
  2491. // // Turns {"abc", "def"} into std::initializer_list<std::string>, then copies
  2492. // // the strings into the set.
  2493. // std::unordered_set<std::string> s = {"abc", "def"};
  2494. //
  2495. // // Turns {"abc", "def"} into std::initializer_list<const char*>, then
  2496. // // copies the strings into the set.
  2497. // phmap::flat_hash_set<std::string> s = {"abc", "def"};
  2498. //
  2499. // The same trick is used in insert().
  2500. //
  2501. // The enabler is necessary to prevent this constructor from triggering where
  2502. // the copy constructor is meant to be called.
  2503. //
  2504. // phmap::flat_hash_set<int> a, b{a};
  2505. //
  2506. // RequiresNotInit<T> is a workaround for gcc prior to 7.1.
  2507. // --------------------------------------------------------------------
  2508. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  2509. parallel_hash_set(std::initializer_list<T> init, size_t bucket_cnt = 0,
  2510. const hasher& hash_param = hasher(), const key_equal& eq = key_equal(),
  2511. const allocator_type& alloc = allocator_type())
  2512. : parallel_hash_set(init.begin(), init.end(), bucket_cnt, hash_param, eq, alloc) {}
  2513. parallel_hash_set(std::initializer_list<init_type> init, size_t bucket_cnt = 0,
  2514. const hasher& hash_param = hasher(), const key_equal& eq = key_equal(),
  2515. const allocator_type& alloc = allocator_type())
  2516. : parallel_hash_set(init.begin(), init.end(), bucket_cnt, hash_param, eq, alloc) {}
  2517. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  2518. parallel_hash_set(std::initializer_list<T> init, size_t bucket_cnt,
  2519. const hasher& hash_param, const allocator_type& alloc)
  2520. : parallel_hash_set(init, bucket_cnt, hash_param, key_equal(), alloc) {}
  2521. parallel_hash_set(std::initializer_list<init_type> init, size_t bucket_cnt,
  2522. const hasher& hash_param, const allocator_type& alloc)
  2523. : parallel_hash_set(init, bucket_cnt, hash_param, key_equal(), alloc) {}
  2524. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  2525. parallel_hash_set(std::initializer_list<T> init, size_t bucket_cnt,
  2526. const allocator_type& alloc)
  2527. : parallel_hash_set(init, bucket_cnt, hasher(), key_equal(), alloc) {}
  2528. parallel_hash_set(std::initializer_list<init_type> init, size_t bucket_cnt,
  2529. const allocator_type& alloc)
  2530. : parallel_hash_set(init, bucket_cnt, hasher(), key_equal(), alloc) {}
  2531. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<T> = 0>
  2532. parallel_hash_set(std::initializer_list<T> init, const allocator_type& alloc)
  2533. : parallel_hash_set(init, 0, hasher(), key_equal(), alloc) {}
  2534. parallel_hash_set(std::initializer_list<init_type> init,
  2535. const allocator_type& alloc)
  2536. : parallel_hash_set(init, 0, hasher(), key_equal(), alloc) {}
  2537. parallel_hash_set(const parallel_hash_set& that)
  2538. : parallel_hash_set(that, AllocTraits::select_on_container_copy_construction(
  2539. that.alloc_ref())) {}
  2540. parallel_hash_set(const parallel_hash_set& that, const allocator_type& a)
  2541. : parallel_hash_set(0, that.hash_ref(), that.eq_ref(), a) {
  2542. for (size_t i=0; i<num_tables; ++i)
  2543. sets_[i].set_ = { that.sets_[i].set_, a };
  2544. }
  2545. parallel_hash_set(parallel_hash_set&& that) noexcept(
  2546. std::is_nothrow_copy_constructible<hasher>::value&&
  2547. std::is_nothrow_copy_constructible<key_equal>::value&&
  2548. std::is_nothrow_copy_constructible<allocator_type>::value)
  2549. : parallel_hash_set(std::move(that), that.alloc_ref()) {
  2550. }
  2551. parallel_hash_set(parallel_hash_set&& that, const allocator_type& a)
  2552. {
  2553. for (size_t i=0; i<num_tables; ++i)
  2554. sets_[i].set_ = { std::move(that.sets_[i]).set_, a };
  2555. }
  2556. parallel_hash_set& operator=(const parallel_hash_set& that) {
  2557. for (size_t i=0; i<num_tables; ++i)
  2558. sets_[i].set_ = that.sets_[i].set_;
  2559. return *this;
  2560. }
  2561. parallel_hash_set& operator=(parallel_hash_set&& that) noexcept(
  2562. phmap::allocator_traits<allocator_type>::is_always_equal::value &&
  2563. std::is_nothrow_move_assignable<hasher>::value &&
  2564. std::is_nothrow_move_assignable<key_equal>::value) {
  2565. for (size_t i=0; i<num_tables; ++i)
  2566. sets_[i].set_ = std::move(that.sets_[i].set_);
  2567. return *this;
  2568. }
  2569. ~parallel_hash_set() {}
  2570. iterator begin() {
  2571. auto it = iterator(&sets_[0], &sets_[0] + num_tables, sets_[0].set_.begin());
  2572. it.skip_empty();
  2573. return it;
  2574. }
  2575. iterator end() { return iterator(); }
  2576. const_iterator begin() const { return const_cast<parallel_hash_set *>(this)->begin(); }
  2577. const_iterator end() const { return const_cast<parallel_hash_set *>(this)->end(); }
  2578. const_iterator cbegin() const { return begin(); }
  2579. const_iterator cend() const { return end(); }
  2580. bool empty() const { return !size(); }
  2581. size_t size() const {
  2582. size_t sz = 0;
  2583. for (const auto& inner : sets_)
  2584. sz += inner.set_.size();
  2585. return sz;
  2586. }
  2587. size_t capacity() const {
  2588. size_t c = 0;
  2589. for (const auto& inner : sets_)
  2590. c += inner.set_.capacity();
  2591. return c;
  2592. }
  2593. size_t max_size() const { return (std::numeric_limits<size_t>::max)(); }
  2594. PHMAP_ATTRIBUTE_REINITIALIZES void clear() {
  2595. for (auto& inner : sets_)
  2596. {
  2597. UniqueLock m(inner);
  2598. inner.set_.clear();
  2599. }
  2600. }
  2601. // extension - clears only soecified submap
  2602. // ----------------------------------------
  2603. void clear(std::size_t submap_index) {
  2604. Inner& inner = sets_[submap_index];
  2605. UniqueLock m(inner);
  2606. inner.set_.clear();
  2607. }
  2608. // This overload kicks in when the argument is an rvalue of insertable and
  2609. // decomposable type other than init_type.
  2610. //
  2611. // flat_hash_map<std::string, int> m;
  2612. // m.insert(std::make_pair("abc", 42));
  2613. // --------------------------------------------------------------------
  2614. template <class T, RequiresInsertable<T> = 0,
  2615. typename std::enable_if<IsDecomposable<T>::value, int>::type = 0,
  2616. T* = nullptr>
  2617. std::pair<iterator, bool> insert(T&& value) {
  2618. return emplace(std::forward<T>(value));
  2619. }
  2620. // This overload kicks in when the argument is a bitfield or an lvalue of
  2621. // insertable and decomposable type.
  2622. //
  2623. // union { int n : 1; };
  2624. // flat_hash_set<int> s;
  2625. // s.insert(n);
  2626. //
  2627. // flat_hash_set<std::string> s;
  2628. // const char* p = "hello";
  2629. // s.insert(p);
  2630. //
  2631. // TODO(romanp): Once we stop supporting gcc 5.1 and below, replace
  2632. // RequiresInsertable<T> with RequiresInsertable<const T&>.
  2633. // We are hitting this bug: https://godbolt.org/g/1Vht4f.
  2634. // --------------------------------------------------------------------
  2635. template <
  2636. class T, RequiresInsertable<T> = 0,
  2637. typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
  2638. std::pair<iterator, bool> insert(const T& value) {
  2639. return emplace(value);
  2640. }
  2641. // This overload kicks in when the argument is an rvalue of init_type. Its
  2642. // purpose is to handle brace-init-list arguments.
  2643. //
  2644. // flat_hash_set<std::pair<std::string, int>> s;
  2645. // s.insert({"abc", 42});
  2646. // --------------------------------------------------------------------
  2647. std::pair<iterator, bool> insert(init_type&& value) {
  2648. return emplace(std::move(value));
  2649. }
  2650. template <class T, RequiresInsertable<T> = 0,
  2651. typename std::enable_if<IsDecomposable<T>::value, int>::type = 0,
  2652. T* = nullptr>
  2653. iterator insert(const_iterator, T&& value) {
  2654. return insert(std::forward<T>(value)).first;
  2655. }
  2656. // TODO(romanp): Once we stop supporting gcc 5.1 and below, replace
  2657. // RequiresInsertable<T> with RequiresInsertable<const T&>.
  2658. // We are hitting this bug: https://godbolt.org/g/1Vht4f.
  2659. // --------------------------------------------------------------------
  2660. template <
  2661. class T, RequiresInsertable<T> = 0,
  2662. typename std::enable_if<IsDecomposable<const T&>::value, int>::type = 0>
  2663. iterator insert(const_iterator, const T& value) {
  2664. return insert(value).first;
  2665. }
  2666. iterator insert(const_iterator, init_type&& value) {
  2667. return insert(std::move(value)).first;
  2668. }
  2669. template <class InputIt>
  2670. void insert(InputIt first, InputIt last) {
  2671. for (; first != last; ++first) insert(*first);
  2672. }
  2673. template <class T, RequiresNotInit<T> = 0, RequiresInsertable<const T&> = 0>
  2674. void insert(std::initializer_list<T> ilist) {
  2675. insert(ilist.begin(), ilist.end());
  2676. }
  2677. void insert(std::initializer_list<init_type> ilist) {
  2678. insert(ilist.begin(), ilist.end());
  2679. }
  2680. insert_return_type insert(node_type&& node) {
  2681. if (!node)
  2682. return {end(), false, node_type()};
  2683. auto& key = node.key();
  2684. size_t hashval = this->hash(key);
  2685. Inner& inner = sets_[subidx(hashval)];
  2686. auto& set = inner.set_;
  2687. UniqueLock m(inner);
  2688. auto res = set.insert(std::move(node), hashval);
  2689. return { make_iterator(&inner, res.position),
  2690. res.inserted,
  2691. res.inserted ? node_type() : std::move(res.node) };
  2692. }
  2693. iterator insert(const_iterator, node_type&& node) {
  2694. return insert(std::move(node)).first;
  2695. }
  2696. struct ReturnKey_
  2697. {
  2698. template <class Key, class... Args>
  2699. Key operator()(Key&& k, const Args&...) const {
  2700. return std::forward<Key>(k);
  2701. }
  2702. };
  2703. // --------------------------------------------------------------------
  2704. // phmap extension: emplace_with_hash
  2705. // ----------------------------------
  2706. // same as emplace, but hashval is provided
  2707. // --------------------------------------------------------------------
  2708. struct EmplaceDecomposableHashval
  2709. {
  2710. template <class K, class... Args>
  2711. std::pair<iterator, bool> operator()(const K& key, Args&&... args) const {
  2712. return s.emplace_decomposable_with_hash(key, hashval, std::forward<Args>(args)...);
  2713. }
  2714. parallel_hash_set& s;
  2715. size_t hashval;
  2716. };
  2717. // This overload kicks in if we can deduce the key from args. This enables us
  2718. // to avoid constructing value_type if an entry with the same key already
  2719. // exists.
  2720. //
  2721. // For example:
  2722. //
  2723. // flat_hash_map<std::string, std::string> m = {{"abc", "def"}};
  2724. // // Creates no std::string copies and makes no heap allocations.
  2725. // m.emplace("abc", "xyz");
  2726. // --------------------------------------------------------------------
  2727. template <class... Args, typename std::enable_if<IsDecomposable<Args...>::value, int>::type = 0>
  2728. std::pair<iterator, bool> emplace_with_hash(size_t hashval, Args&&... args) {
  2729. return PolicyTraits::apply(EmplaceDecomposableHashval{*this, hashval},
  2730. std::forward<Args>(args)...);
  2731. }
  2732. // This overload kicks in if we cannot deduce the key from args. It constructs
  2733. // value_type unconditionally and then either moves it into the table or
  2734. // destroys.
  2735. // --------------------------------------------------------------------
  2736. template <class... Args, typename std::enable_if<!IsDecomposable<Args...>::value, int>::type = 0>
  2737. std::pair<iterator, bool> emplace_with_hash(size_t hashval, Args&&... args) {
  2738. typename phmap::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type raw;
  2739. slot_type* slot = reinterpret_cast<slot_type*>(&raw);
  2740. PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
  2741. const auto& elem = PolicyTraits::element(slot);
  2742. Inner& inner = sets_[subidx(hashval)];
  2743. auto& set = inner.set_;
  2744. UniqueLock m(inner);
  2745. typename EmbeddedSet::template InsertSlotWithHash<true> f { inner, std::move(*slot), hashval };
  2746. return make_rv(PolicyTraits::apply(f, elem));
  2747. }
  2748. template <class... Args>
  2749. iterator emplace_hint_with_hash(size_t hashval, const_iterator, Args&&... args) {
  2750. return emplace_with_hash(hashval, std::forward<Args>(args)...).first;
  2751. }
  2752. // --------------------------------------------------------------------
  2753. // end of phmap expension
  2754. // --------------------------------------------------------------------
  2755. template <class K, class... Args>
  2756. std::pair<iterator, bool> emplace_decomposable_with_hash(const K& key, size_t hashval, Args&&... args)
  2757. {
  2758. Inner& inner = sets_[subidx(hashval)];
  2759. auto& set = inner.set_;
  2760. ReadWriteLock m(inner);
  2761. size_t offset = set._find_key(key, hashval);
  2762. if (offset == (size_t)-1 && m.switch_to_unique()) {
  2763. // we did an unlock/lock, and another thread could have inserted the same key, so we need to
  2764. // do a find() again.
  2765. offset = set._find_key(key, hashval);
  2766. }
  2767. if (offset == (size_t)-1) {
  2768. offset = set.prepare_insert(hashval);
  2769. set.emplace_at(offset, std::forward<Args>(args)...);
  2770. set.set_ctrl(offset, H2(hashval));
  2771. return make_rv(&inner, {set.iterator_at(offset), true});
  2772. }
  2773. return make_rv(&inner, {set.iterator_at(offset), false});
  2774. }
  2775. template <class K, class... Args>
  2776. std::pair<iterator, bool> emplace_decomposable(const K& key, Args&&... args)
  2777. {
  2778. return emplace_decomposable_with_hash(key, this->hash(key), std::forward<Args>(args)...);
  2779. }
  2780. struct EmplaceDecomposable
  2781. {
  2782. template <class K, class... Args>
  2783. std::pair<iterator, bool> operator()(const K& key, Args&&... args) const {
  2784. return s.emplace_decomposable(key, std::forward<Args>(args)...);
  2785. }
  2786. parallel_hash_set& s;
  2787. };
  2788. // This overload kicks in if we can deduce the key from args. This enables us
  2789. // to avoid constructing value_type if an entry with the same key already
  2790. // exists.
  2791. //
  2792. // For example:
  2793. //
  2794. // flat_hash_map<std::string, std::string> m = {{"abc", "def"}};
  2795. // // Creates no std::string copies and makes no heap allocations.
  2796. // m.emplace("abc", "xyz");
  2797. // --------------------------------------------------------------------
  2798. template <class... Args, typename std::enable_if<IsDecomposable<Args...>::value, int>::type = 0>
  2799. std::pair<iterator, bool> emplace(Args&&... args) {
  2800. return PolicyTraits::apply(EmplaceDecomposable{*this}, std::forward<Args>(args)...);
  2801. }
  2802. // This overload kicks in if we cannot deduce the key from args. It constructs
  2803. // value_type unconditionally and then either moves it into the table or
  2804. // destroys.
  2805. // --------------------------------------------------------------------
  2806. template <class... Args, typename std::enable_if<!IsDecomposable<Args...>::value, int>::type = 0>
  2807. std::pair<iterator, bool> emplace(Args&&... args) {
  2808. typename phmap::aligned_storage<sizeof(slot_type), alignof(slot_type)>::type raw;
  2809. slot_type* slot = reinterpret_cast<slot_type*>(&raw);
  2810. size_t hashval = this->hash(PolicyTraits::key(slot));
  2811. PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
  2812. const auto& elem = PolicyTraits::element(slot);
  2813. Inner& inner = sets_[subidx(hashval)];
  2814. auto& set = inner.set_;
  2815. UniqueLock m(inner);
  2816. typename EmbeddedSet::template InsertSlotWithHash<true> f { inner, std::move(*slot), hashval };
  2817. return make_rv(PolicyTraits::apply(f, elem));
  2818. }
  2819. template <class... Args>
  2820. iterator emplace_hint(const_iterator, Args&&... args) {
  2821. return emplace(std::forward<Args>(args)...).first;
  2822. }
  2823. iterator make_iterator(Inner* inner, const EmbeddedIterator it)
  2824. {
  2825. if (it == inner->set_.end())
  2826. return iterator();
  2827. return iterator(inner, &sets_[0] + num_tables, it);
  2828. }
  2829. std::pair<iterator, bool> make_rv(Inner* inner,
  2830. const std::pair<EmbeddedIterator, bool>& res)
  2831. {
  2832. return {iterator(inner, &sets_[0] + num_tables, res.first), res.second};
  2833. }
  2834. // lazy_emplace
  2835. // ------------
  2836. template <class K = key_type, class F>
  2837. iterator lazy_emplace_with_hash(const key_arg<K>& key, size_t hashval, F&& f) {
  2838. Inner& inner = sets_[subidx(hashval)];
  2839. auto& set = inner.set_;
  2840. ReadWriteLock m(inner);
  2841. size_t offset = set._find_key(key, hashval);
  2842. if (offset == (size_t)-1 && m.switch_to_unique()) {
  2843. // we did an unlock/lock, and another thread could have inserted the same key, so we need to
  2844. // do a find() again.
  2845. offset = set._find_key(key, hashval);
  2846. }
  2847. if (offset == (size_t)-1) {
  2848. offset = set.prepare_insert(hashval);
  2849. set.lazy_emplace_at(offset, std::forward<F>(f));
  2850. set.set_ctrl(offset, H2(hashval));
  2851. }
  2852. return make_iterator(&inner, set.iterator_at(offset));
  2853. }
  2854. template <class K = key_type, class F>
  2855. iterator lazy_emplace(const key_arg<K>& key, F&& f) {
  2856. return lazy_emplace_with_hash(key, this->hash(key), std::forward<F>(f));
  2857. }
  2858. // emplace_single
  2859. // --------------
  2860. template <class K = key_type, class F>
  2861. void emplace_single_with_hash(const key_arg<K>& key, size_t hashval, F&& f) {
  2862. Inner& inner = sets_[subidx(hashval)];
  2863. auto& set = inner.set_;
  2864. UniqueLock m(inner);
  2865. set.emplace_single_with_hash(key, hashval, std::forward<F>(f));
  2866. }
  2867. template <class K = key_type, class F>
  2868. void emplace_single(const key_arg<K>& key, F&& f) {
  2869. emplace_single_with_hash<K, F>(key, this->hash(key), std::forward<F>(f));
  2870. }
  2871. // if set contains key, lambda is called with the value_type (under read lock protection),
  2872. // and if_contains returns true. This is a const API and lambda should not modify the value
  2873. // -----------------------------------------------------------------------------------------
  2874. template <class K = key_type, class F>
  2875. bool if_contains(const key_arg<K>& key, F&& f) const {
  2876. return const_cast<parallel_hash_set*>(this)->template
  2877. modify_if_impl<K, F, SharedLock>(key, std::forward<F>(f));
  2878. }
  2879. // if set contains key, lambda is called with the value_type without read lock protection,
  2880. // and if_contains_unsafe returns true. This is a const API and lambda should not modify the value
  2881. // This should be used only if we know that no other thread may be mutating the set at the time.
  2882. // -----------------------------------------------------------------------------------------
  2883. template <class K = key_type, class F>
  2884. bool if_contains_unsafe(const key_arg<K>& key, F&& f) const {
  2885. return const_cast<parallel_hash_set*>(this)->template
  2886. modify_if_impl<K, F, LockableBaseImpl<phmap::NullMutex>::DoNothing>(key, std::forward<F>(f));
  2887. }
  2888. // if map contains key, lambda is called with the value_type (under write lock protection),
  2889. // and modify_if returns true. This is a non-const API and lambda is allowed to modify the mapped value
  2890. // ----------------------------------------------------------------------------------------------------
  2891. template <class K = key_type, class F>
  2892. bool modify_if(const key_arg<K>& key, F&& f) {
  2893. return modify_if_impl<K, F, UniqueLock>(key, std::forward<F>(f));
  2894. }
  2895. // -----------------------------------------------------------------------------------------
  2896. template <class K = key_type, class F, class L>
  2897. bool modify_if_impl(const key_arg<K>& key, F&& f) {
  2898. #if __cplusplus >= 201703L
  2899. static_assert(std::is_invocable<F, value_type&>::value);
  2900. #endif
  2901. L m;
  2902. auto ptr = this->template find_ptr<K, L>(key, this->hash(key), m);
  2903. if (ptr == nullptr)
  2904. return false;
  2905. std::forward<F>(f)(*ptr);
  2906. return true;
  2907. }
  2908. // if map contains key, lambda is called with the mapped value (under write lock protection).
  2909. // If the lambda returns true, the key is subsequently erased from the map (the write lock
  2910. // is only released after erase).
  2911. // returns true if key was erased, false otherwise.
  2912. // ----------------------------------------------------------------------------------------------------
  2913. template <class K = key_type, class F>
  2914. bool erase_if(const key_arg<K>& key, F&& f) {
  2915. return !!erase_if_impl<K, F, ReadWriteLock>(key, std::forward<F>(f));
  2916. }
  2917. template <class K = key_type, class F, class L>
  2918. size_type erase_if_impl(const key_arg<K>& key, F&& f) {
  2919. #if __cplusplus >= 201703L
  2920. static_assert(std::is_invocable<F, value_type&>::value);
  2921. #endif
  2922. auto hashval = this->hash(key);
  2923. Inner& inner = sets_[subidx(hashval)];
  2924. auto& set = inner.set_;
  2925. L m(inner);
  2926. auto it = set.find(key, hashval);
  2927. if (it == set.end())
  2928. return 0;
  2929. if (m.switch_to_unique()) {
  2930. // we did an unlock/lock, need to call `find()` again
  2931. it = set.find(key, hashval);
  2932. if (it == set.end())
  2933. return 0;
  2934. }
  2935. if (std::forward<F>(f)(const_cast<value_type &>(*it)))
  2936. {
  2937. set._erase(it);
  2938. return 1;
  2939. }
  2940. return 0;
  2941. }
  2942. // if map already contains key, the first lambda is called with the mapped value (under
  2943. // write lock protection) and can update the mapped value.
  2944. // if map does not contains key, the second lambda is called and it should invoke the
  2945. // passed constructor to construct the value
  2946. // returns true if key was not already present, false otherwise.
  2947. // ---------------------------------------------------------------------------------------
  2948. template <class K = key_type, class FExists, class FEmplace>
  2949. bool lazy_emplace_l(const key_arg<K>& key, FExists&& fExists, FEmplace&& fEmplace) {
  2950. size_t hashval = this->hash(key);
  2951. ReadWriteLock m;
  2952. auto res = this->find_or_prepare_insert_with_hash(hashval, key, m);
  2953. Inner* inner = std::get<0>(res);
  2954. if (std::get<2>(res)) {
  2955. // key not found. call fEmplace lambda which should invoke passed constructor
  2956. inner->set_.lazy_emplace_at(std::get<1>(res), std::forward<FEmplace>(fEmplace));
  2957. inner->set_.set_ctrl(std::get<1>(res), H2(hashval));
  2958. } else {
  2959. // key found. Call fExists lambda. In case of the set, non "key" part of value_type can be changed
  2960. auto it = this->iterator_at(inner, inner->set_.iterator_at(std::get<1>(res)));
  2961. std::forward<FExists>(fExists)(const_cast<value_type &>(*it));
  2962. }
  2963. return std::get<2>(res);
  2964. }
  2965. // Extension API: support iterating over all values
  2966. //
  2967. // flat_hash_set<std::string> s;
  2968. // s.insert(...);
  2969. // s.for_each([](auto const & key) {
  2970. // // Safely iterates over all the keys
  2971. // });
  2972. template <class F>
  2973. void for_each(F&& fCallback) const {
  2974. for (auto const& inner : sets_) {
  2975. SharedLock m(const_cast<Inner&>(inner));
  2976. std::for_each(inner.set_.begin(), inner.set_.end(), fCallback);
  2977. }
  2978. }
  2979. // this version allows to modify the values
  2980. template <class F>
  2981. void for_each_m(F&& fCallback) {
  2982. for (auto& inner : sets_) {
  2983. UniqueLock m(inner);
  2984. std::for_each(inner.set_.begin(), inner.set_.end(), fCallback);
  2985. }
  2986. }
  2987. #if __cplusplus >= 201703L
  2988. template <class ExecutionPolicy, class F>
  2989. void for_each(ExecutionPolicy&& policy, F&& fCallback) const {
  2990. std::for_each(
  2991. std::forward<ExecutionPolicy>(policy), sets_.begin(), sets_.end(),
  2992. [&](auto const& inner) {
  2993. SharedLock m(const_cast<Inner&>(inner));
  2994. std::for_each(inner.set_.begin(), inner.set_.end(), fCallback);
  2995. }
  2996. );
  2997. }
  2998. template <class ExecutionPolicy, class F>
  2999. void for_each_m(ExecutionPolicy&& policy, F&& fCallback) {
  3000. std::for_each(
  3001. std::forward<ExecutionPolicy>(policy), sets_.begin(), sets_.end(),
  3002. [&](auto& inner) {
  3003. UniqueLock m(inner);
  3004. std::for_each(inner.set_.begin(), inner.set_.end(), fCallback);
  3005. }
  3006. );
  3007. }
  3008. #endif
  3009. // Extension API: access internal submaps by index
  3010. // under lock protection
  3011. // ex: m.with_submap(i, [&](const Map::EmbeddedSet& set) {
  3012. // for (auto& p : set) { ...; }});
  3013. // -------------------------------------------------
  3014. template <class F>
  3015. void with_submap(size_t idx, F&& fCallback) const {
  3016. const Inner& inner = sets_[idx];
  3017. const auto& set = inner.set_;
  3018. SharedLock m(const_cast<Inner&>(inner));
  3019. fCallback(set);
  3020. }
  3021. template <class F>
  3022. void with_submap_m(size_t idx, F&& fCallback) {
  3023. Inner& inner = sets_[idx];
  3024. auto& set = inner.set_;
  3025. UniqueLock m(inner);
  3026. fCallback(set);
  3027. }
  3028. // unsafe, for internal use only
  3029. LL_INLINE Inner& get_inner(size_t idx) {
  3030. return sets_[idx];
  3031. }
  3032. // Extension API: support for heterogeneous keys.
  3033. //
  3034. // std::unordered_set<std::string> s;
  3035. // // Turns "abc" into std::string.
  3036. // s.erase("abc");
  3037. //
  3038. // flat_hash_set<std::string> s;
  3039. // // Uses "abc" directly without copying it into std::string.
  3040. // s.erase("abc");
  3041. //
  3042. // --------------------------------------------------------------------
  3043. template <class K = key_type>
  3044. size_type erase(const key_arg<K>& key) {
  3045. auto always_erase = [](const value_type&){ return true; };
  3046. return erase_if_impl<K, decltype(always_erase), ReadWriteLock>(key, std::move(always_erase));
  3047. }
  3048. // --------------------------------------------------------------------
  3049. iterator erase(const_iterator cit) { return erase(cit.iter_); }
  3050. // Erases the element pointed to by `it`. Unlike `std::unordered_set::erase`,
  3051. // this method returns void to reduce algorithmic complexity to O(1). In
  3052. // order to erase while iterating across a map, use the following idiom (which
  3053. // also works for standard containers):
  3054. //
  3055. // for (auto it = m.begin(), end = m.end(); it != end;) {
  3056. // if (<pred>) {
  3057. // m._erase(it++);
  3058. // } else {
  3059. // ++it;
  3060. // }
  3061. // }
  3062. //
  3063. // Do not use erase APIs taking iterators when accessing the map concurrently
  3064. // --------------------------------------------------------------------
  3065. void _erase(iterator it) {
  3066. Inner* inner = it.inner_;
  3067. assert(inner != nullptr);
  3068. auto& set = inner->set_;
  3069. // UniqueLock m(*inner); // don't lock here
  3070. set._erase(it.it_);
  3071. }
  3072. LL_INLINE void _erase(const_iterator cit) { _erase(cit.iter_); }
  3073. // This overload is necessary because otherwise erase<K>(const K&) would be
  3074. // a better match if non-const iterator is passed as an argument.
  3075. // Do not use erase APIs taking iterators when accessing the map concurrently
  3076. // --------------------------------------------------------------------
  3077. LL_INLINE iterator erase(iterator it) { _erase(it++); return it; }
  3078. iterator erase(const_iterator first, const_iterator last) {
  3079. while (first != last) {
  3080. _erase(first++);
  3081. }
  3082. return last.iter_;
  3083. }
  3084. // Moves elements from `src` into `this`.
  3085. // If the element already exists in `this`, it is left unmodified in `src`.
  3086. // Do not use erase APIs taking iterators when accessing the map concurrently
  3087. // --------------------------------------------------------------------
  3088. template <typename E = Eq>
  3089. void merge(parallel_hash_set<N, RefSet, Mtx_, Policy, Hash, E, Alloc>& src) { // NOLINT
  3090. assert(this != &src);
  3091. if (this != &src)
  3092. {
  3093. for (size_t i=0; i<num_tables; ++i)
  3094. {
  3095. typename Lockable::UniqueLocks l(sets_[i], src.sets_[i]);
  3096. sets_[i].set_.merge(src.sets_[i].set_);
  3097. }
  3098. }
  3099. }
  3100. template <typename E = Eq>
  3101. void merge(parallel_hash_set<N, RefSet, Mtx_, Policy, Hash, E, Alloc>&& src) {
  3102. merge(src);
  3103. }
  3104. LL_INLINE node_type extract(const_iterator position) {
  3105. return position.iter_.inner_->set_.extract(EmbeddedConstIterator(position.iter_.it_));
  3106. }
  3107. template <
  3108. class K = key_type,
  3109. typename std::enable_if<!std::is_same<K, iterator>::value, int>::type = 0>
  3110. node_type extract(const key_arg<K>& key) {
  3111. auto it = find(key);
  3112. return it == end() ? node_type() : extract(const_iterator{it});
  3113. }
  3114. template<class Mtx2_>
  3115. void swap(parallel_hash_set<N, RefSet, Mtx2_, Policy, Hash, Eq, Alloc>& that)
  3116. noexcept(IsNoThrowSwappable<EmbeddedSet>() &&
  3117. (!AllocTraits::propagate_on_container_swap::value ||
  3118. IsNoThrowSwappable<allocator_type>(typename AllocTraits::propagate_on_container_swap{})))
  3119. {
  3120. using std::swap;
  3121. using Lockable2 = phmap::LockableImpl<Mtx2_>;
  3122. for (size_t i=0; i<num_tables; ++i)
  3123. {
  3124. typename Lockable::UniqueLock l(sets_[i]);
  3125. typename Lockable2::UniqueLock l2(that.get_inner(i));
  3126. swap(sets_[i].set_, that.get_inner(i).set_);
  3127. }
  3128. }
  3129. void rehash(size_t n) {
  3130. size_t nn = n / num_tables;
  3131. for (auto& inner : sets_)
  3132. {
  3133. UniqueLock m(inner);
  3134. inner.set_.rehash(nn);
  3135. }
  3136. }
  3137. void reserve(size_t n)
  3138. {
  3139. size_t target = GrowthToLowerboundCapacity(n);
  3140. size_t normalized = num_tables * NormalizeCapacity(n / num_tables);
  3141. rehash(normalized > target ? normalized : target);
  3142. }
  3143. // Extension API: support for heterogeneous keys.
  3144. //
  3145. // std::unordered_set<std::string> s;
  3146. // // Turns "abc" into std::string.
  3147. // s.count("abc");
  3148. //
  3149. // ch_set<std::string> s;
  3150. // // Uses "abc" directly without copying it into std::string.
  3151. // s.count("abc");
  3152. // --------------------------------------------------------------------
  3153. template <class K = key_type>
  3154. LL_INLINE size_t count(const key_arg<K>& key) const {
  3155. return find(key) == end() ? 0 : 1;
  3156. }
  3157. // Issues CPU prefetch instructions for the memory needed to find or insert
  3158. // a key. Like all lookup functions, this support heterogeneous keys.
  3159. //
  3160. // NOTE: This is a very low level operation and should not be used without
  3161. // specific benchmarks indicating its importance.
  3162. // --------------------------------------------------------------------
  3163. LL_INLINE void prefetch_hash(size_t hashval) const {
  3164. const Inner& inner = sets_[subidx(hashval)];
  3165. const auto& set = inner.set_;
  3166. SharedLock m(const_cast<Inner&>(inner));
  3167. set.prefetch_hash(hashval);
  3168. }
  3169. template <class K = key_type>
  3170. LL_INLINE void prefetch(const key_arg<K>& key) const {
  3171. prefetch_hash(this->hash(key));
  3172. }
  3173. // The API of find() has two extensions.
  3174. //
  3175. // 1. The hash can be passed by the user. It must be equal to the hash of the
  3176. // key.
  3177. //
  3178. // 2. The type of the key argument doesn't have to be key_type. This is so
  3179. // called heterogeneous key support.
  3180. // --------------------------------------------------------------------
  3181. template <class K = key_type>
  3182. LL_INLINE iterator find(const key_arg<K>& key, size_t hashval) {
  3183. SharedLock m;
  3184. return find(key, hashval, m);
  3185. }
  3186. template <class K = key_type>
  3187. LL_INLINE iterator find(const key_arg<K>& key) {
  3188. return find(key, this->hash(key));
  3189. }
  3190. template <class K = key_type>
  3191. LL_INLINE const_iterator find(const key_arg<K>& key, size_t hashval) const {
  3192. return const_cast<parallel_hash_set*>(this)->find(key, hashval);
  3193. }
  3194. template <class K = key_type>
  3195. LL_INLINE const_iterator find(const key_arg<K>& key) const {
  3196. return find(key, this->hash(key));
  3197. }
  3198. template <class K = key_type>
  3199. LL_INLINE bool contains(const key_arg<K>& key) const {
  3200. return find(key) != end();
  3201. }
  3202. template <class K = key_type>
  3203. LL_INLINE bool contains(const key_arg<K>& key, size_t hashval) const {
  3204. return find(key, hashval) != end();
  3205. }
  3206. template <class K = key_type>
  3207. std::pair<iterator, iterator> equal_range(const key_arg<K>& key) {
  3208. auto it = find(key);
  3209. if (it != end()) return {it, std::next(it)};
  3210. return {it, it};
  3211. }
  3212. template <class K = key_type>
  3213. std::pair<const_iterator, const_iterator> equal_range(
  3214. const key_arg<K>& key) const {
  3215. auto it = find(key);
  3216. if (it != end()) return {it, std::next(it)};
  3217. return {it, it};
  3218. }
  3219. size_t bucket_count() const {
  3220. size_t sz = 0;
  3221. for (const auto& inner : sets_)
  3222. {
  3223. SharedLock m(const_cast<Inner&>(inner));
  3224. sz += inner.set_.bucket_count();
  3225. }
  3226. return sz;
  3227. }
  3228. float load_factor() const {
  3229. size_t _capacity = bucket_count();
  3230. return _capacity ? static_cast<float>(static_cast<double>(size()) / _capacity) : 0;
  3231. }
  3232. float max_load_factor() const { return 1.0f; }
  3233. void max_load_factor(float) {
  3234. // Does nothing.
  3235. }
  3236. hasher hash_function() const { return hash_ref(); } // warning: doesn't match internal hash - use hash() member function
  3237. key_equal key_eq() const { return eq_ref(); }
  3238. allocator_type get_allocator() const { return alloc_ref(); }
  3239. friend bool operator==(const parallel_hash_set& a, const parallel_hash_set& b) {
  3240. return std::equal(a.sets_.begin(), a.sets_.end(), b.sets_.begin());
  3241. }
  3242. friend bool operator!=(const parallel_hash_set& a, const parallel_hash_set& b) {
  3243. return !(a == b);
  3244. }
  3245. template<class Mtx2_>
  3246. LL_INLINE friend void swap(parallel_hash_set& a,
  3247. parallel_hash_set<N, RefSet, Mtx2_, Policy, Hash, Eq, Alloc>& b)
  3248. noexcept(noexcept(a.swap(b)))
  3249. {
  3250. a.swap(b);
  3251. }
  3252. template <class K>
  3253. LL_INLINE size_t hash(const K& key) const {
  3254. return HashElement{hash_ref()}(key);
  3255. }
  3256. #if !defined(PHMAP_NON_DETERMINISTIC)
  3257. template<typename OutputArchive>
  3258. bool phmap_dump(OutputArchive& ar) const;
  3259. template<typename InputArchive>
  3260. bool phmap_load(InputArchive& ar);
  3261. #endif
  3262. private:
  3263. template <class Container, typename Enabler>
  3264. friend struct phmap::priv::hashtable_debug_internal::HashtableDebugAccess;
  3265. struct FindElement
  3266. {
  3267. template <class K, class... Args>
  3268. const_iterator operator()(const K& key, Args&&...) const {
  3269. return s.find(key);
  3270. }
  3271. const parallel_hash_set& s;
  3272. };
  3273. struct HashElement
  3274. {
  3275. template <class K, class... Args>
  3276. LL_INLINE size_t operator()(const K& key, Args&&...) const {
  3277. #ifdef PHMAP_NO_MIXING
  3278. return (size_t)h(key);
  3279. #else
  3280. return phmap_mix<sizeof(size_t)>()(h(key));
  3281. #endif
  3282. }
  3283. const hasher& h;
  3284. };
  3285. template <class K1>
  3286. struct EqualElement
  3287. {
  3288. template <class K2, class... Args>
  3289. bool operator()(const K2& lhs, Args&&...) const {
  3290. return eq(lhs, rhs);
  3291. }
  3292. const K1& rhs;
  3293. const key_equal& eq;
  3294. };
  3295. // "erases" the object from the container, except that it doesn't actually
  3296. // destroy the object. It only updates all the metadata of the class.
  3297. // This can be used in conjunction with Policy::transfer to move the object to
  3298. // another place.
  3299. // --------------------------------------------------------------------
  3300. void erase_meta_only(const_iterator cit) {
  3301. auto &it = cit.iter_;
  3302. assert(it.set_ != nullptr);
  3303. it.set_.erase_meta_only(const_iterator(it.it_));
  3304. }
  3305. LL_NO_INLINE void drop_deletes_without_resize() {
  3306. for (auto& inner : sets_)
  3307. {
  3308. UniqueLock m(inner);
  3309. inner.set_.drop_deletes_without_resize();
  3310. }
  3311. }
  3312. bool has_element(const value_type& elem) const {
  3313. size_t hashval = PolicyTraits::apply(HashElement{hash_ref()}, elem);
  3314. Inner& inner = sets_[subidx(hashval)];
  3315. auto& set = inner.set_;
  3316. SharedLock m(const_cast<Inner&>(inner));
  3317. return set.has_element(elem, hashval);
  3318. }
  3319. // TODO(alkis): Optimize this assuming *this and that don't overlap.
  3320. // --------------------------------------------------------------------
  3321. template<class Mtx2_>
  3322. parallel_hash_set& move_assign(parallel_hash_set<N, RefSet, Mtx2_, Policy, Hash, Eq, Alloc>&& that, std::true_type) {
  3323. parallel_hash_set<N, RefSet, Mtx2_, Policy, Hash, Eq, Alloc> tmp(std::move(that));
  3324. swap(tmp);
  3325. return *this;
  3326. }
  3327. template<class Mtx2_>
  3328. parallel_hash_set& move_assign(parallel_hash_set<N, RefSet, Mtx2_, Policy, Hash, Eq, Alloc>&& that, std::false_type) {
  3329. parallel_hash_set<N, RefSet, Mtx2_, Policy, Hash, Eq, Alloc> tmp(std::move(that), alloc_ref());
  3330. swap(tmp);
  3331. return *this;
  3332. }
  3333. protected:
  3334. template <class K = key_type, class L = SharedLock>
  3335. pointer find_ptr(const key_arg<K>& key, size_t hashval, L& mutexlock)
  3336. {
  3337. Inner& inner = sets_[subidx(hashval)];
  3338. auto& set = inner.set_;
  3339. mutexlock = std::move(L(inner));
  3340. return set.find_ptr(key, hashval);
  3341. }
  3342. template <class K = key_type, class L = SharedLock>
  3343. iterator find(const key_arg<K>& key, size_t hashval, L& mutexlock) {
  3344. Inner& inner = sets_[subidx(hashval)];
  3345. auto& set = inner.set_;
  3346. mutexlock = std::move(L(inner));
  3347. return make_iterator(&inner, set.find(key, hashval));
  3348. }
  3349. template <class K>
  3350. std::tuple<Inner*, size_t, bool>
  3351. find_or_prepare_insert_with_hash(size_t hashval, const K& key, ReadWriteLock &mutexlock) {
  3352. Inner& inner = sets_[subidx(hashval)];
  3353. auto& set = inner.set_;
  3354. mutexlock = std::move(ReadWriteLock(inner));
  3355. size_t offset = set._find_key(key, hashval);
  3356. if (offset == (size_t)-1 && mutexlock.switch_to_unique()) {
  3357. // we did an unlock/lock, and another thread could have inserted the same key, so we need to
  3358. // do a find() again.
  3359. offset = set._find_key(key, hashval);
  3360. }
  3361. if (offset == (size_t)-1) {
  3362. offset = set.prepare_insert(hashval);
  3363. return std::make_tuple(&inner, offset, true);
  3364. }
  3365. return std::make_tuple(&inner, offset, false);
  3366. }
  3367. template <class K>
  3368. std::tuple<Inner*, size_t, bool>
  3369. find_or_prepare_insert(const K& key, ReadWriteLock &mutexlock) {
  3370. return find_or_prepare_insert_with_hash<K>(this->hash(key), key, mutexlock);
  3371. }
  3372. iterator iterator_at(Inner *inner,
  3373. const EmbeddedIterator& it) {
  3374. return {inner, &sets_[0] + num_tables, it};
  3375. }
  3376. const_iterator iterator_at(Inner *inner,
  3377. const EmbeddedIterator& it) const {
  3378. return {inner, &sets_[0] + num_tables, it};
  3379. }
  3380. static size_t subidx(size_t hashval) {
  3381. return ((hashval >> 8) ^ (hashval >> 16) ^ (hashval >> 24)) & mask;
  3382. }
  3383. static size_t subcnt() {
  3384. return num_tables;
  3385. }
  3386. private:
  3387. friend struct RawHashSetTestOnlyAccess;
  3388. size_t growth_left() {
  3389. size_t sz = 0;
  3390. for (const auto& set : sets_)
  3391. sz += set.growth_left();
  3392. return sz;
  3393. }
  3394. hasher& hash_ref() { return sets_[0].set_.hash_ref(); }
  3395. const hasher& hash_ref() const { return sets_[0].set_.hash_ref(); }
  3396. key_equal& eq_ref() { return sets_[0].set_.eq_ref(); }
  3397. const key_equal& eq_ref() const { return sets_[0].set_.eq_ref(); }
  3398. allocator_type& alloc_ref() { return sets_[0].set_.alloc_ref(); }
  3399. const allocator_type& alloc_ref() const {
  3400. return sets_[0].set_.alloc_ref();
  3401. }
  3402. protected: // protected in case users want to derive fromm this
  3403. std::array<Inner, num_tables> sets_;
  3404. };
  3405. // --------------------------------------------------------------------------
  3406. // --------------------------------------------------------------------------
  3407. template <size_t N,
  3408. template <class, class, class, class> class RefSet,
  3409. class Mtx_,
  3410. class Policy, class Hash, class Eq, class Alloc>
  3411. class parallel_hash_map : public parallel_hash_set<N, RefSet, Mtx_, Policy, Hash, Eq, Alloc>
  3412. {
  3413. // P is Policy. It's passed as a template argument to support maps that have
  3414. // incomplete types as values, as in unordered_map<K, IncompleteType>.
  3415. // MappedReference<> may be a non-reference type.
  3416. template <class P>
  3417. using MappedReference = decltype(P::value(
  3418. std::addressof(std::declval<typename parallel_hash_map::reference>())));
  3419. // MappedConstReference<> may be a non-reference type.
  3420. template <class P>
  3421. using MappedConstReference = decltype(P::value(
  3422. std::addressof(std::declval<typename parallel_hash_map::const_reference>())));
  3423. using KeyArgImpl =
  3424. KeyArg<IsTransparent<Eq>::value && IsTransparent<Hash>::value>;
  3425. using Base = typename parallel_hash_map::parallel_hash_set;
  3426. using Lockable = phmap::LockableImpl<Mtx_>;
  3427. using UniqueLock = typename Lockable::UniqueLock;
  3428. using SharedLock = typename Lockable::SharedLock;
  3429. using ReadWriteLock = typename Lockable::ReadWriteLock;
  3430. public:
  3431. using key_type = typename Policy::key_type;
  3432. using mapped_type = typename Policy::mapped_type;
  3433. using value_type = typename Base::value_type;
  3434. template <class K>
  3435. using key_arg = typename KeyArgImpl::template type<K, key_type>;
  3436. static_assert(!std::is_reference<key_type>::value, "");
  3437. // TODO(alkis): remove this assertion and verify that reference mapped_type is
  3438. // supported.
  3439. static_assert(!std::is_reference<mapped_type>::value, "");
  3440. using iterator = typename parallel_hash_map::parallel_hash_set::iterator;
  3441. using const_iterator = typename parallel_hash_map::parallel_hash_set::const_iterator;
  3442. parallel_hash_map() {}
  3443. #ifdef __INTEL_COMPILER
  3444. using Base::parallel_hash_set;
  3445. #else
  3446. using parallel_hash_map::parallel_hash_set::parallel_hash_set;
  3447. #endif
  3448. // The last two template parameters ensure that both arguments are rvalues
  3449. // (lvalue arguments are handled by the overloads below). This is necessary
  3450. // for supporting bitfield arguments.
  3451. //
  3452. // union { int n : 1; };
  3453. // flat_hash_map<int, int> m;
  3454. // m.insert_or_assign(n, n);
  3455. template <class K = key_type, class V = mapped_type, K* = nullptr,
  3456. V* = nullptr>
  3457. std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, V&& v) {
  3458. return insert_or_assign_impl(std::forward<K>(k), std::forward<V>(v));
  3459. }
  3460. template <class K = key_type, class V = mapped_type, K* = nullptr>
  3461. std::pair<iterator, bool> insert_or_assign(key_arg<K>&& k, const V& v) {
  3462. return insert_or_assign_impl(std::forward<K>(k), v);
  3463. }
  3464. template <class K = key_type, class V = mapped_type, V* = nullptr>
  3465. std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, V&& v) {
  3466. return insert_or_assign_impl(k, std::forward<V>(v));
  3467. }
  3468. template <class K = key_type, class V = mapped_type>
  3469. std::pair<iterator, bool> insert_or_assign(const key_arg<K>& k, const V& v) {
  3470. return insert_or_assign_impl(k, v);
  3471. }
  3472. template <class K = key_type, class V = mapped_type, K* = nullptr,
  3473. V* = nullptr>
  3474. iterator insert_or_assign(const_iterator, key_arg<K>&& k, V&& v) {
  3475. return insert_or_assign(std::forward<K>(k), std::forward<V>(v)).first;
  3476. }
  3477. template <class K = key_type, class V = mapped_type, K* = nullptr>
  3478. iterator insert_or_assign(const_iterator, key_arg<K>&& k, const V& v) {
  3479. return insert_or_assign(std::forward<K>(k), v).first;
  3480. }
  3481. template <class K = key_type, class V = mapped_type, V* = nullptr>
  3482. iterator insert_or_assign(const_iterator, const key_arg<K>& k, V&& v) {
  3483. return insert_or_assign(k, std::forward<V>(v)).first;
  3484. }
  3485. template <class K = key_type, class V = mapped_type>
  3486. iterator insert_or_assign(const_iterator, const key_arg<K>& k, const V& v) {
  3487. return insert_or_assign(k, v).first;
  3488. }
  3489. template <class K = key_type, class... Args,
  3490. typename std::enable_if<
  3491. !std::is_convertible<K, const_iterator>::value, int>::type = 0,
  3492. K* = nullptr>
  3493. std::pair<iterator, bool> try_emplace(key_arg<K>&& k, Args&&... args) {
  3494. return try_emplace_impl(std::forward<K>(k), std::forward<Args>(args)...);
  3495. }
  3496. template <class K = key_type, class... Args,
  3497. typename std::enable_if<
  3498. !std::is_convertible<K, const_iterator>::value, int>::type = 0>
  3499. std::pair<iterator, bool> try_emplace(const key_arg<K>& k, Args&&... args) {
  3500. return try_emplace_impl(k, std::forward<Args>(args)...);
  3501. }
  3502. template <class K = key_type, class... Args, K* = nullptr>
  3503. iterator try_emplace(const_iterator, key_arg<K>&& k, Args&&... args) {
  3504. return try_emplace(std::forward<K>(k), std::forward<Args>(args)...).first;
  3505. }
  3506. template <class K = key_type, class... Args>
  3507. iterator try_emplace(const_iterator, const key_arg<K>& k, Args&&... args) {
  3508. return try_emplace(k, std::forward<Args>(args)...).first;
  3509. }
  3510. template <class K = key_type, class P = Policy>
  3511. MappedReference<P> at(const key_arg<K>& key) {
  3512. auto it = this->find(key);
  3513. if (it == this->end())
  3514. phmap::base_internal::ThrowStdOutOfRange("phmap at(): lookup non-existent key");
  3515. return Policy::value(&*it);
  3516. }
  3517. template <class K = key_type, class P = Policy>
  3518. MappedConstReference<P> at(const key_arg<K>& key) const {
  3519. auto it = this->find(key);
  3520. if (it == this->end())
  3521. phmap::base_internal::ThrowStdOutOfRange("phmap at(): lookup non-existent key");
  3522. return Policy::value(&*it);
  3523. }
  3524. // ----------- phmap extensions --------------------------
  3525. template <class K = key_type, class... Args,
  3526. typename std::enable_if<
  3527. !std::is_convertible<K, const_iterator>::value, int>::type = 0,
  3528. K* = nullptr>
  3529. std::pair<iterator, bool> try_emplace_with_hash(size_t hashval, key_arg<K>&& k, Args&&... args) {
  3530. return try_emplace_impl_with_hash(hashval, std::forward<K>(k), std::forward<Args>(args)...);
  3531. }
  3532. template <class K = key_type, class... Args,
  3533. typename std::enable_if<
  3534. !std::is_convertible<K, const_iterator>::value, int>::type = 0>
  3535. std::pair<iterator, bool> try_emplace_with_hash(size_t hashval, const key_arg<K>& k, Args&&... args) {
  3536. return try_emplace_impl_with_hash(hashval, k, std::forward<Args>(args)...);
  3537. }
  3538. template <class K = key_type, class... Args, K* = nullptr>
  3539. iterator try_emplace_with_hash(size_t hashval, const_iterator, key_arg<K>&& k, Args&&... args) {
  3540. return try_emplace_with_hash(hashval, std::forward<K>(k), std::forward<Args>(args)...).first;
  3541. }
  3542. template <class K = key_type, class... Args>
  3543. iterator try_emplace_with_hash(size_t hashval, const_iterator, const key_arg<K>& k, Args&&... args) {
  3544. return try_emplace_with_hash(hashval, k, std::forward<Args>(args)...).first;
  3545. }
  3546. // if map does not contains key, it is inserted and the mapped value is value-constructed
  3547. // with the provided arguments (if any), as with try_emplace.
  3548. // if map already contains key, then the lambda is called with the mapped value (under
  3549. // write lock protection) and can update the mapped value.
  3550. // returns true if key was not already present, false otherwise.
  3551. // ---------------------------------------------------------------------------------------
  3552. template <class K = key_type, class F, class... Args>
  3553. bool try_emplace_l(K&& k, F&& f, Args&&... args) {
  3554. size_t hashval = this->hash(k);
  3555. ReadWriteLock m;
  3556. auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
  3557. typename Base::Inner *inner = std::get<0>(res);
  3558. if (std::get<2>(res)) {
  3559. inner->set_.emplace_at(std::get<1>(res), std::piecewise_construct,
  3560. std::forward_as_tuple(std::forward<K>(k)),
  3561. std::forward_as_tuple(std::forward<Args>(args)...));
  3562. inner->set_.set_ctrl(std::get<1>(res), H2(hashval));
  3563. } else {
  3564. auto it = this->iterator_at(inner, inner->set_.iterator_at(std::get<1>(res)));
  3565. // call lambda. in case of the set, non "key" part of value_type can be changed
  3566. std::forward<F>(f)(const_cast<value_type &>(*it));
  3567. }
  3568. return std::get<2>(res);
  3569. }
  3570. // returns {pointer, bool} instead of {iterator, bool} per try_emplace.
  3571. // useful for node-based containers, since the pointer is not invalidated by concurrent insert etc.
  3572. template <class K = key_type, class... Args>
  3573. std::pair<typename parallel_hash_map::parallel_hash_set::pointer, bool> try_emplace_p(K&& k, Args&&... args) {
  3574. size_t hashval = this->hash(k);
  3575. ReadWriteLock m;
  3576. auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
  3577. typename Base::Inner *inner = std::get<0>(res);
  3578. if (std::get<2>(res)) {
  3579. inner->set_.emplace_at(std::get<1>(res), std::piecewise_construct,
  3580. std::forward_as_tuple(std::forward<K>(k)),
  3581. std::forward_as_tuple(std::forward<Args>(args)...));
  3582. inner->set_.set_ctrl(std::get<1>(res), H2(hashval));
  3583. }
  3584. auto it = this->iterator_at(inner, inner->set_.iterator_at(std::get<1>(res)));
  3585. return {&*it, std::get<2>(res)};
  3586. }
  3587. // ----------- end of phmap extensions --------------------------
  3588. template <class K = key_type, class P = Policy, K* = nullptr>
  3589. MappedReference<P> operator[](key_arg<K>&& key) {
  3590. return Policy::value(&*try_emplace(std::forward<K>(key)).first);
  3591. }
  3592. template <class K = key_type, class P = Policy>
  3593. MappedReference<P> operator[](const key_arg<K>& key) {
  3594. return Policy::value(&*try_emplace(key).first);
  3595. }
  3596. private:
  3597. template <class K, class V>
  3598. std::pair<iterator, bool> insert_or_assign_impl(K&& k, V&& v) {
  3599. size_t hashval = this->hash(k);
  3600. ReadWriteLock m;
  3601. auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
  3602. typename Base::Inner *inner = std::get<0>(res);
  3603. if (std::get<2>(res)) {
  3604. inner->set_.emplace_at(std::get<1>(res), std::forward<K>(k), std::forward<V>(v));
  3605. inner->set_.set_ctrl(std::get<1>(res), H2(hashval));
  3606. } else
  3607. Policy::value(&*inner->set_.iterator_at(std::get<1>(res))) = std::forward<V>(v);
  3608. return {this->iterator_at(inner, inner->set_.iterator_at(std::get<1>(res))),
  3609. std::get<2>(res)};
  3610. }
  3611. template <class K = key_type, class... Args>
  3612. std::pair<iterator, bool> try_emplace_impl(K&& k, Args&&... args) {
  3613. return try_emplace_impl_with_hash(this->hash(k), std::forward<K>(k),
  3614. std::forward<Args>(args)...);
  3615. }
  3616. template <class K = key_type, class... Args>
  3617. std::pair<iterator, bool> try_emplace_impl_with_hash(size_t hashval, K&& k, Args&&... args) {
  3618. ReadWriteLock m;
  3619. auto res = this->find_or_prepare_insert_with_hash(hashval, k, m);
  3620. typename Base::Inner *inner = std::get<0>(res);
  3621. if (std::get<2>(res)) {
  3622. inner->set_.emplace_at(std::get<1>(res), std::piecewise_construct,
  3623. std::forward_as_tuple(std::forward<K>(k)),
  3624. std::forward_as_tuple(std::forward<Args>(args)...));
  3625. inner->set_.set_ctrl(std::get<1>(res), H2(hashval));
  3626. }
  3627. return {this->iterator_at(inner, inner->set_.iterator_at(std::get<1>(res))),
  3628. std::get<2>(res)};
  3629. }
  3630. };
  3631. // Constructs T into uninitialized storage pointed by `ptr` using the args
  3632. // specified in the tuple.
  3633. // ----------------------------------------------------------------------------
  3634. template <class Alloc, class T, class Tuple>
  3635. void ConstructFromTuple(Alloc* alloc, T* ptr, Tuple&& t) {
  3636. memory_internal::ConstructFromTupleImpl(
  3637. alloc, ptr, std::forward<Tuple>(t),
  3638. phmap::make_index_sequence<
  3639. std::tuple_size<typename std::decay<Tuple>::type>::value>());
  3640. }
  3641. // Constructs T using the args specified in the tuple and calls F with the
  3642. // constructed value.
  3643. // ----------------------------------------------------------------------------
  3644. template <class T, class Tuple, class F>
  3645. decltype(std::declval<F>()(std::declval<T>())) WithConstructed(
  3646. Tuple&& t, F&& f) {
  3647. return memory_internal::WithConstructedImpl<T>(
  3648. std::forward<Tuple>(t),
  3649. phmap::make_index_sequence<
  3650. std::tuple_size<typename std::decay<Tuple>::type>::value>(),
  3651. std::forward<F>(f));
  3652. }
  3653. // ----------------------------------------------------------------------------
  3654. // Given arguments of an std::pair's consructor, PairArgs() returns a pair of
  3655. // tuples with references to the passed arguments. The tuples contain
  3656. // constructor arguments for the first and the second elements of the pair.
  3657. //
  3658. // The following two snippets are equivalent.
  3659. //
  3660. // 1. std::pair<F, S> p(args...);
  3661. //
  3662. // 2. auto a = PairArgs(args...);
  3663. // std::pair<F, S> p(std::piecewise_construct,
  3664. // std::move(p.first), std::move(p.second));
  3665. // ----------------------------------------------------------------------------
  3666. LL_INLINE std::pair<std::tuple<>, std::tuple<>> PairArgs() { return {}; }
  3667. template <class F, class S>
  3668. std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(F&& f, S&& s) {
  3669. return {std::piecewise_construct, std::forward_as_tuple(std::forward<F>(f)),
  3670. std::forward_as_tuple(std::forward<S>(s))};
  3671. }
  3672. template <class F, class S>
  3673. std::pair<std::tuple<const F&>, std::tuple<const S&>> PairArgs(
  3674. const std::pair<F, S>& p) {
  3675. return PairArgs(p.first, p.second);
  3676. }
  3677. template <class F, class S>
  3678. std::pair<std::tuple<F&&>, std::tuple<S&&>> PairArgs(std::pair<F, S>&& p) {
  3679. return PairArgs(std::forward<F>(p.first), std::forward<S>(p.second));
  3680. }
  3681. template <class F, class S>
  3682. auto PairArgs(std::piecewise_construct_t, F&& f, S&& s)
  3683. -> decltype(std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
  3684. memory_internal::TupleRef(std::forward<S>(s)))) {
  3685. return std::make_pair(memory_internal::TupleRef(std::forward<F>(f)),
  3686. memory_internal::TupleRef(std::forward<S>(s)));
  3687. }
  3688. // A helper function for implementing apply() in map policies.
  3689. // ----------------------------------------------------------------------------
  3690. template <class F, class... Args>
  3691. auto DecomposePair(F&& f, Args&&... args)
  3692. -> decltype(memory_internal::DecomposePairImpl(
  3693. std::forward<F>(f), PairArgs(std::forward<Args>(args)...))) {
  3694. return memory_internal::DecomposePairImpl(
  3695. std::forward<F>(f), PairArgs(std::forward<Args>(args)...));
  3696. }
  3697. // A helper function for implementing apply() in set policies.
  3698. // ----------------------------------------------------------------------------
  3699. template <class F, class Arg>
  3700. decltype(std::declval<F>()(std::declval<const Arg&>(), std::declval<Arg>()))
  3701. DecomposeValue(F&& f, Arg&& arg) {
  3702. const auto& key = arg;
  3703. return std::forward<F>(f)(key, std::forward<Arg>(arg));
  3704. }
  3705. // --------------------------------------------------------------------------
  3706. // Policy: a policy defines how to perform different operations on
  3707. // the slots of the hashtable (see hash_policy_traits.h for the full interface
  3708. // of policy).
  3709. //
  3710. // Hash: a (possibly polymorphic) functor that hashes keys of the hashtable. The
  3711. // functor should accept a key and return size_t as hash. For best performance
  3712. // it is important that the hash function provides high entropy across all bits
  3713. // of the hash.
  3714. //
  3715. // Eq: a (possibly polymorphic) functor that compares two keys for equality. It
  3716. // should accept two (of possibly different type) keys and return a bool: true
  3717. // if they are equal, false if they are not. If two keys compare equal, then
  3718. // their hash values as defined by Hash MUST be equal.
  3719. //
  3720. // Allocator: an Allocator [https://devdocs.io/cpp/concept/allocator] with which
  3721. // the storage of the hashtable will be allocated and the elements will be
  3722. // constructed and destroyed.
  3723. // --------------------------------------------------------------------------
  3724. template <class T>
  3725. struct FlatHashSetPolicy
  3726. {
  3727. using slot_type = T;
  3728. using key_type = T;
  3729. using init_type = T;
  3730. using constant_iterators = std::true_type;
  3731. using is_flat = std::true_type;
  3732. template <class Allocator, class... Args>
  3733. static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
  3734. phmap::allocator_traits<Allocator>::construct(*alloc, slot,
  3735. std::forward<Args>(args)...);
  3736. }
  3737. template <class Allocator>
  3738. static void destroy(Allocator* alloc, slot_type* slot) {
  3739. phmap::allocator_traits<Allocator>::destroy(*alloc, slot);
  3740. }
  3741. template <class Allocator>
  3742. static void transfer(Allocator* alloc, slot_type* new_slot,
  3743. slot_type* old_slot) {
  3744. construct(alloc, new_slot, std::move(*old_slot));
  3745. destroy(alloc, old_slot);
  3746. }
  3747. static T& element(slot_type* slot) { return *slot; }
  3748. template <class F, class... Args>
  3749. static decltype(phmap::priv::DecomposeValue(
  3750. std::declval<F>(), std::declval<Args>()...))
  3751. apply(F&& f, Args&&... args) {
  3752. return phmap::priv::DecomposeValue(
  3753. std::forward<F>(f), std::forward<Args>(args)...);
  3754. }
  3755. static size_t space_used(const T*) { return 0; }
  3756. };
  3757. // --------------------------------------------------------------------------
  3758. // --------------------------------------------------------------------------
  3759. template <class K, class V>
  3760. struct FlatHashMapPolicy
  3761. {
  3762. using slot_policy = priv::map_slot_policy<K, V>;
  3763. using slot_type = typename slot_policy::slot_type;
  3764. using key_type = K;
  3765. using mapped_type = V;
  3766. using init_type = std::pair</*non const*/ key_type, mapped_type>;
  3767. using is_flat = std::true_type;
  3768. template <class Allocator, class... Args>
  3769. static void construct(Allocator* alloc, slot_type* slot, Args&&... args) {
  3770. slot_policy::construct(alloc, slot, std::forward<Args>(args)...);
  3771. }
  3772. template <class Allocator>
  3773. static void destroy(Allocator* alloc, slot_type* slot) {
  3774. slot_policy::destroy(alloc, slot);
  3775. }
  3776. template <class Allocator>
  3777. static void transfer(Allocator* alloc, slot_type* new_slot,
  3778. slot_type* old_slot) {
  3779. slot_policy::transfer(alloc, new_slot, old_slot);
  3780. }
  3781. template <class F, class... Args>
  3782. static decltype(phmap::priv::DecomposePair(
  3783. std::declval<F>(), std::declval<Args>()...))
  3784. apply(F&& f, Args&&... args) {
  3785. return phmap::priv::DecomposePair(std::forward<F>(f),
  3786. std::forward<Args>(args)...);
  3787. }
  3788. static size_t space_used(const slot_type*) { return 0; }
  3789. static std::pair<const K, V>& element(slot_type* slot) { return slot->value; }
  3790. static V& value(std::pair<const K, V>* kv) { return kv->second; }
  3791. static const V& value(const std::pair<const K, V>* kv) { return kv->second; }
  3792. };
  3793. template <class Reference, class Policy>
  3794. struct node_hash_policy {
  3795. static_assert(std::is_lvalue_reference<Reference>::value, "");
  3796. using slot_type = typename std::remove_cv<
  3797. typename std::remove_reference<Reference>::type>::type*;
  3798. template <class Alloc, class... Args>
  3799. static void construct(Alloc* alloc, slot_type* slot, Args&&... args) {
  3800. *slot = Policy::new_element(alloc, std::forward<Args>(args)...);
  3801. }
  3802. template <class Alloc>
  3803. static void destroy(Alloc* alloc, slot_type* slot) {
  3804. Policy::delete_element(alloc, *slot);
  3805. }
  3806. template <class Alloc>
  3807. static void transfer(Alloc*, slot_type* new_slot, slot_type* old_slot) {
  3808. *new_slot = *old_slot;
  3809. }
  3810. static size_t space_used(const slot_type* slot) {
  3811. if (slot == nullptr) return Policy::element_space_used(nullptr);
  3812. return Policy::element_space_used(*slot);
  3813. }
  3814. static Reference element(slot_type* slot) { return **slot; }
  3815. template <class T, class P = Policy>
  3816. static auto value(T* elem) -> decltype(P::value(elem)) {
  3817. return P::value(elem);
  3818. }
  3819. template <class... Ts, class P = Policy>
  3820. static auto apply(Ts&&... ts) -> decltype(P::apply(std::forward<Ts>(ts)...)) {
  3821. return P::apply(std::forward<Ts>(ts)...);
  3822. }
  3823. };
  3824. // --------------------------------------------------------------------------
  3825. // --------------------------------------------------------------------------
  3826. template <class T>
  3827. struct NodeHashSetPolicy
  3828. : phmap::priv::node_hash_policy<T&, NodeHashSetPolicy<T>>
  3829. {
  3830. using key_type = T;
  3831. using init_type = T;
  3832. using constant_iterators = std::true_type;
  3833. using is_flat = std::false_type;
  3834. template <class Allocator, class... Args>
  3835. static T* new_element(Allocator* alloc, Args&&... args) {
  3836. using ValueAlloc =
  3837. typename phmap::allocator_traits<Allocator>::template rebind_alloc<T>;
  3838. ValueAlloc value_alloc(*alloc);
  3839. T* res = phmap::allocator_traits<ValueAlloc>::allocate(value_alloc, 1);
  3840. phmap::allocator_traits<ValueAlloc>::construct(value_alloc, res,
  3841. std::forward<Args>(args)...);
  3842. return res;
  3843. }
  3844. template <class Allocator>
  3845. static void delete_element(Allocator* alloc, T* elem) {
  3846. using ValueAlloc =
  3847. typename phmap::allocator_traits<Allocator>::template rebind_alloc<T>;
  3848. ValueAlloc value_alloc(*alloc);
  3849. phmap::allocator_traits<ValueAlloc>::destroy(value_alloc, elem);
  3850. phmap::allocator_traits<ValueAlloc>::deallocate(value_alloc, elem, 1);
  3851. }
  3852. template <class F, class... Args>
  3853. static decltype(phmap::priv::DecomposeValue(
  3854. std::declval<F>(), std::declval<Args>()...))
  3855. apply(F&& f, Args&&... args) {
  3856. return phmap::priv::DecomposeValue(
  3857. std::forward<F>(f), std::forward<Args>(args)...);
  3858. }
  3859. static size_t element_space_used(const T*) { return sizeof(T); }
  3860. };
  3861. // --------------------------------------------------------------------------
  3862. // --------------------------------------------------------------------------
  3863. template <class Key, class Value>
  3864. class NodeHashMapPolicy
  3865. : public phmap::priv::node_hash_policy<
  3866. std::pair<const Key, Value>&, NodeHashMapPolicy<Key, Value>>
  3867. {
  3868. using value_type = std::pair<const Key, Value>;
  3869. public:
  3870. using key_type = Key;
  3871. using mapped_type = Value;
  3872. using init_type = std::pair</*non const*/ key_type, mapped_type>;
  3873. using is_flat = std::false_type;
  3874. template <class Allocator, class... Args>
  3875. static value_type* new_element(Allocator* alloc, Args&&... args) {
  3876. using PairAlloc = typename phmap::allocator_traits<
  3877. Allocator>::template rebind_alloc<value_type>;
  3878. PairAlloc pair_alloc(*alloc);
  3879. value_type* res =
  3880. phmap::allocator_traits<PairAlloc>::allocate(pair_alloc, 1);
  3881. phmap::allocator_traits<PairAlloc>::construct(pair_alloc, res,
  3882. std::forward<Args>(args)...);
  3883. return res;
  3884. }
  3885. template <class Allocator>
  3886. static void delete_element(Allocator* alloc, value_type* pair) {
  3887. using PairAlloc = typename phmap::allocator_traits<
  3888. Allocator>::template rebind_alloc<value_type>;
  3889. PairAlloc pair_alloc(*alloc);
  3890. phmap::allocator_traits<PairAlloc>::destroy(pair_alloc, pair);
  3891. phmap::allocator_traits<PairAlloc>::deallocate(pair_alloc, pair, 1);
  3892. }
  3893. template <class F, class... Args>
  3894. static decltype(phmap::priv::DecomposePair(
  3895. std::declval<F>(), std::declval<Args>()...))
  3896. apply(F&& f, Args&&... args) {
  3897. return phmap::priv::DecomposePair(std::forward<F>(f),
  3898. std::forward<Args>(args)...);
  3899. }
  3900. static size_t element_space_used(const value_type*) {
  3901. return sizeof(value_type);
  3902. }
  3903. static Value& value(value_type* elem) { return elem->second; }
  3904. static const Value& value(const value_type* elem) { return elem->second; }
  3905. };
  3906. // --------------------------------------------------------------------------
  3907. // hash_default
  3908. // --------------------------------------------------------------------------
  3909. #if PHMAP_HAVE_STD_STRING_VIEW
  3910. // Supports heterogeneous lookup for basic_string<T>-like elements.
  3911. template<class CharT>
  3912. struct StringHashEqT
  3913. {
  3914. struct Hash
  3915. {
  3916. using is_transparent = void;
  3917. size_t operator()(std::basic_string_view<CharT> v) const {
  3918. std::string_view bv{
  3919. reinterpret_cast<const char*>(v.data()), v.size() * sizeof(CharT)};
  3920. return std::hash<std::string_view>()(bv);
  3921. }
  3922. };
  3923. struct Eq {
  3924. using is_transparent = void;
  3925. LL_INLINE bool operator()(std::basic_string_view<CharT> lhs,
  3926. std::basic_string_view<CharT> rhs) const {
  3927. return lhs == rhs;
  3928. }
  3929. };
  3930. };
  3931. template <>
  3932. struct HashEq<std::string> : StringHashEqT<char> {};
  3933. template <>
  3934. struct HashEq<std::string_view> : StringHashEqT<char> {};
  3935. // char16_t
  3936. template <>
  3937. struct HashEq<std::u16string> : StringHashEqT<char16_t> {};
  3938. template <>
  3939. struct HashEq<std::u16string_view> : StringHashEqT<char16_t> {};
  3940. // wchar_t
  3941. template <>
  3942. struct HashEq<std::wstring> : StringHashEqT<wchar_t> {};
  3943. template <>
  3944. struct HashEq<std::wstring_view> : StringHashEqT<wchar_t> {};
  3945. #endif
  3946. // Supports heterogeneous lookup for pointers and smart pointers.
  3947. // -------------------------------------------------------------
  3948. template <class T>
  3949. struct HashEq<T*>
  3950. {
  3951. struct Hash {
  3952. using is_transparent = void;
  3953. template <class U>
  3954. size_t operator()(const U& ptr) const {
  3955. // we want phmap::Hash<T*> and not phmap::Hash<const T*>
  3956. // so "struct std::hash<T*> " override works
  3957. return phmap::Hash<T*>{}((T*)(uintptr_t)HashEq::ToPtr(ptr));
  3958. }
  3959. };
  3960. struct Eq {
  3961. using is_transparent = void;
  3962. template <class A, class B>
  3963. bool operator()(const A& a, const B& b) const {
  3964. return HashEq::ToPtr(a) == HashEq::ToPtr(b);
  3965. }
  3966. };
  3967. private:
  3968. static const T* ToPtr(const T* ptr) { return ptr; }
  3969. template <class U, class D>
  3970. static const T* ToPtr(const std::unique_ptr<U, D>& ptr) {
  3971. return ptr.get();
  3972. }
  3973. template <class U>
  3974. static const T* ToPtr(const std::shared_ptr<U>& ptr) {
  3975. return ptr.get();
  3976. }
  3977. };
  3978. template <class T, class D>
  3979. struct HashEq<std::unique_ptr<T, D>> : HashEq<T*> {};
  3980. template <class T>
  3981. struct HashEq<std::shared_ptr<T>> : HashEq<T*> {};
  3982. namespace hashtable_debug_internal {
  3983. // --------------------------------------------------------------------------
  3984. // --------------------------------------------------------------------------
  3985. template<typename, typename = void >
  3986. struct has_member_type_raw_hash_set : std::false_type
  3987. {};
  3988. template<typename T>
  3989. struct has_member_type_raw_hash_set<T, phmap::void_t<typename T::raw_hash_set>> : std::true_type
  3990. {};
  3991. template <typename Set>
  3992. struct HashtableDebugAccess<Set, typename std::enable_if<has_member_type_raw_hash_set<Set>::value>::type>
  3993. {
  3994. using Traits = typename Set::PolicyTraits;
  3995. using Slot = typename Traits::slot_type;
  3996. static size_t GetNumProbes(const Set& set,
  3997. const typename Set::key_type& key) {
  3998. size_t num_probes = 0;
  3999. size_t hashval = set.hash(key);
  4000. auto seq = set.probe(hashval);
  4001. while (true) {
  4002. priv::Group g{set.ctrl_ + seq.offset()};
  4003. for (uint32_t i : g.Match((h2_t)priv::H2(hashval))) {
  4004. if (Traits::apply(
  4005. typename Set::template EqualElement<typename Set::key_type>{
  4006. key, set.eq_ref()},
  4007. Traits::element(set.slots_ + seq.offset((size_t)i))))
  4008. return num_probes;
  4009. ++num_probes;
  4010. }
  4011. if (g.MatchEmpty()) return num_probes;
  4012. seq.next();
  4013. ++num_probes;
  4014. }
  4015. }
  4016. static size_t AllocatedByteSize(const Set& c) {
  4017. size_t capacity = c.capacity_;
  4018. if (capacity == 0) return 0;
  4019. auto layout = Set::MakeLayout(capacity);
  4020. size_t m = layout.AllocSize();
  4021. size_t per_slot = Traits::space_used(static_cast<const Slot*>(nullptr));
  4022. if (per_slot != ~size_t{}) {
  4023. m += per_slot * c.size();
  4024. } else {
  4025. for (size_t i = 0; i != capacity; ++i) {
  4026. if (priv::IsFull(c.ctrl_[i])) {
  4027. m += Traits::space_used(c.slots_ + i);
  4028. }
  4029. }
  4030. }
  4031. return m;
  4032. }
  4033. static size_t LowerBoundAllocatedByteSize(size_t size) {
  4034. size_t capacity = GrowthToLowerboundCapacity(size);
  4035. if (capacity == 0) return 0;
  4036. auto layout = Set::MakeLayout(NormalizeCapacity(capacity));
  4037. size_t m = layout.AllocSize();
  4038. size_t per_slot = Traits::space_used(static_cast<const Slot*>(nullptr));
  4039. if (per_slot != ~size_t{}) {
  4040. m += per_slot * size;
  4041. }
  4042. return m;
  4043. }
  4044. };
  4045. template<typename, typename = void >
  4046. struct has_member_type_EmbeddedSet : std::false_type
  4047. {};
  4048. template<typename T>
  4049. struct has_member_type_EmbeddedSet<T, phmap::void_t<typename T::EmbeddedSet>> : std::true_type
  4050. {};
  4051. template <typename Set>
  4052. struct HashtableDebugAccess<Set, typename std::enable_if<has_member_type_EmbeddedSet<Set>::value>::type> {
  4053. using Traits = typename Set::PolicyTraits;
  4054. using Slot = typename Traits::slot_type;
  4055. using EmbeddedSet = typename Set::EmbeddedSet;
  4056. static size_t GetNumProbes(const Set& set, const typename Set::key_type& key) {
  4057. size_t hashval = set.hash(key);
  4058. auto& inner = set.sets_[set.subidx(hashval)];
  4059. auto& inner_set = inner.set_;
  4060. return HashtableDebugAccess<EmbeddedSet>::GetNumProbes(inner_set, key);
  4061. }
  4062. };
  4063. } // namespace hashtable_debug_internal
  4064. } // namespace priv
  4065. // -----------------------------------------------------------------------------
  4066. // phmap::flat_hash_set
  4067. // -----------------------------------------------------------------------------
  4068. // An `phmap::flat_hash_set<T>` is an unordered associative container which has
  4069. // been optimized for both speed and memory footprint in most common use cases.
  4070. // Its interface is similar to that of `std::unordered_set<T>` with the
  4071. // following notable differences:
  4072. //
  4073. // * Supports heterogeneous lookup, through `find()`, `operator[]()` and
  4074. // `insert()`, provided that the set is provided a compatible heterogeneous
  4075. // hashing function and equality operator.
  4076. // * Invalidates any references and pointers to elements within the table after
  4077. // `rehash()`.
  4078. // * Contains a `capacity()` member function indicating the number of element
  4079. // slots (open, deleted, and empty) within the hash set.
  4080. // * Returns `void` from the `_erase(iterator)` overload.
  4081. // -----------------------------------------------------------------------------
  4082. template <class T, class Hash, class Eq, class Alloc> // default values in phmap_fwd_decl.h
  4083. class flat_hash_set
  4084. : public phmap::priv::raw_hash_set<
  4085. phmap::priv::FlatHashSetPolicy<T>, Hash, Eq, Alloc>
  4086. {
  4087. using Base = typename flat_hash_set::raw_hash_set;
  4088. public:
  4089. flat_hash_set() {}
  4090. #ifdef __INTEL_COMPILER
  4091. using Base::raw_hash_set;
  4092. #else
  4093. using Base::Base;
  4094. #endif
  4095. using Base::begin;
  4096. using Base::cbegin;
  4097. using Base::cend;
  4098. using Base::end;
  4099. using Base::capacity;
  4100. using Base::empty;
  4101. using Base::max_size;
  4102. using Base::size;
  4103. using Base::clear; // may shrink - To avoid shrinking `erase(begin(), end())`
  4104. using Base::erase;
  4105. using Base::insert;
  4106. using Base::emplace;
  4107. using Base::emplace_hint;
  4108. using Base::extract;
  4109. using Base::merge;
  4110. using Base::swap;
  4111. using Base::rehash;
  4112. using Base::reserve;
  4113. using Base::contains;
  4114. using Base::count;
  4115. using Base::equal_range;
  4116. using Base::find;
  4117. using Base::bucket_count;
  4118. using Base::load_factor;
  4119. using Base::max_load_factor;
  4120. using Base::get_allocator;
  4121. using Base::hash_function;
  4122. using Base::hash;
  4123. using Base::key_eq;
  4124. };
  4125. // -----------------------------------------------------------------------------
  4126. // phmap::flat_hash_map
  4127. // -----------------------------------------------------------------------------
  4128. //
  4129. // An `phmap::flat_hash_map<K, V>` is an unordered associative container which
  4130. // has been optimized for both speed and memory footprint in most common use
  4131. // cases. Its interface is similar to that of `std::unordered_map<K, V>` with
  4132. // the following notable differences:
  4133. //
  4134. // * Supports heterogeneous lookup, through `find()`, `operator[]()` and
  4135. // `insert()`, provided that the map is provided a compatible heterogeneous
  4136. // hashing function and equality operator.
  4137. // * Invalidates any references and pointers to elements within the table after
  4138. // `rehash()`.
  4139. // * Contains a `capacity()` member function indicating the number of element
  4140. // slots (open, deleted, and empty) within the hash map.
  4141. // * Returns `void` from the `_erase(iterator)` overload.
  4142. // -----------------------------------------------------------------------------
  4143. template <class K, class V, class Hash, class Eq, class Alloc> // default values in phmap_fwd_decl.h
  4144. class flat_hash_map : public phmap::priv::raw_hash_map<
  4145. phmap::priv::FlatHashMapPolicy<K, V>,
  4146. Hash, Eq, Alloc> {
  4147. using Base = typename flat_hash_map::raw_hash_map;
  4148. public:
  4149. flat_hash_map() {}
  4150. #ifdef __INTEL_COMPILER
  4151. using Base::raw_hash_map;
  4152. #else
  4153. using Base::Base;
  4154. #endif
  4155. using Base::begin;
  4156. using Base::cbegin;
  4157. using Base::cend;
  4158. using Base::end;
  4159. using Base::capacity;
  4160. using Base::empty;
  4161. using Base::max_size;
  4162. using Base::size;
  4163. using Base::clear;
  4164. using Base::erase;
  4165. using Base::insert;
  4166. using Base::insert_or_assign;
  4167. using Base::emplace;
  4168. using Base::emplace_hint;
  4169. using Base::try_emplace;
  4170. using Base::extract;
  4171. using Base::merge;
  4172. using Base::swap;
  4173. using Base::rehash;
  4174. using Base::reserve;
  4175. using Base::at;
  4176. using Base::contains;
  4177. using Base::count;
  4178. using Base::equal_range;
  4179. using Base::find;
  4180. using Base::operator[];
  4181. using Base::bucket_count;
  4182. using Base::load_factor;
  4183. using Base::max_load_factor;
  4184. using Base::get_allocator;
  4185. using Base::hash_function;
  4186. using Base::hash;
  4187. using Base::key_eq;
  4188. };
  4189. // -----------------------------------------------------------------------------
  4190. // phmap::node_hash_set
  4191. // -----------------------------------------------------------------------------
  4192. // An `phmap::node_hash_set<T>` is an unordered associative container which
  4193. // has been optimized for both speed and memory footprint in most common use
  4194. // cases. Its interface is similar to that of `std::unordered_set<T>` with the
  4195. // following notable differences:
  4196. //
  4197. // * Supports heterogeneous lookup, through `find()`, `operator[]()` and
  4198. // `insert()`, provided that the map is provided a compatible heterogeneous
  4199. // hashing function and equality operator.
  4200. // * Contains a `capacity()` member function indicating the number of element
  4201. // slots (open, deleted, and empty) within the hash set.
  4202. // * Returns `void` from the `_erase(iterator)` overload.
  4203. // -----------------------------------------------------------------------------
  4204. template <class T, class Hash, class Eq, class Alloc> // default values in phmap_fwd_decl.h
  4205. class node_hash_set
  4206. : public phmap::priv::raw_hash_set<
  4207. phmap::priv::NodeHashSetPolicy<T>, Hash, Eq, Alloc>
  4208. {
  4209. using Base = typename node_hash_set::raw_hash_set;
  4210. public:
  4211. node_hash_set() {}
  4212. #ifdef __INTEL_COMPILER
  4213. using Base::raw_hash_set;
  4214. #else
  4215. using Base::Base;
  4216. #endif
  4217. using Base::begin;
  4218. using Base::cbegin;
  4219. using Base::cend;
  4220. using Base::end;
  4221. using Base::capacity;
  4222. using Base::empty;
  4223. using Base::max_size;
  4224. using Base::size;
  4225. using Base::clear;
  4226. using Base::erase;
  4227. using Base::insert;
  4228. using Base::emplace;
  4229. using Base::emplace_hint;
  4230. using Base::emplace_with_hash;
  4231. using Base::emplace_hint_with_hash;
  4232. using Base::extract;
  4233. using Base::merge;
  4234. using Base::swap;
  4235. using Base::rehash;
  4236. using Base::reserve;
  4237. using Base::contains;
  4238. using Base::count;
  4239. using Base::equal_range;
  4240. using Base::find;
  4241. using Base::bucket_count;
  4242. using Base::load_factor;
  4243. using Base::max_load_factor;
  4244. using Base::get_allocator;
  4245. using Base::hash_function;
  4246. using Base::hash;
  4247. using Base::key_eq;
  4248. typename Base::hasher hash_funct() { return this->hash_function(); }
  4249. void resize(typename Base::size_type hint) { this->rehash(hint); }
  4250. };
  4251. // -----------------------------------------------------------------------------
  4252. // phmap::node_hash_map
  4253. // -----------------------------------------------------------------------------
  4254. //
  4255. // An `phmap::node_hash_map<K, V>` is an unordered associative container which
  4256. // has been optimized for both speed and memory footprint in most common use
  4257. // cases. Its interface is similar to that of `std::unordered_map<K, V>` with
  4258. // the following notable differences:
  4259. //
  4260. // * Supports heterogeneous lookup, through `find()`, `operator[]()` and
  4261. // `insert()`, provided that the map is provided a compatible heterogeneous
  4262. // hashing function and equality operator.
  4263. // * Contains a `capacity()` member function indicating the number of element
  4264. // slots (open, deleted, and empty) within the hash map.
  4265. // * Returns `void` from the `_erase(iterator)` overload.
  4266. // -----------------------------------------------------------------------------
  4267. template <class Key, class Value, class Hash, class Eq, class Alloc> // default values in phmap_fwd_decl.h
  4268. class node_hash_map
  4269. : public phmap::priv::raw_hash_map<
  4270. phmap::priv::NodeHashMapPolicy<Key, Value>, Hash, Eq,
  4271. Alloc>
  4272. {
  4273. using Base = typename node_hash_map::raw_hash_map;
  4274. public:
  4275. node_hash_map() {}
  4276. #ifdef __INTEL_COMPILER
  4277. using Base::raw_hash_map;
  4278. #else
  4279. using Base::Base;
  4280. #endif
  4281. using Base::begin;
  4282. using Base::cbegin;
  4283. using Base::cend;
  4284. using Base::end;
  4285. using Base::capacity;
  4286. using Base::empty;
  4287. using Base::max_size;
  4288. using Base::size;
  4289. using Base::clear;
  4290. using Base::erase;
  4291. using Base::insert;
  4292. using Base::insert_or_assign;
  4293. using Base::emplace;
  4294. using Base::emplace_hint;
  4295. using Base::try_emplace;
  4296. using Base::extract;
  4297. using Base::merge;
  4298. using Base::swap;
  4299. using Base::rehash;
  4300. using Base::reserve;
  4301. using Base::at;
  4302. using Base::contains;
  4303. using Base::count;
  4304. using Base::equal_range;
  4305. using Base::find;
  4306. using Base::operator[];
  4307. using Base::bucket_count;
  4308. using Base::load_factor;
  4309. using Base::max_load_factor;
  4310. using Base::get_allocator;
  4311. using Base::hash_function;
  4312. using Base::hash;
  4313. using Base::key_eq;
  4314. typename Base::hasher hash_funct() { return this->hash_function(); }
  4315. void resize(typename Base::size_type hint) { this->rehash(hint); }
  4316. };
  4317. // -----------------------------------------------------------------------------
  4318. // phmap::parallel_flat_hash_set
  4319. // -----------------------------------------------------------------------------
  4320. template <class T, class Hash, class Eq, class Alloc, size_t N, class Mtx_> // default values in phmap_fwd_decl.h
  4321. class parallel_flat_hash_set
  4322. : public phmap::priv::parallel_hash_set<
  4323. N, phmap::priv::raw_hash_set, Mtx_,
  4324. phmap::priv::FlatHashSetPolicy<T>,
  4325. Hash, Eq, Alloc>
  4326. {
  4327. using Base = typename parallel_flat_hash_set::parallel_hash_set;
  4328. public:
  4329. parallel_flat_hash_set() {}
  4330. #ifdef __INTEL_COMPILER
  4331. using Base::parallel_hash_set;
  4332. #else
  4333. using Base::Base;
  4334. #endif
  4335. using Base::hash;
  4336. using Base::subidx;
  4337. using Base::subcnt;
  4338. using Base::begin;
  4339. using Base::cbegin;
  4340. using Base::cend;
  4341. using Base::end;
  4342. using Base::capacity;
  4343. using Base::empty;
  4344. using Base::max_size;
  4345. using Base::size;
  4346. using Base::clear;
  4347. using Base::erase;
  4348. using Base::insert;
  4349. using Base::emplace;
  4350. using Base::emplace_hint;
  4351. using Base::emplace_with_hash;
  4352. using Base::emplace_hint_with_hash;
  4353. using Base::extract;
  4354. using Base::merge;
  4355. using Base::swap;
  4356. using Base::rehash;
  4357. using Base::reserve;
  4358. using Base::contains;
  4359. using Base::count;
  4360. using Base::equal_range;
  4361. using Base::find;
  4362. using Base::bucket_count;
  4363. using Base::load_factor;
  4364. using Base::max_load_factor;
  4365. using Base::get_allocator;
  4366. using Base::hash_function;
  4367. using Base::key_eq;
  4368. };
  4369. // -----------------------------------------------------------------------------
  4370. // phmap::parallel_flat_hash_map - default values in phmap_fwd_decl.h
  4371. // -----------------------------------------------------------------------------
  4372. template <class K, class V, class Hash, class Eq, class Alloc, size_t N, class Mtx_>
  4373. class parallel_flat_hash_map : public phmap::priv::parallel_hash_map<
  4374. N, phmap::priv::raw_hash_set, Mtx_,
  4375. phmap::priv::FlatHashMapPolicy<K, V>,
  4376. Hash, Eq, Alloc>
  4377. {
  4378. using Base = typename parallel_flat_hash_map::parallel_hash_map;
  4379. public:
  4380. parallel_flat_hash_map() {}
  4381. #ifdef __INTEL_COMPILER
  4382. using Base::parallel_hash_map;
  4383. #else
  4384. using Base::Base;
  4385. #endif
  4386. using Base::hash;
  4387. using Base::subidx;
  4388. using Base::subcnt;
  4389. using Base::begin;
  4390. using Base::cbegin;
  4391. using Base::cend;
  4392. using Base::end;
  4393. using Base::capacity;
  4394. using Base::empty;
  4395. using Base::max_size;
  4396. using Base::size;
  4397. using Base::clear;
  4398. using Base::erase;
  4399. using Base::insert;
  4400. using Base::insert_or_assign;
  4401. using Base::emplace;
  4402. using Base::emplace_hint;
  4403. using Base::try_emplace;
  4404. using Base::emplace_with_hash;
  4405. using Base::emplace_hint_with_hash;
  4406. using Base::try_emplace_with_hash;
  4407. using Base::extract;
  4408. using Base::merge;
  4409. using Base::swap;
  4410. using Base::rehash;
  4411. using Base::reserve;
  4412. using Base::at;
  4413. using Base::contains;
  4414. using Base::count;
  4415. using Base::equal_range;
  4416. using Base::find;
  4417. using Base::operator[];
  4418. using Base::bucket_count;
  4419. using Base::load_factor;
  4420. using Base::max_load_factor;
  4421. using Base::get_allocator;
  4422. using Base::hash_function;
  4423. using Base::key_eq;
  4424. };
  4425. // -----------------------------------------------------------------------------
  4426. // phmap::parallel_node_hash_set
  4427. // -----------------------------------------------------------------------------
  4428. template <class T, class Hash, class Eq, class Alloc, size_t N, class Mtx_>
  4429. class parallel_node_hash_set
  4430. : public phmap::priv::parallel_hash_set<
  4431. N, phmap::priv::raw_hash_set, Mtx_,
  4432. phmap::priv::NodeHashSetPolicy<T>, Hash, Eq, Alloc>
  4433. {
  4434. using Base = typename parallel_node_hash_set::parallel_hash_set;
  4435. public:
  4436. parallel_node_hash_set() {}
  4437. #ifdef __INTEL_COMPILER
  4438. using Base::parallel_hash_set;
  4439. #else
  4440. using Base::Base;
  4441. #endif
  4442. using Base::hash;
  4443. using Base::subidx;
  4444. using Base::subcnt;
  4445. using Base::begin;
  4446. using Base::cbegin;
  4447. using Base::cend;
  4448. using Base::end;
  4449. using Base::capacity;
  4450. using Base::empty;
  4451. using Base::max_size;
  4452. using Base::size;
  4453. using Base::clear;
  4454. using Base::erase;
  4455. using Base::insert;
  4456. using Base::emplace;
  4457. using Base::emplace_hint;
  4458. using Base::emplace_with_hash;
  4459. using Base::emplace_hint_with_hash;
  4460. using Base::extract;
  4461. using Base::merge;
  4462. using Base::swap;
  4463. using Base::rehash;
  4464. using Base::reserve;
  4465. using Base::contains;
  4466. using Base::count;
  4467. using Base::equal_range;
  4468. using Base::find;
  4469. using Base::bucket_count;
  4470. using Base::load_factor;
  4471. using Base::max_load_factor;
  4472. using Base::get_allocator;
  4473. using Base::hash_function;
  4474. using Base::key_eq;
  4475. typename Base::hasher hash_funct() { return this->hash_function(); }
  4476. void resize(typename Base::size_type hint) { this->rehash(hint); }
  4477. };
  4478. // -----------------------------------------------------------------------------
  4479. // phmap::parallel_node_hash_map
  4480. // -----------------------------------------------------------------------------
  4481. template <class Key, class Value, class Hash, class Eq, class Alloc, size_t N, class Mtx_>
  4482. class parallel_node_hash_map
  4483. : public phmap::priv::parallel_hash_map<
  4484. N, phmap::priv::raw_hash_set, Mtx_,
  4485. phmap::priv::NodeHashMapPolicy<Key, Value>, Hash, Eq,
  4486. Alloc>
  4487. {
  4488. using Base = typename parallel_node_hash_map::parallel_hash_map;
  4489. public:
  4490. parallel_node_hash_map() {}
  4491. #ifdef __INTEL_COMPILER
  4492. using Base::parallel_hash_map;
  4493. #else
  4494. using Base::Base;
  4495. #endif
  4496. using Base::hash;
  4497. using Base::subidx;
  4498. using Base::subcnt;
  4499. using Base::begin;
  4500. using Base::cbegin;
  4501. using Base::cend;
  4502. using Base::end;
  4503. using Base::capacity;
  4504. using Base::empty;
  4505. using Base::max_size;
  4506. using Base::size;
  4507. using Base::clear;
  4508. using Base::erase;
  4509. using Base::insert;
  4510. using Base::insert_or_assign;
  4511. using Base::emplace;
  4512. using Base::emplace_hint;
  4513. using Base::try_emplace;
  4514. using Base::emplace_with_hash;
  4515. using Base::emplace_hint_with_hash;
  4516. using Base::try_emplace_with_hash;
  4517. using Base::extract;
  4518. using Base::merge;
  4519. using Base::swap;
  4520. using Base::rehash;
  4521. using Base::reserve;
  4522. using Base::at;
  4523. using Base::contains;
  4524. using Base::count;
  4525. using Base::equal_range;
  4526. using Base::find;
  4527. using Base::operator[];
  4528. using Base::bucket_count;
  4529. using Base::load_factor;
  4530. using Base::max_load_factor;
  4531. using Base::get_allocator;
  4532. using Base::hash_function;
  4533. using Base::key_eq;
  4534. typename Base::hasher hash_funct() { return this->hash_function(); }
  4535. void resize(typename Base::size_type hint) { this->rehash(hint); }
  4536. };
  4537. } // namespace phmap
  4538. namespace phmap {
  4539. namespace priv {
  4540. template <class C, class Pred>
  4541. std::size_t erase_if(C &c, Pred pred) {
  4542. auto old_size = c.size();
  4543. for (auto i = c.begin(), last = c.end(); i != last; ) {
  4544. if (pred(*i)) {
  4545. i = c.erase(i);
  4546. } else {
  4547. ++i;
  4548. }
  4549. }
  4550. return old_size - c.size();
  4551. }
  4552. } // priv
  4553. // ======== erase_if for phmap set containers ==================================
  4554. template <class T, class Hash, class Eq, class Alloc, class Pred>
  4555. LL_INLINE std::size_t erase_if(phmap::flat_hash_set<T, Hash, Eq, Alloc>& c, Pred pred) {
  4556. return phmap::priv::erase_if(c, std::move(pred));
  4557. }
  4558. template <class T, class Hash, class Eq, class Alloc, class Pred>
  4559. LL_INLINE std::size_t erase_if(phmap::node_hash_set<T, Hash, Eq, Alloc>& c, Pred pred) {
  4560. return phmap::priv::erase_if(c, std::move(pred));
  4561. }
  4562. template <class T, class Hash, class Eq, class Alloc, size_t N, class Mtx_, class Pred>
  4563. LL_INLINE std::size_t erase_if(phmap::parallel_flat_hash_set<T, Hash, Eq, Alloc, N, Mtx_>& c, Pred pred) {
  4564. return phmap::priv::erase_if(c, std::move(pred));
  4565. }
  4566. template <class T, class Hash, class Eq, class Alloc, size_t N, class Mtx_, class Pred>
  4567. LL_INLINE std::size_t erase_if(phmap::parallel_node_hash_set<T, Hash, Eq, Alloc, N, Mtx_>& c, Pred pred) {
  4568. return phmap::priv::erase_if(c, std::move(pred));
  4569. }
  4570. // ======== erase_if for phmap map containers ==================================
  4571. template <class K, class V, class Hash, class Eq, class Alloc, class Pred>
  4572. LL_INLINE std::size_t erase_if(phmap::flat_hash_map<K, V, Hash, Eq, Alloc>& c, Pred pred) {
  4573. return phmap::priv::erase_if(c, std::move(pred));
  4574. }
  4575. template <class K, class V, class Hash, class Eq, class Alloc, class Pred>
  4576. LL_INLINE std::size_t erase_if(phmap::node_hash_map<K, V, Hash, Eq, Alloc>& c, Pred pred) {
  4577. return phmap::priv::erase_if(c, std::move(pred));
  4578. }
  4579. template <class K, class V, class Hash, class Eq, class Alloc, size_t N, class Mtx_, class Pred>
  4580. LL_INLINE std::size_t erase_if(phmap::parallel_flat_hash_map<K, V, Hash, Eq, Alloc, N, Mtx_>& c, Pred pred) {
  4581. return phmap::priv::erase_if(c, std::move(pred));
  4582. }
  4583. template <class K, class V, class Hash, class Eq, class Alloc, size_t N, class Mtx_, class Pred>
  4584. LL_INLINE std::size_t erase_if(phmap::parallel_node_hash_map<K, V, Hash, Eq, Alloc, N, Mtx_>& c, Pred pred) {
  4585. return phmap::priv::erase_if(c, std::move(pred));
  4586. }
  4587. } // phmap
  4588. #ifdef _MSC_VER
  4589. #pragma warning(pop)
  4590. #endif
  4591. #endif // phmap_h_guard_