GTLRVisionObjects.h 383 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155
  1. // NOTE: This file was generated by the ServiceGenerator.
  2. // ----------------------------------------------------------------------------
  3. // API:
  4. // Cloud Vision API (vision/v1)
  5. // Description:
  6. // Integrates Google Vision features, including image labeling, face, logo,
  7. // and landmark detection, optical character recognition (OCR), and detection
  8. // of explicit content, into applications.
  9. // Documentation:
  10. // https://cloud.google.com/vision/
  11. #if GTLR_BUILT_AS_FRAMEWORK
  12. #import "GTLR/GTLRObject.h"
  13. #else
  14. #import "GTLRObject.h"
  15. #endif
  16. #if GTLR_RUNTIME_VERSION != 3000
  17. #error This file was generated by a different version of ServiceGenerator which is incompatible with this GTLR library source.
  18. #endif
  19. @class GTLRVision_AnnotateImageRequest;
  20. @class GTLRVision_AnnotateImageResponse;
  21. @class GTLRVision_AsyncAnnotateFileRequest;
  22. @class GTLRVision_AsyncAnnotateFileResponse;
  23. @class GTLRVision_Block;
  24. @class GTLRVision_BoundingPoly;
  25. @class GTLRVision_Color;
  26. @class GTLRVision_ColorInfo;
  27. @class GTLRVision_CropHint;
  28. @class GTLRVision_CropHintsAnnotation;
  29. @class GTLRVision_CropHintsParams;
  30. @class GTLRVision_DetectedBreak;
  31. @class GTLRVision_DetectedLanguage;
  32. @class GTLRVision_DominantColorsAnnotation;
  33. @class GTLRVision_EntityAnnotation;
  34. @class GTLRVision_FaceAnnotation;
  35. @class GTLRVision_Feature;
  36. @class GTLRVision_GcsDestination;
  37. @class GTLRVision_GcsSource;
  38. @class GTLRVision_GoogleCloudVisionV1p1beta1AnnotateImageResponse;
  39. @class GTLRVision_GoogleCloudVisionV1p1beta1AsyncAnnotateFileResponse;
  40. @class GTLRVision_GoogleCloudVisionV1p1beta1Block;
  41. @class GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly;
  42. @class GTLRVision_GoogleCloudVisionV1p1beta1ColorInfo;
  43. @class GTLRVision_GoogleCloudVisionV1p1beta1CropHint;
  44. @class GTLRVision_GoogleCloudVisionV1p1beta1CropHintsAnnotation;
  45. @class GTLRVision_GoogleCloudVisionV1p1beta1DominantColorsAnnotation;
  46. @class GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation;
  47. @class GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation;
  48. @class GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark;
  49. @class GTLRVision_GoogleCloudVisionV1p1beta1GcsDestination;
  50. @class GTLRVision_GoogleCloudVisionV1p1beta1GcsSource;
  51. @class GTLRVision_GoogleCloudVisionV1p1beta1ImageAnnotationContext;
  52. @class GTLRVision_GoogleCloudVisionV1p1beta1ImageProperties;
  53. @class GTLRVision_GoogleCloudVisionV1p1beta1InputConfig;
  54. @class GTLRVision_GoogleCloudVisionV1p1beta1LocalizedObjectAnnotation;
  55. @class GTLRVision_GoogleCloudVisionV1p1beta1LocationInfo;
  56. @class GTLRVision_GoogleCloudVisionV1p1beta1NormalizedVertex;
  57. @class GTLRVision_GoogleCloudVisionV1p1beta1OutputConfig;
  58. @class GTLRVision_GoogleCloudVisionV1p1beta1Page;
  59. @class GTLRVision_GoogleCloudVisionV1p1beta1Paragraph;
  60. @class GTLRVision_GoogleCloudVisionV1p1beta1Position;
  61. @class GTLRVision_GoogleCloudVisionV1p1beta1Product;
  62. @class GTLRVision_GoogleCloudVisionV1p1beta1ProductKeyValue;
  63. @class GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResults;
  64. @class GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsGroupedResult;
  65. @class GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult;
  66. @class GTLRVision_GoogleCloudVisionV1p1beta1Property;
  67. @class GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation;
  68. @class GTLRVision_GoogleCloudVisionV1p1beta1Symbol;
  69. @class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotation;
  70. @class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak;
  71. @class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedLanguage;
  72. @class GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty;
  73. @class GTLRVision_GoogleCloudVisionV1p1beta1Vertex;
  74. @class GTLRVision_GoogleCloudVisionV1p1beta1WebDetection;
  75. @class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebEntity;
  76. @class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage;
  77. @class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebLabel;
  78. @class GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebPage;
  79. @class GTLRVision_GoogleCloudVisionV1p1beta1Word;
  80. @class GTLRVision_GoogleCloudVisionV1p2beta1AnnotateImageResponse;
  81. @class GTLRVision_GoogleCloudVisionV1p2beta1AsyncAnnotateFileResponse;
  82. @class GTLRVision_GoogleCloudVisionV1p2beta1Block;
  83. @class GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly;
  84. @class GTLRVision_GoogleCloudVisionV1p2beta1ColorInfo;
  85. @class GTLRVision_GoogleCloudVisionV1p2beta1CropHint;
  86. @class GTLRVision_GoogleCloudVisionV1p2beta1CropHintsAnnotation;
  87. @class GTLRVision_GoogleCloudVisionV1p2beta1DominantColorsAnnotation;
  88. @class GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation;
  89. @class GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation;
  90. @class GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark;
  91. @class GTLRVision_GoogleCloudVisionV1p2beta1GcsDestination;
  92. @class GTLRVision_GoogleCloudVisionV1p2beta1GcsSource;
  93. @class GTLRVision_GoogleCloudVisionV1p2beta1ImageAnnotationContext;
  94. @class GTLRVision_GoogleCloudVisionV1p2beta1ImageProperties;
  95. @class GTLRVision_GoogleCloudVisionV1p2beta1InputConfig;
  96. @class GTLRVision_GoogleCloudVisionV1p2beta1LocalizedObjectAnnotation;
  97. @class GTLRVision_GoogleCloudVisionV1p2beta1LocationInfo;
  98. @class GTLRVision_GoogleCloudVisionV1p2beta1NormalizedVertex;
  99. @class GTLRVision_GoogleCloudVisionV1p2beta1OutputConfig;
  100. @class GTLRVision_GoogleCloudVisionV1p2beta1Page;
  101. @class GTLRVision_GoogleCloudVisionV1p2beta1Paragraph;
  102. @class GTLRVision_GoogleCloudVisionV1p2beta1Position;
  103. @class GTLRVision_GoogleCloudVisionV1p2beta1Product;
  104. @class GTLRVision_GoogleCloudVisionV1p2beta1ProductKeyValue;
  105. @class GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResults;
  106. @class GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsGroupedResult;
  107. @class GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult;
  108. @class GTLRVision_GoogleCloudVisionV1p2beta1Property;
  109. @class GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation;
  110. @class GTLRVision_GoogleCloudVisionV1p2beta1Symbol;
  111. @class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotation;
  112. @class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak;
  113. @class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedLanguage;
  114. @class GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty;
  115. @class GTLRVision_GoogleCloudVisionV1p2beta1Vertex;
  116. @class GTLRVision_GoogleCloudVisionV1p2beta1WebDetection;
  117. @class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity;
  118. @class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage;
  119. @class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel;
  120. @class GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebPage;
  121. @class GTLRVision_GoogleCloudVisionV1p2beta1Word;
  122. @class GTLRVision_GoogleCloudVisionV1p3beta1AnnotateImageResponse;
  123. @class GTLRVision_GoogleCloudVisionV1p3beta1AsyncAnnotateFileResponse;
  124. @class GTLRVision_GoogleCloudVisionV1p3beta1Block;
  125. @class GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly;
  126. @class GTLRVision_GoogleCloudVisionV1p3beta1ColorInfo;
  127. @class GTLRVision_GoogleCloudVisionV1p3beta1CropHint;
  128. @class GTLRVision_GoogleCloudVisionV1p3beta1CropHintsAnnotation;
  129. @class GTLRVision_GoogleCloudVisionV1p3beta1DominantColorsAnnotation;
  130. @class GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation;
  131. @class GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation;
  132. @class GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark;
  133. @class GTLRVision_GoogleCloudVisionV1p3beta1GcsDestination;
  134. @class GTLRVision_GoogleCloudVisionV1p3beta1GcsSource;
  135. @class GTLRVision_GoogleCloudVisionV1p3beta1ImageAnnotationContext;
  136. @class GTLRVision_GoogleCloudVisionV1p3beta1ImageProperties;
  137. @class GTLRVision_GoogleCloudVisionV1p3beta1InputConfig;
  138. @class GTLRVision_GoogleCloudVisionV1p3beta1LocalizedObjectAnnotation;
  139. @class GTLRVision_GoogleCloudVisionV1p3beta1LocationInfo;
  140. @class GTLRVision_GoogleCloudVisionV1p3beta1NormalizedVertex;
  141. @class GTLRVision_GoogleCloudVisionV1p3beta1OutputConfig;
  142. @class GTLRVision_GoogleCloudVisionV1p3beta1Page;
  143. @class GTLRVision_GoogleCloudVisionV1p3beta1Paragraph;
  144. @class GTLRVision_GoogleCloudVisionV1p3beta1Position;
  145. @class GTLRVision_GoogleCloudVisionV1p3beta1Product;
  146. @class GTLRVision_GoogleCloudVisionV1p3beta1ProductKeyValue;
  147. @class GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResults;
  148. @class GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsGroupedResult;
  149. @class GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult;
  150. @class GTLRVision_GoogleCloudVisionV1p3beta1Property;
  151. @class GTLRVision_GoogleCloudVisionV1p3beta1ReferenceImage;
  152. @class GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation;
  153. @class GTLRVision_GoogleCloudVisionV1p3beta1Symbol;
  154. @class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotation;
  155. @class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak;
  156. @class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedLanguage;
  157. @class GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty;
  158. @class GTLRVision_GoogleCloudVisionV1p3beta1Vertex;
  159. @class GTLRVision_GoogleCloudVisionV1p3beta1WebDetection;
  160. @class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebEntity;
  161. @class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage;
  162. @class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebLabel;
  163. @class GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebPage;
  164. @class GTLRVision_GoogleCloudVisionV1p3beta1Word;
  165. @class GTLRVision_GroupedResult;
  166. @class GTLRVision_Image;
  167. @class GTLRVision_ImageAnnotationContext;
  168. @class GTLRVision_ImageContext;
  169. @class GTLRVision_ImageProperties;
  170. @class GTLRVision_ImageSource;
  171. @class GTLRVision_ImportProductSetsGcsSource;
  172. @class GTLRVision_ImportProductSetsInputConfig;
  173. @class GTLRVision_InputConfig;
  174. @class GTLRVision_KeyValue;
  175. @class GTLRVision_Landmark;
  176. @class GTLRVision_LatLng;
  177. @class GTLRVision_LatLongRect;
  178. @class GTLRVision_LocalizedObjectAnnotation;
  179. @class GTLRVision_LocationInfo;
  180. @class GTLRVision_NormalizedVertex;
  181. @class GTLRVision_Operation;
  182. @class GTLRVision_Operation_Metadata;
  183. @class GTLRVision_Operation_Response;
  184. @class GTLRVision_OutputConfig;
  185. @class GTLRVision_Page;
  186. @class GTLRVision_Paragraph;
  187. @class GTLRVision_Position;
  188. @class GTLRVision_Product;
  189. @class GTLRVision_ProductSearchParams;
  190. @class GTLRVision_ProductSearchResults;
  191. @class GTLRVision_ProductSet;
  192. @class GTLRVision_Property;
  193. @class GTLRVision_ReferenceImage;
  194. @class GTLRVision_Result;
  195. @class GTLRVision_SafeSearchAnnotation;
  196. @class GTLRVision_Status;
  197. @class GTLRVision_Status_Details_Item;
  198. @class GTLRVision_Symbol;
  199. @class GTLRVision_TextAnnotation;
  200. @class GTLRVision_TextProperty;
  201. @class GTLRVision_Vertex;
  202. @class GTLRVision_WebDetection;
  203. @class GTLRVision_WebDetectionParams;
  204. @class GTLRVision_WebEntity;
  205. @class GTLRVision_WebImage;
  206. @class GTLRVision_WebLabel;
  207. @class GTLRVision_WebPage;
  208. @class GTLRVision_Word;
  209. // Generated comments include content from the discovery document; avoid them
  210. // causing warnings since clang's checks are some what arbitrary.
  211. #pragma clang diagnostic push
  212. #pragma clang diagnostic ignored "-Wdocumentation"
  213. NS_ASSUME_NONNULL_BEGIN
  214. // ----------------------------------------------------------------------------
  215. // Constants - For some of the classes' properties below.
  216. // ----------------------------------------------------------------------------
  217. // GTLRVision_BatchOperationMetadata.state
  218. /**
  219. * The request is done after the longrunning.Operations.CancelOperation has
  220. * been called by the user. Any records that were processed before the
  221. * cancel command are output as specified in the request.
  222. *
  223. * Value: "CANCELLED"
  224. */
  225. GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Cancelled;
  226. /**
  227. * The request is done and no item has been successfully processed.
  228. *
  229. * Value: "FAILED"
  230. */
  231. GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Failed;
  232. /**
  233. * Request is actively being processed.
  234. *
  235. * Value: "PROCESSING"
  236. */
  237. GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Processing;
  238. /**
  239. * Invalid.
  240. *
  241. * Value: "STATE_UNSPECIFIED"
  242. */
  243. GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_StateUnspecified;
  244. /**
  245. * The request is done and at least one item has been successfully
  246. * processed.
  247. *
  248. * Value: "SUCCESSFUL"
  249. */
  250. GTLR_EXTERN NSString * const kGTLRVision_BatchOperationMetadata_State_Successful;
  251. // ----------------------------------------------------------------------------
  252. // GTLRVision_Block.blockType
  253. /**
  254. * Barcode block.
  255. *
  256. * Value: "BARCODE"
  257. */
  258. GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Barcode;
  259. /**
  260. * Image block.
  261. *
  262. * Value: "PICTURE"
  263. */
  264. GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Picture;
  265. /**
  266. * Horizontal/vertical line box.
  267. *
  268. * Value: "RULER"
  269. */
  270. GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Ruler;
  271. /**
  272. * Table block.
  273. *
  274. * Value: "TABLE"
  275. */
  276. GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Table;
  277. /**
  278. * Regular text block.
  279. *
  280. * Value: "TEXT"
  281. */
  282. GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Text;
  283. /**
  284. * Unknown block type.
  285. *
  286. * Value: "UNKNOWN"
  287. */
  288. GTLR_EXTERN NSString * const kGTLRVision_Block_BlockType_Unknown;
  289. // ----------------------------------------------------------------------------
  290. // GTLRVision_DetectedBreak.type
  291. /**
  292. * Line-wrapping break.
  293. *
  294. * Value: "EOL_SURE_SPACE"
  295. */
  296. GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_EolSureSpace;
  297. /**
  298. * End-line hyphen that is not present in text; does not co-occur with
  299. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
  300. *
  301. * Value: "HYPHEN"
  302. */
  303. GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_Hyphen;
  304. /**
  305. * Line break that ends a paragraph.
  306. *
  307. * Value: "LINE_BREAK"
  308. */
  309. GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_LineBreak;
  310. /**
  311. * Regular space.
  312. *
  313. * Value: "SPACE"
  314. */
  315. GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_Space;
  316. /**
  317. * Sure space (very wide).
  318. *
  319. * Value: "SURE_SPACE"
  320. */
  321. GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_SureSpace;
  322. /**
  323. * Unknown break label type.
  324. *
  325. * Value: "UNKNOWN"
  326. */
  327. GTLR_EXTERN NSString * const kGTLRVision_DetectedBreak_Type_Unknown;
  328. // ----------------------------------------------------------------------------
  329. // GTLRVision_FaceAnnotation.angerLikelihood
  330. /**
  331. * It is likely that the image belongs to the specified vertical.
  332. *
  333. * Value: "LIKELY"
  334. */
  335. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Likely;
  336. /**
  337. * It is possible that the image belongs to the specified vertical.
  338. *
  339. * Value: "POSSIBLE"
  340. */
  341. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Possible;
  342. /**
  343. * Unknown likelihood.
  344. *
  345. * Value: "UNKNOWN"
  346. */
  347. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Unknown;
  348. /**
  349. * It is unlikely that the image belongs to the specified vertical.
  350. *
  351. * Value: "UNLIKELY"
  352. */
  353. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_Unlikely;
  354. /**
  355. * It is very likely that the image belongs to the specified vertical.
  356. *
  357. * Value: "VERY_LIKELY"
  358. */
  359. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_VeryLikely;
  360. /**
  361. * It is very unlikely that the image belongs to the specified vertical.
  362. *
  363. * Value: "VERY_UNLIKELY"
  364. */
  365. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_AngerLikelihood_VeryUnlikely;
  366. // ----------------------------------------------------------------------------
  367. // GTLRVision_FaceAnnotation.blurredLikelihood
  368. /**
  369. * It is likely that the image belongs to the specified vertical.
  370. *
  371. * Value: "LIKELY"
  372. */
  373. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Likely;
  374. /**
  375. * It is possible that the image belongs to the specified vertical.
  376. *
  377. * Value: "POSSIBLE"
  378. */
  379. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Possible;
  380. /**
  381. * Unknown likelihood.
  382. *
  383. * Value: "UNKNOWN"
  384. */
  385. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Unknown;
  386. /**
  387. * It is unlikely that the image belongs to the specified vertical.
  388. *
  389. * Value: "UNLIKELY"
  390. */
  391. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_Unlikely;
  392. /**
  393. * It is very likely that the image belongs to the specified vertical.
  394. *
  395. * Value: "VERY_LIKELY"
  396. */
  397. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryLikely;
  398. /**
  399. * It is very unlikely that the image belongs to the specified vertical.
  400. *
  401. * Value: "VERY_UNLIKELY"
  402. */
  403. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryUnlikely;
  404. // ----------------------------------------------------------------------------
  405. // GTLRVision_FaceAnnotation.headwearLikelihood
  406. /**
  407. * It is likely that the image belongs to the specified vertical.
  408. *
  409. * Value: "LIKELY"
  410. */
  411. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Likely;
  412. /**
  413. * It is possible that the image belongs to the specified vertical.
  414. *
  415. * Value: "POSSIBLE"
  416. */
  417. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Possible;
  418. /**
  419. * Unknown likelihood.
  420. *
  421. * Value: "UNKNOWN"
  422. */
  423. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unknown;
  424. /**
  425. * It is unlikely that the image belongs to the specified vertical.
  426. *
  427. * Value: "UNLIKELY"
  428. */
  429. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unlikely;
  430. /**
  431. * It is very likely that the image belongs to the specified vertical.
  432. *
  433. * Value: "VERY_LIKELY"
  434. */
  435. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryLikely;
  436. /**
  437. * It is very unlikely that the image belongs to the specified vertical.
  438. *
  439. * Value: "VERY_UNLIKELY"
  440. */
  441. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
  442. // ----------------------------------------------------------------------------
  443. // GTLRVision_FaceAnnotation.joyLikelihood
  444. /**
  445. * It is likely that the image belongs to the specified vertical.
  446. *
  447. * Value: "LIKELY"
  448. */
  449. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Likely;
  450. /**
  451. * It is possible that the image belongs to the specified vertical.
  452. *
  453. * Value: "POSSIBLE"
  454. */
  455. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Possible;
  456. /**
  457. * Unknown likelihood.
  458. *
  459. * Value: "UNKNOWN"
  460. */
  461. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Unknown;
  462. /**
  463. * It is unlikely that the image belongs to the specified vertical.
  464. *
  465. * Value: "UNLIKELY"
  466. */
  467. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_Unlikely;
  468. /**
  469. * It is very likely that the image belongs to the specified vertical.
  470. *
  471. * Value: "VERY_LIKELY"
  472. */
  473. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_VeryLikely;
  474. /**
  475. * It is very unlikely that the image belongs to the specified vertical.
  476. *
  477. * Value: "VERY_UNLIKELY"
  478. */
  479. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_JoyLikelihood_VeryUnlikely;
  480. // ----------------------------------------------------------------------------
  481. // GTLRVision_FaceAnnotation.sorrowLikelihood
  482. /**
  483. * It is likely that the image belongs to the specified vertical.
  484. *
  485. * Value: "LIKELY"
  486. */
  487. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Likely;
  488. /**
  489. * It is possible that the image belongs to the specified vertical.
  490. *
  491. * Value: "POSSIBLE"
  492. */
  493. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Possible;
  494. /**
  495. * Unknown likelihood.
  496. *
  497. * Value: "UNKNOWN"
  498. */
  499. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Unknown;
  500. /**
  501. * It is unlikely that the image belongs to the specified vertical.
  502. *
  503. * Value: "UNLIKELY"
  504. */
  505. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_Unlikely;
  506. /**
  507. * It is very likely that the image belongs to the specified vertical.
  508. *
  509. * Value: "VERY_LIKELY"
  510. */
  511. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryLikely;
  512. /**
  513. * It is very unlikely that the image belongs to the specified vertical.
  514. *
  515. * Value: "VERY_UNLIKELY"
  516. */
  517. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryUnlikely;
  518. // ----------------------------------------------------------------------------
  519. // GTLRVision_FaceAnnotation.surpriseLikelihood
  520. /**
  521. * It is likely that the image belongs to the specified vertical.
  522. *
  523. * Value: "LIKELY"
  524. */
  525. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Likely;
  526. /**
  527. * It is possible that the image belongs to the specified vertical.
  528. *
  529. * Value: "POSSIBLE"
  530. */
  531. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Possible;
  532. /**
  533. * Unknown likelihood.
  534. *
  535. * Value: "UNKNOWN"
  536. */
  537. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unknown;
  538. /**
  539. * It is unlikely that the image belongs to the specified vertical.
  540. *
  541. * Value: "UNLIKELY"
  542. */
  543. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unlikely;
  544. /**
  545. * It is very likely that the image belongs to the specified vertical.
  546. *
  547. * Value: "VERY_LIKELY"
  548. */
  549. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryLikely;
  550. /**
  551. * It is very unlikely that the image belongs to the specified vertical.
  552. *
  553. * Value: "VERY_UNLIKELY"
  554. */
  555. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
  556. // ----------------------------------------------------------------------------
  557. // GTLRVision_FaceAnnotation.underExposedLikelihood
  558. /**
  559. * It is likely that the image belongs to the specified vertical.
  560. *
  561. * Value: "LIKELY"
  562. */
  563. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Likely;
  564. /**
  565. * It is possible that the image belongs to the specified vertical.
  566. *
  567. * Value: "POSSIBLE"
  568. */
  569. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Possible;
  570. /**
  571. * Unknown likelihood.
  572. *
  573. * Value: "UNKNOWN"
  574. */
  575. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unknown;
  576. /**
  577. * It is unlikely that the image belongs to the specified vertical.
  578. *
  579. * Value: "UNLIKELY"
  580. */
  581. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unlikely;
  582. /**
  583. * It is very likely that the image belongs to the specified vertical.
  584. *
  585. * Value: "VERY_LIKELY"
  586. */
  587. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryLikely;
  588. /**
  589. * It is very unlikely that the image belongs to the specified vertical.
  590. *
  591. * Value: "VERY_UNLIKELY"
  592. */
  593. GTLR_EXTERN NSString * const kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
  594. // ----------------------------------------------------------------------------
  595. // GTLRVision_Feature.type
  596. /**
  597. * Run crop hints.
  598. *
  599. * Value: "CROP_HINTS"
  600. */
  601. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_CropHints;
  602. /**
  603. * Run dense text document OCR. Takes precedence when both
  604. * `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
  605. *
  606. * Value: "DOCUMENT_TEXT_DETECTION"
  607. */
  608. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_DocumentTextDetection;
  609. /**
  610. * Run face detection.
  611. *
  612. * Value: "FACE_DETECTION"
  613. */
  614. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_FaceDetection;
  615. /**
  616. * Compute a set of image properties, such as the
  617. * image's dominant colors.
  618. *
  619. * Value: "IMAGE_PROPERTIES"
  620. */
  621. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_ImageProperties;
  622. /**
  623. * Run label detection.
  624. *
  625. * Value: "LABEL_DETECTION"
  626. */
  627. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_LabelDetection;
  628. /**
  629. * Run landmark detection.
  630. *
  631. * Value: "LANDMARK_DETECTION"
  632. */
  633. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_LandmarkDetection;
  634. /**
  635. * Run logo detection.
  636. *
  637. * Value: "LOGO_DETECTION"
  638. */
  639. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_LogoDetection;
  640. /**
  641. * Run localizer for object detection.
  642. *
  643. * Value: "OBJECT_LOCALIZATION"
  644. */
  645. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_ObjectLocalization;
  646. /**
  647. * Run Product Search.
  648. *
  649. * Value: "PRODUCT_SEARCH"
  650. */
  651. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_ProductSearch;
  652. /**
  653. * Run Safe Search to detect potentially unsafe
  654. * or undesirable content.
  655. *
  656. * Value: "SAFE_SEARCH_DETECTION"
  657. */
  658. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_SafeSearchDetection;
  659. /**
  660. * Run text detection / optical character recognition (OCR). Text detection
  661. * is optimized for areas of text within a larger image; if the image is
  662. * a document, use `DOCUMENT_TEXT_DETECTION` instead.
  663. *
  664. * Value: "TEXT_DETECTION"
  665. */
  666. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_TextDetection;
  667. /**
  668. * Unspecified feature type.
  669. *
  670. * Value: "TYPE_UNSPECIFIED"
  671. */
  672. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_TypeUnspecified;
  673. /**
  674. * Run web detection.
  675. *
  676. * Value: "WEB_DETECTION"
  677. */
  678. GTLR_EXTERN NSString * const kGTLRVision_Feature_Type_WebDetection;
  679. // ----------------------------------------------------------------------------
  680. // GTLRVision_GoogleCloudVisionV1p1beta1Block.blockType
  681. /**
  682. * Barcode block.
  683. *
  684. * Value: "BARCODE"
  685. */
  686. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Barcode;
  687. /**
  688. * Image block.
  689. *
  690. * Value: "PICTURE"
  691. */
  692. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Picture;
  693. /**
  694. * Horizontal/vertical line box.
  695. *
  696. * Value: "RULER"
  697. */
  698. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Ruler;
  699. /**
  700. * Table block.
  701. *
  702. * Value: "TABLE"
  703. */
  704. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Table;
  705. /**
  706. * Regular text block.
  707. *
  708. * Value: "TEXT"
  709. */
  710. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Text;
  711. /**
  712. * Unknown block type.
  713. *
  714. * Value: "UNKNOWN"
  715. */
  716. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Unknown;
  717. // ----------------------------------------------------------------------------
  718. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.angerLikelihood
  719. /**
  720. * It is likely that the image belongs to the specified vertical.
  721. *
  722. * Value: "LIKELY"
  723. */
  724. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Likely;
  725. /**
  726. * It is possible that the image belongs to the specified vertical.
  727. *
  728. * Value: "POSSIBLE"
  729. */
  730. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Possible;
  731. /**
  732. * Unknown likelihood.
  733. *
  734. * Value: "UNKNOWN"
  735. */
  736. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unknown;
  737. /**
  738. * It is unlikely that the image belongs to the specified vertical.
  739. *
  740. * Value: "UNLIKELY"
  741. */
  742. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unlikely;
  743. /**
  744. * It is very likely that the image belongs to the specified vertical.
  745. *
  746. * Value: "VERY_LIKELY"
  747. */
  748. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryLikely;
  749. /**
  750. * It is very unlikely that the image belongs to the specified vertical.
  751. *
  752. * Value: "VERY_UNLIKELY"
  753. */
  754. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryUnlikely;
  755. // ----------------------------------------------------------------------------
  756. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.blurredLikelihood
  757. /**
  758. * It is likely that the image belongs to the specified vertical.
  759. *
  760. * Value: "LIKELY"
  761. */
  762. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Likely;
  763. /**
  764. * It is possible that the image belongs to the specified vertical.
  765. *
  766. * Value: "POSSIBLE"
  767. */
  768. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Possible;
  769. /**
  770. * Unknown likelihood.
  771. *
  772. * Value: "UNKNOWN"
  773. */
  774. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unknown;
  775. /**
  776. * It is unlikely that the image belongs to the specified vertical.
  777. *
  778. * Value: "UNLIKELY"
  779. */
  780. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unlikely;
  781. /**
  782. * It is very likely that the image belongs to the specified vertical.
  783. *
  784. * Value: "VERY_LIKELY"
  785. */
  786. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryLikely;
  787. /**
  788. * It is very unlikely that the image belongs to the specified vertical.
  789. *
  790. * Value: "VERY_UNLIKELY"
  791. */
  792. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely;
  793. // ----------------------------------------------------------------------------
  794. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.headwearLikelihood
  795. /**
  796. * It is likely that the image belongs to the specified vertical.
  797. *
  798. * Value: "LIKELY"
  799. */
  800. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Likely;
  801. /**
  802. * It is possible that the image belongs to the specified vertical.
  803. *
  804. * Value: "POSSIBLE"
  805. */
  806. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Possible;
  807. /**
  808. * Unknown likelihood.
  809. *
  810. * Value: "UNKNOWN"
  811. */
  812. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unknown;
  813. /**
  814. * It is unlikely that the image belongs to the specified vertical.
  815. *
  816. * Value: "UNLIKELY"
  817. */
  818. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unlikely;
  819. /**
  820. * It is very likely that the image belongs to the specified vertical.
  821. *
  822. * Value: "VERY_LIKELY"
  823. */
  824. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryLikely;
  825. /**
  826. * It is very unlikely that the image belongs to the specified vertical.
  827. *
  828. * Value: "VERY_UNLIKELY"
  829. */
  830. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
  831. // ----------------------------------------------------------------------------
  832. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.joyLikelihood
  833. /**
  834. * It is likely that the image belongs to the specified vertical.
  835. *
  836. * Value: "LIKELY"
  837. */
  838. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Likely;
  839. /**
  840. * It is possible that the image belongs to the specified vertical.
  841. *
  842. * Value: "POSSIBLE"
  843. */
  844. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Possible;
  845. /**
  846. * Unknown likelihood.
  847. *
  848. * Value: "UNKNOWN"
  849. */
  850. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unknown;
  851. /**
  852. * It is unlikely that the image belongs to the specified vertical.
  853. *
  854. * Value: "UNLIKELY"
  855. */
  856. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unlikely;
  857. /**
  858. * It is very likely that the image belongs to the specified vertical.
  859. *
  860. * Value: "VERY_LIKELY"
  861. */
  862. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryLikely;
  863. /**
  864. * It is very unlikely that the image belongs to the specified vertical.
  865. *
  866. * Value: "VERY_UNLIKELY"
  867. */
  868. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryUnlikely;
  869. // ----------------------------------------------------------------------------
  870. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.sorrowLikelihood
  871. /**
  872. * It is likely that the image belongs to the specified vertical.
  873. *
  874. * Value: "LIKELY"
  875. */
  876. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Likely;
  877. /**
  878. * It is possible that the image belongs to the specified vertical.
  879. *
  880. * Value: "POSSIBLE"
  881. */
  882. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Possible;
  883. /**
  884. * Unknown likelihood.
  885. *
  886. * Value: "UNKNOWN"
  887. */
  888. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unknown;
  889. /**
  890. * It is unlikely that the image belongs to the specified vertical.
  891. *
  892. * Value: "UNLIKELY"
  893. */
  894. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unlikely;
  895. /**
  896. * It is very likely that the image belongs to the specified vertical.
  897. *
  898. * Value: "VERY_LIKELY"
  899. */
  900. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryLikely;
  901. /**
  902. * It is very unlikely that the image belongs to the specified vertical.
  903. *
  904. * Value: "VERY_UNLIKELY"
  905. */
  906. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely;
  907. // ----------------------------------------------------------------------------
  908. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.surpriseLikelihood
  909. /**
  910. * It is likely that the image belongs to the specified vertical.
  911. *
  912. * Value: "LIKELY"
  913. */
  914. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Likely;
  915. /**
  916. * It is possible that the image belongs to the specified vertical.
  917. *
  918. * Value: "POSSIBLE"
  919. */
  920. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Possible;
  921. /**
  922. * Unknown likelihood.
  923. *
  924. * Value: "UNKNOWN"
  925. */
  926. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unknown;
  927. /**
  928. * It is unlikely that the image belongs to the specified vertical.
  929. *
  930. * Value: "UNLIKELY"
  931. */
  932. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unlikely;
  933. /**
  934. * It is very likely that the image belongs to the specified vertical.
  935. *
  936. * Value: "VERY_LIKELY"
  937. */
  938. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryLikely;
  939. /**
  940. * It is very unlikely that the image belongs to the specified vertical.
  941. *
  942. * Value: "VERY_UNLIKELY"
  943. */
  944. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
  945. // ----------------------------------------------------------------------------
  946. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation.underExposedLikelihood
  947. /**
  948. * It is likely that the image belongs to the specified vertical.
  949. *
  950. * Value: "LIKELY"
  951. */
  952. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Likely;
  953. /**
  954. * It is possible that the image belongs to the specified vertical.
  955. *
  956. * Value: "POSSIBLE"
  957. */
  958. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Possible;
  959. /**
  960. * Unknown likelihood.
  961. *
  962. * Value: "UNKNOWN"
  963. */
  964. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unknown;
  965. /**
  966. * It is unlikely that the image belongs to the specified vertical.
  967. *
  968. * Value: "UNLIKELY"
  969. */
  970. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unlikely;
  971. /**
  972. * It is very likely that the image belongs to the specified vertical.
  973. *
  974. * Value: "VERY_LIKELY"
  975. */
  976. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely;
  977. /**
  978. * It is very unlikely that the image belongs to the specified vertical.
  979. *
  980. * Value: "VERY_UNLIKELY"
  981. */
  982. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
  983. // ----------------------------------------------------------------------------
  984. // GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark.type
  985. /**
  986. * Chin gnathion.
  987. *
  988. * Value: "CHIN_GNATHION"
  989. */
  990. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinGnathion;
  991. /**
  992. * Chin left gonion.
  993. *
  994. * Value: "CHIN_LEFT_GONION"
  995. */
  996. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinLeftGonion;
  997. /**
  998. * Chin right gonion.
  999. *
  1000. * Value: "CHIN_RIGHT_GONION"
  1001. */
  1002. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinRightGonion;
  1003. /**
  1004. * Forehead glabella.
  1005. *
  1006. * Value: "FOREHEAD_GLABELLA"
  1007. */
  1008. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ForeheadGlabella;
  1009. /**
  1010. * Left ear tragion.
  1011. *
  1012. * Value: "LEFT_EAR_TRAGION"
  1013. */
  1014. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEarTragion;
  1015. /**
  1016. * Left eye.
  1017. *
  1018. * Value: "LEFT_EYE"
  1019. */
  1020. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEye;
  1021. /**
  1022. * Left eye, bottom boundary.
  1023. *
  1024. * Value: "LEFT_EYE_BOTTOM_BOUNDARY"
  1025. */
  1026. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary;
  1027. /**
  1028. * Left eyebrow, upper midpoint.
  1029. *
  1030. * Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
  1031. */
  1032. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint;
  1033. /**
  1034. * Left eye, left corner.
  1035. *
  1036. * Value: "LEFT_EYE_LEFT_CORNER"
  1037. */
  1038. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner;
  1039. /**
  1040. * Left eye pupil.
  1041. *
  1042. * Value: "LEFT_EYE_PUPIL"
  1043. */
  1044. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyePupil;
  1045. /**
  1046. * Left eye, right corner.
  1047. *
  1048. * Value: "LEFT_EYE_RIGHT_CORNER"
  1049. */
  1050. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner;
  1051. /**
  1052. * Left eye, top boundary.
  1053. *
  1054. * Value: "LEFT_EYE_TOP_BOUNDARY"
  1055. */
  1056. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary;
  1057. /**
  1058. * Left of left eyebrow.
  1059. *
  1060. * Value: "LEFT_OF_LEFT_EYEBROW"
  1061. */
  1062. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow;
  1063. /**
  1064. * Left of right eyebrow.
  1065. *
  1066. * Value: "LEFT_OF_RIGHT_EYEBROW"
  1067. */
  1068. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow;
  1069. /**
  1070. * Lower lip.
  1071. *
  1072. * Value: "LOWER_LIP"
  1073. */
  1074. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LowerLip;
  1075. /**
  1076. * Midpoint between eyes.
  1077. *
  1078. * Value: "MIDPOINT_BETWEEN_EYES"
  1079. */
  1080. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes;
  1081. /**
  1082. * Mouth center.
  1083. *
  1084. * Value: "MOUTH_CENTER"
  1085. */
  1086. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthCenter;
  1087. /**
  1088. * Mouth left.
  1089. *
  1090. * Value: "MOUTH_LEFT"
  1091. */
  1092. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthLeft;
  1093. /**
  1094. * Mouth right.
  1095. *
  1096. * Value: "MOUTH_RIGHT"
  1097. */
  1098. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthRight;
  1099. /**
  1100. * Nose, bottom center.
  1101. *
  1102. * Value: "NOSE_BOTTOM_CENTER"
  1103. */
  1104. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomCenter;
  1105. /**
  1106. * Nose, bottom left.
  1107. *
  1108. * Value: "NOSE_BOTTOM_LEFT"
  1109. */
  1110. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomLeft;
  1111. /**
  1112. * Nose, bottom right.
  1113. *
  1114. * Value: "NOSE_BOTTOM_RIGHT"
  1115. */
  1116. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomRight;
  1117. /**
  1118. * Nose tip.
  1119. *
  1120. * Value: "NOSE_TIP"
  1121. */
  1122. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseTip;
  1123. /**
  1124. * Right ear tragion.
  1125. *
  1126. * Value: "RIGHT_EAR_TRAGION"
  1127. */
  1128. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEarTragion;
  1129. /**
  1130. * Right eye.
  1131. *
  1132. * Value: "RIGHT_EYE"
  1133. */
  1134. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEye;
  1135. /**
  1136. * Right eye, bottom boundary.
  1137. *
  1138. * Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
  1139. */
  1140. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary;
  1141. /**
  1142. * Right eyebrow, upper midpoint.
  1143. *
  1144. * Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
  1145. */
  1146. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint;
  1147. /**
  1148. * Right eye, left corner.
  1149. *
  1150. * Value: "RIGHT_EYE_LEFT_CORNER"
  1151. */
  1152. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner;
  1153. /**
  1154. * Right eye pupil.
  1155. *
  1156. * Value: "RIGHT_EYE_PUPIL"
  1157. */
  1158. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyePupil;
  1159. /**
  1160. * Right eye, right corner.
  1161. *
  1162. * Value: "RIGHT_EYE_RIGHT_CORNER"
  1163. */
  1164. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeRightCorner;
  1165. /**
  1166. * Right eye, top boundary.
  1167. *
  1168. * Value: "RIGHT_EYE_TOP_BOUNDARY"
  1169. */
  1170. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary;
  1171. /**
  1172. * Right of left eyebrow.
  1173. *
  1174. * Value: "RIGHT_OF_LEFT_EYEBROW"
  1175. */
  1176. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow;
  1177. /**
  1178. * Right of right eyebrow.
  1179. *
  1180. * Value: "RIGHT_OF_RIGHT_EYEBROW"
  1181. */
  1182. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow;
  1183. /**
  1184. * Unknown face landmark detected. Should not be filled.
  1185. *
  1186. * Value: "UNKNOWN_LANDMARK"
  1187. */
  1188. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UnknownLandmark;
  1189. /**
  1190. * Upper lip.
  1191. *
  1192. * Value: "UPPER_LIP"
  1193. */
  1194. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UpperLip;
  1195. // ----------------------------------------------------------------------------
  1196. // GTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata.state
  1197. /**
  1198. * The batch processing was cancelled.
  1199. *
  1200. * Value: "CANCELLED"
  1201. */
  1202. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Cancelled;
  1203. /**
  1204. * Request is received.
  1205. *
  1206. * Value: "CREATED"
  1207. */
  1208. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Created;
  1209. /**
  1210. * The batch processing is done.
  1211. *
  1212. * Value: "DONE"
  1213. */
  1214. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Done;
  1215. /**
  1216. * Request is actively being processed.
  1217. *
  1218. * Value: "RUNNING"
  1219. */
  1220. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Running;
  1221. /**
  1222. * Invalid.
  1223. *
  1224. * Value: "STATE_UNSPECIFIED"
  1225. */
  1226. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_StateUnspecified;
  1227. // ----------------------------------------------------------------------------
  1228. // GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.adult
  1229. /**
  1230. * It is likely that the image belongs to the specified vertical.
  1231. *
  1232. * Value: "LIKELY"
  1233. */
  1234. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Likely;
  1235. /**
  1236. * It is possible that the image belongs to the specified vertical.
  1237. *
  1238. * Value: "POSSIBLE"
  1239. */
  1240. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Possible;
  1241. /**
  1242. * Unknown likelihood.
  1243. *
  1244. * Value: "UNKNOWN"
  1245. */
  1246. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unknown;
  1247. /**
  1248. * It is unlikely that the image belongs to the specified vertical.
  1249. *
  1250. * Value: "UNLIKELY"
  1251. */
  1252. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unlikely;
  1253. /**
  1254. * It is very likely that the image belongs to the specified vertical.
  1255. *
  1256. * Value: "VERY_LIKELY"
  1257. */
  1258. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryLikely;
  1259. /**
  1260. * It is very unlikely that the image belongs to the specified vertical.
  1261. *
  1262. * Value: "VERY_UNLIKELY"
  1263. */
  1264. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryUnlikely;
  1265. // ----------------------------------------------------------------------------
  1266. // GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.medical
  1267. /**
  1268. * It is likely that the image belongs to the specified vertical.
  1269. *
  1270. * Value: "LIKELY"
  1271. */
  1272. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Likely;
  1273. /**
  1274. * It is possible that the image belongs to the specified vertical.
  1275. *
  1276. * Value: "POSSIBLE"
  1277. */
  1278. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Possible;
  1279. /**
  1280. * Unknown likelihood.
  1281. *
  1282. * Value: "UNKNOWN"
  1283. */
  1284. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unknown;
  1285. /**
  1286. * It is unlikely that the image belongs to the specified vertical.
  1287. *
  1288. * Value: "UNLIKELY"
  1289. */
  1290. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unlikely;
  1291. /**
  1292. * It is very likely that the image belongs to the specified vertical.
  1293. *
  1294. * Value: "VERY_LIKELY"
  1295. */
  1296. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryLikely;
  1297. /**
  1298. * It is very unlikely that the image belongs to the specified vertical.
  1299. *
  1300. * Value: "VERY_UNLIKELY"
  1301. */
  1302. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryUnlikely;
  1303. // ----------------------------------------------------------------------------
  1304. // GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.racy
  1305. /**
  1306. * It is likely that the image belongs to the specified vertical.
  1307. *
  1308. * Value: "LIKELY"
  1309. */
  1310. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Likely;
  1311. /**
  1312. * It is possible that the image belongs to the specified vertical.
  1313. *
  1314. * Value: "POSSIBLE"
  1315. */
  1316. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Possible;
  1317. /**
  1318. * Unknown likelihood.
  1319. *
  1320. * Value: "UNKNOWN"
  1321. */
  1322. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unknown;
  1323. /**
  1324. * It is unlikely that the image belongs to the specified vertical.
  1325. *
  1326. * Value: "UNLIKELY"
  1327. */
  1328. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unlikely;
  1329. /**
  1330. * It is very likely that the image belongs to the specified vertical.
  1331. *
  1332. * Value: "VERY_LIKELY"
  1333. */
  1334. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryLikely;
  1335. /**
  1336. * It is very unlikely that the image belongs to the specified vertical.
  1337. *
  1338. * Value: "VERY_UNLIKELY"
  1339. */
  1340. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryUnlikely;
  1341. // ----------------------------------------------------------------------------
  1342. // GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.spoof
  1343. /**
  1344. * It is likely that the image belongs to the specified vertical.
  1345. *
  1346. * Value: "LIKELY"
  1347. */
  1348. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Likely;
  1349. /**
  1350. * It is possible that the image belongs to the specified vertical.
  1351. *
  1352. * Value: "POSSIBLE"
  1353. */
  1354. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Possible;
  1355. /**
  1356. * Unknown likelihood.
  1357. *
  1358. * Value: "UNKNOWN"
  1359. */
  1360. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unknown;
  1361. /**
  1362. * It is unlikely that the image belongs to the specified vertical.
  1363. *
  1364. * Value: "UNLIKELY"
  1365. */
  1366. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unlikely;
  1367. /**
  1368. * It is very likely that the image belongs to the specified vertical.
  1369. *
  1370. * Value: "VERY_LIKELY"
  1371. */
  1372. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryLikely;
  1373. /**
  1374. * It is very unlikely that the image belongs to the specified vertical.
  1375. *
  1376. * Value: "VERY_UNLIKELY"
  1377. */
  1378. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryUnlikely;
  1379. // ----------------------------------------------------------------------------
  1380. // GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation.violence
  1381. /**
  1382. * It is likely that the image belongs to the specified vertical.
  1383. *
  1384. * Value: "LIKELY"
  1385. */
  1386. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Likely;
  1387. /**
  1388. * It is possible that the image belongs to the specified vertical.
  1389. *
  1390. * Value: "POSSIBLE"
  1391. */
  1392. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Possible;
  1393. /**
  1394. * Unknown likelihood.
  1395. *
  1396. * Value: "UNKNOWN"
  1397. */
  1398. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unknown;
  1399. /**
  1400. * It is unlikely that the image belongs to the specified vertical.
  1401. *
  1402. * Value: "UNLIKELY"
  1403. */
  1404. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unlikely;
  1405. /**
  1406. * It is very likely that the image belongs to the specified vertical.
  1407. *
  1408. * Value: "VERY_LIKELY"
  1409. */
  1410. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryLikely;
  1411. /**
  1412. * It is very unlikely that the image belongs to the specified vertical.
  1413. *
  1414. * Value: "VERY_UNLIKELY"
  1415. */
  1416. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryUnlikely;
  1417. // ----------------------------------------------------------------------------
  1418. // GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak.type
  1419. /**
  1420. * Line-wrapping break.
  1421. *
  1422. * Value: "EOL_SURE_SPACE"
  1423. */
  1424. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_EolSureSpace;
  1425. /**
  1426. * End-line hyphen that is not present in text; does not co-occur with
  1427. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
  1428. *
  1429. * Value: "HYPHEN"
  1430. */
  1431. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Hyphen;
  1432. /**
  1433. * Line break that ends a paragraph.
  1434. *
  1435. * Value: "LINE_BREAK"
  1436. */
  1437. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_LineBreak;
  1438. /**
  1439. * Regular space.
  1440. *
  1441. * Value: "SPACE"
  1442. */
  1443. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Space;
  1444. /**
  1445. * Sure space (very wide).
  1446. *
  1447. * Value: "SURE_SPACE"
  1448. */
  1449. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_SureSpace;
  1450. /**
  1451. * Unknown break label type.
  1452. *
  1453. * Value: "UNKNOWN"
  1454. */
  1455. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Unknown;
  1456. // ----------------------------------------------------------------------------
  1457. // GTLRVision_GoogleCloudVisionV1p2beta1Block.blockType
  1458. /**
  1459. * Barcode block.
  1460. *
  1461. * Value: "BARCODE"
  1462. */
  1463. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Barcode;
  1464. /**
  1465. * Image block.
  1466. *
  1467. * Value: "PICTURE"
  1468. */
  1469. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Picture;
  1470. /**
  1471. * Horizontal/vertical line box.
  1472. *
  1473. * Value: "RULER"
  1474. */
  1475. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Ruler;
  1476. /**
  1477. * Table block.
  1478. *
  1479. * Value: "TABLE"
  1480. */
  1481. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Table;
  1482. /**
  1483. * Regular text block.
  1484. *
  1485. * Value: "TEXT"
  1486. */
  1487. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Text;
  1488. /**
  1489. * Unknown block type.
  1490. *
  1491. * Value: "UNKNOWN"
  1492. */
  1493. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Unknown;
  1494. // ----------------------------------------------------------------------------
  1495. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.angerLikelihood
  1496. /**
  1497. * It is likely that the image belongs to the specified vertical.
  1498. *
  1499. * Value: "LIKELY"
  1500. */
  1501. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Likely;
  1502. /**
  1503. * It is possible that the image belongs to the specified vertical.
  1504. *
  1505. * Value: "POSSIBLE"
  1506. */
  1507. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Possible;
  1508. /**
  1509. * Unknown likelihood.
  1510. *
  1511. * Value: "UNKNOWN"
  1512. */
  1513. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unknown;
  1514. /**
  1515. * It is unlikely that the image belongs to the specified vertical.
  1516. *
  1517. * Value: "UNLIKELY"
  1518. */
  1519. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unlikely;
  1520. /**
  1521. * It is very likely that the image belongs to the specified vertical.
  1522. *
  1523. * Value: "VERY_LIKELY"
  1524. */
  1525. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryLikely;
  1526. /**
  1527. * It is very unlikely that the image belongs to the specified vertical.
  1528. *
  1529. * Value: "VERY_UNLIKELY"
  1530. */
  1531. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryUnlikely;
  1532. // ----------------------------------------------------------------------------
  1533. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.blurredLikelihood
  1534. /**
  1535. * It is likely that the image belongs to the specified vertical.
  1536. *
  1537. * Value: "LIKELY"
  1538. */
  1539. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Likely;
  1540. /**
  1541. * It is possible that the image belongs to the specified vertical.
  1542. *
  1543. * Value: "POSSIBLE"
  1544. */
  1545. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Possible;
  1546. /**
  1547. * Unknown likelihood.
  1548. *
  1549. * Value: "UNKNOWN"
  1550. */
  1551. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unknown;
  1552. /**
  1553. * It is unlikely that the image belongs to the specified vertical.
  1554. *
  1555. * Value: "UNLIKELY"
  1556. */
  1557. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unlikely;
  1558. /**
  1559. * It is very likely that the image belongs to the specified vertical.
  1560. *
  1561. * Value: "VERY_LIKELY"
  1562. */
  1563. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryLikely;
  1564. /**
  1565. * It is very unlikely that the image belongs to the specified vertical.
  1566. *
  1567. * Value: "VERY_UNLIKELY"
  1568. */
  1569. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely;
  1570. // ----------------------------------------------------------------------------
  1571. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.headwearLikelihood
  1572. /**
  1573. * It is likely that the image belongs to the specified vertical.
  1574. *
  1575. * Value: "LIKELY"
  1576. */
  1577. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Likely;
  1578. /**
  1579. * It is possible that the image belongs to the specified vertical.
  1580. *
  1581. * Value: "POSSIBLE"
  1582. */
  1583. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Possible;
  1584. /**
  1585. * Unknown likelihood.
  1586. *
  1587. * Value: "UNKNOWN"
  1588. */
  1589. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unknown;
  1590. /**
  1591. * It is unlikely that the image belongs to the specified vertical.
  1592. *
  1593. * Value: "UNLIKELY"
  1594. */
  1595. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unlikely;
  1596. /**
  1597. * It is very likely that the image belongs to the specified vertical.
  1598. *
  1599. * Value: "VERY_LIKELY"
  1600. */
  1601. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryLikely;
  1602. /**
  1603. * It is very unlikely that the image belongs to the specified vertical.
  1604. *
  1605. * Value: "VERY_UNLIKELY"
  1606. */
  1607. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
  1608. // ----------------------------------------------------------------------------
  1609. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.joyLikelihood
  1610. /**
  1611. * It is likely that the image belongs to the specified vertical.
  1612. *
  1613. * Value: "LIKELY"
  1614. */
  1615. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Likely;
  1616. /**
  1617. * It is possible that the image belongs to the specified vertical.
  1618. *
  1619. * Value: "POSSIBLE"
  1620. */
  1621. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Possible;
  1622. /**
  1623. * Unknown likelihood.
  1624. *
  1625. * Value: "UNKNOWN"
  1626. */
  1627. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unknown;
  1628. /**
  1629. * It is unlikely that the image belongs to the specified vertical.
  1630. *
  1631. * Value: "UNLIKELY"
  1632. */
  1633. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unlikely;
  1634. /**
  1635. * It is very likely that the image belongs to the specified vertical.
  1636. *
  1637. * Value: "VERY_LIKELY"
  1638. */
  1639. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryLikely;
  1640. /**
  1641. * It is very unlikely that the image belongs to the specified vertical.
  1642. *
  1643. * Value: "VERY_UNLIKELY"
  1644. */
  1645. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryUnlikely;
  1646. // ----------------------------------------------------------------------------
  1647. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.sorrowLikelihood
  1648. /**
  1649. * It is likely that the image belongs to the specified vertical.
  1650. *
  1651. * Value: "LIKELY"
  1652. */
  1653. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Likely;
  1654. /**
  1655. * It is possible that the image belongs to the specified vertical.
  1656. *
  1657. * Value: "POSSIBLE"
  1658. */
  1659. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Possible;
  1660. /**
  1661. * Unknown likelihood.
  1662. *
  1663. * Value: "UNKNOWN"
  1664. */
  1665. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unknown;
  1666. /**
  1667. * It is unlikely that the image belongs to the specified vertical.
  1668. *
  1669. * Value: "UNLIKELY"
  1670. */
  1671. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unlikely;
  1672. /**
  1673. * It is very likely that the image belongs to the specified vertical.
  1674. *
  1675. * Value: "VERY_LIKELY"
  1676. */
  1677. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryLikely;
  1678. /**
  1679. * It is very unlikely that the image belongs to the specified vertical.
  1680. *
  1681. * Value: "VERY_UNLIKELY"
  1682. */
  1683. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely;
  1684. // ----------------------------------------------------------------------------
  1685. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.surpriseLikelihood
  1686. /**
  1687. * It is likely that the image belongs to the specified vertical.
  1688. *
  1689. * Value: "LIKELY"
  1690. */
  1691. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Likely;
  1692. /**
  1693. * It is possible that the image belongs to the specified vertical.
  1694. *
  1695. * Value: "POSSIBLE"
  1696. */
  1697. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Possible;
  1698. /**
  1699. * Unknown likelihood.
  1700. *
  1701. * Value: "UNKNOWN"
  1702. */
  1703. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unknown;
  1704. /**
  1705. * It is unlikely that the image belongs to the specified vertical.
  1706. *
  1707. * Value: "UNLIKELY"
  1708. */
  1709. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unlikely;
  1710. /**
  1711. * It is very likely that the image belongs to the specified vertical.
  1712. *
  1713. * Value: "VERY_LIKELY"
  1714. */
  1715. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryLikely;
  1716. /**
  1717. * It is very unlikely that the image belongs to the specified vertical.
  1718. *
  1719. * Value: "VERY_UNLIKELY"
  1720. */
  1721. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
  1722. // ----------------------------------------------------------------------------
  1723. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation.underExposedLikelihood
  1724. /**
  1725. * It is likely that the image belongs to the specified vertical.
  1726. *
  1727. * Value: "LIKELY"
  1728. */
  1729. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Likely;
  1730. /**
  1731. * It is possible that the image belongs to the specified vertical.
  1732. *
  1733. * Value: "POSSIBLE"
  1734. */
  1735. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Possible;
  1736. /**
  1737. * Unknown likelihood.
  1738. *
  1739. * Value: "UNKNOWN"
  1740. */
  1741. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unknown;
  1742. /**
  1743. * It is unlikely that the image belongs to the specified vertical.
  1744. *
  1745. * Value: "UNLIKELY"
  1746. */
  1747. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unlikely;
  1748. /**
  1749. * It is very likely that the image belongs to the specified vertical.
  1750. *
  1751. * Value: "VERY_LIKELY"
  1752. */
  1753. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely;
  1754. /**
  1755. * It is very unlikely that the image belongs to the specified vertical.
  1756. *
  1757. * Value: "VERY_UNLIKELY"
  1758. */
  1759. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
  1760. // ----------------------------------------------------------------------------
  1761. // GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark.type
  1762. /**
  1763. * Chin gnathion.
  1764. *
  1765. * Value: "CHIN_GNATHION"
  1766. */
  1767. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinGnathion;
  1768. /**
  1769. * Chin left gonion.
  1770. *
  1771. * Value: "CHIN_LEFT_GONION"
  1772. */
  1773. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinLeftGonion;
  1774. /**
  1775. * Chin right gonion.
  1776. *
  1777. * Value: "CHIN_RIGHT_GONION"
  1778. */
  1779. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinRightGonion;
  1780. /**
  1781. * Forehead glabella.
  1782. *
  1783. * Value: "FOREHEAD_GLABELLA"
  1784. */
  1785. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ForeheadGlabella;
  1786. /**
  1787. * Left ear tragion.
  1788. *
  1789. * Value: "LEFT_EAR_TRAGION"
  1790. */
  1791. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEarTragion;
  1792. /**
  1793. * Left eye.
  1794. *
  1795. * Value: "LEFT_EYE"
  1796. */
  1797. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEye;
  1798. /**
  1799. * Left eye, bottom boundary.
  1800. *
  1801. * Value: "LEFT_EYE_BOTTOM_BOUNDARY"
  1802. */
  1803. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary;
  1804. /**
  1805. * Left eyebrow, upper midpoint.
  1806. *
  1807. * Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
  1808. */
  1809. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint;
  1810. /**
  1811. * Left eye, left corner.
  1812. *
  1813. * Value: "LEFT_EYE_LEFT_CORNER"
  1814. */
  1815. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner;
  1816. /**
  1817. * Left eye pupil.
  1818. *
  1819. * Value: "LEFT_EYE_PUPIL"
  1820. */
  1821. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyePupil;
  1822. /**
  1823. * Left eye, right corner.
  1824. *
  1825. * Value: "LEFT_EYE_RIGHT_CORNER"
  1826. */
  1827. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner;
  1828. /**
  1829. * Left eye, top boundary.
  1830. *
  1831. * Value: "LEFT_EYE_TOP_BOUNDARY"
  1832. */
  1833. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary;
  1834. /**
  1835. * Left of left eyebrow.
  1836. *
  1837. * Value: "LEFT_OF_LEFT_EYEBROW"
  1838. */
  1839. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow;
  1840. /**
  1841. * Left of right eyebrow.
  1842. *
  1843. * Value: "LEFT_OF_RIGHT_EYEBROW"
  1844. */
  1845. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow;
  1846. /**
  1847. * Lower lip.
  1848. *
  1849. * Value: "LOWER_LIP"
  1850. */
  1851. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LowerLip;
  1852. /**
  1853. * Midpoint between eyes.
  1854. *
  1855. * Value: "MIDPOINT_BETWEEN_EYES"
  1856. */
  1857. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes;
  1858. /**
  1859. * Mouth center.
  1860. *
  1861. * Value: "MOUTH_CENTER"
  1862. */
  1863. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthCenter;
  1864. /**
  1865. * Mouth left.
  1866. *
  1867. * Value: "MOUTH_LEFT"
  1868. */
  1869. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthLeft;
  1870. /**
  1871. * Mouth right.
  1872. *
  1873. * Value: "MOUTH_RIGHT"
  1874. */
  1875. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthRight;
  1876. /**
  1877. * Nose, bottom center.
  1878. *
  1879. * Value: "NOSE_BOTTOM_CENTER"
  1880. */
  1881. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomCenter;
  1882. /**
  1883. * Nose, bottom left.
  1884. *
  1885. * Value: "NOSE_BOTTOM_LEFT"
  1886. */
  1887. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomLeft;
  1888. /**
  1889. * Nose, bottom right.
  1890. *
  1891. * Value: "NOSE_BOTTOM_RIGHT"
  1892. */
  1893. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomRight;
  1894. /**
  1895. * Nose tip.
  1896. *
  1897. * Value: "NOSE_TIP"
  1898. */
  1899. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseTip;
  1900. /**
  1901. * Right ear tragion.
  1902. *
  1903. * Value: "RIGHT_EAR_TRAGION"
  1904. */
  1905. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEarTragion;
  1906. /**
  1907. * Right eye.
  1908. *
  1909. * Value: "RIGHT_EYE"
  1910. */
  1911. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEye;
  1912. /**
  1913. * Right eye, bottom boundary.
  1914. *
  1915. * Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
  1916. */
  1917. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary;
  1918. /**
  1919. * Right eyebrow, upper midpoint.
  1920. *
  1921. * Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
  1922. */
  1923. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint;
  1924. /**
  1925. * Right eye, left corner.
  1926. *
  1927. * Value: "RIGHT_EYE_LEFT_CORNER"
  1928. */
  1929. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner;
  1930. /**
  1931. * Right eye pupil.
  1932. *
  1933. * Value: "RIGHT_EYE_PUPIL"
  1934. */
  1935. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyePupil;
  1936. /**
  1937. * Right eye, right corner.
  1938. *
  1939. * Value: "RIGHT_EYE_RIGHT_CORNER"
  1940. */
  1941. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeRightCorner;
  1942. /**
  1943. * Right eye, top boundary.
  1944. *
  1945. * Value: "RIGHT_EYE_TOP_BOUNDARY"
  1946. */
  1947. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary;
  1948. /**
  1949. * Right of left eyebrow.
  1950. *
  1951. * Value: "RIGHT_OF_LEFT_EYEBROW"
  1952. */
  1953. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow;
  1954. /**
  1955. * Right of right eyebrow.
  1956. *
  1957. * Value: "RIGHT_OF_RIGHT_EYEBROW"
  1958. */
  1959. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow;
  1960. /**
  1961. * Unknown face landmark detected. Should not be filled.
  1962. *
  1963. * Value: "UNKNOWN_LANDMARK"
  1964. */
  1965. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UnknownLandmark;
  1966. /**
  1967. * Upper lip.
  1968. *
  1969. * Value: "UPPER_LIP"
  1970. */
  1971. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UpperLip;
  1972. // ----------------------------------------------------------------------------
  1973. // GTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata.state
  1974. /**
  1975. * The batch processing was cancelled.
  1976. *
  1977. * Value: "CANCELLED"
  1978. */
  1979. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Cancelled;
  1980. /**
  1981. * Request is received.
  1982. *
  1983. * Value: "CREATED"
  1984. */
  1985. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Created;
  1986. /**
  1987. * The batch processing is done.
  1988. *
  1989. * Value: "DONE"
  1990. */
  1991. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Done;
  1992. /**
  1993. * Request is actively being processed.
  1994. *
  1995. * Value: "RUNNING"
  1996. */
  1997. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Running;
  1998. /**
  1999. * Invalid.
  2000. *
  2001. * Value: "STATE_UNSPECIFIED"
  2002. */
  2003. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_StateUnspecified;
  2004. // ----------------------------------------------------------------------------
  2005. // GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.adult
  2006. /**
  2007. * It is likely that the image belongs to the specified vertical.
  2008. *
  2009. * Value: "LIKELY"
  2010. */
  2011. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Likely;
  2012. /**
  2013. * It is possible that the image belongs to the specified vertical.
  2014. *
  2015. * Value: "POSSIBLE"
  2016. */
  2017. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Possible;
  2018. /**
  2019. * Unknown likelihood.
  2020. *
  2021. * Value: "UNKNOWN"
  2022. */
  2023. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unknown;
  2024. /**
  2025. * It is unlikely that the image belongs to the specified vertical.
  2026. *
  2027. * Value: "UNLIKELY"
  2028. */
  2029. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unlikely;
  2030. /**
  2031. * It is very likely that the image belongs to the specified vertical.
  2032. *
  2033. * Value: "VERY_LIKELY"
  2034. */
  2035. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryLikely;
  2036. /**
  2037. * It is very unlikely that the image belongs to the specified vertical.
  2038. *
  2039. * Value: "VERY_UNLIKELY"
  2040. */
  2041. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryUnlikely;
  2042. // ----------------------------------------------------------------------------
  2043. // GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.medical
  2044. /**
  2045. * It is likely that the image belongs to the specified vertical.
  2046. *
  2047. * Value: "LIKELY"
  2048. */
  2049. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Likely;
  2050. /**
  2051. * It is possible that the image belongs to the specified vertical.
  2052. *
  2053. * Value: "POSSIBLE"
  2054. */
  2055. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Possible;
  2056. /**
  2057. * Unknown likelihood.
  2058. *
  2059. * Value: "UNKNOWN"
  2060. */
  2061. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unknown;
  2062. /**
  2063. * It is unlikely that the image belongs to the specified vertical.
  2064. *
  2065. * Value: "UNLIKELY"
  2066. */
  2067. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unlikely;
  2068. /**
  2069. * It is very likely that the image belongs to the specified vertical.
  2070. *
  2071. * Value: "VERY_LIKELY"
  2072. */
  2073. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryLikely;
  2074. /**
  2075. * It is very unlikely that the image belongs to the specified vertical.
  2076. *
  2077. * Value: "VERY_UNLIKELY"
  2078. */
  2079. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryUnlikely;
  2080. // ----------------------------------------------------------------------------
  2081. // GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.racy
  2082. /**
  2083. * It is likely that the image belongs to the specified vertical.
  2084. *
  2085. * Value: "LIKELY"
  2086. */
  2087. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Likely;
  2088. /**
  2089. * It is possible that the image belongs to the specified vertical.
  2090. *
  2091. * Value: "POSSIBLE"
  2092. */
  2093. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Possible;
  2094. /**
  2095. * Unknown likelihood.
  2096. *
  2097. * Value: "UNKNOWN"
  2098. */
  2099. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unknown;
  2100. /**
  2101. * It is unlikely that the image belongs to the specified vertical.
  2102. *
  2103. * Value: "UNLIKELY"
  2104. */
  2105. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unlikely;
  2106. /**
  2107. * It is very likely that the image belongs to the specified vertical.
  2108. *
  2109. * Value: "VERY_LIKELY"
  2110. */
  2111. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryLikely;
  2112. /**
  2113. * It is very unlikely that the image belongs to the specified vertical.
  2114. *
  2115. * Value: "VERY_UNLIKELY"
  2116. */
  2117. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryUnlikely;
  2118. // ----------------------------------------------------------------------------
  2119. // GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.spoof
  2120. /**
  2121. * It is likely that the image belongs to the specified vertical.
  2122. *
  2123. * Value: "LIKELY"
  2124. */
  2125. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Likely;
  2126. /**
  2127. * It is possible that the image belongs to the specified vertical.
  2128. *
  2129. * Value: "POSSIBLE"
  2130. */
  2131. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Possible;
  2132. /**
  2133. * Unknown likelihood.
  2134. *
  2135. * Value: "UNKNOWN"
  2136. */
  2137. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unknown;
  2138. /**
  2139. * It is unlikely that the image belongs to the specified vertical.
  2140. *
  2141. * Value: "UNLIKELY"
  2142. */
  2143. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unlikely;
  2144. /**
  2145. * It is very likely that the image belongs to the specified vertical.
  2146. *
  2147. * Value: "VERY_LIKELY"
  2148. */
  2149. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryLikely;
  2150. /**
  2151. * It is very unlikely that the image belongs to the specified vertical.
  2152. *
  2153. * Value: "VERY_UNLIKELY"
  2154. */
  2155. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryUnlikely;
  2156. // ----------------------------------------------------------------------------
  2157. // GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation.violence
  2158. /**
  2159. * It is likely that the image belongs to the specified vertical.
  2160. *
  2161. * Value: "LIKELY"
  2162. */
  2163. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Likely;
  2164. /**
  2165. * It is possible that the image belongs to the specified vertical.
  2166. *
  2167. * Value: "POSSIBLE"
  2168. */
  2169. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Possible;
  2170. /**
  2171. * Unknown likelihood.
  2172. *
  2173. * Value: "UNKNOWN"
  2174. */
  2175. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unknown;
  2176. /**
  2177. * It is unlikely that the image belongs to the specified vertical.
  2178. *
  2179. * Value: "UNLIKELY"
  2180. */
  2181. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unlikely;
  2182. /**
  2183. * It is very likely that the image belongs to the specified vertical.
  2184. *
  2185. * Value: "VERY_LIKELY"
  2186. */
  2187. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryLikely;
  2188. /**
  2189. * It is very unlikely that the image belongs to the specified vertical.
  2190. *
  2191. * Value: "VERY_UNLIKELY"
  2192. */
  2193. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryUnlikely;
  2194. // ----------------------------------------------------------------------------
  2195. // GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak.type
  2196. /**
  2197. * Line-wrapping break.
  2198. *
  2199. * Value: "EOL_SURE_SPACE"
  2200. */
  2201. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_EolSureSpace;
  2202. /**
  2203. * End-line hyphen that is not present in text; does not co-occur with
  2204. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
  2205. *
  2206. * Value: "HYPHEN"
  2207. */
  2208. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Hyphen;
  2209. /**
  2210. * Line break that ends a paragraph.
  2211. *
  2212. * Value: "LINE_BREAK"
  2213. */
  2214. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_LineBreak;
  2215. /**
  2216. * Regular space.
  2217. *
  2218. * Value: "SPACE"
  2219. */
  2220. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Space;
  2221. /**
  2222. * Sure space (very wide).
  2223. *
  2224. * Value: "SURE_SPACE"
  2225. */
  2226. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_SureSpace;
  2227. /**
  2228. * Unknown break label type.
  2229. *
  2230. * Value: "UNKNOWN"
  2231. */
  2232. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Unknown;
  2233. // ----------------------------------------------------------------------------
  2234. // GTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata.state
  2235. /**
  2236. * The request is done after the longrunning.Operations.CancelOperation has
  2237. * been called by the user. Any records that were processed before the
  2238. * cancel command are output as specified in the request.
  2239. *
  2240. * Value: "CANCELLED"
  2241. */
  2242. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Cancelled;
  2243. /**
  2244. * The request is done and no item has been successfully processed.
  2245. *
  2246. * Value: "FAILED"
  2247. */
  2248. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Failed;
  2249. /**
  2250. * Request is actively being processed.
  2251. *
  2252. * Value: "PROCESSING"
  2253. */
  2254. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Processing;
  2255. /**
  2256. * Invalid.
  2257. *
  2258. * Value: "STATE_UNSPECIFIED"
  2259. */
  2260. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_StateUnspecified;
  2261. /**
  2262. * The request is done and at least one item has been successfully
  2263. * processed.
  2264. *
  2265. * Value: "SUCCESSFUL"
  2266. */
  2267. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Successful;
  2268. // ----------------------------------------------------------------------------
  2269. // GTLRVision_GoogleCloudVisionV1p3beta1Block.blockType
  2270. /**
  2271. * Barcode block.
  2272. *
  2273. * Value: "BARCODE"
  2274. */
  2275. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Barcode;
  2276. /**
  2277. * Image block.
  2278. *
  2279. * Value: "PICTURE"
  2280. */
  2281. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Picture;
  2282. /**
  2283. * Horizontal/vertical line box.
  2284. *
  2285. * Value: "RULER"
  2286. */
  2287. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Ruler;
  2288. /**
  2289. * Table block.
  2290. *
  2291. * Value: "TABLE"
  2292. */
  2293. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Table;
  2294. /**
  2295. * Regular text block.
  2296. *
  2297. * Value: "TEXT"
  2298. */
  2299. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Text;
  2300. /**
  2301. * Unknown block type.
  2302. *
  2303. * Value: "UNKNOWN"
  2304. */
  2305. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Unknown;
  2306. // ----------------------------------------------------------------------------
  2307. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.angerLikelihood
  2308. /**
  2309. * It is likely that the image belongs to the specified vertical.
  2310. *
  2311. * Value: "LIKELY"
  2312. */
  2313. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Likely;
  2314. /**
  2315. * It is possible that the image belongs to the specified vertical.
  2316. *
  2317. * Value: "POSSIBLE"
  2318. */
  2319. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Possible;
  2320. /**
  2321. * Unknown likelihood.
  2322. *
  2323. * Value: "UNKNOWN"
  2324. */
  2325. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unknown;
  2326. /**
  2327. * It is unlikely that the image belongs to the specified vertical.
  2328. *
  2329. * Value: "UNLIKELY"
  2330. */
  2331. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unlikely;
  2332. /**
  2333. * It is very likely that the image belongs to the specified vertical.
  2334. *
  2335. * Value: "VERY_LIKELY"
  2336. */
  2337. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryLikely;
  2338. /**
  2339. * It is very unlikely that the image belongs to the specified vertical.
  2340. *
  2341. * Value: "VERY_UNLIKELY"
  2342. */
  2343. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryUnlikely;
  2344. // ----------------------------------------------------------------------------
  2345. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.blurredLikelihood
  2346. /**
  2347. * It is likely that the image belongs to the specified vertical.
  2348. *
  2349. * Value: "LIKELY"
  2350. */
  2351. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Likely;
  2352. /**
  2353. * It is possible that the image belongs to the specified vertical.
  2354. *
  2355. * Value: "POSSIBLE"
  2356. */
  2357. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Possible;
  2358. /**
  2359. * Unknown likelihood.
  2360. *
  2361. * Value: "UNKNOWN"
  2362. */
  2363. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unknown;
  2364. /**
  2365. * It is unlikely that the image belongs to the specified vertical.
  2366. *
  2367. * Value: "UNLIKELY"
  2368. */
  2369. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unlikely;
  2370. /**
  2371. * It is very likely that the image belongs to the specified vertical.
  2372. *
  2373. * Value: "VERY_LIKELY"
  2374. */
  2375. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryLikely;
  2376. /**
  2377. * It is very unlikely that the image belongs to the specified vertical.
  2378. *
  2379. * Value: "VERY_UNLIKELY"
  2380. */
  2381. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely;
  2382. // ----------------------------------------------------------------------------
  2383. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.headwearLikelihood
  2384. /**
  2385. * It is likely that the image belongs to the specified vertical.
  2386. *
  2387. * Value: "LIKELY"
  2388. */
  2389. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Likely;
  2390. /**
  2391. * It is possible that the image belongs to the specified vertical.
  2392. *
  2393. * Value: "POSSIBLE"
  2394. */
  2395. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Possible;
  2396. /**
  2397. * Unknown likelihood.
  2398. *
  2399. * Value: "UNKNOWN"
  2400. */
  2401. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unknown;
  2402. /**
  2403. * It is unlikely that the image belongs to the specified vertical.
  2404. *
  2405. * Value: "UNLIKELY"
  2406. */
  2407. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unlikely;
  2408. /**
  2409. * It is very likely that the image belongs to the specified vertical.
  2410. *
  2411. * Value: "VERY_LIKELY"
  2412. */
  2413. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryLikely;
  2414. /**
  2415. * It is very unlikely that the image belongs to the specified vertical.
  2416. *
  2417. * Value: "VERY_UNLIKELY"
  2418. */
  2419. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely;
  2420. // ----------------------------------------------------------------------------
  2421. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.joyLikelihood
  2422. /**
  2423. * It is likely that the image belongs to the specified vertical.
  2424. *
  2425. * Value: "LIKELY"
  2426. */
  2427. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Likely;
  2428. /**
  2429. * It is possible that the image belongs to the specified vertical.
  2430. *
  2431. * Value: "POSSIBLE"
  2432. */
  2433. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Possible;
  2434. /**
  2435. * Unknown likelihood.
  2436. *
  2437. * Value: "UNKNOWN"
  2438. */
  2439. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unknown;
  2440. /**
  2441. * It is unlikely that the image belongs to the specified vertical.
  2442. *
  2443. * Value: "UNLIKELY"
  2444. */
  2445. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unlikely;
  2446. /**
  2447. * It is very likely that the image belongs to the specified vertical.
  2448. *
  2449. * Value: "VERY_LIKELY"
  2450. */
  2451. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryLikely;
  2452. /**
  2453. * It is very unlikely that the image belongs to the specified vertical.
  2454. *
  2455. * Value: "VERY_UNLIKELY"
  2456. */
  2457. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryUnlikely;
  2458. // ----------------------------------------------------------------------------
  2459. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.sorrowLikelihood
  2460. /**
  2461. * It is likely that the image belongs to the specified vertical.
  2462. *
  2463. * Value: "LIKELY"
  2464. */
  2465. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Likely;
  2466. /**
  2467. * It is possible that the image belongs to the specified vertical.
  2468. *
  2469. * Value: "POSSIBLE"
  2470. */
  2471. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Possible;
  2472. /**
  2473. * Unknown likelihood.
  2474. *
  2475. * Value: "UNKNOWN"
  2476. */
  2477. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unknown;
  2478. /**
  2479. * It is unlikely that the image belongs to the specified vertical.
  2480. *
  2481. * Value: "UNLIKELY"
  2482. */
  2483. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unlikely;
  2484. /**
  2485. * It is very likely that the image belongs to the specified vertical.
  2486. *
  2487. * Value: "VERY_LIKELY"
  2488. */
  2489. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryLikely;
  2490. /**
  2491. * It is very unlikely that the image belongs to the specified vertical.
  2492. *
  2493. * Value: "VERY_UNLIKELY"
  2494. */
  2495. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely;
  2496. // ----------------------------------------------------------------------------
  2497. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.surpriseLikelihood
  2498. /**
  2499. * It is likely that the image belongs to the specified vertical.
  2500. *
  2501. * Value: "LIKELY"
  2502. */
  2503. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Likely;
  2504. /**
  2505. * It is possible that the image belongs to the specified vertical.
  2506. *
  2507. * Value: "POSSIBLE"
  2508. */
  2509. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Possible;
  2510. /**
  2511. * Unknown likelihood.
  2512. *
  2513. * Value: "UNKNOWN"
  2514. */
  2515. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unknown;
  2516. /**
  2517. * It is unlikely that the image belongs to the specified vertical.
  2518. *
  2519. * Value: "UNLIKELY"
  2520. */
  2521. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unlikely;
  2522. /**
  2523. * It is very likely that the image belongs to the specified vertical.
  2524. *
  2525. * Value: "VERY_LIKELY"
  2526. */
  2527. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryLikely;
  2528. /**
  2529. * It is very unlikely that the image belongs to the specified vertical.
  2530. *
  2531. * Value: "VERY_UNLIKELY"
  2532. */
  2533. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely;
  2534. // ----------------------------------------------------------------------------
  2535. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation.underExposedLikelihood
  2536. /**
  2537. * It is likely that the image belongs to the specified vertical.
  2538. *
  2539. * Value: "LIKELY"
  2540. */
  2541. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Likely;
  2542. /**
  2543. * It is possible that the image belongs to the specified vertical.
  2544. *
  2545. * Value: "POSSIBLE"
  2546. */
  2547. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Possible;
  2548. /**
  2549. * Unknown likelihood.
  2550. *
  2551. * Value: "UNKNOWN"
  2552. */
  2553. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unknown;
  2554. /**
  2555. * It is unlikely that the image belongs to the specified vertical.
  2556. *
  2557. * Value: "UNLIKELY"
  2558. */
  2559. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unlikely;
  2560. /**
  2561. * It is very likely that the image belongs to the specified vertical.
  2562. *
  2563. * Value: "VERY_LIKELY"
  2564. */
  2565. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely;
  2566. /**
  2567. * It is very unlikely that the image belongs to the specified vertical.
  2568. *
  2569. * Value: "VERY_UNLIKELY"
  2570. */
  2571. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely;
  2572. // ----------------------------------------------------------------------------
  2573. // GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark.type
  2574. /**
  2575. * Chin gnathion.
  2576. *
  2577. * Value: "CHIN_GNATHION"
  2578. */
  2579. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinGnathion;
  2580. /**
  2581. * Chin left gonion.
  2582. *
  2583. * Value: "CHIN_LEFT_GONION"
  2584. */
  2585. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinLeftGonion;
  2586. /**
  2587. * Chin right gonion.
  2588. *
  2589. * Value: "CHIN_RIGHT_GONION"
  2590. */
  2591. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinRightGonion;
  2592. /**
  2593. * Forehead glabella.
  2594. *
  2595. * Value: "FOREHEAD_GLABELLA"
  2596. */
  2597. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ForeheadGlabella;
  2598. /**
  2599. * Left ear tragion.
  2600. *
  2601. * Value: "LEFT_EAR_TRAGION"
  2602. */
  2603. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEarTragion;
  2604. /**
  2605. * Left eye.
  2606. *
  2607. * Value: "LEFT_EYE"
  2608. */
  2609. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEye;
  2610. /**
  2611. * Left eye, bottom boundary.
  2612. *
  2613. * Value: "LEFT_EYE_BOTTOM_BOUNDARY"
  2614. */
  2615. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary;
  2616. /**
  2617. * Left eyebrow, upper midpoint.
  2618. *
  2619. * Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
  2620. */
  2621. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint;
  2622. /**
  2623. * Left eye, left corner.
  2624. *
  2625. * Value: "LEFT_EYE_LEFT_CORNER"
  2626. */
  2627. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner;
  2628. /**
  2629. * Left eye pupil.
  2630. *
  2631. * Value: "LEFT_EYE_PUPIL"
  2632. */
  2633. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyePupil;
  2634. /**
  2635. * Left eye, right corner.
  2636. *
  2637. * Value: "LEFT_EYE_RIGHT_CORNER"
  2638. */
  2639. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner;
  2640. /**
  2641. * Left eye, top boundary.
  2642. *
  2643. * Value: "LEFT_EYE_TOP_BOUNDARY"
  2644. */
  2645. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary;
  2646. /**
  2647. * Left of left eyebrow.
  2648. *
  2649. * Value: "LEFT_OF_LEFT_EYEBROW"
  2650. */
  2651. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow;
  2652. /**
  2653. * Left of right eyebrow.
  2654. *
  2655. * Value: "LEFT_OF_RIGHT_EYEBROW"
  2656. */
  2657. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow;
  2658. /**
  2659. * Lower lip.
  2660. *
  2661. * Value: "LOWER_LIP"
  2662. */
  2663. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LowerLip;
  2664. /**
  2665. * Midpoint between eyes.
  2666. *
  2667. * Value: "MIDPOINT_BETWEEN_EYES"
  2668. */
  2669. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes;
  2670. /**
  2671. * Mouth center.
  2672. *
  2673. * Value: "MOUTH_CENTER"
  2674. */
  2675. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthCenter;
  2676. /**
  2677. * Mouth left.
  2678. *
  2679. * Value: "MOUTH_LEFT"
  2680. */
  2681. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthLeft;
  2682. /**
  2683. * Mouth right.
  2684. *
  2685. * Value: "MOUTH_RIGHT"
  2686. */
  2687. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthRight;
  2688. /**
  2689. * Nose, bottom center.
  2690. *
  2691. * Value: "NOSE_BOTTOM_CENTER"
  2692. */
  2693. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomCenter;
  2694. /**
  2695. * Nose, bottom left.
  2696. *
  2697. * Value: "NOSE_BOTTOM_LEFT"
  2698. */
  2699. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomLeft;
  2700. /**
  2701. * Nose, bottom right.
  2702. *
  2703. * Value: "NOSE_BOTTOM_RIGHT"
  2704. */
  2705. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomRight;
  2706. /**
  2707. * Nose tip.
  2708. *
  2709. * Value: "NOSE_TIP"
  2710. */
  2711. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseTip;
  2712. /**
  2713. * Right ear tragion.
  2714. *
  2715. * Value: "RIGHT_EAR_TRAGION"
  2716. */
  2717. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEarTragion;
  2718. /**
  2719. * Right eye.
  2720. *
  2721. * Value: "RIGHT_EYE"
  2722. */
  2723. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEye;
  2724. /**
  2725. * Right eye, bottom boundary.
  2726. *
  2727. * Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
  2728. */
  2729. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary;
  2730. /**
  2731. * Right eyebrow, upper midpoint.
  2732. *
  2733. * Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
  2734. */
  2735. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint;
  2736. /**
  2737. * Right eye, left corner.
  2738. *
  2739. * Value: "RIGHT_EYE_LEFT_CORNER"
  2740. */
  2741. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner;
  2742. /**
  2743. * Right eye pupil.
  2744. *
  2745. * Value: "RIGHT_EYE_PUPIL"
  2746. */
  2747. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyePupil;
  2748. /**
  2749. * Right eye, right corner.
  2750. *
  2751. * Value: "RIGHT_EYE_RIGHT_CORNER"
  2752. */
  2753. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeRightCorner;
  2754. /**
  2755. * Right eye, top boundary.
  2756. *
  2757. * Value: "RIGHT_EYE_TOP_BOUNDARY"
  2758. */
  2759. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary;
  2760. /**
  2761. * Right of left eyebrow.
  2762. *
  2763. * Value: "RIGHT_OF_LEFT_EYEBROW"
  2764. */
  2765. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow;
  2766. /**
  2767. * Right of right eyebrow.
  2768. *
  2769. * Value: "RIGHT_OF_RIGHT_EYEBROW"
  2770. */
  2771. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow;
  2772. /**
  2773. * Unknown face landmark detected. Should not be filled.
  2774. *
  2775. * Value: "UNKNOWN_LANDMARK"
  2776. */
  2777. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UnknownLandmark;
  2778. /**
  2779. * Upper lip.
  2780. *
  2781. * Value: "UPPER_LIP"
  2782. */
  2783. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UpperLip;
  2784. // ----------------------------------------------------------------------------
  2785. // GTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata.state
  2786. /**
  2787. * The batch processing was cancelled.
  2788. *
  2789. * Value: "CANCELLED"
  2790. */
  2791. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Cancelled;
  2792. /**
  2793. * Request is received.
  2794. *
  2795. * Value: "CREATED"
  2796. */
  2797. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Created;
  2798. /**
  2799. * The batch processing is done.
  2800. *
  2801. * Value: "DONE"
  2802. */
  2803. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Done;
  2804. /**
  2805. * Request is actively being processed.
  2806. *
  2807. * Value: "RUNNING"
  2808. */
  2809. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Running;
  2810. /**
  2811. * Invalid.
  2812. *
  2813. * Value: "STATE_UNSPECIFIED"
  2814. */
  2815. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_StateUnspecified;
  2816. // ----------------------------------------------------------------------------
  2817. // GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.adult
  2818. /**
  2819. * It is likely that the image belongs to the specified vertical.
  2820. *
  2821. * Value: "LIKELY"
  2822. */
  2823. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Likely;
  2824. /**
  2825. * It is possible that the image belongs to the specified vertical.
  2826. *
  2827. * Value: "POSSIBLE"
  2828. */
  2829. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Possible;
  2830. /**
  2831. * Unknown likelihood.
  2832. *
  2833. * Value: "UNKNOWN"
  2834. */
  2835. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unknown;
  2836. /**
  2837. * It is unlikely that the image belongs to the specified vertical.
  2838. *
  2839. * Value: "UNLIKELY"
  2840. */
  2841. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unlikely;
  2842. /**
  2843. * It is very likely that the image belongs to the specified vertical.
  2844. *
  2845. * Value: "VERY_LIKELY"
  2846. */
  2847. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryLikely;
  2848. /**
  2849. * It is very unlikely that the image belongs to the specified vertical.
  2850. *
  2851. * Value: "VERY_UNLIKELY"
  2852. */
  2853. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryUnlikely;
  2854. // ----------------------------------------------------------------------------
  2855. // GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.medical
  2856. /**
  2857. * It is likely that the image belongs to the specified vertical.
  2858. *
  2859. * Value: "LIKELY"
  2860. */
  2861. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Likely;
  2862. /**
  2863. * It is possible that the image belongs to the specified vertical.
  2864. *
  2865. * Value: "POSSIBLE"
  2866. */
  2867. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Possible;
  2868. /**
  2869. * Unknown likelihood.
  2870. *
  2871. * Value: "UNKNOWN"
  2872. */
  2873. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unknown;
  2874. /**
  2875. * It is unlikely that the image belongs to the specified vertical.
  2876. *
  2877. * Value: "UNLIKELY"
  2878. */
  2879. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unlikely;
  2880. /**
  2881. * It is very likely that the image belongs to the specified vertical.
  2882. *
  2883. * Value: "VERY_LIKELY"
  2884. */
  2885. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryLikely;
  2886. /**
  2887. * It is very unlikely that the image belongs to the specified vertical.
  2888. *
  2889. * Value: "VERY_UNLIKELY"
  2890. */
  2891. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryUnlikely;
  2892. // ----------------------------------------------------------------------------
  2893. // GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.racy
  2894. /**
  2895. * It is likely that the image belongs to the specified vertical.
  2896. *
  2897. * Value: "LIKELY"
  2898. */
  2899. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Likely;
  2900. /**
  2901. * It is possible that the image belongs to the specified vertical.
  2902. *
  2903. * Value: "POSSIBLE"
  2904. */
  2905. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Possible;
  2906. /**
  2907. * Unknown likelihood.
  2908. *
  2909. * Value: "UNKNOWN"
  2910. */
  2911. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unknown;
  2912. /**
  2913. * It is unlikely that the image belongs to the specified vertical.
  2914. *
  2915. * Value: "UNLIKELY"
  2916. */
  2917. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unlikely;
  2918. /**
  2919. * It is very likely that the image belongs to the specified vertical.
  2920. *
  2921. * Value: "VERY_LIKELY"
  2922. */
  2923. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryLikely;
  2924. /**
  2925. * It is very unlikely that the image belongs to the specified vertical.
  2926. *
  2927. * Value: "VERY_UNLIKELY"
  2928. */
  2929. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryUnlikely;
  2930. // ----------------------------------------------------------------------------
  2931. // GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.spoof
  2932. /**
  2933. * It is likely that the image belongs to the specified vertical.
  2934. *
  2935. * Value: "LIKELY"
  2936. */
  2937. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Likely;
  2938. /**
  2939. * It is possible that the image belongs to the specified vertical.
  2940. *
  2941. * Value: "POSSIBLE"
  2942. */
  2943. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Possible;
  2944. /**
  2945. * Unknown likelihood.
  2946. *
  2947. * Value: "UNKNOWN"
  2948. */
  2949. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unknown;
  2950. /**
  2951. * It is unlikely that the image belongs to the specified vertical.
  2952. *
  2953. * Value: "UNLIKELY"
  2954. */
  2955. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unlikely;
  2956. /**
  2957. * It is very likely that the image belongs to the specified vertical.
  2958. *
  2959. * Value: "VERY_LIKELY"
  2960. */
  2961. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryLikely;
  2962. /**
  2963. * It is very unlikely that the image belongs to the specified vertical.
  2964. *
  2965. * Value: "VERY_UNLIKELY"
  2966. */
  2967. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryUnlikely;
  2968. // ----------------------------------------------------------------------------
  2969. // GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation.violence
  2970. /**
  2971. * It is likely that the image belongs to the specified vertical.
  2972. *
  2973. * Value: "LIKELY"
  2974. */
  2975. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Likely;
  2976. /**
  2977. * It is possible that the image belongs to the specified vertical.
  2978. *
  2979. * Value: "POSSIBLE"
  2980. */
  2981. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Possible;
  2982. /**
  2983. * Unknown likelihood.
  2984. *
  2985. * Value: "UNKNOWN"
  2986. */
  2987. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unknown;
  2988. /**
  2989. * It is unlikely that the image belongs to the specified vertical.
  2990. *
  2991. * Value: "UNLIKELY"
  2992. */
  2993. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unlikely;
  2994. /**
  2995. * It is very likely that the image belongs to the specified vertical.
  2996. *
  2997. * Value: "VERY_LIKELY"
  2998. */
  2999. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryLikely;
  3000. /**
  3001. * It is very unlikely that the image belongs to the specified vertical.
  3002. *
  3003. * Value: "VERY_UNLIKELY"
  3004. */
  3005. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryUnlikely;
  3006. // ----------------------------------------------------------------------------
  3007. // GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak.type
  3008. /**
  3009. * Line-wrapping break.
  3010. *
  3011. * Value: "EOL_SURE_SPACE"
  3012. */
  3013. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_EolSureSpace;
  3014. /**
  3015. * End-line hyphen that is not present in text; does not co-occur with
  3016. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`.
  3017. *
  3018. * Value: "HYPHEN"
  3019. */
  3020. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Hyphen;
  3021. /**
  3022. * Line break that ends a paragraph.
  3023. *
  3024. * Value: "LINE_BREAK"
  3025. */
  3026. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_LineBreak;
  3027. /**
  3028. * Regular space.
  3029. *
  3030. * Value: "SPACE"
  3031. */
  3032. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Space;
  3033. /**
  3034. * Sure space (very wide).
  3035. *
  3036. * Value: "SURE_SPACE"
  3037. */
  3038. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_SureSpace;
  3039. /**
  3040. * Unknown break label type.
  3041. *
  3042. * Value: "UNKNOWN"
  3043. */
  3044. GTLR_EXTERN NSString * const kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Unknown;
  3045. // ----------------------------------------------------------------------------
  3046. // GTLRVision_Landmark.type
  3047. /**
  3048. * Chin gnathion.
  3049. *
  3050. * Value: "CHIN_GNATHION"
  3051. */
  3052. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ChinGnathion;
  3053. /**
  3054. * Chin left gonion.
  3055. *
  3056. * Value: "CHIN_LEFT_GONION"
  3057. */
  3058. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ChinLeftGonion;
  3059. /**
  3060. * Chin right gonion.
  3061. *
  3062. * Value: "CHIN_RIGHT_GONION"
  3063. */
  3064. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ChinRightGonion;
  3065. /**
  3066. * Forehead glabella.
  3067. *
  3068. * Value: "FOREHEAD_GLABELLA"
  3069. */
  3070. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_ForeheadGlabella;
  3071. /**
  3072. * Left ear tragion.
  3073. *
  3074. * Value: "LEFT_EAR_TRAGION"
  3075. */
  3076. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEarTragion;
  3077. /**
  3078. * Left eye.
  3079. *
  3080. * Value: "LEFT_EYE"
  3081. */
  3082. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEye;
  3083. /**
  3084. * Left eye, bottom boundary.
  3085. *
  3086. * Value: "LEFT_EYE_BOTTOM_BOUNDARY"
  3087. */
  3088. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeBottomBoundary;
  3089. /**
  3090. * Left eyebrow, upper midpoint.
  3091. *
  3092. * Value: "LEFT_EYEBROW_UPPER_MIDPOINT"
  3093. */
  3094. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyebrowUpperMidpoint;
  3095. /**
  3096. * Left eye, left corner.
  3097. *
  3098. * Value: "LEFT_EYE_LEFT_CORNER"
  3099. */
  3100. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeLeftCorner;
  3101. /**
  3102. * Left eye pupil.
  3103. *
  3104. * Value: "LEFT_EYE_PUPIL"
  3105. */
  3106. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyePupil;
  3107. /**
  3108. * Left eye, right corner.
  3109. *
  3110. * Value: "LEFT_EYE_RIGHT_CORNER"
  3111. */
  3112. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeRightCorner;
  3113. /**
  3114. * Left eye, top boundary.
  3115. *
  3116. * Value: "LEFT_EYE_TOP_BOUNDARY"
  3117. */
  3118. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftEyeTopBoundary;
  3119. /**
  3120. * Left of left eyebrow.
  3121. *
  3122. * Value: "LEFT_OF_LEFT_EYEBROW"
  3123. */
  3124. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftOfLeftEyebrow;
  3125. /**
  3126. * Left of right eyebrow.
  3127. *
  3128. * Value: "LEFT_OF_RIGHT_EYEBROW"
  3129. */
  3130. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LeftOfRightEyebrow;
  3131. /**
  3132. * Lower lip.
  3133. *
  3134. * Value: "LOWER_LIP"
  3135. */
  3136. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_LowerLip;
  3137. /**
  3138. * Midpoint between eyes.
  3139. *
  3140. * Value: "MIDPOINT_BETWEEN_EYES"
  3141. */
  3142. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MidpointBetweenEyes;
  3143. /**
  3144. * Mouth center.
  3145. *
  3146. * Value: "MOUTH_CENTER"
  3147. */
  3148. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MouthCenter;
  3149. /**
  3150. * Mouth left.
  3151. *
  3152. * Value: "MOUTH_LEFT"
  3153. */
  3154. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MouthLeft;
  3155. /**
  3156. * Mouth right.
  3157. *
  3158. * Value: "MOUTH_RIGHT"
  3159. */
  3160. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_MouthRight;
  3161. /**
  3162. * Nose, bottom center.
  3163. *
  3164. * Value: "NOSE_BOTTOM_CENTER"
  3165. */
  3166. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseBottomCenter;
  3167. /**
  3168. * Nose, bottom left.
  3169. *
  3170. * Value: "NOSE_BOTTOM_LEFT"
  3171. */
  3172. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseBottomLeft;
  3173. /**
  3174. * Nose, bottom right.
  3175. *
  3176. * Value: "NOSE_BOTTOM_RIGHT"
  3177. */
  3178. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseBottomRight;
  3179. /**
  3180. * Nose tip.
  3181. *
  3182. * Value: "NOSE_TIP"
  3183. */
  3184. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_NoseTip;
  3185. /**
  3186. * Right ear tragion.
  3187. *
  3188. * Value: "RIGHT_EAR_TRAGION"
  3189. */
  3190. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEarTragion;
  3191. /**
  3192. * Right eye.
  3193. *
  3194. * Value: "RIGHT_EYE"
  3195. */
  3196. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEye;
  3197. /**
  3198. * Right eye, bottom boundary.
  3199. *
  3200. * Value: "RIGHT_EYE_BOTTOM_BOUNDARY"
  3201. */
  3202. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeBottomBoundary;
  3203. /**
  3204. * Right eyebrow, upper midpoint.
  3205. *
  3206. * Value: "RIGHT_EYEBROW_UPPER_MIDPOINT"
  3207. */
  3208. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyebrowUpperMidpoint;
  3209. /**
  3210. * Right eye, left corner.
  3211. *
  3212. * Value: "RIGHT_EYE_LEFT_CORNER"
  3213. */
  3214. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeLeftCorner;
  3215. /**
  3216. * Right eye pupil.
  3217. *
  3218. * Value: "RIGHT_EYE_PUPIL"
  3219. */
  3220. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyePupil;
  3221. /**
  3222. * Right eye, right corner.
  3223. *
  3224. * Value: "RIGHT_EYE_RIGHT_CORNER"
  3225. */
  3226. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeRightCorner;
  3227. /**
  3228. * Right eye, top boundary.
  3229. *
  3230. * Value: "RIGHT_EYE_TOP_BOUNDARY"
  3231. */
  3232. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightEyeTopBoundary;
  3233. /**
  3234. * Right of left eyebrow.
  3235. *
  3236. * Value: "RIGHT_OF_LEFT_EYEBROW"
  3237. */
  3238. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightOfLeftEyebrow;
  3239. /**
  3240. * Right of right eyebrow.
  3241. *
  3242. * Value: "RIGHT_OF_RIGHT_EYEBROW"
  3243. */
  3244. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_RightOfRightEyebrow;
  3245. /**
  3246. * Unknown face landmark detected. Should not be filled.
  3247. *
  3248. * Value: "UNKNOWN_LANDMARK"
  3249. */
  3250. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_UnknownLandmark;
  3251. /**
  3252. * Upper lip.
  3253. *
  3254. * Value: "UPPER_LIP"
  3255. */
  3256. GTLR_EXTERN NSString * const kGTLRVision_Landmark_Type_UpperLip;
  3257. // ----------------------------------------------------------------------------
  3258. // GTLRVision_OperationMetadata.state
  3259. /**
  3260. * The batch processing was cancelled.
  3261. *
  3262. * Value: "CANCELLED"
  3263. */
  3264. GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Cancelled;
  3265. /**
  3266. * Request is received.
  3267. *
  3268. * Value: "CREATED"
  3269. */
  3270. GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Created;
  3271. /**
  3272. * The batch processing is done.
  3273. *
  3274. * Value: "DONE"
  3275. */
  3276. GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Done;
  3277. /**
  3278. * Request is actively being processed.
  3279. *
  3280. * Value: "RUNNING"
  3281. */
  3282. GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_Running;
  3283. /**
  3284. * Invalid.
  3285. *
  3286. * Value: "STATE_UNSPECIFIED"
  3287. */
  3288. GTLR_EXTERN NSString * const kGTLRVision_OperationMetadata_State_StateUnspecified;
  3289. // ----------------------------------------------------------------------------
  3290. // GTLRVision_SafeSearchAnnotation.adult
  3291. /**
  3292. * It is likely that the image belongs to the specified vertical.
  3293. *
  3294. * Value: "LIKELY"
  3295. */
  3296. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Likely;
  3297. /**
  3298. * It is possible that the image belongs to the specified vertical.
  3299. *
  3300. * Value: "POSSIBLE"
  3301. */
  3302. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Possible;
  3303. /**
  3304. * Unknown likelihood.
  3305. *
  3306. * Value: "UNKNOWN"
  3307. */
  3308. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Unknown;
  3309. /**
  3310. * It is unlikely that the image belongs to the specified vertical.
  3311. *
  3312. * Value: "UNLIKELY"
  3313. */
  3314. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_Unlikely;
  3315. /**
  3316. * It is very likely that the image belongs to the specified vertical.
  3317. *
  3318. * Value: "VERY_LIKELY"
  3319. */
  3320. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_VeryLikely;
  3321. /**
  3322. * It is very unlikely that the image belongs to the specified vertical.
  3323. *
  3324. * Value: "VERY_UNLIKELY"
  3325. */
  3326. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Adult_VeryUnlikely;
  3327. // ----------------------------------------------------------------------------
  3328. // GTLRVision_SafeSearchAnnotation.medical
  3329. /**
  3330. * It is likely that the image belongs to the specified vertical.
  3331. *
  3332. * Value: "LIKELY"
  3333. */
  3334. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Likely;
  3335. /**
  3336. * It is possible that the image belongs to the specified vertical.
  3337. *
  3338. * Value: "POSSIBLE"
  3339. */
  3340. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Possible;
  3341. /**
  3342. * Unknown likelihood.
  3343. *
  3344. * Value: "UNKNOWN"
  3345. */
  3346. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Unknown;
  3347. /**
  3348. * It is unlikely that the image belongs to the specified vertical.
  3349. *
  3350. * Value: "UNLIKELY"
  3351. */
  3352. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_Unlikely;
  3353. /**
  3354. * It is very likely that the image belongs to the specified vertical.
  3355. *
  3356. * Value: "VERY_LIKELY"
  3357. */
  3358. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_VeryLikely;
  3359. /**
  3360. * It is very unlikely that the image belongs to the specified vertical.
  3361. *
  3362. * Value: "VERY_UNLIKELY"
  3363. */
  3364. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Medical_VeryUnlikely;
  3365. // ----------------------------------------------------------------------------
  3366. // GTLRVision_SafeSearchAnnotation.racy
  3367. /**
  3368. * It is likely that the image belongs to the specified vertical.
  3369. *
  3370. * Value: "LIKELY"
  3371. */
  3372. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Likely;
  3373. /**
  3374. * It is possible that the image belongs to the specified vertical.
  3375. *
  3376. * Value: "POSSIBLE"
  3377. */
  3378. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Possible;
  3379. /**
  3380. * Unknown likelihood.
  3381. *
  3382. * Value: "UNKNOWN"
  3383. */
  3384. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Unknown;
  3385. /**
  3386. * It is unlikely that the image belongs to the specified vertical.
  3387. *
  3388. * Value: "UNLIKELY"
  3389. */
  3390. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_Unlikely;
  3391. /**
  3392. * It is very likely that the image belongs to the specified vertical.
  3393. *
  3394. * Value: "VERY_LIKELY"
  3395. */
  3396. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_VeryLikely;
  3397. /**
  3398. * It is very unlikely that the image belongs to the specified vertical.
  3399. *
  3400. * Value: "VERY_UNLIKELY"
  3401. */
  3402. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Racy_VeryUnlikely;
  3403. // ----------------------------------------------------------------------------
  3404. // GTLRVision_SafeSearchAnnotation.spoof
  3405. /**
  3406. * It is likely that the image belongs to the specified vertical.
  3407. *
  3408. * Value: "LIKELY"
  3409. */
  3410. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Likely;
  3411. /**
  3412. * It is possible that the image belongs to the specified vertical.
  3413. *
  3414. * Value: "POSSIBLE"
  3415. */
  3416. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Possible;
  3417. /**
  3418. * Unknown likelihood.
  3419. *
  3420. * Value: "UNKNOWN"
  3421. */
  3422. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Unknown;
  3423. /**
  3424. * It is unlikely that the image belongs to the specified vertical.
  3425. *
  3426. * Value: "UNLIKELY"
  3427. */
  3428. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_Unlikely;
  3429. /**
  3430. * It is very likely that the image belongs to the specified vertical.
  3431. *
  3432. * Value: "VERY_LIKELY"
  3433. */
  3434. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_VeryLikely;
  3435. /**
  3436. * It is very unlikely that the image belongs to the specified vertical.
  3437. *
  3438. * Value: "VERY_UNLIKELY"
  3439. */
  3440. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Spoof_VeryUnlikely;
  3441. // ----------------------------------------------------------------------------
  3442. // GTLRVision_SafeSearchAnnotation.violence
  3443. /**
  3444. * It is likely that the image belongs to the specified vertical.
  3445. *
  3446. * Value: "LIKELY"
  3447. */
  3448. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Likely;
  3449. /**
  3450. * It is possible that the image belongs to the specified vertical.
  3451. *
  3452. * Value: "POSSIBLE"
  3453. */
  3454. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Possible;
  3455. /**
  3456. * Unknown likelihood.
  3457. *
  3458. * Value: "UNKNOWN"
  3459. */
  3460. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Unknown;
  3461. /**
  3462. * It is unlikely that the image belongs to the specified vertical.
  3463. *
  3464. * Value: "UNLIKELY"
  3465. */
  3466. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_Unlikely;
  3467. /**
  3468. * It is very likely that the image belongs to the specified vertical.
  3469. *
  3470. * Value: "VERY_LIKELY"
  3471. */
  3472. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_VeryLikely;
  3473. /**
  3474. * It is very unlikely that the image belongs to the specified vertical.
  3475. *
  3476. * Value: "VERY_UNLIKELY"
  3477. */
  3478. GTLR_EXTERN NSString * const kGTLRVision_SafeSearchAnnotation_Violence_VeryUnlikely;
  3479. /**
  3480. * Request message for the `AddProductToProductSet` method.
  3481. */
  3482. @interface GTLRVision_AddProductToProductSetRequest : GTLRObject
  3483. /**
  3484. * The resource name for the Product to be added to this ProductSet.
  3485. * Format is:
  3486. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
  3487. */
  3488. @property(nonatomic, copy, nullable) NSString *product;
  3489. @end
  3490. /**
  3491. * Response to a single file annotation request. A file may contain one or more
  3492. * images, which individually have their own responses.
  3493. */
  3494. @interface GTLRVision_AnnotateFileResponse : GTLRObject
  3495. /** Information about the file for which this response is generated. */
  3496. @property(nonatomic, strong, nullable) GTLRVision_InputConfig *inputConfig;
  3497. /** Individual responses to images found within the file. */
  3498. @property(nonatomic, strong, nullable) NSArray<GTLRVision_AnnotateImageResponse *> *responses;
  3499. @end
  3500. /**
  3501. * Request for performing Google Cloud Vision API tasks over a user-provided
  3502. * image, with user-requested features.
  3503. */
  3504. @interface GTLRVision_AnnotateImageRequest : GTLRObject
  3505. /** Requested features. */
  3506. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Feature *> *features;
  3507. /** The image to be processed. */
  3508. @property(nonatomic, strong, nullable) GTLRVision_Image *image;
  3509. /** Additional context that may accompany the image. */
  3510. @property(nonatomic, strong, nullable) GTLRVision_ImageContext *imageContext;
  3511. @end
  3512. /**
  3513. * Response to an image annotation request.
  3514. */
  3515. @interface GTLRVision_AnnotateImageResponse : GTLRObject
  3516. /**
  3517. * If present, contextual information is needed to understand where this image
  3518. * comes from.
  3519. */
  3520. @property(nonatomic, strong, nullable) GTLRVision_ImageAnnotationContext *context;
  3521. /** If present, crop hints have completed successfully. */
  3522. @property(nonatomic, strong, nullable) GTLRVision_CropHintsAnnotation *cropHintsAnnotation;
  3523. /**
  3524. * If set, represents the error message for the operation.
  3525. * Note that filled-in image annotations are guaranteed to be
  3526. * correct, even when `error` is set.
  3527. */
  3528. @property(nonatomic, strong, nullable) GTLRVision_Status *error;
  3529. /** If present, face detection has completed successfully. */
  3530. @property(nonatomic, strong, nullable) NSArray<GTLRVision_FaceAnnotation *> *faceAnnotations;
  3531. /**
  3532. * If present, text (OCR) detection or document (OCR) text detection has
  3533. * completed successfully.
  3534. * This annotation provides the structural hierarchy for the OCR detected
  3535. * text.
  3536. */
  3537. @property(nonatomic, strong, nullable) GTLRVision_TextAnnotation *fullTextAnnotation;
  3538. /** If present, image properties were extracted successfully. */
  3539. @property(nonatomic, strong, nullable) GTLRVision_ImageProperties *imagePropertiesAnnotation;
  3540. /** If present, label detection has completed successfully. */
  3541. @property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *labelAnnotations;
  3542. /** If present, landmark detection has completed successfully. */
  3543. @property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *landmarkAnnotations;
  3544. /**
  3545. * If present, localized object detection has completed successfully.
  3546. * This will be sorted descending by confidence score.
  3547. */
  3548. @property(nonatomic, strong, nullable) NSArray<GTLRVision_LocalizedObjectAnnotation *> *localizedObjectAnnotations;
  3549. /** If present, logo detection has completed successfully. */
  3550. @property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *logoAnnotations;
  3551. /** If present, product search has completed successfully. */
  3552. @property(nonatomic, strong, nullable) GTLRVision_ProductSearchResults *productSearchResults;
  3553. /** If present, safe-search annotation has completed successfully. */
  3554. @property(nonatomic, strong, nullable) GTLRVision_SafeSearchAnnotation *safeSearchAnnotation;
  3555. /** If present, text (OCR) detection has completed successfully. */
  3556. @property(nonatomic, strong, nullable) NSArray<GTLRVision_EntityAnnotation *> *textAnnotations;
  3557. /** If present, web detection has completed successfully. */
  3558. @property(nonatomic, strong, nullable) GTLRVision_WebDetection *webDetection;
  3559. @end
  3560. /**
  3561. * An offline file annotation request.
  3562. */
  3563. @interface GTLRVision_AsyncAnnotateFileRequest : GTLRObject
  3564. /** Required. Requested features. */
  3565. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Feature *> *features;
  3566. /** Additional context that may accompany the image(s) in the file. */
  3567. @property(nonatomic, strong, nullable) GTLRVision_ImageContext *imageContext;
  3568. /** Required. Information about the input file. */
  3569. @property(nonatomic, strong, nullable) GTLRVision_InputConfig *inputConfig;
  3570. /** Required. The desired output location and metadata (e.g. format). */
  3571. @property(nonatomic, strong, nullable) GTLRVision_OutputConfig *outputConfig;
  3572. @end
  3573. /**
  3574. * The response for a single offline file annotation request.
  3575. */
  3576. @interface GTLRVision_AsyncAnnotateFileResponse : GTLRObject
  3577. /** The output location and metadata from AsyncAnnotateFileRequest. */
  3578. @property(nonatomic, strong, nullable) GTLRVision_OutputConfig *outputConfig;
  3579. @end
  3580. /**
  3581. * Multiple async file annotation requests are batched into a single service
  3582. * call.
  3583. */
  3584. @interface GTLRVision_AsyncBatchAnnotateFilesRequest : GTLRObject
  3585. /** Individual async file annotation requests for this batch. */
  3586. @property(nonatomic, strong, nullable) NSArray<GTLRVision_AsyncAnnotateFileRequest *> *requests;
  3587. @end
  3588. /**
  3589. * Response to an async batch file annotation request.
  3590. */
  3591. @interface GTLRVision_AsyncBatchAnnotateFilesResponse : GTLRObject
  3592. /**
  3593. * The list of file annotation responses, one for each request in
  3594. * AsyncBatchAnnotateFilesRequest.
  3595. */
  3596. @property(nonatomic, strong, nullable) NSArray<GTLRVision_AsyncAnnotateFileResponse *> *responses;
  3597. @end
  3598. /**
  3599. * Multiple image annotation requests are batched into a single service call.
  3600. */
  3601. @interface GTLRVision_BatchAnnotateImagesRequest : GTLRObject
  3602. /** Individual image annotation requests for this batch. */
  3603. @property(nonatomic, strong, nullable) NSArray<GTLRVision_AnnotateImageRequest *> *requests;
  3604. @end
  3605. /**
  3606. * Response to a batch image annotation request.
  3607. */
  3608. @interface GTLRVision_BatchAnnotateImagesResponse : GTLRObject
  3609. /** Individual responses to image annotation requests within the batch. */
  3610. @property(nonatomic, strong, nullable) NSArray<GTLRVision_AnnotateImageResponse *> *responses;
  3611. @end
  3612. /**
  3613. * Metadata for the batch operations such as the current state.
  3614. * This is included in the `metadata` field of the `Operation` returned by the
  3615. * `GetOperation` call of the `google::longrunning::Operations` service.
  3616. */
  3617. @interface GTLRVision_BatchOperationMetadata : GTLRObject
  3618. /**
  3619. * The time when the batch request is finished and
  3620. * google.longrunning.Operation.done is set to true.
  3621. */
  3622. @property(nonatomic, strong, nullable) GTLRDateTime *endTime;
  3623. /**
  3624. * The current state of the batch operation.
  3625. *
  3626. * Likely values:
  3627. * @arg @c kGTLRVision_BatchOperationMetadata_State_Cancelled The request is
  3628. * done after the longrunning.Operations.CancelOperation has
  3629. * been called by the user. Any records that were processed before the
  3630. * cancel command are output as specified in the request. (Value:
  3631. * "CANCELLED")
  3632. * @arg @c kGTLRVision_BatchOperationMetadata_State_Failed The request is
  3633. * done and no item has been successfully processed. (Value: "FAILED")
  3634. * @arg @c kGTLRVision_BatchOperationMetadata_State_Processing Request is
  3635. * actively being processed. (Value: "PROCESSING")
  3636. * @arg @c kGTLRVision_BatchOperationMetadata_State_StateUnspecified Invalid.
  3637. * (Value: "STATE_UNSPECIFIED")
  3638. * @arg @c kGTLRVision_BatchOperationMetadata_State_Successful The request is
  3639. * done and at least one item has been successfully
  3640. * processed. (Value: "SUCCESSFUL")
  3641. */
  3642. @property(nonatomic, copy, nullable) NSString *state;
  3643. /** The time when the batch request was submitted to the server. */
  3644. @property(nonatomic, strong, nullable) GTLRDateTime *submitTime;
  3645. @end
  3646. /**
  3647. * Logical element on the page.
  3648. */
  3649. @interface GTLRVision_Block : GTLRObject
  3650. /**
  3651. * Detected block type (text, image etc) for this block.
  3652. *
  3653. * Likely values:
  3654. * @arg @c kGTLRVision_Block_BlockType_Barcode Barcode block. (Value:
  3655. * "BARCODE")
  3656. * @arg @c kGTLRVision_Block_BlockType_Picture Image block. (Value:
  3657. * "PICTURE")
  3658. * @arg @c kGTLRVision_Block_BlockType_Ruler Horizontal/vertical line box.
  3659. * (Value: "RULER")
  3660. * @arg @c kGTLRVision_Block_BlockType_Table Table block. (Value: "TABLE")
  3661. * @arg @c kGTLRVision_Block_BlockType_Text Regular text block. (Value:
  3662. * "TEXT")
  3663. * @arg @c kGTLRVision_Block_BlockType_Unknown Unknown block type. (Value:
  3664. * "UNKNOWN")
  3665. */
  3666. @property(nonatomic, copy, nullable) NSString *blockType;
  3667. /**
  3668. * The bounding box for the block.
  3669. * The vertices are in the order of top-left, top-right, bottom-right,
  3670. * bottom-left. When a rotation of the bounding box is detected the rotation
  3671. * is represented as around the top-left corner as defined when the text is
  3672. * read in the 'natural' orientation.
  3673. * For example:
  3674. * * when the text is horizontal it might look like:
  3675. * 0----1
  3676. * | |
  3677. * 3----2
  3678. * * when it's rotated 180 degrees around the top-left corner it becomes:
  3679. * 2----3
  3680. * | |
  3681. * 1----0
  3682. * and the vertex order will still be (0, 1, 2, 3).
  3683. */
  3684. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
  3685. /**
  3686. * Confidence of the OCR results on the block. Range [0, 1].
  3687. *
  3688. * Uses NSNumber of floatValue.
  3689. */
  3690. @property(nonatomic, strong, nullable) NSNumber *confidence;
  3691. /** List of paragraphs in this block (if this blocks is of type text). */
  3692. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Paragraph *> *paragraphs;
  3693. /** Additional information detected for the block. */
  3694. @property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
  3695. @end
  3696. /**
  3697. * A bounding polygon for the detected image annotation.
  3698. */
  3699. @interface GTLRVision_BoundingPoly : GTLRObject
  3700. /** The bounding polygon normalized vertices. */
  3701. @property(nonatomic, strong, nullable) NSArray<GTLRVision_NormalizedVertex *> *normalizedVertices;
  3702. /** The bounding polygon vertices. */
  3703. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Vertex *> *vertices;
  3704. @end
  3705. /**
  3706. * The request message for Operations.CancelOperation.
  3707. */
  3708. @interface GTLRVision_CancelOperationRequest : GTLRObject
  3709. @end
  3710. /**
  3711. * Represents a color in the RGBA color space. This representation is designed
  3712. * for simplicity of conversion to/from color representations in various
  3713. * languages over compactness; for example, the fields of this representation
  3714. * can be trivially provided to the constructor of "java.awt.Color" in Java; it
  3715. * can also be trivially provided to UIColor's "+colorWithRed:green:blue:alpha"
  3716. * method in iOS; and, with just a little work, it can be easily formatted into
  3717. * a CSS "rgba()" string in JavaScript, as well. Here are some examples:
  3718. * Example (Java):
  3719. * import com.google.type.Color;
  3720. * // ...
  3721. * public static java.awt.Color fromProto(Color protocolor) {
  3722. * float alpha = protocolor.hasAlpha()
  3723. * ? protocolor.getAlpha().getValue()
  3724. * : 1.0;
  3725. * return new java.awt.Color(
  3726. * protocolor.getRed(),
  3727. * protocolor.getGreen(),
  3728. * protocolor.getBlue(),
  3729. * alpha);
  3730. * }
  3731. * public static Color toProto(java.awt.Color color) {
  3732. * float red = (float) color.getRed();
  3733. * float green = (float) color.getGreen();
  3734. * float blue = (float) color.getBlue();
  3735. * float denominator = 255.0;
  3736. * Color.Builder resultBuilder =
  3737. * Color
  3738. * .newBuilder()
  3739. * .setRed(red / denominator)
  3740. * .setGreen(green / denominator)
  3741. * .setBlue(blue / denominator);
  3742. * int alpha = color.getAlpha();
  3743. * if (alpha != 255) {
  3744. * result.setAlpha(
  3745. * FloatValue
  3746. * .newBuilder()
  3747. * .setValue(((float) alpha) / denominator)
  3748. * .build());
  3749. * }
  3750. * return resultBuilder.build();
  3751. * }
  3752. * // ...
  3753. * Example (iOS / Obj-C):
  3754. * // ...
  3755. * static UIColor* fromProto(Color* protocolor) {
  3756. * float red = [protocolor red];
  3757. * float green = [protocolor green];
  3758. * float blue = [protocolor blue];
  3759. * FloatValue* alpha_wrapper = [protocolor alpha];
  3760. * float alpha = 1.0;
  3761. * if (alpha_wrapper != nil) {
  3762. * alpha = [alpha_wrapper value];
  3763. * }
  3764. * return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
  3765. * }
  3766. * static Color* toProto(UIColor* color) {
  3767. * CGFloat red, green, blue, alpha;
  3768. * if (![color getRed:&red green:&green blue:&blue alpha:&alpha]) {
  3769. * return nil;
  3770. * }
  3771. * Color* result = [[Color alloc] init];
  3772. * [result setRed:red];
  3773. * [result setGreen:green];
  3774. * [result setBlue:blue];
  3775. * if (alpha <= 0.9999) {
  3776. * [result setAlpha:floatWrapperWithValue(alpha)];
  3777. * }
  3778. * [result autorelease];
  3779. * return result;
  3780. * }
  3781. * // ...
  3782. * Example (JavaScript):
  3783. * // ...
  3784. * var protoToCssColor = function(rgb_color) {
  3785. * var redFrac = rgb_color.red || 0.0;
  3786. * var greenFrac = rgb_color.green || 0.0;
  3787. * var blueFrac = rgb_color.blue || 0.0;
  3788. * var red = Math.floor(redFrac * 255);
  3789. * var green = Math.floor(greenFrac * 255);
  3790. * var blue = Math.floor(blueFrac * 255);
  3791. * if (!('alpha' in rgb_color)) {
  3792. * return rgbToCssColor_(red, green, blue);
  3793. * }
  3794. * var alphaFrac = rgb_color.alpha.value || 0.0;
  3795. * var rgbParams = [red, green, blue].join(',');
  3796. * return ['rgba(', rgbParams, ',', alphaFrac, ')'].join('');
  3797. * };
  3798. * var rgbToCssColor_ = function(red, green, blue) {
  3799. * var rgbNumber = new Number((red << 16) | (green << 8) | blue);
  3800. * var hexString = rgbNumber.toString(16);
  3801. * var missingZeros = 6 - hexString.length;
  3802. * var resultBuilder = ['#'];
  3803. * for (var i = 0; i < missingZeros; i++) {
  3804. * resultBuilder.push('0');
  3805. * }
  3806. * resultBuilder.push(hexString);
  3807. * return resultBuilder.join('');
  3808. * };
  3809. * // ...
  3810. */
  3811. @interface GTLRVision_Color : GTLRObject
  3812. /**
  3813. * The fraction of this color that should be applied to the pixel. That is,
  3814. * the final pixel color is defined by the equation:
  3815. * pixel color = alpha * (this color) + (1.0 - alpha) * (background color)
  3816. * This means that a value of 1.0 corresponds to a solid color, whereas
  3817. * a value of 0.0 corresponds to a completely transparent color. This
  3818. * uses a wrapper message rather than a simple float scalar so that it is
  3819. * possible to distinguish between a default value and the value being unset.
  3820. * If omitted, this color object is to be rendered as a solid color
  3821. * (as if the alpha value had been explicitly given with a value of 1.0).
  3822. *
  3823. * Uses NSNumber of floatValue.
  3824. */
  3825. @property(nonatomic, strong, nullable) NSNumber *alpha;
  3826. /**
  3827. * The amount of blue in the color as a value in the interval [0, 1].
  3828. *
  3829. * Uses NSNumber of floatValue.
  3830. */
  3831. @property(nonatomic, strong, nullable) NSNumber *blue;
  3832. /**
  3833. * The amount of green in the color as a value in the interval [0, 1].
  3834. *
  3835. * Uses NSNumber of floatValue.
  3836. */
  3837. @property(nonatomic, strong, nullable) NSNumber *green;
  3838. /**
  3839. * The amount of red in the color as a value in the interval [0, 1].
  3840. *
  3841. * Uses NSNumber of floatValue.
  3842. */
  3843. @property(nonatomic, strong, nullable) NSNumber *red;
  3844. @end
  3845. /**
  3846. * Color information consists of RGB channels, score, and the fraction of
  3847. * the image that the color occupies in the image.
  3848. */
  3849. @interface GTLRVision_ColorInfo : GTLRObject
  3850. /** RGB components of the color. */
  3851. @property(nonatomic, strong, nullable) GTLRVision_Color *color;
  3852. /**
  3853. * The fraction of pixels the color occupies in the image.
  3854. * Value in range [0, 1].
  3855. *
  3856. * Uses NSNumber of floatValue.
  3857. */
  3858. @property(nonatomic, strong, nullable) NSNumber *pixelFraction;
  3859. /**
  3860. * Image-specific score for this color. Value in range [0, 1].
  3861. *
  3862. * Uses NSNumber of floatValue.
  3863. */
  3864. @property(nonatomic, strong, nullable) NSNumber *score;
  3865. @end
  3866. /**
  3867. * Single crop hint that is used to generate a new crop when serving an image.
  3868. */
  3869. @interface GTLRVision_CropHint : GTLRObject
  3870. /**
  3871. * The bounding polygon for the crop region. The coordinates of the bounding
  3872. * box are in the original image's scale.
  3873. */
  3874. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
  3875. /**
  3876. * Confidence of this being a salient region. Range [0, 1].
  3877. *
  3878. * Uses NSNumber of floatValue.
  3879. */
  3880. @property(nonatomic, strong, nullable) NSNumber *confidence;
  3881. /**
  3882. * Fraction of importance of this salient region with respect to the original
  3883. * image.
  3884. *
  3885. * Uses NSNumber of floatValue.
  3886. */
  3887. @property(nonatomic, strong, nullable) NSNumber *importanceFraction;
  3888. @end
  3889. /**
  3890. * Set of crop hints that are used to generate new crops when serving images.
  3891. */
  3892. @interface GTLRVision_CropHintsAnnotation : GTLRObject
  3893. /** Crop hint results. */
  3894. @property(nonatomic, strong, nullable) NSArray<GTLRVision_CropHint *> *cropHints;
  3895. @end
  3896. /**
  3897. * Parameters for crop hints annotation request.
  3898. */
  3899. @interface GTLRVision_CropHintsParams : GTLRObject
  3900. /**
  3901. * Aspect ratios in floats, representing the ratio of the width to the height
  3902. * of the image. For example, if the desired aspect ratio is 4/3, the
  3903. * corresponding float value should be 1.33333. If not specified, the
  3904. * best possible crop is returned. The number of provided aspect ratios is
  3905. * limited to a maximum of 16; any aspect ratios provided after the 16th are
  3906. * ignored.
  3907. *
  3908. * Uses NSNumber of floatValue.
  3909. */
  3910. @property(nonatomic, strong, nullable) NSArray<NSNumber *> *aspectRatios;
  3911. @end
  3912. /**
  3913. * Detected start or end of a structural component.
  3914. */
  3915. @interface GTLRVision_DetectedBreak : GTLRObject
  3916. /**
  3917. * True if break prepends the element.
  3918. *
  3919. * Uses NSNumber of boolValue.
  3920. */
  3921. @property(nonatomic, strong, nullable) NSNumber *isPrefix;
  3922. /**
  3923. * Detected break type.
  3924. *
  3925. * Likely values:
  3926. * @arg @c kGTLRVision_DetectedBreak_Type_EolSureSpace Line-wrapping break.
  3927. * (Value: "EOL_SURE_SPACE")
  3928. * @arg @c kGTLRVision_DetectedBreak_Type_Hyphen End-line hyphen that is not
  3929. * present in text; does not co-occur with
  3930. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
  3931. * @arg @c kGTLRVision_DetectedBreak_Type_LineBreak Line break that ends a
  3932. * paragraph. (Value: "LINE_BREAK")
  3933. * @arg @c kGTLRVision_DetectedBreak_Type_Space Regular space. (Value:
  3934. * "SPACE")
  3935. * @arg @c kGTLRVision_DetectedBreak_Type_SureSpace Sure space (very wide).
  3936. * (Value: "SURE_SPACE")
  3937. * @arg @c kGTLRVision_DetectedBreak_Type_Unknown Unknown break label type.
  3938. * (Value: "UNKNOWN")
  3939. */
  3940. @property(nonatomic, copy, nullable) NSString *type;
  3941. @end
  3942. /**
  3943. * Detected language for a structural component.
  3944. */
  3945. @interface GTLRVision_DetectedLanguage : GTLRObject
  3946. /**
  3947. * Confidence of detected language. Range [0, 1].
  3948. *
  3949. * Uses NSNumber of floatValue.
  3950. */
  3951. @property(nonatomic, strong, nullable) NSNumber *confidence;
  3952. /**
  3953. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  3954. * information, see
  3955. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  3956. */
  3957. @property(nonatomic, copy, nullable) NSString *languageCode;
  3958. @end
  3959. /**
  3960. * Set of dominant colors and their corresponding scores.
  3961. */
  3962. @interface GTLRVision_DominantColorsAnnotation : GTLRObject
  3963. /** RGB color values with their score and pixel fraction. */
  3964. @property(nonatomic, strong, nullable) NSArray<GTLRVision_ColorInfo *> *colors;
  3965. @end
  3966. /**
  3967. * A generic empty message that you can re-use to avoid defining duplicated
  3968. * empty messages in your APIs. A typical example is to use it as the request
  3969. * or the response type of an API method. For instance:
  3970. * service Foo {
  3971. * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
  3972. * }
  3973. * The JSON representation for `Empty` is empty JSON object `{}`.
  3974. */
  3975. @interface GTLRVision_Empty : GTLRObject
  3976. @end
  3977. /**
  3978. * Set of detected entity features.
  3979. */
  3980. @interface GTLRVision_EntityAnnotation : GTLRObject
  3981. /**
  3982. * Image region to which this entity belongs. Not produced
  3983. * for `LABEL_DETECTION` features.
  3984. */
  3985. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
  3986. /**
  3987. * **Deprecated. Use `score` instead.**
  3988. * The accuracy of the entity detection in an image.
  3989. * For example, for an image in which the "Eiffel Tower" entity is detected,
  3990. * this field represents the confidence that there is a tower in the query
  3991. * image. Range [0, 1].
  3992. *
  3993. * Uses NSNumber of floatValue.
  3994. */
  3995. @property(nonatomic, strong, nullable) NSNumber *confidence;
  3996. /**
  3997. * Entity textual description, expressed in its `locale` language.
  3998. *
  3999. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  4000. */
  4001. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  4002. /**
  4003. * The language code for the locale in which the entity textual
  4004. * `description` is expressed.
  4005. */
  4006. @property(nonatomic, copy, nullable) NSString *locale;
  4007. /**
  4008. * The location information for the detected entity. Multiple
  4009. * `LocationInfo` elements can be present because one location may
  4010. * indicate the location of the scene in the image, and another location
  4011. * may indicate the location of the place where the image was taken.
  4012. * Location information is usually present for landmarks.
  4013. */
  4014. @property(nonatomic, strong, nullable) NSArray<GTLRVision_LocationInfo *> *locations;
  4015. /**
  4016. * Opaque entity ID. Some IDs may be available in
  4017. * [Google Knowledge Graph Search
  4018. * API](https://developers.google.com/knowledge-graph/).
  4019. */
  4020. @property(nonatomic, copy, nullable) NSString *mid;
  4021. /**
  4022. * Some entities may have optional user-supplied `Property` (name/value)
  4023. * fields, such a score or string that qualifies the entity.
  4024. */
  4025. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Property *> *properties;
  4026. /**
  4027. * Overall score of the result. Range [0, 1].
  4028. *
  4029. * Uses NSNumber of floatValue.
  4030. */
  4031. @property(nonatomic, strong, nullable) NSNumber *score;
  4032. /**
  4033. * The relevancy of the ICA (Image Content Annotation) label to the
  4034. * image. For example, the relevancy of "tower" is likely higher to an image
  4035. * containing the detected "Eiffel Tower" than to an image containing a
  4036. * detected distant towering building, even though the confidence that
  4037. * there is a tower in each image may be the same. Range [0, 1].
  4038. *
  4039. * Uses NSNumber of floatValue.
  4040. */
  4041. @property(nonatomic, strong, nullable) NSNumber *topicality;
  4042. @end
  4043. /**
  4044. * A face annotation object contains the results of face detection.
  4045. */
  4046. @interface GTLRVision_FaceAnnotation : GTLRObject
  4047. /**
  4048. * Anger likelihood.
  4049. *
  4050. * Likely values:
  4051. * @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Likely It is likely
  4052. * that the image belongs to the specified vertical. (Value: "LIKELY")
  4053. * @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Possible It is possible
  4054. * that the image belongs to the specified vertical. (Value: "POSSIBLE")
  4055. * @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Unknown Unknown
  4056. * likelihood. (Value: "UNKNOWN")
  4057. * @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_Unlikely It is unlikely
  4058. * that the image belongs to the specified vertical. (Value: "UNLIKELY")
  4059. * @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_VeryLikely It is very
  4060. * likely that the image belongs to the specified vertical. (Value:
  4061. * "VERY_LIKELY")
  4062. * @arg @c kGTLRVision_FaceAnnotation_AngerLikelihood_VeryUnlikely It is very
  4063. * unlikely that the image belongs to the specified vertical. (Value:
  4064. * "VERY_UNLIKELY")
  4065. */
  4066. @property(nonatomic, copy, nullable) NSString *angerLikelihood;
  4067. /**
  4068. * Blurred likelihood.
  4069. *
  4070. * Likely values:
  4071. * @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Likely It is likely
  4072. * that the image belongs to the specified vertical. (Value: "LIKELY")
  4073. * @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Possible It is
  4074. * possible that the image belongs to the specified vertical. (Value:
  4075. * "POSSIBLE")
  4076. * @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Unknown Unknown
  4077. * likelihood. (Value: "UNKNOWN")
  4078. * @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_Unlikely It is
  4079. * unlikely that the image belongs to the specified vertical. (Value:
  4080. * "UNLIKELY")
  4081. * @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryLikely It is very
  4082. * likely that the image belongs to the specified vertical. (Value:
  4083. * "VERY_LIKELY")
  4084. * @arg @c kGTLRVision_FaceAnnotation_BlurredLikelihood_VeryUnlikely It is
  4085. * very unlikely that the image belongs to the specified vertical.
  4086. * (Value: "VERY_UNLIKELY")
  4087. */
  4088. @property(nonatomic, copy, nullable) NSString *blurredLikelihood;
  4089. /**
  4090. * The bounding polygon around the face. The coordinates of the bounding box
  4091. * are in the original image's scale.
  4092. * The bounding box is computed to "frame" the face in accordance with human
  4093. * expectations. It is based on the landmarker results.
  4094. * Note that one or more x and/or y coordinates may not be generated in the
  4095. * `BoundingPoly` (the polygon will be unbounded) if only a partial face
  4096. * appears in the image to be annotated.
  4097. */
  4098. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
  4099. /**
  4100. * Detection confidence. Range [0, 1].
  4101. *
  4102. * Uses NSNumber of floatValue.
  4103. */
  4104. @property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
  4105. /**
  4106. * The `fd_bounding_poly` bounding polygon is tighter than the
  4107. * `boundingPoly`, and encloses only the skin part of the face. Typically, it
  4108. * is used to eliminate the face from any image analysis that detects the
  4109. * "amount of skin" visible in an image. It is not based on the
  4110. * landmarker results, only on the initial face detection, hence
  4111. * the <code>fd</code> (face detection) prefix.
  4112. */
  4113. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *fdBoundingPoly;
  4114. /**
  4115. * Headwear likelihood.
  4116. *
  4117. * Likely values:
  4118. * @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Likely It is likely
  4119. * that the image belongs to the specified vertical. (Value: "LIKELY")
  4120. * @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Possible It is
  4121. * possible that the image belongs to the specified vertical. (Value:
  4122. * "POSSIBLE")
  4123. * @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unknown Unknown
  4124. * likelihood. (Value: "UNKNOWN")
  4125. * @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_Unlikely It is
  4126. * unlikely that the image belongs to the specified vertical. (Value:
  4127. * "UNLIKELY")
  4128. * @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryLikely It is
  4129. * very likely that the image belongs to the specified vertical. (Value:
  4130. * "VERY_LIKELY")
  4131. * @arg @c kGTLRVision_FaceAnnotation_HeadwearLikelihood_VeryUnlikely It is
  4132. * very unlikely that the image belongs to the specified vertical.
  4133. * (Value: "VERY_UNLIKELY")
  4134. */
  4135. @property(nonatomic, copy, nullable) NSString *headwearLikelihood;
  4136. /**
  4137. * Joy likelihood.
  4138. *
  4139. * Likely values:
  4140. * @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Likely It is likely that
  4141. * the image belongs to the specified vertical. (Value: "LIKELY")
  4142. * @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Possible It is possible
  4143. * that the image belongs to the specified vertical. (Value: "POSSIBLE")
  4144. * @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Unknown Unknown
  4145. * likelihood. (Value: "UNKNOWN")
  4146. * @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_Unlikely It is unlikely
  4147. * that the image belongs to the specified vertical. (Value: "UNLIKELY")
  4148. * @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_VeryLikely It is very
  4149. * likely that the image belongs to the specified vertical. (Value:
  4150. * "VERY_LIKELY")
  4151. * @arg @c kGTLRVision_FaceAnnotation_JoyLikelihood_VeryUnlikely It is very
  4152. * unlikely that the image belongs to the specified vertical. (Value:
  4153. * "VERY_UNLIKELY")
  4154. */
  4155. @property(nonatomic, copy, nullable) NSString *joyLikelihood;
  4156. /**
  4157. * Face landmarking confidence. Range [0, 1].
  4158. *
  4159. * Uses NSNumber of floatValue.
  4160. */
  4161. @property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
  4162. /** Detected face landmarks. */
  4163. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Landmark *> *landmarks;
  4164. /**
  4165. * Yaw angle, which indicates the leftward/rightward angle that the face is
  4166. * pointing relative to the vertical plane perpendicular to the image. Range
  4167. * [-180,180].
  4168. *
  4169. * Uses NSNumber of floatValue.
  4170. */
  4171. @property(nonatomic, strong, nullable) NSNumber *panAngle;
  4172. /**
  4173. * Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
  4174. * of the face relative to the image vertical about the axis perpendicular to
  4175. * the face. Range [-180,180].
  4176. *
  4177. * Uses NSNumber of floatValue.
  4178. */
  4179. @property(nonatomic, strong, nullable) NSNumber *rollAngle;
  4180. /**
  4181. * Sorrow likelihood.
  4182. *
  4183. * Likely values:
  4184. * @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Likely It is likely
  4185. * that the image belongs to the specified vertical. (Value: "LIKELY")
  4186. * @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Possible It is
  4187. * possible that the image belongs to the specified vertical. (Value:
  4188. * "POSSIBLE")
  4189. * @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Unknown Unknown
  4190. * likelihood. (Value: "UNKNOWN")
  4191. * @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_Unlikely It is
  4192. * unlikely that the image belongs to the specified vertical. (Value:
  4193. * "UNLIKELY")
  4194. * @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryLikely It is very
  4195. * likely that the image belongs to the specified vertical. (Value:
  4196. * "VERY_LIKELY")
  4197. * @arg @c kGTLRVision_FaceAnnotation_SorrowLikelihood_VeryUnlikely It is
  4198. * very unlikely that the image belongs to the specified vertical.
  4199. * (Value: "VERY_UNLIKELY")
  4200. */
  4201. @property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
  4202. /**
  4203. * Surprise likelihood.
  4204. *
  4205. * Likely values:
  4206. * @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Likely It is likely
  4207. * that the image belongs to the specified vertical. (Value: "LIKELY")
  4208. * @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Possible It is
  4209. * possible that the image belongs to the specified vertical. (Value:
  4210. * "POSSIBLE")
  4211. * @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unknown Unknown
  4212. * likelihood. (Value: "UNKNOWN")
  4213. * @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_Unlikely It is
  4214. * unlikely that the image belongs to the specified vertical. (Value:
  4215. * "UNLIKELY")
  4216. * @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryLikely It is
  4217. * very likely that the image belongs to the specified vertical. (Value:
  4218. * "VERY_LIKELY")
  4219. * @arg @c kGTLRVision_FaceAnnotation_SurpriseLikelihood_VeryUnlikely It is
  4220. * very unlikely that the image belongs to the specified vertical.
  4221. * (Value: "VERY_UNLIKELY")
  4222. */
  4223. @property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
  4224. /**
  4225. * Pitch angle, which indicates the upwards/downwards angle that the face is
  4226. * pointing relative to the image's horizontal plane. Range [-180,180].
  4227. *
  4228. * Uses NSNumber of floatValue.
  4229. */
  4230. @property(nonatomic, strong, nullable) NSNumber *tiltAngle;
  4231. /**
  4232. * Under-exposed likelihood.
  4233. *
  4234. * Likely values:
  4235. * @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Likely It is
  4236. * likely that the image belongs to the specified vertical. (Value:
  4237. * "LIKELY")
  4238. * @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Possible It is
  4239. * possible that the image belongs to the specified vertical. (Value:
  4240. * "POSSIBLE")
  4241. * @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unknown Unknown
  4242. * likelihood. (Value: "UNKNOWN")
  4243. * @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_Unlikely It is
  4244. * unlikely that the image belongs to the specified vertical. (Value:
  4245. * "UNLIKELY")
  4246. * @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryLikely It is
  4247. * very likely that the image belongs to the specified vertical. (Value:
  4248. * "VERY_LIKELY")
  4249. * @arg @c kGTLRVision_FaceAnnotation_UnderExposedLikelihood_VeryUnlikely It
  4250. * is very unlikely that the image belongs to the specified vertical.
  4251. * (Value: "VERY_UNLIKELY")
  4252. */
  4253. @property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
  4254. @end
  4255. /**
  4256. * The type of Google Cloud Vision API detection to perform, and the maximum
  4257. * number of results to return for that type. Multiple `Feature` objects can
  4258. * be specified in the `features` list.
  4259. */
  4260. @interface GTLRVision_Feature : GTLRObject
  4261. /**
  4262. * Maximum number of results of this type. Does not apply to
  4263. * `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
  4264. *
  4265. * Uses NSNumber of intValue.
  4266. */
  4267. @property(nonatomic, strong, nullable) NSNumber *maxResults;
  4268. /**
  4269. * Model to use for the feature.
  4270. * Supported values: "builtin/stable" (the default if unset) and
  4271. * "builtin/latest".
  4272. */
  4273. @property(nonatomic, copy, nullable) NSString *model;
  4274. /**
  4275. * The feature type.
  4276. *
  4277. * Likely values:
  4278. * @arg @c kGTLRVision_Feature_Type_CropHints Run crop hints. (Value:
  4279. * "CROP_HINTS")
  4280. * @arg @c kGTLRVision_Feature_Type_DocumentTextDetection Run dense text
  4281. * document OCR. Takes precedence when both
  4282. * `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present. (Value:
  4283. * "DOCUMENT_TEXT_DETECTION")
  4284. * @arg @c kGTLRVision_Feature_Type_FaceDetection Run face detection. (Value:
  4285. * "FACE_DETECTION")
  4286. * @arg @c kGTLRVision_Feature_Type_ImageProperties Compute a set of image
  4287. * properties, such as the
  4288. * image's dominant colors. (Value: "IMAGE_PROPERTIES")
  4289. * @arg @c kGTLRVision_Feature_Type_LabelDetection Run label detection.
  4290. * (Value: "LABEL_DETECTION")
  4291. * @arg @c kGTLRVision_Feature_Type_LandmarkDetection Run landmark detection.
  4292. * (Value: "LANDMARK_DETECTION")
  4293. * @arg @c kGTLRVision_Feature_Type_LogoDetection Run logo detection. (Value:
  4294. * "LOGO_DETECTION")
  4295. * @arg @c kGTLRVision_Feature_Type_ObjectLocalization Run localizer for
  4296. * object detection. (Value: "OBJECT_LOCALIZATION")
  4297. * @arg @c kGTLRVision_Feature_Type_ProductSearch Run Product Search. (Value:
  4298. * "PRODUCT_SEARCH")
  4299. * @arg @c kGTLRVision_Feature_Type_SafeSearchDetection Run Safe Search to
  4300. * detect potentially unsafe
  4301. * or undesirable content. (Value: "SAFE_SEARCH_DETECTION")
  4302. * @arg @c kGTLRVision_Feature_Type_TextDetection Run text detection /
  4303. * optical character recognition (OCR). Text detection
  4304. * is optimized for areas of text within a larger image; if the image is
  4305. * a document, use `DOCUMENT_TEXT_DETECTION` instead. (Value:
  4306. * "TEXT_DETECTION")
  4307. * @arg @c kGTLRVision_Feature_Type_TypeUnspecified Unspecified feature type.
  4308. * (Value: "TYPE_UNSPECIFIED")
  4309. * @arg @c kGTLRVision_Feature_Type_WebDetection Run web detection. (Value:
  4310. * "WEB_DETECTION")
  4311. */
  4312. @property(nonatomic, copy, nullable) NSString *type;
  4313. @end
  4314. /**
  4315. * The Google Cloud Storage location where the output will be written to.
  4316. */
  4317. @interface GTLRVision_GcsDestination : GTLRObject
  4318. /**
  4319. * Google Cloud Storage URI where the results will be stored. Results will
  4320. * be in JSON format and preceded by its corresponding input URI. This field
  4321. * can either represent a single file, or a prefix for multiple outputs.
  4322. * Prefixes must end in a `/`.
  4323. * Examples:
  4324. * * File: gs://bucket-name/filename.json
  4325. * * Prefix: gs://bucket-name/prefix/here/
  4326. * * File: gs://bucket-name/prefix/here
  4327. * If multiple outputs, each response is still AnnotateFileResponse, each of
  4328. * which contains some subset of the full list of AnnotateImageResponse.
  4329. * Multiple outputs can happen if, for example, the output JSON is too large
  4330. * and overflows into multiple sharded files.
  4331. */
  4332. @property(nonatomic, copy, nullable) NSString *uri;
  4333. @end
  4334. /**
  4335. * The Google Cloud Storage location where the input will be read from.
  4336. */
  4337. @interface GTLRVision_GcsSource : GTLRObject
  4338. /**
  4339. * Google Cloud Storage URI for the input file. This must only be a
  4340. * Google Cloud Storage object. Wildcards are not currently supported.
  4341. */
  4342. @property(nonatomic, copy, nullable) NSString *uri;
  4343. @end
  4344. /**
  4345. * Response to a single file annotation request. A file may contain one or more
  4346. * images, which individually have their own responses.
  4347. */
  4348. @interface GTLRVision_GoogleCloudVisionV1p1beta1AnnotateFileResponse : GTLRObject
  4349. /** Information about the file for which this response is generated. */
  4350. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1InputConfig *inputConfig;
  4351. /** Individual responses to images found within the file. */
  4352. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1AnnotateImageResponse *> *responses;
  4353. @end
  4354. /**
  4355. * Response to an image annotation request.
  4356. */
  4357. @interface GTLRVision_GoogleCloudVisionV1p1beta1AnnotateImageResponse : GTLRObject
  4358. /**
  4359. * If present, contextual information is needed to understand where this image
  4360. * comes from.
  4361. */
  4362. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1ImageAnnotationContext *context;
  4363. /** If present, crop hints have completed successfully. */
  4364. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1CropHintsAnnotation *cropHintsAnnotation;
  4365. /**
  4366. * If set, represents the error message for the operation.
  4367. * Note that filled-in image annotations are guaranteed to be
  4368. * correct, even when `error` is set.
  4369. */
  4370. @property(nonatomic, strong, nullable) GTLRVision_Status *error;
  4371. /** If present, face detection has completed successfully. */
  4372. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation *> *faceAnnotations;
  4373. /**
  4374. * If present, text (OCR) detection or document (OCR) text detection has
  4375. * completed successfully.
  4376. * This annotation provides the structural hierarchy for the OCR detected
  4377. * text.
  4378. */
  4379. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotation *fullTextAnnotation;
  4380. /** If present, image properties were extracted successfully. */
  4381. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1ImageProperties *imagePropertiesAnnotation;
  4382. /** If present, label detection has completed successfully. */
  4383. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *labelAnnotations;
  4384. /** If present, landmark detection has completed successfully. */
  4385. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *landmarkAnnotations;
  4386. /**
  4387. * If present, localized object detection has completed successfully.
  4388. * This will be sorted descending by confidence score.
  4389. */
  4390. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1LocalizedObjectAnnotation *> *localizedObjectAnnotations;
  4391. /** If present, logo detection has completed successfully. */
  4392. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *logoAnnotations;
  4393. /** If present, product search has completed successfully. */
  4394. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResults *productSearchResults;
  4395. /** If present, safe-search annotation has completed successfully. */
  4396. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation *safeSearchAnnotation;
  4397. /** If present, text (OCR) detection has completed successfully. */
  4398. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation *> *textAnnotations;
  4399. /** If present, web detection has completed successfully. */
  4400. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1WebDetection *webDetection;
  4401. @end
  4402. /**
  4403. * The response for a single offline file annotation request.
  4404. */
  4405. @interface GTLRVision_GoogleCloudVisionV1p1beta1AsyncAnnotateFileResponse : GTLRObject
  4406. /** The output location and metadata from AsyncAnnotateFileRequest. */
  4407. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1OutputConfig *outputConfig;
  4408. @end
  4409. /**
  4410. * Response to an async batch file annotation request.
  4411. */
  4412. @interface GTLRVision_GoogleCloudVisionV1p1beta1AsyncBatchAnnotateFilesResponse : GTLRObject
  4413. /**
  4414. * The list of file annotation responses, one for each request in
  4415. * AsyncBatchAnnotateFilesRequest.
  4416. */
  4417. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1AsyncAnnotateFileResponse *> *responses;
  4418. @end
  4419. /**
  4420. * Logical element on the page.
  4421. */
  4422. @interface GTLRVision_GoogleCloudVisionV1p1beta1Block : GTLRObject
  4423. /**
  4424. * Detected block type (text, image etc) for this block.
  4425. *
  4426. * Likely values:
  4427. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Barcode
  4428. * Barcode block. (Value: "BARCODE")
  4429. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Picture
  4430. * Image block. (Value: "PICTURE")
  4431. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Ruler
  4432. * Horizontal/vertical line box. (Value: "RULER")
  4433. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Table Table
  4434. * block. (Value: "TABLE")
  4435. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Text Regular
  4436. * text block. (Value: "TEXT")
  4437. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1Block_BlockType_Unknown
  4438. * Unknown block type. (Value: "UNKNOWN")
  4439. */
  4440. @property(nonatomic, copy, nullable) NSString *blockType;
  4441. /**
  4442. * The bounding box for the block.
  4443. * The vertices are in the order of top-left, top-right, bottom-right,
  4444. * bottom-left. When a rotation of the bounding box is detected the rotation
  4445. * is represented as around the top-left corner as defined when the text is
  4446. * read in the 'natural' orientation.
  4447. * For example:
  4448. * * when the text is horizontal it might look like:
  4449. * 0----1
  4450. * | |
  4451. * 3----2
  4452. * * when it's rotated 180 degrees around the top-left corner it becomes:
  4453. * 2----3
  4454. * | |
  4455. * 1----0
  4456. * and the vertex order will still be (0, 1, 2, 3).
  4457. */
  4458. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
  4459. /**
  4460. * Confidence of the OCR results on the block. Range [0, 1].
  4461. *
  4462. * Uses NSNumber of floatValue.
  4463. */
  4464. @property(nonatomic, strong, nullable) NSNumber *confidence;
  4465. /** List of paragraphs in this block (if this blocks is of type text). */
  4466. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Paragraph *> *paragraphs;
  4467. /** Additional information detected for the block. */
  4468. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
  4469. @end
  4470. /**
  4471. * A bounding polygon for the detected image annotation.
  4472. */
  4473. @interface GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly : GTLRObject
  4474. /** The bounding polygon normalized vertices. */
  4475. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1NormalizedVertex *> *normalizedVertices;
  4476. /** The bounding polygon vertices. */
  4477. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Vertex *> *vertices;
  4478. @end
  4479. /**
  4480. * Color information consists of RGB channels, score, and the fraction of
  4481. * the image that the color occupies in the image.
  4482. */
  4483. @interface GTLRVision_GoogleCloudVisionV1p1beta1ColorInfo : GTLRObject
  4484. /** RGB components of the color. */
  4485. @property(nonatomic, strong, nullable) GTLRVision_Color *color;
  4486. /**
  4487. * The fraction of pixels the color occupies in the image.
  4488. * Value in range [0, 1].
  4489. *
  4490. * Uses NSNumber of floatValue.
  4491. */
  4492. @property(nonatomic, strong, nullable) NSNumber *pixelFraction;
  4493. /**
  4494. * Image-specific score for this color. Value in range [0, 1].
  4495. *
  4496. * Uses NSNumber of floatValue.
  4497. */
  4498. @property(nonatomic, strong, nullable) NSNumber *score;
  4499. @end
  4500. /**
  4501. * Single crop hint that is used to generate a new crop when serving an image.
  4502. */
  4503. @interface GTLRVision_GoogleCloudVisionV1p1beta1CropHint : GTLRObject
  4504. /**
  4505. * The bounding polygon for the crop region. The coordinates of the bounding
  4506. * box are in the original image's scale.
  4507. */
  4508. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
  4509. /**
  4510. * Confidence of this being a salient region. Range [0, 1].
  4511. *
  4512. * Uses NSNumber of floatValue.
  4513. */
  4514. @property(nonatomic, strong, nullable) NSNumber *confidence;
  4515. /**
  4516. * Fraction of importance of this salient region with respect to the original
  4517. * image.
  4518. *
  4519. * Uses NSNumber of floatValue.
  4520. */
  4521. @property(nonatomic, strong, nullable) NSNumber *importanceFraction;
  4522. @end
  4523. /**
  4524. * Set of crop hints that are used to generate new crops when serving images.
  4525. */
  4526. @interface GTLRVision_GoogleCloudVisionV1p1beta1CropHintsAnnotation : GTLRObject
  4527. /** Crop hint results. */
  4528. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1CropHint *> *cropHints;
  4529. @end
  4530. /**
  4531. * Set of dominant colors and their corresponding scores.
  4532. */
  4533. @interface GTLRVision_GoogleCloudVisionV1p1beta1DominantColorsAnnotation : GTLRObject
  4534. /** RGB color values with their score and pixel fraction. */
  4535. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ColorInfo *> *colors;
  4536. @end
  4537. /**
  4538. * Set of detected entity features.
  4539. */
  4540. @interface GTLRVision_GoogleCloudVisionV1p1beta1EntityAnnotation : GTLRObject
  4541. /**
  4542. * Image region to which this entity belongs. Not produced
  4543. * for `LABEL_DETECTION` features.
  4544. */
  4545. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
  4546. /**
  4547. * **Deprecated. Use `score` instead.**
  4548. * The accuracy of the entity detection in an image.
  4549. * For example, for an image in which the "Eiffel Tower" entity is detected,
  4550. * this field represents the confidence that there is a tower in the query
  4551. * image. Range [0, 1].
  4552. *
  4553. * Uses NSNumber of floatValue.
  4554. */
  4555. @property(nonatomic, strong, nullable) NSNumber *confidence;
  4556. /**
  4557. * Entity textual description, expressed in its `locale` language.
  4558. *
  4559. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  4560. */
  4561. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  4562. /**
  4563. * The language code for the locale in which the entity textual
  4564. * `description` is expressed.
  4565. */
  4566. @property(nonatomic, copy, nullable) NSString *locale;
  4567. /**
  4568. * The location information for the detected entity. Multiple
  4569. * `LocationInfo` elements can be present because one location may
  4570. * indicate the location of the scene in the image, and another location
  4571. * may indicate the location of the place where the image was taken.
  4572. * Location information is usually present for landmarks.
  4573. */
  4574. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1LocationInfo *> *locations;
  4575. /**
  4576. * Opaque entity ID. Some IDs may be available in
  4577. * [Google Knowledge Graph Search
  4578. * API](https://developers.google.com/knowledge-graph/).
  4579. */
  4580. @property(nonatomic, copy, nullable) NSString *mid;
  4581. /**
  4582. * Some entities may have optional user-supplied `Property` (name/value)
  4583. * fields, such a score or string that qualifies the entity.
  4584. */
  4585. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Property *> *properties;
  4586. /**
  4587. * Overall score of the result. Range [0, 1].
  4588. *
  4589. * Uses NSNumber of floatValue.
  4590. */
  4591. @property(nonatomic, strong, nullable) NSNumber *score;
  4592. /**
  4593. * The relevancy of the ICA (Image Content Annotation) label to the
  4594. * image. For example, the relevancy of "tower" is likely higher to an image
  4595. * containing the detected "Eiffel Tower" than to an image containing a
  4596. * detected distant towering building, even though the confidence that
  4597. * there is a tower in each image may be the same. Range [0, 1].
  4598. *
  4599. * Uses NSNumber of floatValue.
  4600. */
  4601. @property(nonatomic, strong, nullable) NSNumber *topicality;
  4602. @end
  4603. /**
  4604. * A face annotation object contains the results of face detection.
  4605. */
  4606. @interface GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation : GTLRObject
  4607. /**
  4608. * Anger likelihood.
  4609. *
  4610. * Likely values:
  4611. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Likely
  4612. * It is likely that the image belongs to the specified vertical. (Value:
  4613. * "LIKELY")
  4614. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Possible
  4615. * It is possible that the image belongs to the specified vertical.
  4616. * (Value: "POSSIBLE")
  4617. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unknown
  4618. * Unknown likelihood. (Value: "UNKNOWN")
  4619. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_Unlikely
  4620. * It is unlikely that the image belongs to the specified vertical.
  4621. * (Value: "UNLIKELY")
  4622. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryLikely
  4623. * It is very likely that the image belongs to the specified vertical.
  4624. * (Value: "VERY_LIKELY")
  4625. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_AngerLikelihood_VeryUnlikely
  4626. * It is very unlikely that the image belongs to the specified vertical.
  4627. * (Value: "VERY_UNLIKELY")
  4628. */
  4629. @property(nonatomic, copy, nullable) NSString *angerLikelihood;
  4630. /**
  4631. * Blurred likelihood.
  4632. *
  4633. * Likely values:
  4634. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Likely
  4635. * It is likely that the image belongs to the specified vertical. (Value:
  4636. * "LIKELY")
  4637. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Possible
  4638. * It is possible that the image belongs to the specified vertical.
  4639. * (Value: "POSSIBLE")
  4640. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unknown
  4641. * Unknown likelihood. (Value: "UNKNOWN")
  4642. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_Unlikely
  4643. * It is unlikely that the image belongs to the specified vertical.
  4644. * (Value: "UNLIKELY")
  4645. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryLikely
  4646. * It is very likely that the image belongs to the specified vertical.
  4647. * (Value: "VERY_LIKELY")
  4648. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely
  4649. * It is very unlikely that the image belongs to the specified vertical.
  4650. * (Value: "VERY_UNLIKELY")
  4651. */
  4652. @property(nonatomic, copy, nullable) NSString *blurredLikelihood;
  4653. /**
  4654. * The bounding polygon around the face. The coordinates of the bounding box
  4655. * are in the original image's scale.
  4656. * The bounding box is computed to "frame" the face in accordance with human
  4657. * expectations. It is based on the landmarker results.
  4658. * Note that one or more x and/or y coordinates may not be generated in the
  4659. * `BoundingPoly` (the polygon will be unbounded) if only a partial face
  4660. * appears in the image to be annotated.
  4661. */
  4662. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
  4663. /**
  4664. * Detection confidence. Range [0, 1].
  4665. *
  4666. * Uses NSNumber of floatValue.
  4667. */
  4668. @property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
  4669. /**
  4670. * The `fd_bounding_poly` bounding polygon is tighter than the
  4671. * `boundingPoly`, and encloses only the skin part of the face. Typically, it
  4672. * is used to eliminate the face from any image analysis that detects the
  4673. * "amount of skin" visible in an image. It is not based on the
  4674. * landmarker results, only on the initial face detection, hence
  4675. * the <code>fd</code> (face detection) prefix.
  4676. */
  4677. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *fdBoundingPoly;
  4678. /**
  4679. * Headwear likelihood.
  4680. *
  4681. * Likely values:
  4682. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Likely
  4683. * It is likely that the image belongs to the specified vertical. (Value:
  4684. * "LIKELY")
  4685. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Possible
  4686. * It is possible that the image belongs to the specified vertical.
  4687. * (Value: "POSSIBLE")
  4688. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unknown
  4689. * Unknown likelihood. (Value: "UNKNOWN")
  4690. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_Unlikely
  4691. * It is unlikely that the image belongs to the specified vertical.
  4692. * (Value: "UNLIKELY")
  4693. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryLikely
  4694. * It is very likely that the image belongs to the specified vertical.
  4695. * (Value: "VERY_LIKELY")
  4696. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely
  4697. * It is very unlikely that the image belongs to the specified vertical.
  4698. * (Value: "VERY_UNLIKELY")
  4699. */
  4700. @property(nonatomic, copy, nullable) NSString *headwearLikelihood;
  4701. /**
  4702. * Joy likelihood.
  4703. *
  4704. * Likely values:
  4705. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Likely
  4706. * It is likely that the image belongs to the specified vertical. (Value:
  4707. * "LIKELY")
  4708. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Possible
  4709. * It is possible that the image belongs to the specified vertical.
  4710. * (Value: "POSSIBLE")
  4711. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unknown
  4712. * Unknown likelihood. (Value: "UNKNOWN")
  4713. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_Unlikely
  4714. * It is unlikely that the image belongs to the specified vertical.
  4715. * (Value: "UNLIKELY")
  4716. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryLikely
  4717. * It is very likely that the image belongs to the specified vertical.
  4718. * (Value: "VERY_LIKELY")
  4719. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_JoyLikelihood_VeryUnlikely
  4720. * It is very unlikely that the image belongs to the specified vertical.
  4721. * (Value: "VERY_UNLIKELY")
  4722. */
  4723. @property(nonatomic, copy, nullable) NSString *joyLikelihood;
  4724. /**
  4725. * Face landmarking confidence. Range [0, 1].
  4726. *
  4727. * Uses NSNumber of floatValue.
  4728. */
  4729. @property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
  4730. /** Detected face landmarks. */
  4731. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark *> *landmarks;
  4732. /**
  4733. * Yaw angle, which indicates the leftward/rightward angle that the face is
  4734. * pointing relative to the vertical plane perpendicular to the image. Range
  4735. * [-180,180].
  4736. *
  4737. * Uses NSNumber of floatValue.
  4738. */
  4739. @property(nonatomic, strong, nullable) NSNumber *panAngle;
  4740. /**
  4741. * Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
  4742. * of the face relative to the image vertical about the axis perpendicular to
  4743. * the face. Range [-180,180].
  4744. *
  4745. * Uses NSNumber of floatValue.
  4746. */
  4747. @property(nonatomic, strong, nullable) NSNumber *rollAngle;
  4748. /**
  4749. * Sorrow likelihood.
  4750. *
  4751. * Likely values:
  4752. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Likely
  4753. * It is likely that the image belongs to the specified vertical. (Value:
  4754. * "LIKELY")
  4755. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Possible
  4756. * It is possible that the image belongs to the specified vertical.
  4757. * (Value: "POSSIBLE")
  4758. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unknown
  4759. * Unknown likelihood. (Value: "UNKNOWN")
  4760. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_Unlikely
  4761. * It is unlikely that the image belongs to the specified vertical.
  4762. * (Value: "UNLIKELY")
  4763. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryLikely
  4764. * It is very likely that the image belongs to the specified vertical.
  4765. * (Value: "VERY_LIKELY")
  4766. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely
  4767. * It is very unlikely that the image belongs to the specified vertical.
  4768. * (Value: "VERY_UNLIKELY")
  4769. */
  4770. @property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
  4771. /**
  4772. * Surprise likelihood.
  4773. *
  4774. * Likely values:
  4775. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Likely
  4776. * It is likely that the image belongs to the specified vertical. (Value:
  4777. * "LIKELY")
  4778. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Possible
  4779. * It is possible that the image belongs to the specified vertical.
  4780. * (Value: "POSSIBLE")
  4781. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unknown
  4782. * Unknown likelihood. (Value: "UNKNOWN")
  4783. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_Unlikely
  4784. * It is unlikely that the image belongs to the specified vertical.
  4785. * (Value: "UNLIKELY")
  4786. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryLikely
  4787. * It is very likely that the image belongs to the specified vertical.
  4788. * (Value: "VERY_LIKELY")
  4789. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely
  4790. * It is very unlikely that the image belongs to the specified vertical.
  4791. * (Value: "VERY_UNLIKELY")
  4792. */
  4793. @property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
  4794. /**
  4795. * Pitch angle, which indicates the upwards/downwards angle that the face is
  4796. * pointing relative to the image's horizontal plane. Range [-180,180].
  4797. *
  4798. * Uses NSNumber of floatValue.
  4799. */
  4800. @property(nonatomic, strong, nullable) NSNumber *tiltAngle;
  4801. /**
  4802. * Under-exposed likelihood.
  4803. *
  4804. * Likely values:
  4805. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Likely
  4806. * It is likely that the image belongs to the specified vertical. (Value:
  4807. * "LIKELY")
  4808. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Possible
  4809. * It is possible that the image belongs to the specified vertical.
  4810. * (Value: "POSSIBLE")
  4811. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unknown
  4812. * Unknown likelihood. (Value: "UNKNOWN")
  4813. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_Unlikely
  4814. * It is unlikely that the image belongs to the specified vertical.
  4815. * (Value: "UNLIKELY")
  4816. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely
  4817. * It is very likely that the image belongs to the specified vertical.
  4818. * (Value: "VERY_LIKELY")
  4819. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely
  4820. * It is very unlikely that the image belongs to the specified vertical.
  4821. * (Value: "VERY_UNLIKELY")
  4822. */
  4823. @property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
  4824. @end
  4825. /**
  4826. * A face-specific landmark (for example, a face feature).
  4827. */
  4828. @interface GTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark : GTLRObject
  4829. /** Face landmark position. */
  4830. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1Position *position;
  4831. /**
  4832. * Face landmark type.
  4833. *
  4834. * Likely values:
  4835. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinGnathion
  4836. * Chin gnathion. (Value: "CHIN_GNATHION")
  4837. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinLeftGonion
  4838. * Chin left gonion. (Value: "CHIN_LEFT_GONION")
  4839. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ChinRightGonion
  4840. * Chin right gonion. (Value: "CHIN_RIGHT_GONION")
  4841. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_ForeheadGlabella
  4842. * Forehead glabella. (Value: "FOREHEAD_GLABELLA")
  4843. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEarTragion
  4844. * Left ear tragion. (Value: "LEFT_EAR_TRAGION")
  4845. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEye
  4846. * Left eye. (Value: "LEFT_EYE")
  4847. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary
  4848. * Left eye, bottom boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
  4849. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint
  4850. * Left eyebrow, upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
  4851. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner
  4852. * Left eye, left corner. (Value: "LEFT_EYE_LEFT_CORNER")
  4853. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyePupil
  4854. * Left eye pupil. (Value: "LEFT_EYE_PUPIL")
  4855. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner
  4856. * Left eye, right corner. (Value: "LEFT_EYE_RIGHT_CORNER")
  4857. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary
  4858. * Left eye, top boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
  4859. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow
  4860. * Left of left eyebrow. (Value: "LEFT_OF_LEFT_EYEBROW")
  4861. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow
  4862. * Left of right eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
  4863. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_LowerLip
  4864. * Lower lip. (Value: "LOWER_LIP")
  4865. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes
  4866. * Midpoint between eyes. (Value: "MIDPOINT_BETWEEN_EYES")
  4867. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthCenter
  4868. * Mouth center. (Value: "MOUTH_CENTER")
  4869. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthLeft
  4870. * Mouth left. (Value: "MOUTH_LEFT")
  4871. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_MouthRight
  4872. * Mouth right. (Value: "MOUTH_RIGHT")
  4873. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomCenter
  4874. * Nose, bottom center. (Value: "NOSE_BOTTOM_CENTER")
  4875. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomLeft
  4876. * Nose, bottom left. (Value: "NOSE_BOTTOM_LEFT")
  4877. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseBottomRight
  4878. * Nose, bottom right. (Value: "NOSE_BOTTOM_RIGHT")
  4879. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_NoseTip
  4880. * Nose tip. (Value: "NOSE_TIP")
  4881. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEarTragion
  4882. * Right ear tragion. (Value: "RIGHT_EAR_TRAGION")
  4883. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEye
  4884. * Right eye. (Value: "RIGHT_EYE")
  4885. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary
  4886. * Right eye, bottom boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
  4887. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint
  4888. * Right eyebrow, upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
  4889. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner
  4890. * Right eye, left corner. (Value: "RIGHT_EYE_LEFT_CORNER")
  4891. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyePupil
  4892. * Right eye pupil. (Value: "RIGHT_EYE_PUPIL")
  4893. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeRightCorner
  4894. * Right eye, right corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
  4895. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary
  4896. * Right eye, top boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
  4897. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow
  4898. * Right of left eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
  4899. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow
  4900. * Right of right eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
  4901. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UnknownLandmark
  4902. * Unknown face landmark detected. Should not be filled. (Value:
  4903. * "UNKNOWN_LANDMARK")
  4904. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1FaceAnnotationLandmark_Type_UpperLip
  4905. * Upper lip. (Value: "UPPER_LIP")
  4906. */
  4907. @property(nonatomic, copy, nullable) NSString *type;
  4908. @end
  4909. /**
  4910. * The Google Cloud Storage location where the output will be written to.
  4911. */
  4912. @interface GTLRVision_GoogleCloudVisionV1p1beta1GcsDestination : GTLRObject
  4913. /**
  4914. * Google Cloud Storage URI where the results will be stored. Results will
  4915. * be in JSON format and preceded by its corresponding input URI. This field
  4916. * can either represent a single file, or a prefix for multiple outputs.
  4917. * Prefixes must end in a `/`.
  4918. * Examples:
  4919. * * File: gs://bucket-name/filename.json
  4920. * * Prefix: gs://bucket-name/prefix/here/
  4921. * * File: gs://bucket-name/prefix/here
  4922. * If multiple outputs, each response is still AnnotateFileResponse, each of
  4923. * which contains some subset of the full list of AnnotateImageResponse.
  4924. * Multiple outputs can happen if, for example, the output JSON is too large
  4925. * and overflows into multiple sharded files.
  4926. */
  4927. @property(nonatomic, copy, nullable) NSString *uri;
  4928. @end
  4929. /**
  4930. * The Google Cloud Storage location where the input will be read from.
  4931. */
  4932. @interface GTLRVision_GoogleCloudVisionV1p1beta1GcsSource : GTLRObject
  4933. /**
  4934. * Google Cloud Storage URI for the input file. This must only be a
  4935. * Google Cloud Storage object. Wildcards are not currently supported.
  4936. */
  4937. @property(nonatomic, copy, nullable) NSString *uri;
  4938. @end
  4939. /**
  4940. * If an image was produced from a file (e.g. a PDF), this message gives
  4941. * information about the source of that image.
  4942. */
  4943. @interface GTLRVision_GoogleCloudVisionV1p1beta1ImageAnnotationContext : GTLRObject
  4944. /**
  4945. * If the file was a PDF or TIFF, this field gives the page number within
  4946. * the file used to produce the image.
  4947. *
  4948. * Uses NSNumber of intValue.
  4949. */
  4950. @property(nonatomic, strong, nullable) NSNumber *pageNumber;
  4951. /** The URI of the file used to produce the image. */
  4952. @property(nonatomic, copy, nullable) NSString *uri;
  4953. @end
  4954. /**
  4955. * Stores image properties, such as dominant colors.
  4956. */
  4957. @interface GTLRVision_GoogleCloudVisionV1p1beta1ImageProperties : GTLRObject
  4958. /** If present, dominant colors completed successfully. */
  4959. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1DominantColorsAnnotation *dominantColors;
  4960. @end
  4961. /**
  4962. * The desired input location and metadata.
  4963. */
  4964. @interface GTLRVision_GoogleCloudVisionV1p1beta1InputConfig : GTLRObject
  4965. /** The Google Cloud Storage location to read the input from. */
  4966. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1GcsSource *gcsSource;
  4967. /**
  4968. * The type of the file. Currently only "application/pdf" and "image/tiff"
  4969. * are supported. Wildcards are not supported.
  4970. */
  4971. @property(nonatomic, copy, nullable) NSString *mimeType;
  4972. @end
  4973. /**
  4974. * Set of detected objects with bounding boxes.
  4975. */
  4976. @interface GTLRVision_GoogleCloudVisionV1p1beta1LocalizedObjectAnnotation : GTLRObject
  4977. /** Image region to which this object belongs. This must be populated. */
  4978. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
  4979. /**
  4980. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  4981. * information, see
  4982. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  4983. */
  4984. @property(nonatomic, copy, nullable) NSString *languageCode;
  4985. /** Object ID that should align with EntityAnnotation mid. */
  4986. @property(nonatomic, copy, nullable) NSString *mid;
  4987. /** Object name, expressed in its `language_code` language. */
  4988. @property(nonatomic, copy, nullable) NSString *name;
  4989. /**
  4990. * Score of the result. Range [0, 1].
  4991. *
  4992. * Uses NSNumber of floatValue.
  4993. */
  4994. @property(nonatomic, strong, nullable) NSNumber *score;
  4995. @end
  4996. /**
  4997. * Detected entity location information.
  4998. */
  4999. @interface GTLRVision_GoogleCloudVisionV1p1beta1LocationInfo : GTLRObject
  5000. /** lat/long location coordinates. */
  5001. @property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
  5002. @end
  5003. /**
  5004. * A vertex represents a 2D point in the image.
  5005. * NOTE: the normalized vertex coordinates are relative to the original image
  5006. * and range from 0 to 1.
  5007. */
  5008. @interface GTLRVision_GoogleCloudVisionV1p1beta1NormalizedVertex : GTLRObject
  5009. /**
  5010. * X coordinate.
  5011. *
  5012. * Uses NSNumber of floatValue.
  5013. */
  5014. @property(nonatomic, strong, nullable) NSNumber *x;
  5015. /**
  5016. * Y coordinate.
  5017. *
  5018. * Uses NSNumber of floatValue.
  5019. */
  5020. @property(nonatomic, strong, nullable) NSNumber *y;
  5021. @end
  5022. /**
  5023. * Contains metadata for the BatchAnnotateImages operation.
  5024. */
  5025. @interface GTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata : GTLRObject
  5026. /** The time when the batch request was received. */
  5027. @property(nonatomic, strong, nullable) GTLRDateTime *createTime;
  5028. /**
  5029. * Current state of the batch operation.
  5030. *
  5031. * Likely values:
  5032. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Cancelled
  5033. * The batch processing was cancelled. (Value: "CANCELLED")
  5034. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Created
  5035. * Request is received. (Value: "CREATED")
  5036. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Done
  5037. * The batch processing is done. (Value: "DONE")
  5038. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_Running
  5039. * Request is actively being processed. (Value: "RUNNING")
  5040. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1OperationMetadata_State_StateUnspecified
  5041. * Invalid. (Value: "STATE_UNSPECIFIED")
  5042. */
  5043. @property(nonatomic, copy, nullable) NSString *state;
  5044. /** The time when the operation result was last updated. */
  5045. @property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
  5046. @end
  5047. /**
  5048. * The desired output location and metadata.
  5049. */
  5050. @interface GTLRVision_GoogleCloudVisionV1p1beta1OutputConfig : GTLRObject
  5051. /**
  5052. * The max number of response protos to put into each output JSON file on
  5053. * Google Cloud Storage.
  5054. * The valid range is [1, 100]. If not specified, the default value is 20.
  5055. * For example, for one pdf file with 100 pages, 100 response protos will
  5056. * be generated. If `batch_size` = 20, then 5 json files each
  5057. * containing 20 response protos will be written under the prefix
  5058. * `gcs_destination`.`uri`.
  5059. * Currently, batch_size only applies to GcsDestination, with potential future
  5060. * support for other output configurations.
  5061. *
  5062. * Uses NSNumber of intValue.
  5063. */
  5064. @property(nonatomic, strong, nullable) NSNumber *batchSize;
  5065. /** The Google Cloud Storage location to write the output(s) to. */
  5066. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1GcsDestination *gcsDestination;
  5067. @end
  5068. /**
  5069. * Detected page from OCR.
  5070. */
  5071. @interface GTLRVision_GoogleCloudVisionV1p1beta1Page : GTLRObject
  5072. /** List of blocks of text, images etc on this page. */
  5073. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Block *> *blocks;
  5074. /**
  5075. * Confidence of the OCR results on the page. Range [0, 1].
  5076. *
  5077. * Uses NSNumber of floatValue.
  5078. */
  5079. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5080. /**
  5081. * Page height. For PDFs the unit is points. For images (including
  5082. * TIFFs) the unit is pixels.
  5083. *
  5084. * Uses NSNumber of intValue.
  5085. */
  5086. @property(nonatomic, strong, nullable) NSNumber *height;
  5087. /** Additional information detected on the page. */
  5088. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
  5089. /**
  5090. * Page width. For PDFs the unit is points. For images (including
  5091. * TIFFs) the unit is pixels.
  5092. *
  5093. * Uses NSNumber of intValue.
  5094. */
  5095. @property(nonatomic, strong, nullable) NSNumber *width;
  5096. @end
  5097. /**
  5098. * Structural unit of text representing a number of words in certain order.
  5099. */
  5100. @interface GTLRVision_GoogleCloudVisionV1p1beta1Paragraph : GTLRObject
  5101. /**
  5102. * The bounding box for the paragraph.
  5103. * The vertices are in the order of top-left, top-right, bottom-right,
  5104. * bottom-left. When a rotation of the bounding box is detected the rotation
  5105. * is represented as around the top-left corner as defined when the text is
  5106. * read in the 'natural' orientation.
  5107. * For example:
  5108. * * when the text is horizontal it might look like:
  5109. * 0----1
  5110. * | |
  5111. * 3----2
  5112. * * when it's rotated 180 degrees around the top-left corner it becomes:
  5113. * 2----3
  5114. * | |
  5115. * 1----0
  5116. * and the vertex order will still be (0, 1, 2, 3).
  5117. */
  5118. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
  5119. /**
  5120. * Confidence of the OCR results for the paragraph. Range [0, 1].
  5121. *
  5122. * Uses NSNumber of floatValue.
  5123. */
  5124. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5125. /** Additional information detected for the paragraph. */
  5126. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
  5127. /** List of words in this paragraph. */
  5128. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Word *> *words;
  5129. @end
  5130. /**
  5131. * A 3D position in the image, used primarily for Face detection landmarks.
  5132. * A valid Position must have both x and y coordinates.
  5133. * The position coordinates are in the same scale as the original image.
  5134. */
  5135. @interface GTLRVision_GoogleCloudVisionV1p1beta1Position : GTLRObject
  5136. /**
  5137. * X coordinate.
  5138. *
  5139. * Uses NSNumber of floatValue.
  5140. */
  5141. @property(nonatomic, strong, nullable) NSNumber *x;
  5142. /**
  5143. * Y coordinate.
  5144. *
  5145. * Uses NSNumber of floatValue.
  5146. */
  5147. @property(nonatomic, strong, nullable) NSNumber *y;
  5148. /**
  5149. * Z coordinate (or depth).
  5150. *
  5151. * Uses NSNumber of floatValue.
  5152. */
  5153. @property(nonatomic, strong, nullable) NSNumber *z;
  5154. @end
  5155. /**
  5156. * A Product contains ReferenceImages.
  5157. */
  5158. @interface GTLRVision_GoogleCloudVisionV1p1beta1Product : GTLRObject
  5159. /**
  5160. * User-provided metadata to be stored with this product. Must be at most 4096
  5161. * characters long.
  5162. *
  5163. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  5164. */
  5165. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  5166. /**
  5167. * The user-provided name for this Product. Must not be empty. Must be at most
  5168. * 4096 characters long.
  5169. */
  5170. @property(nonatomic, copy, nullable) NSString *displayName;
  5171. /**
  5172. * The resource name of the product.
  5173. * Format is:
  5174. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
  5175. * This field is ignored when creating a product.
  5176. */
  5177. @property(nonatomic, copy, nullable) NSString *name;
  5178. /**
  5179. * The category for the product identified by the reference image. This should
  5180. * be either "homegoods", "apparel", or "toys".
  5181. * This field is immutable.
  5182. */
  5183. @property(nonatomic, copy, nullable) NSString *productCategory;
  5184. /**
  5185. * Key-value pairs that can be attached to a product. At query time,
  5186. * constraints can be specified based on the product_labels.
  5187. * Note that integer values can be provided as strings, e.g. "1199". Only
  5188. * strings with integer values can match a range-based restriction which is
  5189. * to be supported soon.
  5190. * Multiple values can be assigned to the same key. One product may have up to
  5191. * 100 product_labels.
  5192. */
  5193. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductKeyValue *> *productLabels;
  5194. @end
  5195. /**
  5196. * A product label represented as a key-value pair.
  5197. */
  5198. @interface GTLRVision_GoogleCloudVisionV1p1beta1ProductKeyValue : GTLRObject
  5199. /**
  5200. * The key of the label attached to the product. Cannot be empty and cannot
  5201. * exceed 128 bytes.
  5202. */
  5203. @property(nonatomic, copy, nullable) NSString *key;
  5204. /**
  5205. * The value of the label attached to the product. Cannot be empty and
  5206. * cannot exceed 128 bytes.
  5207. */
  5208. @property(nonatomic, copy, nullable) NSString *value;
  5209. @end
  5210. /**
  5211. * Results for a product search request.
  5212. */
  5213. @interface GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResults : GTLRObject
  5214. /**
  5215. * Timestamp of the index which provided these results. Changes made after
  5216. * this time are not reflected in the current results.
  5217. */
  5218. @property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
  5219. /**
  5220. * List of results grouped by products detected in the query image. Each entry
  5221. * corresponds to one bounding polygon in the query image, and contains the
  5222. * matching products specific to that region. There may be duplicate product
  5223. * matches in the union of all the per-product results.
  5224. */
  5225. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsGroupedResult *> *productGroupedResults;
  5226. /** List of results, one for each product match. */
  5227. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult *> *results;
  5228. @end
  5229. /**
  5230. * Information about the products similar to a single product in a query
  5231. * image.
  5232. */
  5233. @interface GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsGroupedResult : GTLRObject
  5234. /** The bounding polygon around the product detected in the query image. */
  5235. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingPoly;
  5236. /** List of results, one for each product match. */
  5237. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult *> *results;
  5238. @end
  5239. /**
  5240. * Information about a product.
  5241. */
  5242. @interface GTLRVision_GoogleCloudVisionV1p1beta1ProductSearchResultsResult : GTLRObject
  5243. /**
  5244. * The resource name of the image from the product that is the closest match
  5245. * to the query.
  5246. */
  5247. @property(nonatomic, copy, nullable) NSString *image;
  5248. /** The Product. */
  5249. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1Product *product;
  5250. /**
  5251. * A confidence level on the match, ranging from 0 (no confidence) to
  5252. * 1 (full confidence).
  5253. *
  5254. * Uses NSNumber of floatValue.
  5255. */
  5256. @property(nonatomic, strong, nullable) NSNumber *score;
  5257. @end
  5258. /**
  5259. * A `Property` consists of a user-supplied name/value pair.
  5260. */
  5261. @interface GTLRVision_GoogleCloudVisionV1p1beta1Property : GTLRObject
  5262. /** Name of the property. */
  5263. @property(nonatomic, copy, nullable) NSString *name;
  5264. /**
  5265. * Value of numeric properties.
  5266. *
  5267. * Uses NSNumber of unsignedLongLongValue.
  5268. */
  5269. @property(nonatomic, strong, nullable) NSNumber *uint64Value;
  5270. /** Value of the property. */
  5271. @property(nonatomic, copy, nullable) NSString *value;
  5272. @end
  5273. /**
  5274. * Set of features pertaining to the image, computed by computer vision
  5275. * methods over safe-search verticals (for example, adult, spoof, medical,
  5276. * violence).
  5277. */
  5278. @interface GTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation : GTLRObject
  5279. /**
  5280. * Represents the adult content likelihood for the image. Adult content may
  5281. * contain elements such as nudity, pornographic images or cartoons, or
  5282. * sexual activities.
  5283. *
  5284. * Likely values:
  5285. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Likely
  5286. * It is likely that the image belongs to the specified vertical. (Value:
  5287. * "LIKELY")
  5288. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Possible
  5289. * It is possible that the image belongs to the specified vertical.
  5290. * (Value: "POSSIBLE")
  5291. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unknown
  5292. * Unknown likelihood. (Value: "UNKNOWN")
  5293. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_Unlikely
  5294. * It is unlikely that the image belongs to the specified vertical.
  5295. * (Value: "UNLIKELY")
  5296. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryLikely
  5297. * It is very likely that the image belongs to the specified vertical.
  5298. * (Value: "VERY_LIKELY")
  5299. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Adult_VeryUnlikely
  5300. * It is very unlikely that the image belongs to the specified vertical.
  5301. * (Value: "VERY_UNLIKELY")
  5302. */
  5303. @property(nonatomic, copy, nullable) NSString *adult;
  5304. /**
  5305. * Likelihood that this is a medical image.
  5306. *
  5307. * Likely values:
  5308. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Likely
  5309. * It is likely that the image belongs to the specified vertical. (Value:
  5310. * "LIKELY")
  5311. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Possible
  5312. * It is possible that the image belongs to the specified vertical.
  5313. * (Value: "POSSIBLE")
  5314. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unknown
  5315. * Unknown likelihood. (Value: "UNKNOWN")
  5316. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_Unlikely
  5317. * It is unlikely that the image belongs to the specified vertical.
  5318. * (Value: "UNLIKELY")
  5319. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryLikely
  5320. * It is very likely that the image belongs to the specified vertical.
  5321. * (Value: "VERY_LIKELY")
  5322. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Medical_VeryUnlikely
  5323. * It is very unlikely that the image belongs to the specified vertical.
  5324. * (Value: "VERY_UNLIKELY")
  5325. */
  5326. @property(nonatomic, copy, nullable) NSString *medical;
  5327. /**
  5328. * Likelihood that the request image contains racy content. Racy content may
  5329. * include (but is not limited to) skimpy or sheer clothing, strategically
  5330. * covered nudity, lewd or provocative poses, or close-ups of sensitive
  5331. * body areas.
  5332. *
  5333. * Likely values:
  5334. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Likely
  5335. * It is likely that the image belongs to the specified vertical. (Value:
  5336. * "LIKELY")
  5337. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Possible
  5338. * It is possible that the image belongs to the specified vertical.
  5339. * (Value: "POSSIBLE")
  5340. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unknown
  5341. * Unknown likelihood. (Value: "UNKNOWN")
  5342. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_Unlikely
  5343. * It is unlikely that the image belongs to the specified vertical.
  5344. * (Value: "UNLIKELY")
  5345. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryLikely
  5346. * It is very likely that the image belongs to the specified vertical.
  5347. * (Value: "VERY_LIKELY")
  5348. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Racy_VeryUnlikely
  5349. * It is very unlikely that the image belongs to the specified vertical.
  5350. * (Value: "VERY_UNLIKELY")
  5351. */
  5352. @property(nonatomic, copy, nullable) NSString *racy;
  5353. /**
  5354. * Spoof likelihood. The likelihood that an modification
  5355. * was made to the image's canonical version to make it appear
  5356. * funny or offensive.
  5357. *
  5358. * Likely values:
  5359. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Likely
  5360. * It is likely that the image belongs to the specified vertical. (Value:
  5361. * "LIKELY")
  5362. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Possible
  5363. * It is possible that the image belongs to the specified vertical.
  5364. * (Value: "POSSIBLE")
  5365. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unknown
  5366. * Unknown likelihood. (Value: "UNKNOWN")
  5367. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_Unlikely
  5368. * It is unlikely that the image belongs to the specified vertical.
  5369. * (Value: "UNLIKELY")
  5370. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryLikely
  5371. * It is very likely that the image belongs to the specified vertical.
  5372. * (Value: "VERY_LIKELY")
  5373. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Spoof_VeryUnlikely
  5374. * It is very unlikely that the image belongs to the specified vertical.
  5375. * (Value: "VERY_UNLIKELY")
  5376. */
  5377. @property(nonatomic, copy, nullable) NSString *spoof;
  5378. /**
  5379. * Likelihood that this image contains violent content.
  5380. *
  5381. * Likely values:
  5382. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Likely
  5383. * It is likely that the image belongs to the specified vertical. (Value:
  5384. * "LIKELY")
  5385. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Possible
  5386. * It is possible that the image belongs to the specified vertical.
  5387. * (Value: "POSSIBLE")
  5388. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unknown
  5389. * Unknown likelihood. (Value: "UNKNOWN")
  5390. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_Unlikely
  5391. * It is unlikely that the image belongs to the specified vertical.
  5392. * (Value: "UNLIKELY")
  5393. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryLikely
  5394. * It is very likely that the image belongs to the specified vertical.
  5395. * (Value: "VERY_LIKELY")
  5396. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation_Violence_VeryUnlikely
  5397. * It is very unlikely that the image belongs to the specified vertical.
  5398. * (Value: "VERY_UNLIKELY")
  5399. */
  5400. @property(nonatomic, copy, nullable) NSString *violence;
  5401. @end
  5402. /**
  5403. * A single symbol representation.
  5404. */
  5405. @interface GTLRVision_GoogleCloudVisionV1p1beta1Symbol : GTLRObject
  5406. /**
  5407. * The bounding box for the symbol.
  5408. * The vertices are in the order of top-left, top-right, bottom-right,
  5409. * bottom-left. When a rotation of the bounding box is detected the rotation
  5410. * is represented as around the top-left corner as defined when the text is
  5411. * read in the 'natural' orientation.
  5412. * For example:
  5413. * * when the text is horizontal it might look like:
  5414. * 0----1
  5415. * | |
  5416. * 3----2
  5417. * * when it's rotated 180 degrees around the top-left corner it becomes:
  5418. * 2----3
  5419. * | |
  5420. * 1----0
  5421. * and the vertice order will still be (0, 1, 2, 3).
  5422. */
  5423. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
  5424. /**
  5425. * Confidence of the OCR results for the symbol. Range [0, 1].
  5426. *
  5427. * Uses NSNumber of floatValue.
  5428. */
  5429. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5430. /** Additional information detected for the symbol. */
  5431. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
  5432. /** The actual UTF-8 representation of the symbol. */
  5433. @property(nonatomic, copy, nullable) NSString *text;
  5434. @end
  5435. /**
  5436. * TextAnnotation contains a structured representation of OCR extracted text.
  5437. * The hierarchy of an OCR extracted text structure is like this:
  5438. * TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
  5439. * Each structural component, starting from Page, may further have their own
  5440. * properties. Properties describe detected languages, breaks etc.. Please
  5441. * refer
  5442. * to the TextAnnotation.TextProperty message definition below for more
  5443. * detail.
  5444. */
  5445. @interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotation : GTLRObject
  5446. /** List of pages detected by OCR. */
  5447. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Page *> *pages;
  5448. /** UTF-8 text detected on the pages. */
  5449. @property(nonatomic, copy, nullable) NSString *text;
  5450. @end
  5451. /**
  5452. * Detected start or end of a structural component.
  5453. */
  5454. @interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak : GTLRObject
  5455. /**
  5456. * True if break prepends the element.
  5457. *
  5458. * Uses NSNumber of boolValue.
  5459. */
  5460. @property(nonatomic, strong, nullable) NSNumber *isPrefix;
  5461. /**
  5462. * Detected break type.
  5463. *
  5464. * Likely values:
  5465. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_EolSureSpace
  5466. * Line-wrapping break. (Value: "EOL_SURE_SPACE")
  5467. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Hyphen
  5468. * End-line hyphen that is not present in text; does not co-occur with
  5469. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
  5470. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_LineBreak
  5471. * Line break that ends a paragraph. (Value: "LINE_BREAK")
  5472. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Space
  5473. * Regular space. (Value: "SPACE")
  5474. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_SureSpace
  5475. * Sure space (very wide). (Value: "SURE_SPACE")
  5476. * @arg @c kGTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak_Type_Unknown
  5477. * Unknown break label type. (Value: "UNKNOWN")
  5478. */
  5479. @property(nonatomic, copy, nullable) NSString *type;
  5480. @end
  5481. /**
  5482. * Detected language for a structural component.
  5483. */
  5484. @interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedLanguage : GTLRObject
  5485. /**
  5486. * Confidence of detected language. Range [0, 1].
  5487. *
  5488. * Uses NSNumber of floatValue.
  5489. */
  5490. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5491. /**
  5492. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  5493. * information, see
  5494. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  5495. */
  5496. @property(nonatomic, copy, nullable) NSString *languageCode;
  5497. @end
  5498. /**
  5499. * Additional information detected on the structural component.
  5500. */
  5501. @interface GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty : GTLRObject
  5502. /** Detected start or end of a text segment. */
  5503. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedBreak *detectedBreak;
  5504. /** A list of detected languages together with confidence. */
  5505. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationDetectedLanguage *> *detectedLanguages;
  5506. @end
  5507. /**
  5508. * A vertex represents a 2D point in the image.
  5509. * NOTE: the vertex coordinates are in the same scale as the original image.
  5510. */
  5511. @interface GTLRVision_GoogleCloudVisionV1p1beta1Vertex : GTLRObject
  5512. /**
  5513. * X coordinate.
  5514. *
  5515. * Uses NSNumber of intValue.
  5516. */
  5517. @property(nonatomic, strong, nullable) NSNumber *x;
  5518. /**
  5519. * Y coordinate.
  5520. *
  5521. * Uses NSNumber of intValue.
  5522. */
  5523. @property(nonatomic, strong, nullable) NSNumber *y;
  5524. @end
  5525. /**
  5526. * Relevant information for the image from the Internet.
  5527. */
  5528. @interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetection : GTLRObject
  5529. /**
  5530. * The service's best guess as to the topic of the request image.
  5531. * Inferred from similar images on the open web.
  5532. */
  5533. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebLabel *> *bestGuessLabels;
  5534. /**
  5535. * Fully matching images from the Internet.
  5536. * Can include resized copies of the query image.
  5537. */
  5538. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *fullMatchingImages;
  5539. /** Web pages containing the matching images from the Internet. */
  5540. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebPage *> *pagesWithMatchingImages;
  5541. /**
  5542. * Partial matching images from the Internet.
  5543. * Those images are similar enough to share some key-point features. For
  5544. * example an original image will likely have partial matching for its crops.
  5545. */
  5546. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *partialMatchingImages;
  5547. /** The visually similar image results. */
  5548. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *visuallySimilarImages;
  5549. /** Deduced entities from similar images on the Internet. */
  5550. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebEntity *> *webEntities;
  5551. @end
  5552. /**
  5553. * Entity deduced from similar images on the Internet.
  5554. */
  5555. @interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebEntity : GTLRObject
  5556. /**
  5557. * Canonical description of the entity, in English.
  5558. *
  5559. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  5560. */
  5561. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  5562. /** Opaque entity ID. */
  5563. @property(nonatomic, copy, nullable) NSString *entityId;
  5564. /**
  5565. * Overall relevancy score for the entity.
  5566. * Not normalized and not comparable across different image queries.
  5567. *
  5568. * Uses NSNumber of floatValue.
  5569. */
  5570. @property(nonatomic, strong, nullable) NSNumber *score;
  5571. @end
  5572. /**
  5573. * Metadata for online images.
  5574. */
  5575. @interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage : GTLRObject
  5576. /**
  5577. * (Deprecated) Overall relevancy score for the image.
  5578. *
  5579. * Uses NSNumber of floatValue.
  5580. */
  5581. @property(nonatomic, strong, nullable) NSNumber *score;
  5582. /** The result image URL. */
  5583. @property(nonatomic, copy, nullable) NSString *url;
  5584. @end
  5585. /**
  5586. * Label to provide extra metadata for the web detection.
  5587. */
  5588. @interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebLabel : GTLRObject
  5589. /** Label for extra metadata. */
  5590. @property(nonatomic, copy, nullable) NSString *label;
  5591. /**
  5592. * The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
  5593. * For more information, see
  5594. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  5595. */
  5596. @property(nonatomic, copy, nullable) NSString *languageCode;
  5597. @end
  5598. /**
  5599. * Metadata for web pages.
  5600. */
  5601. @interface GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebPage : GTLRObject
  5602. /**
  5603. * Fully matching images on the page.
  5604. * Can include resized copies of the query image.
  5605. */
  5606. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *fullMatchingImages;
  5607. /** Title for the web page, may contain HTML markups. */
  5608. @property(nonatomic, copy, nullable) NSString *pageTitle;
  5609. /**
  5610. * Partial matching images on the page.
  5611. * Those images are similar enough to share some key-point features. For
  5612. * example an original image will likely have partial matching for its
  5613. * crops.
  5614. */
  5615. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1WebDetectionWebImage *> *partialMatchingImages;
  5616. /**
  5617. * (Deprecated) Overall relevancy score for the web page.
  5618. *
  5619. * Uses NSNumber of floatValue.
  5620. */
  5621. @property(nonatomic, strong, nullable) NSNumber *score;
  5622. /** The result web page URL. */
  5623. @property(nonatomic, copy, nullable) NSString *url;
  5624. @end
  5625. /**
  5626. * A word representation.
  5627. */
  5628. @interface GTLRVision_GoogleCloudVisionV1p1beta1Word : GTLRObject
  5629. /**
  5630. * The bounding box for the word.
  5631. * The vertices are in the order of top-left, top-right, bottom-right,
  5632. * bottom-left. When a rotation of the bounding box is detected the rotation
  5633. * is represented as around the top-left corner as defined when the text is
  5634. * read in the 'natural' orientation.
  5635. * For example:
  5636. * * when the text is horizontal it might look like:
  5637. * 0----1
  5638. * | |
  5639. * 3----2
  5640. * * when it's rotated 180 degrees around the top-left corner it becomes:
  5641. * 2----3
  5642. * | |
  5643. * 1----0
  5644. * and the vertex order will still be (0, 1, 2, 3).
  5645. */
  5646. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1BoundingPoly *boundingBox;
  5647. /**
  5648. * Confidence of the OCR results for the word. Range [0, 1].
  5649. *
  5650. * Uses NSNumber of floatValue.
  5651. */
  5652. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5653. /** Additional information detected for the word. */
  5654. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p1beta1TextAnnotationTextProperty *property;
  5655. /**
  5656. * List of symbols in the word.
  5657. * The order of the symbols follows the natural reading order.
  5658. */
  5659. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p1beta1Symbol *> *symbols;
  5660. @end
  5661. /**
  5662. * Response to a single file annotation request. A file may contain one or more
  5663. * images, which individually have their own responses.
  5664. */
  5665. @interface GTLRVision_GoogleCloudVisionV1p2beta1AnnotateFileResponse : GTLRObject
  5666. /** Information about the file for which this response is generated. */
  5667. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1InputConfig *inputConfig;
  5668. /** Individual responses to images found within the file. */
  5669. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1AnnotateImageResponse *> *responses;
  5670. @end
  5671. /**
  5672. * Response to an image annotation request.
  5673. */
  5674. @interface GTLRVision_GoogleCloudVisionV1p2beta1AnnotateImageResponse : GTLRObject
  5675. /**
  5676. * If present, contextual information is needed to understand where this image
  5677. * comes from.
  5678. */
  5679. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1ImageAnnotationContext *context;
  5680. /** If present, crop hints have completed successfully. */
  5681. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1CropHintsAnnotation *cropHintsAnnotation;
  5682. /**
  5683. * If set, represents the error message for the operation.
  5684. * Note that filled-in image annotations are guaranteed to be
  5685. * correct, even when `error` is set.
  5686. */
  5687. @property(nonatomic, strong, nullable) GTLRVision_Status *error;
  5688. /** If present, face detection has completed successfully. */
  5689. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation *> *faceAnnotations;
  5690. /**
  5691. * If present, text (OCR) detection or document (OCR) text detection has
  5692. * completed successfully.
  5693. * This annotation provides the structural hierarchy for the OCR detected
  5694. * text.
  5695. */
  5696. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotation *fullTextAnnotation;
  5697. /** If present, image properties were extracted successfully. */
  5698. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1ImageProperties *imagePropertiesAnnotation;
  5699. /** If present, label detection has completed successfully. */
  5700. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *labelAnnotations;
  5701. /** If present, landmark detection has completed successfully. */
  5702. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *landmarkAnnotations;
  5703. /**
  5704. * If present, localized object detection has completed successfully.
  5705. * This will be sorted descending by confidence score.
  5706. */
  5707. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1LocalizedObjectAnnotation *> *localizedObjectAnnotations;
  5708. /** If present, logo detection has completed successfully. */
  5709. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *logoAnnotations;
  5710. /** If present, product search has completed successfully. */
  5711. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResults *productSearchResults;
  5712. /** If present, safe-search annotation has completed successfully. */
  5713. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation *safeSearchAnnotation;
  5714. /** If present, text (OCR) detection has completed successfully. */
  5715. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation *> *textAnnotations;
  5716. /** If present, web detection has completed successfully. */
  5717. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1WebDetection *webDetection;
  5718. @end
  5719. /**
  5720. * The response for a single offline file annotation request.
  5721. */
  5722. @interface GTLRVision_GoogleCloudVisionV1p2beta1AsyncAnnotateFileResponse : GTLRObject
  5723. /** The output location and metadata from AsyncAnnotateFileRequest. */
  5724. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1OutputConfig *outputConfig;
  5725. @end
  5726. /**
  5727. * Response to an async batch file annotation request.
  5728. */
  5729. @interface GTLRVision_GoogleCloudVisionV1p2beta1AsyncBatchAnnotateFilesResponse : GTLRObject
  5730. /**
  5731. * The list of file annotation responses, one for each request in
  5732. * AsyncBatchAnnotateFilesRequest.
  5733. */
  5734. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1AsyncAnnotateFileResponse *> *responses;
  5735. @end
  5736. /**
  5737. * Logical element on the page.
  5738. */
  5739. @interface GTLRVision_GoogleCloudVisionV1p2beta1Block : GTLRObject
  5740. /**
  5741. * Detected block type (text, image etc) for this block.
  5742. *
  5743. * Likely values:
  5744. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Barcode
  5745. * Barcode block. (Value: "BARCODE")
  5746. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Picture
  5747. * Image block. (Value: "PICTURE")
  5748. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Ruler
  5749. * Horizontal/vertical line box. (Value: "RULER")
  5750. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Table Table
  5751. * block. (Value: "TABLE")
  5752. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Text Regular
  5753. * text block. (Value: "TEXT")
  5754. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1Block_BlockType_Unknown
  5755. * Unknown block type. (Value: "UNKNOWN")
  5756. */
  5757. @property(nonatomic, copy, nullable) NSString *blockType;
  5758. /**
  5759. * The bounding box for the block.
  5760. * The vertices are in the order of top-left, top-right, bottom-right,
  5761. * bottom-left. When a rotation of the bounding box is detected the rotation
  5762. * is represented as around the top-left corner as defined when the text is
  5763. * read in the 'natural' orientation.
  5764. * For example:
  5765. * * when the text is horizontal it might look like:
  5766. * 0----1
  5767. * | |
  5768. * 3----2
  5769. * * when it's rotated 180 degrees around the top-left corner it becomes:
  5770. * 2----3
  5771. * | |
  5772. * 1----0
  5773. * and the vertex order will still be (0, 1, 2, 3).
  5774. */
  5775. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
  5776. /**
  5777. * Confidence of the OCR results on the block. Range [0, 1].
  5778. *
  5779. * Uses NSNumber of floatValue.
  5780. */
  5781. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5782. /** List of paragraphs in this block (if this blocks is of type text). */
  5783. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Paragraph *> *paragraphs;
  5784. /** Additional information detected for the block. */
  5785. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
  5786. @end
  5787. /**
  5788. * A bounding polygon for the detected image annotation.
  5789. */
  5790. @interface GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly : GTLRObject
  5791. /** The bounding polygon normalized vertices. */
  5792. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1NormalizedVertex *> *normalizedVertices;
  5793. /** The bounding polygon vertices. */
  5794. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Vertex *> *vertices;
  5795. @end
  5796. /**
  5797. * Color information consists of RGB channels, score, and the fraction of
  5798. * the image that the color occupies in the image.
  5799. */
  5800. @interface GTLRVision_GoogleCloudVisionV1p2beta1ColorInfo : GTLRObject
  5801. /** RGB components of the color. */
  5802. @property(nonatomic, strong, nullable) GTLRVision_Color *color;
  5803. /**
  5804. * The fraction of pixels the color occupies in the image.
  5805. * Value in range [0, 1].
  5806. *
  5807. * Uses NSNumber of floatValue.
  5808. */
  5809. @property(nonatomic, strong, nullable) NSNumber *pixelFraction;
  5810. /**
  5811. * Image-specific score for this color. Value in range [0, 1].
  5812. *
  5813. * Uses NSNumber of floatValue.
  5814. */
  5815. @property(nonatomic, strong, nullable) NSNumber *score;
  5816. @end
  5817. /**
  5818. * Single crop hint that is used to generate a new crop when serving an image.
  5819. */
  5820. @interface GTLRVision_GoogleCloudVisionV1p2beta1CropHint : GTLRObject
  5821. /**
  5822. * The bounding polygon for the crop region. The coordinates of the bounding
  5823. * box are in the original image's scale.
  5824. */
  5825. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
  5826. /**
  5827. * Confidence of this being a salient region. Range [0, 1].
  5828. *
  5829. * Uses NSNumber of floatValue.
  5830. */
  5831. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5832. /**
  5833. * Fraction of importance of this salient region with respect to the original
  5834. * image.
  5835. *
  5836. * Uses NSNumber of floatValue.
  5837. */
  5838. @property(nonatomic, strong, nullable) NSNumber *importanceFraction;
  5839. @end
  5840. /**
  5841. * Set of crop hints that are used to generate new crops when serving images.
  5842. */
  5843. @interface GTLRVision_GoogleCloudVisionV1p2beta1CropHintsAnnotation : GTLRObject
  5844. /** Crop hint results. */
  5845. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1CropHint *> *cropHints;
  5846. @end
  5847. /**
  5848. * Set of dominant colors and their corresponding scores.
  5849. */
  5850. @interface GTLRVision_GoogleCloudVisionV1p2beta1DominantColorsAnnotation : GTLRObject
  5851. /** RGB color values with their score and pixel fraction. */
  5852. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ColorInfo *> *colors;
  5853. @end
  5854. /**
  5855. * Set of detected entity features.
  5856. */
  5857. @interface GTLRVision_GoogleCloudVisionV1p2beta1EntityAnnotation : GTLRObject
  5858. /**
  5859. * Image region to which this entity belongs. Not produced
  5860. * for `LABEL_DETECTION` features.
  5861. */
  5862. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
  5863. /**
  5864. * **Deprecated. Use `score` instead.**
  5865. * The accuracy of the entity detection in an image.
  5866. * For example, for an image in which the "Eiffel Tower" entity is detected,
  5867. * this field represents the confidence that there is a tower in the query
  5868. * image. Range [0, 1].
  5869. *
  5870. * Uses NSNumber of floatValue.
  5871. */
  5872. @property(nonatomic, strong, nullable) NSNumber *confidence;
  5873. /**
  5874. * Entity textual description, expressed in its `locale` language.
  5875. *
  5876. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  5877. */
  5878. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  5879. /**
  5880. * The language code for the locale in which the entity textual
  5881. * `description` is expressed.
  5882. */
  5883. @property(nonatomic, copy, nullable) NSString *locale;
  5884. /**
  5885. * The location information for the detected entity. Multiple
  5886. * `LocationInfo` elements can be present because one location may
  5887. * indicate the location of the scene in the image, and another location
  5888. * may indicate the location of the place where the image was taken.
  5889. * Location information is usually present for landmarks.
  5890. */
  5891. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1LocationInfo *> *locations;
  5892. /**
  5893. * Opaque entity ID. Some IDs may be available in
  5894. * [Google Knowledge Graph Search
  5895. * API](https://developers.google.com/knowledge-graph/).
  5896. */
  5897. @property(nonatomic, copy, nullable) NSString *mid;
  5898. /**
  5899. * Some entities may have optional user-supplied `Property` (name/value)
  5900. * fields, such a score or string that qualifies the entity.
  5901. */
  5902. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Property *> *properties;
  5903. /**
  5904. * Overall score of the result. Range [0, 1].
  5905. *
  5906. * Uses NSNumber of floatValue.
  5907. */
  5908. @property(nonatomic, strong, nullable) NSNumber *score;
  5909. /**
  5910. * The relevancy of the ICA (Image Content Annotation) label to the
  5911. * image. For example, the relevancy of "tower" is likely higher to an image
  5912. * containing the detected "Eiffel Tower" than to an image containing a
  5913. * detected distant towering building, even though the confidence that
  5914. * there is a tower in each image may be the same. Range [0, 1].
  5915. *
  5916. * Uses NSNumber of floatValue.
  5917. */
  5918. @property(nonatomic, strong, nullable) NSNumber *topicality;
  5919. @end
  5920. /**
  5921. * A face annotation object contains the results of face detection.
  5922. */
  5923. @interface GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation : GTLRObject
  5924. /**
  5925. * Anger likelihood.
  5926. *
  5927. * Likely values:
  5928. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Likely
  5929. * It is likely that the image belongs to the specified vertical. (Value:
  5930. * "LIKELY")
  5931. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Possible
  5932. * It is possible that the image belongs to the specified vertical.
  5933. * (Value: "POSSIBLE")
  5934. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unknown
  5935. * Unknown likelihood. (Value: "UNKNOWN")
  5936. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_Unlikely
  5937. * It is unlikely that the image belongs to the specified vertical.
  5938. * (Value: "UNLIKELY")
  5939. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryLikely
  5940. * It is very likely that the image belongs to the specified vertical.
  5941. * (Value: "VERY_LIKELY")
  5942. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_AngerLikelihood_VeryUnlikely
  5943. * It is very unlikely that the image belongs to the specified vertical.
  5944. * (Value: "VERY_UNLIKELY")
  5945. */
  5946. @property(nonatomic, copy, nullable) NSString *angerLikelihood;
  5947. /**
  5948. * Blurred likelihood.
  5949. *
  5950. * Likely values:
  5951. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Likely
  5952. * It is likely that the image belongs to the specified vertical. (Value:
  5953. * "LIKELY")
  5954. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Possible
  5955. * It is possible that the image belongs to the specified vertical.
  5956. * (Value: "POSSIBLE")
  5957. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unknown
  5958. * Unknown likelihood. (Value: "UNKNOWN")
  5959. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_Unlikely
  5960. * It is unlikely that the image belongs to the specified vertical.
  5961. * (Value: "UNLIKELY")
  5962. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryLikely
  5963. * It is very likely that the image belongs to the specified vertical.
  5964. * (Value: "VERY_LIKELY")
  5965. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely
  5966. * It is very unlikely that the image belongs to the specified vertical.
  5967. * (Value: "VERY_UNLIKELY")
  5968. */
  5969. @property(nonatomic, copy, nullable) NSString *blurredLikelihood;
  5970. /**
  5971. * The bounding polygon around the face. The coordinates of the bounding box
  5972. * are in the original image's scale.
  5973. * The bounding box is computed to "frame" the face in accordance with human
  5974. * expectations. It is based on the landmarker results.
  5975. * Note that one or more x and/or y coordinates may not be generated in the
  5976. * `BoundingPoly` (the polygon will be unbounded) if only a partial face
  5977. * appears in the image to be annotated.
  5978. */
  5979. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
  5980. /**
  5981. * Detection confidence. Range [0, 1].
  5982. *
  5983. * Uses NSNumber of floatValue.
  5984. */
  5985. @property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
  5986. /**
  5987. * The `fd_bounding_poly` bounding polygon is tighter than the
  5988. * `boundingPoly`, and encloses only the skin part of the face. Typically, it
  5989. * is used to eliminate the face from any image analysis that detects the
  5990. * "amount of skin" visible in an image. It is not based on the
  5991. * landmarker results, only on the initial face detection, hence
  5992. * the <code>fd</code> (face detection) prefix.
  5993. */
  5994. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *fdBoundingPoly;
  5995. /**
  5996. * Headwear likelihood.
  5997. *
  5998. * Likely values:
  5999. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Likely
  6000. * It is likely that the image belongs to the specified vertical. (Value:
  6001. * "LIKELY")
  6002. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Possible
  6003. * It is possible that the image belongs to the specified vertical.
  6004. * (Value: "POSSIBLE")
  6005. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unknown
  6006. * Unknown likelihood. (Value: "UNKNOWN")
  6007. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_Unlikely
  6008. * It is unlikely that the image belongs to the specified vertical.
  6009. * (Value: "UNLIKELY")
  6010. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryLikely
  6011. * It is very likely that the image belongs to the specified vertical.
  6012. * (Value: "VERY_LIKELY")
  6013. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely
  6014. * It is very unlikely that the image belongs to the specified vertical.
  6015. * (Value: "VERY_UNLIKELY")
  6016. */
  6017. @property(nonatomic, copy, nullable) NSString *headwearLikelihood;
  6018. /**
  6019. * Joy likelihood.
  6020. *
  6021. * Likely values:
  6022. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Likely
  6023. * It is likely that the image belongs to the specified vertical. (Value:
  6024. * "LIKELY")
  6025. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Possible
  6026. * It is possible that the image belongs to the specified vertical.
  6027. * (Value: "POSSIBLE")
  6028. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unknown
  6029. * Unknown likelihood. (Value: "UNKNOWN")
  6030. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_Unlikely
  6031. * It is unlikely that the image belongs to the specified vertical.
  6032. * (Value: "UNLIKELY")
  6033. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryLikely
  6034. * It is very likely that the image belongs to the specified vertical.
  6035. * (Value: "VERY_LIKELY")
  6036. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_JoyLikelihood_VeryUnlikely
  6037. * It is very unlikely that the image belongs to the specified vertical.
  6038. * (Value: "VERY_UNLIKELY")
  6039. */
  6040. @property(nonatomic, copy, nullable) NSString *joyLikelihood;
  6041. /**
  6042. * Face landmarking confidence. Range [0, 1].
  6043. *
  6044. * Uses NSNumber of floatValue.
  6045. */
  6046. @property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
  6047. /** Detected face landmarks. */
  6048. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark *> *landmarks;
  6049. /**
  6050. * Yaw angle, which indicates the leftward/rightward angle that the face is
  6051. * pointing relative to the vertical plane perpendicular to the image. Range
  6052. * [-180,180].
  6053. *
  6054. * Uses NSNumber of floatValue.
  6055. */
  6056. @property(nonatomic, strong, nullable) NSNumber *panAngle;
  6057. /**
  6058. * Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
  6059. * of the face relative to the image vertical about the axis perpendicular to
  6060. * the face. Range [-180,180].
  6061. *
  6062. * Uses NSNumber of floatValue.
  6063. */
  6064. @property(nonatomic, strong, nullable) NSNumber *rollAngle;
  6065. /**
  6066. * Sorrow likelihood.
  6067. *
  6068. * Likely values:
  6069. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Likely
  6070. * It is likely that the image belongs to the specified vertical. (Value:
  6071. * "LIKELY")
  6072. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Possible
  6073. * It is possible that the image belongs to the specified vertical.
  6074. * (Value: "POSSIBLE")
  6075. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unknown
  6076. * Unknown likelihood. (Value: "UNKNOWN")
  6077. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_Unlikely
  6078. * It is unlikely that the image belongs to the specified vertical.
  6079. * (Value: "UNLIKELY")
  6080. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryLikely
  6081. * It is very likely that the image belongs to the specified vertical.
  6082. * (Value: "VERY_LIKELY")
  6083. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely
  6084. * It is very unlikely that the image belongs to the specified vertical.
  6085. * (Value: "VERY_UNLIKELY")
  6086. */
  6087. @property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
  6088. /**
  6089. * Surprise likelihood.
  6090. *
  6091. * Likely values:
  6092. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Likely
  6093. * It is likely that the image belongs to the specified vertical. (Value:
  6094. * "LIKELY")
  6095. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Possible
  6096. * It is possible that the image belongs to the specified vertical.
  6097. * (Value: "POSSIBLE")
  6098. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unknown
  6099. * Unknown likelihood. (Value: "UNKNOWN")
  6100. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_Unlikely
  6101. * It is unlikely that the image belongs to the specified vertical.
  6102. * (Value: "UNLIKELY")
  6103. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryLikely
  6104. * It is very likely that the image belongs to the specified vertical.
  6105. * (Value: "VERY_LIKELY")
  6106. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely
  6107. * It is very unlikely that the image belongs to the specified vertical.
  6108. * (Value: "VERY_UNLIKELY")
  6109. */
  6110. @property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
  6111. /**
  6112. * Pitch angle, which indicates the upwards/downwards angle that the face is
  6113. * pointing relative to the image's horizontal plane. Range [-180,180].
  6114. *
  6115. * Uses NSNumber of floatValue.
  6116. */
  6117. @property(nonatomic, strong, nullable) NSNumber *tiltAngle;
  6118. /**
  6119. * Under-exposed likelihood.
  6120. *
  6121. * Likely values:
  6122. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Likely
  6123. * It is likely that the image belongs to the specified vertical. (Value:
  6124. * "LIKELY")
  6125. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Possible
  6126. * It is possible that the image belongs to the specified vertical.
  6127. * (Value: "POSSIBLE")
  6128. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unknown
  6129. * Unknown likelihood. (Value: "UNKNOWN")
  6130. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_Unlikely
  6131. * It is unlikely that the image belongs to the specified vertical.
  6132. * (Value: "UNLIKELY")
  6133. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely
  6134. * It is very likely that the image belongs to the specified vertical.
  6135. * (Value: "VERY_LIKELY")
  6136. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely
  6137. * It is very unlikely that the image belongs to the specified vertical.
  6138. * (Value: "VERY_UNLIKELY")
  6139. */
  6140. @property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
  6141. @end
  6142. /**
  6143. * A face-specific landmark (for example, a face feature).
  6144. */
  6145. @interface GTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark : GTLRObject
  6146. /** Face landmark position. */
  6147. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1Position *position;
  6148. /**
  6149. * Face landmark type.
  6150. *
  6151. * Likely values:
  6152. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinGnathion
  6153. * Chin gnathion. (Value: "CHIN_GNATHION")
  6154. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinLeftGonion
  6155. * Chin left gonion. (Value: "CHIN_LEFT_GONION")
  6156. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ChinRightGonion
  6157. * Chin right gonion. (Value: "CHIN_RIGHT_GONION")
  6158. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_ForeheadGlabella
  6159. * Forehead glabella. (Value: "FOREHEAD_GLABELLA")
  6160. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEarTragion
  6161. * Left ear tragion. (Value: "LEFT_EAR_TRAGION")
  6162. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEye
  6163. * Left eye. (Value: "LEFT_EYE")
  6164. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary
  6165. * Left eye, bottom boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
  6166. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint
  6167. * Left eyebrow, upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
  6168. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner
  6169. * Left eye, left corner. (Value: "LEFT_EYE_LEFT_CORNER")
  6170. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyePupil
  6171. * Left eye pupil. (Value: "LEFT_EYE_PUPIL")
  6172. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner
  6173. * Left eye, right corner. (Value: "LEFT_EYE_RIGHT_CORNER")
  6174. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary
  6175. * Left eye, top boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
  6176. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow
  6177. * Left of left eyebrow. (Value: "LEFT_OF_LEFT_EYEBROW")
  6178. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow
  6179. * Left of right eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
  6180. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_LowerLip
  6181. * Lower lip. (Value: "LOWER_LIP")
  6182. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes
  6183. * Midpoint between eyes. (Value: "MIDPOINT_BETWEEN_EYES")
  6184. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthCenter
  6185. * Mouth center. (Value: "MOUTH_CENTER")
  6186. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthLeft
  6187. * Mouth left. (Value: "MOUTH_LEFT")
  6188. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_MouthRight
  6189. * Mouth right. (Value: "MOUTH_RIGHT")
  6190. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomCenter
  6191. * Nose, bottom center. (Value: "NOSE_BOTTOM_CENTER")
  6192. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomLeft
  6193. * Nose, bottom left. (Value: "NOSE_BOTTOM_LEFT")
  6194. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseBottomRight
  6195. * Nose, bottom right. (Value: "NOSE_BOTTOM_RIGHT")
  6196. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_NoseTip
  6197. * Nose tip. (Value: "NOSE_TIP")
  6198. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEarTragion
  6199. * Right ear tragion. (Value: "RIGHT_EAR_TRAGION")
  6200. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEye
  6201. * Right eye. (Value: "RIGHT_EYE")
  6202. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary
  6203. * Right eye, bottom boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
  6204. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint
  6205. * Right eyebrow, upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
  6206. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner
  6207. * Right eye, left corner. (Value: "RIGHT_EYE_LEFT_CORNER")
  6208. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyePupil
  6209. * Right eye pupil. (Value: "RIGHT_EYE_PUPIL")
  6210. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeRightCorner
  6211. * Right eye, right corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
  6212. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary
  6213. * Right eye, top boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
  6214. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow
  6215. * Right of left eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
  6216. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow
  6217. * Right of right eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
  6218. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UnknownLandmark
  6219. * Unknown face landmark detected. Should not be filled. (Value:
  6220. * "UNKNOWN_LANDMARK")
  6221. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1FaceAnnotationLandmark_Type_UpperLip
  6222. * Upper lip. (Value: "UPPER_LIP")
  6223. */
  6224. @property(nonatomic, copy, nullable) NSString *type;
  6225. @end
  6226. /**
  6227. * The Google Cloud Storage location where the output will be written to.
  6228. */
  6229. @interface GTLRVision_GoogleCloudVisionV1p2beta1GcsDestination : GTLRObject
  6230. /**
  6231. * Google Cloud Storage URI where the results will be stored. Results will
  6232. * be in JSON format and preceded by its corresponding input URI. This field
  6233. * can either represent a single file, or a prefix for multiple outputs.
  6234. * Prefixes must end in a `/`.
  6235. * Examples:
  6236. * * File: gs://bucket-name/filename.json
  6237. * * Prefix: gs://bucket-name/prefix/here/
  6238. * * File: gs://bucket-name/prefix/here
  6239. * If multiple outputs, each response is still AnnotateFileResponse, each of
  6240. * which contains some subset of the full list of AnnotateImageResponse.
  6241. * Multiple outputs can happen if, for example, the output JSON is too large
  6242. * and overflows into multiple sharded files.
  6243. */
  6244. @property(nonatomic, copy, nullable) NSString *uri;
  6245. @end
  6246. /**
  6247. * The Google Cloud Storage location where the input will be read from.
  6248. */
  6249. @interface GTLRVision_GoogleCloudVisionV1p2beta1GcsSource : GTLRObject
  6250. /**
  6251. * Google Cloud Storage URI for the input file. This must only be a
  6252. * Google Cloud Storage object. Wildcards are not currently supported.
  6253. */
  6254. @property(nonatomic, copy, nullable) NSString *uri;
  6255. @end
  6256. /**
  6257. * If an image was produced from a file (e.g. a PDF), this message gives
  6258. * information about the source of that image.
  6259. */
  6260. @interface GTLRVision_GoogleCloudVisionV1p2beta1ImageAnnotationContext : GTLRObject
  6261. /**
  6262. * If the file was a PDF or TIFF, this field gives the page number within
  6263. * the file used to produce the image.
  6264. *
  6265. * Uses NSNumber of intValue.
  6266. */
  6267. @property(nonatomic, strong, nullable) NSNumber *pageNumber;
  6268. /** The URI of the file used to produce the image. */
  6269. @property(nonatomic, copy, nullable) NSString *uri;
  6270. @end
  6271. /**
  6272. * Stores image properties, such as dominant colors.
  6273. */
  6274. @interface GTLRVision_GoogleCloudVisionV1p2beta1ImageProperties : GTLRObject
  6275. /** If present, dominant colors completed successfully. */
  6276. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1DominantColorsAnnotation *dominantColors;
  6277. @end
  6278. /**
  6279. * The desired input location and metadata.
  6280. */
  6281. @interface GTLRVision_GoogleCloudVisionV1p2beta1InputConfig : GTLRObject
  6282. /** The Google Cloud Storage location to read the input from. */
  6283. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1GcsSource *gcsSource;
  6284. /**
  6285. * The type of the file. Currently only "application/pdf" and "image/tiff"
  6286. * are supported. Wildcards are not supported.
  6287. */
  6288. @property(nonatomic, copy, nullable) NSString *mimeType;
  6289. @end
  6290. /**
  6291. * Set of detected objects with bounding boxes.
  6292. */
  6293. @interface GTLRVision_GoogleCloudVisionV1p2beta1LocalizedObjectAnnotation : GTLRObject
  6294. /** Image region to which this object belongs. This must be populated. */
  6295. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
  6296. /**
  6297. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  6298. * information, see
  6299. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  6300. */
  6301. @property(nonatomic, copy, nullable) NSString *languageCode;
  6302. /** Object ID that should align with EntityAnnotation mid. */
  6303. @property(nonatomic, copy, nullable) NSString *mid;
  6304. /** Object name, expressed in its `language_code` language. */
  6305. @property(nonatomic, copy, nullable) NSString *name;
  6306. /**
  6307. * Score of the result. Range [0, 1].
  6308. *
  6309. * Uses NSNumber of floatValue.
  6310. */
  6311. @property(nonatomic, strong, nullable) NSNumber *score;
  6312. @end
  6313. /**
  6314. * Detected entity location information.
  6315. */
  6316. @interface GTLRVision_GoogleCloudVisionV1p2beta1LocationInfo : GTLRObject
  6317. /** lat/long location coordinates. */
  6318. @property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
  6319. @end
  6320. /**
  6321. * A vertex represents a 2D point in the image.
  6322. * NOTE: the normalized vertex coordinates are relative to the original image
  6323. * and range from 0 to 1.
  6324. */
  6325. @interface GTLRVision_GoogleCloudVisionV1p2beta1NormalizedVertex : GTLRObject
  6326. /**
  6327. * X coordinate.
  6328. *
  6329. * Uses NSNumber of floatValue.
  6330. */
  6331. @property(nonatomic, strong, nullable) NSNumber *x;
  6332. /**
  6333. * Y coordinate.
  6334. *
  6335. * Uses NSNumber of floatValue.
  6336. */
  6337. @property(nonatomic, strong, nullable) NSNumber *y;
  6338. @end
  6339. /**
  6340. * Contains metadata for the BatchAnnotateImages operation.
  6341. */
  6342. @interface GTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata : GTLRObject
  6343. /** The time when the batch request was received. */
  6344. @property(nonatomic, strong, nullable) GTLRDateTime *createTime;
  6345. /**
  6346. * Current state of the batch operation.
  6347. *
  6348. * Likely values:
  6349. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Cancelled
  6350. * The batch processing was cancelled. (Value: "CANCELLED")
  6351. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Created
  6352. * Request is received. (Value: "CREATED")
  6353. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Done
  6354. * The batch processing is done. (Value: "DONE")
  6355. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_Running
  6356. * Request is actively being processed. (Value: "RUNNING")
  6357. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1OperationMetadata_State_StateUnspecified
  6358. * Invalid. (Value: "STATE_UNSPECIFIED")
  6359. */
  6360. @property(nonatomic, copy, nullable) NSString *state;
  6361. /** The time when the operation result was last updated. */
  6362. @property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
  6363. @end
  6364. /**
  6365. * The desired output location and metadata.
  6366. */
  6367. @interface GTLRVision_GoogleCloudVisionV1p2beta1OutputConfig : GTLRObject
  6368. /**
  6369. * The max number of response protos to put into each output JSON file on
  6370. * Google Cloud Storage.
  6371. * The valid range is [1, 100]. If not specified, the default value is 20.
  6372. * For example, for one pdf file with 100 pages, 100 response protos will
  6373. * be generated. If `batch_size` = 20, then 5 json files each
  6374. * containing 20 response protos will be written under the prefix
  6375. * `gcs_destination`.`uri`.
  6376. * Currently, batch_size only applies to GcsDestination, with potential future
  6377. * support for other output configurations.
  6378. *
  6379. * Uses NSNumber of intValue.
  6380. */
  6381. @property(nonatomic, strong, nullable) NSNumber *batchSize;
  6382. /** The Google Cloud Storage location to write the output(s) to. */
  6383. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1GcsDestination *gcsDestination;
  6384. @end
  6385. /**
  6386. * Detected page from OCR.
  6387. */
  6388. @interface GTLRVision_GoogleCloudVisionV1p2beta1Page : GTLRObject
  6389. /** List of blocks of text, images etc on this page. */
  6390. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Block *> *blocks;
  6391. /**
  6392. * Confidence of the OCR results on the page. Range [0, 1].
  6393. *
  6394. * Uses NSNumber of floatValue.
  6395. */
  6396. @property(nonatomic, strong, nullable) NSNumber *confidence;
  6397. /**
  6398. * Page height. For PDFs the unit is points. For images (including
  6399. * TIFFs) the unit is pixels.
  6400. *
  6401. * Uses NSNumber of intValue.
  6402. */
  6403. @property(nonatomic, strong, nullable) NSNumber *height;
  6404. /** Additional information detected on the page. */
  6405. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
  6406. /**
  6407. * Page width. For PDFs the unit is points. For images (including
  6408. * TIFFs) the unit is pixels.
  6409. *
  6410. * Uses NSNumber of intValue.
  6411. */
  6412. @property(nonatomic, strong, nullable) NSNumber *width;
  6413. @end
  6414. /**
  6415. * Structural unit of text representing a number of words in certain order.
  6416. */
  6417. @interface GTLRVision_GoogleCloudVisionV1p2beta1Paragraph : GTLRObject
  6418. /**
  6419. * The bounding box for the paragraph.
  6420. * The vertices are in the order of top-left, top-right, bottom-right,
  6421. * bottom-left. When a rotation of the bounding box is detected the rotation
  6422. * is represented as around the top-left corner as defined when the text is
  6423. * read in the 'natural' orientation.
  6424. * For example:
  6425. * * when the text is horizontal it might look like:
  6426. * 0----1
  6427. * | |
  6428. * 3----2
  6429. * * when it's rotated 180 degrees around the top-left corner it becomes:
  6430. * 2----3
  6431. * | |
  6432. * 1----0
  6433. * and the vertex order will still be (0, 1, 2, 3).
  6434. */
  6435. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
  6436. /**
  6437. * Confidence of the OCR results for the paragraph. Range [0, 1].
  6438. *
  6439. * Uses NSNumber of floatValue.
  6440. */
  6441. @property(nonatomic, strong, nullable) NSNumber *confidence;
  6442. /** Additional information detected for the paragraph. */
  6443. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
  6444. /** List of words in this paragraph. */
  6445. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Word *> *words;
  6446. @end
  6447. /**
  6448. * A 3D position in the image, used primarily for Face detection landmarks.
  6449. * A valid Position must have both x and y coordinates.
  6450. * The position coordinates are in the same scale as the original image.
  6451. */
  6452. @interface GTLRVision_GoogleCloudVisionV1p2beta1Position : GTLRObject
  6453. /**
  6454. * X coordinate.
  6455. *
  6456. * Uses NSNumber of floatValue.
  6457. */
  6458. @property(nonatomic, strong, nullable) NSNumber *x;
  6459. /**
  6460. * Y coordinate.
  6461. *
  6462. * Uses NSNumber of floatValue.
  6463. */
  6464. @property(nonatomic, strong, nullable) NSNumber *y;
  6465. /**
  6466. * Z coordinate (or depth).
  6467. *
  6468. * Uses NSNumber of floatValue.
  6469. */
  6470. @property(nonatomic, strong, nullable) NSNumber *z;
  6471. @end
  6472. /**
  6473. * A Product contains ReferenceImages.
  6474. */
  6475. @interface GTLRVision_GoogleCloudVisionV1p2beta1Product : GTLRObject
  6476. /**
  6477. * User-provided metadata to be stored with this product. Must be at most 4096
  6478. * characters long.
  6479. *
  6480. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  6481. */
  6482. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  6483. /**
  6484. * The user-provided name for this Product. Must not be empty. Must be at most
  6485. * 4096 characters long.
  6486. */
  6487. @property(nonatomic, copy, nullable) NSString *displayName;
  6488. /**
  6489. * The resource name of the product.
  6490. * Format is:
  6491. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
  6492. * This field is ignored when creating a product.
  6493. */
  6494. @property(nonatomic, copy, nullable) NSString *name;
  6495. /**
  6496. * The category for the product identified by the reference image. This should
  6497. * be either "homegoods", "apparel", or "toys".
  6498. * This field is immutable.
  6499. */
  6500. @property(nonatomic, copy, nullable) NSString *productCategory;
  6501. /**
  6502. * Key-value pairs that can be attached to a product. At query time,
  6503. * constraints can be specified based on the product_labels.
  6504. * Note that integer values can be provided as strings, e.g. "1199". Only
  6505. * strings with integer values can match a range-based restriction which is
  6506. * to be supported soon.
  6507. * Multiple values can be assigned to the same key. One product may have up to
  6508. * 100 product_labels.
  6509. */
  6510. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductKeyValue *> *productLabels;
  6511. @end
  6512. /**
  6513. * A product label represented as a key-value pair.
  6514. */
  6515. @interface GTLRVision_GoogleCloudVisionV1p2beta1ProductKeyValue : GTLRObject
  6516. /**
  6517. * The key of the label attached to the product. Cannot be empty and cannot
  6518. * exceed 128 bytes.
  6519. */
  6520. @property(nonatomic, copy, nullable) NSString *key;
  6521. /**
  6522. * The value of the label attached to the product. Cannot be empty and
  6523. * cannot exceed 128 bytes.
  6524. */
  6525. @property(nonatomic, copy, nullable) NSString *value;
  6526. @end
  6527. /**
  6528. * Results for a product search request.
  6529. */
  6530. @interface GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResults : GTLRObject
  6531. /**
  6532. * Timestamp of the index which provided these results. Changes made after
  6533. * this time are not reflected in the current results.
  6534. */
  6535. @property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
  6536. /**
  6537. * List of results grouped by products detected in the query image. Each entry
  6538. * corresponds to one bounding polygon in the query image, and contains the
  6539. * matching products specific to that region. There may be duplicate product
  6540. * matches in the union of all the per-product results.
  6541. */
  6542. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsGroupedResult *> *productGroupedResults;
  6543. /** List of results, one for each product match. */
  6544. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult *> *results;
  6545. @end
  6546. /**
  6547. * Information about the products similar to a single product in a query
  6548. * image.
  6549. */
  6550. @interface GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsGroupedResult : GTLRObject
  6551. /** The bounding polygon around the product detected in the query image. */
  6552. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingPoly;
  6553. /** List of results, one for each product match. */
  6554. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult *> *results;
  6555. @end
  6556. /**
  6557. * Information about a product.
  6558. */
  6559. @interface GTLRVision_GoogleCloudVisionV1p2beta1ProductSearchResultsResult : GTLRObject
  6560. /**
  6561. * The resource name of the image from the product that is the closest match
  6562. * to the query.
  6563. */
  6564. @property(nonatomic, copy, nullable) NSString *image;
  6565. /** The Product. */
  6566. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1Product *product;
  6567. /**
  6568. * A confidence level on the match, ranging from 0 (no confidence) to
  6569. * 1 (full confidence).
  6570. *
  6571. * Uses NSNumber of floatValue.
  6572. */
  6573. @property(nonatomic, strong, nullable) NSNumber *score;
  6574. @end
  6575. /**
  6576. * A `Property` consists of a user-supplied name/value pair.
  6577. */
  6578. @interface GTLRVision_GoogleCloudVisionV1p2beta1Property : GTLRObject
  6579. /** Name of the property. */
  6580. @property(nonatomic, copy, nullable) NSString *name;
  6581. /**
  6582. * Value of numeric properties.
  6583. *
  6584. * Uses NSNumber of unsignedLongLongValue.
  6585. */
  6586. @property(nonatomic, strong, nullable) NSNumber *uint64Value;
  6587. /** Value of the property. */
  6588. @property(nonatomic, copy, nullable) NSString *value;
  6589. @end
  6590. /**
  6591. * Set of features pertaining to the image, computed by computer vision
  6592. * methods over safe-search verticals (for example, adult, spoof, medical,
  6593. * violence).
  6594. */
  6595. @interface GTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation : GTLRObject
  6596. /**
  6597. * Represents the adult content likelihood for the image. Adult content may
  6598. * contain elements such as nudity, pornographic images or cartoons, or
  6599. * sexual activities.
  6600. *
  6601. * Likely values:
  6602. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Likely
  6603. * It is likely that the image belongs to the specified vertical. (Value:
  6604. * "LIKELY")
  6605. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Possible
  6606. * It is possible that the image belongs to the specified vertical.
  6607. * (Value: "POSSIBLE")
  6608. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unknown
  6609. * Unknown likelihood. (Value: "UNKNOWN")
  6610. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_Unlikely
  6611. * It is unlikely that the image belongs to the specified vertical.
  6612. * (Value: "UNLIKELY")
  6613. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryLikely
  6614. * It is very likely that the image belongs to the specified vertical.
  6615. * (Value: "VERY_LIKELY")
  6616. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Adult_VeryUnlikely
  6617. * It is very unlikely that the image belongs to the specified vertical.
  6618. * (Value: "VERY_UNLIKELY")
  6619. */
  6620. @property(nonatomic, copy, nullable) NSString *adult;
  6621. /**
  6622. * Likelihood that this is a medical image.
  6623. *
  6624. * Likely values:
  6625. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Likely
  6626. * It is likely that the image belongs to the specified vertical. (Value:
  6627. * "LIKELY")
  6628. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Possible
  6629. * It is possible that the image belongs to the specified vertical.
  6630. * (Value: "POSSIBLE")
  6631. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unknown
  6632. * Unknown likelihood. (Value: "UNKNOWN")
  6633. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_Unlikely
  6634. * It is unlikely that the image belongs to the specified vertical.
  6635. * (Value: "UNLIKELY")
  6636. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryLikely
  6637. * It is very likely that the image belongs to the specified vertical.
  6638. * (Value: "VERY_LIKELY")
  6639. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Medical_VeryUnlikely
  6640. * It is very unlikely that the image belongs to the specified vertical.
  6641. * (Value: "VERY_UNLIKELY")
  6642. */
  6643. @property(nonatomic, copy, nullable) NSString *medical;
  6644. /**
  6645. * Likelihood that the request image contains racy content. Racy content may
  6646. * include (but is not limited to) skimpy or sheer clothing, strategically
  6647. * covered nudity, lewd or provocative poses, or close-ups of sensitive
  6648. * body areas.
  6649. *
  6650. * Likely values:
  6651. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Likely
  6652. * It is likely that the image belongs to the specified vertical. (Value:
  6653. * "LIKELY")
  6654. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Possible
  6655. * It is possible that the image belongs to the specified vertical.
  6656. * (Value: "POSSIBLE")
  6657. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unknown
  6658. * Unknown likelihood. (Value: "UNKNOWN")
  6659. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_Unlikely
  6660. * It is unlikely that the image belongs to the specified vertical.
  6661. * (Value: "UNLIKELY")
  6662. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryLikely
  6663. * It is very likely that the image belongs to the specified vertical.
  6664. * (Value: "VERY_LIKELY")
  6665. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Racy_VeryUnlikely
  6666. * It is very unlikely that the image belongs to the specified vertical.
  6667. * (Value: "VERY_UNLIKELY")
  6668. */
  6669. @property(nonatomic, copy, nullable) NSString *racy;
  6670. /**
  6671. * Spoof likelihood. The likelihood that an modification
  6672. * was made to the image's canonical version to make it appear
  6673. * funny or offensive.
  6674. *
  6675. * Likely values:
  6676. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Likely
  6677. * It is likely that the image belongs to the specified vertical. (Value:
  6678. * "LIKELY")
  6679. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Possible
  6680. * It is possible that the image belongs to the specified vertical.
  6681. * (Value: "POSSIBLE")
  6682. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unknown
  6683. * Unknown likelihood. (Value: "UNKNOWN")
  6684. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_Unlikely
  6685. * It is unlikely that the image belongs to the specified vertical.
  6686. * (Value: "UNLIKELY")
  6687. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryLikely
  6688. * It is very likely that the image belongs to the specified vertical.
  6689. * (Value: "VERY_LIKELY")
  6690. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Spoof_VeryUnlikely
  6691. * It is very unlikely that the image belongs to the specified vertical.
  6692. * (Value: "VERY_UNLIKELY")
  6693. */
  6694. @property(nonatomic, copy, nullable) NSString *spoof;
  6695. /**
  6696. * Likelihood that this image contains violent content.
  6697. *
  6698. * Likely values:
  6699. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Likely
  6700. * It is likely that the image belongs to the specified vertical. (Value:
  6701. * "LIKELY")
  6702. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Possible
  6703. * It is possible that the image belongs to the specified vertical.
  6704. * (Value: "POSSIBLE")
  6705. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unknown
  6706. * Unknown likelihood. (Value: "UNKNOWN")
  6707. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_Unlikely
  6708. * It is unlikely that the image belongs to the specified vertical.
  6709. * (Value: "UNLIKELY")
  6710. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryLikely
  6711. * It is very likely that the image belongs to the specified vertical.
  6712. * (Value: "VERY_LIKELY")
  6713. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1SafeSearchAnnotation_Violence_VeryUnlikely
  6714. * It is very unlikely that the image belongs to the specified vertical.
  6715. * (Value: "VERY_UNLIKELY")
  6716. */
  6717. @property(nonatomic, copy, nullable) NSString *violence;
  6718. @end
  6719. /**
  6720. * A single symbol representation.
  6721. */
  6722. @interface GTLRVision_GoogleCloudVisionV1p2beta1Symbol : GTLRObject
  6723. /**
  6724. * The bounding box for the symbol.
  6725. * The vertices are in the order of top-left, top-right, bottom-right,
  6726. * bottom-left. When a rotation of the bounding box is detected the rotation
  6727. * is represented as around the top-left corner as defined when the text is
  6728. * read in the 'natural' orientation.
  6729. * For example:
  6730. * * when the text is horizontal it might look like:
  6731. * 0----1
  6732. * | |
  6733. * 3----2
  6734. * * when it's rotated 180 degrees around the top-left corner it becomes:
  6735. * 2----3
  6736. * | |
  6737. * 1----0
  6738. * and the vertice order will still be (0, 1, 2, 3).
  6739. */
  6740. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
  6741. /**
  6742. * Confidence of the OCR results for the symbol. Range [0, 1].
  6743. *
  6744. * Uses NSNumber of floatValue.
  6745. */
  6746. @property(nonatomic, strong, nullable) NSNumber *confidence;
  6747. /** Additional information detected for the symbol. */
  6748. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
  6749. /** The actual UTF-8 representation of the symbol. */
  6750. @property(nonatomic, copy, nullable) NSString *text;
  6751. @end
  6752. /**
  6753. * TextAnnotation contains a structured representation of OCR extracted text.
  6754. * The hierarchy of an OCR extracted text structure is like this:
  6755. * TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
  6756. * Each structural component, starting from Page, may further have their own
  6757. * properties. Properties describe detected languages, breaks etc.. Please
  6758. * refer
  6759. * to the TextAnnotation.TextProperty message definition below for more
  6760. * detail.
  6761. */
  6762. @interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotation : GTLRObject
  6763. /** List of pages detected by OCR. */
  6764. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Page *> *pages;
  6765. /** UTF-8 text detected on the pages. */
  6766. @property(nonatomic, copy, nullable) NSString *text;
  6767. @end
  6768. /**
  6769. * Detected start or end of a structural component.
  6770. */
  6771. @interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak : GTLRObject
  6772. /**
  6773. * True if break prepends the element.
  6774. *
  6775. * Uses NSNumber of boolValue.
  6776. */
  6777. @property(nonatomic, strong, nullable) NSNumber *isPrefix;
  6778. /**
  6779. * Detected break type.
  6780. *
  6781. * Likely values:
  6782. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_EolSureSpace
  6783. * Line-wrapping break. (Value: "EOL_SURE_SPACE")
  6784. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Hyphen
  6785. * End-line hyphen that is not present in text; does not co-occur with
  6786. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
  6787. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_LineBreak
  6788. * Line break that ends a paragraph. (Value: "LINE_BREAK")
  6789. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Space
  6790. * Regular space. (Value: "SPACE")
  6791. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_SureSpace
  6792. * Sure space (very wide). (Value: "SURE_SPACE")
  6793. * @arg @c kGTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak_Type_Unknown
  6794. * Unknown break label type. (Value: "UNKNOWN")
  6795. */
  6796. @property(nonatomic, copy, nullable) NSString *type;
  6797. @end
  6798. /**
  6799. * Detected language for a structural component.
  6800. */
  6801. @interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedLanguage : GTLRObject
  6802. /**
  6803. * Confidence of detected language. Range [0, 1].
  6804. *
  6805. * Uses NSNumber of floatValue.
  6806. */
  6807. @property(nonatomic, strong, nullable) NSNumber *confidence;
  6808. /**
  6809. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  6810. * information, see
  6811. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  6812. */
  6813. @property(nonatomic, copy, nullable) NSString *languageCode;
  6814. @end
  6815. /**
  6816. * Additional information detected on the structural component.
  6817. */
  6818. @interface GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty : GTLRObject
  6819. /** Detected start or end of a text segment. */
  6820. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedBreak *detectedBreak;
  6821. /** A list of detected languages together with confidence. */
  6822. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationDetectedLanguage *> *detectedLanguages;
  6823. @end
  6824. /**
  6825. * A vertex represents a 2D point in the image.
  6826. * NOTE: the vertex coordinates are in the same scale as the original image.
  6827. */
  6828. @interface GTLRVision_GoogleCloudVisionV1p2beta1Vertex : GTLRObject
  6829. /**
  6830. * X coordinate.
  6831. *
  6832. * Uses NSNumber of intValue.
  6833. */
  6834. @property(nonatomic, strong, nullable) NSNumber *x;
  6835. /**
  6836. * Y coordinate.
  6837. *
  6838. * Uses NSNumber of intValue.
  6839. */
  6840. @property(nonatomic, strong, nullable) NSNumber *y;
  6841. @end
  6842. /**
  6843. * Relevant information for the image from the Internet.
  6844. */
  6845. @interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetection : GTLRObject
  6846. /**
  6847. * The service's best guess as to the topic of the request image.
  6848. * Inferred from similar images on the open web.
  6849. */
  6850. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel *> *bestGuessLabels;
  6851. /**
  6852. * Fully matching images from the Internet.
  6853. * Can include resized copies of the query image.
  6854. */
  6855. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *fullMatchingImages;
  6856. /** Web pages containing the matching images from the Internet. */
  6857. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebPage *> *pagesWithMatchingImages;
  6858. /**
  6859. * Partial matching images from the Internet.
  6860. * Those images are similar enough to share some key-point features. For
  6861. * example an original image will likely have partial matching for its crops.
  6862. */
  6863. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *partialMatchingImages;
  6864. /** The visually similar image results. */
  6865. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *visuallySimilarImages;
  6866. /** Deduced entities from similar images on the Internet. */
  6867. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity *> *webEntities;
  6868. @end
  6869. /**
  6870. * Entity deduced from similar images on the Internet.
  6871. */
  6872. @interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebEntity : GTLRObject
  6873. /**
  6874. * Canonical description of the entity, in English.
  6875. *
  6876. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  6877. */
  6878. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  6879. /** Opaque entity ID. */
  6880. @property(nonatomic, copy, nullable) NSString *entityId;
  6881. /**
  6882. * Overall relevancy score for the entity.
  6883. * Not normalized and not comparable across different image queries.
  6884. *
  6885. * Uses NSNumber of floatValue.
  6886. */
  6887. @property(nonatomic, strong, nullable) NSNumber *score;
  6888. @end
  6889. /**
  6890. * Metadata for online images.
  6891. */
  6892. @interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage : GTLRObject
  6893. /**
  6894. * (Deprecated) Overall relevancy score for the image.
  6895. *
  6896. * Uses NSNumber of floatValue.
  6897. */
  6898. @property(nonatomic, strong, nullable) NSNumber *score;
  6899. /** The result image URL. */
  6900. @property(nonatomic, copy, nullable) NSString *url;
  6901. @end
  6902. /**
  6903. * Label to provide extra metadata for the web detection.
  6904. */
  6905. @interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebLabel : GTLRObject
  6906. /** Label for extra metadata. */
  6907. @property(nonatomic, copy, nullable) NSString *label;
  6908. /**
  6909. * The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
  6910. * For more information, see
  6911. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  6912. */
  6913. @property(nonatomic, copy, nullable) NSString *languageCode;
  6914. @end
  6915. /**
  6916. * Metadata for web pages.
  6917. */
  6918. @interface GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebPage : GTLRObject
  6919. /**
  6920. * Fully matching images on the page.
  6921. * Can include resized copies of the query image.
  6922. */
  6923. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *fullMatchingImages;
  6924. /** Title for the web page, may contain HTML markups. */
  6925. @property(nonatomic, copy, nullable) NSString *pageTitle;
  6926. /**
  6927. * Partial matching images on the page.
  6928. * Those images are similar enough to share some key-point features. For
  6929. * example an original image will likely have partial matching for its
  6930. * crops.
  6931. */
  6932. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1WebDetectionWebImage *> *partialMatchingImages;
  6933. /**
  6934. * (Deprecated) Overall relevancy score for the web page.
  6935. *
  6936. * Uses NSNumber of floatValue.
  6937. */
  6938. @property(nonatomic, strong, nullable) NSNumber *score;
  6939. /** The result web page URL. */
  6940. @property(nonatomic, copy, nullable) NSString *url;
  6941. @end
  6942. /**
  6943. * A word representation.
  6944. */
  6945. @interface GTLRVision_GoogleCloudVisionV1p2beta1Word : GTLRObject
  6946. /**
  6947. * The bounding box for the word.
  6948. * The vertices are in the order of top-left, top-right, bottom-right,
  6949. * bottom-left. When a rotation of the bounding box is detected the rotation
  6950. * is represented as around the top-left corner as defined when the text is
  6951. * read in the 'natural' orientation.
  6952. * For example:
  6953. * * when the text is horizontal it might look like:
  6954. * 0----1
  6955. * | |
  6956. * 3----2
  6957. * * when it's rotated 180 degrees around the top-left corner it becomes:
  6958. * 2----3
  6959. * | |
  6960. * 1----0
  6961. * and the vertex order will still be (0, 1, 2, 3).
  6962. */
  6963. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1BoundingPoly *boundingBox;
  6964. /**
  6965. * Confidence of the OCR results for the word. Range [0, 1].
  6966. *
  6967. * Uses NSNumber of floatValue.
  6968. */
  6969. @property(nonatomic, strong, nullable) NSNumber *confidence;
  6970. /** Additional information detected for the word. */
  6971. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p2beta1TextAnnotationTextProperty *property;
  6972. /**
  6973. * List of symbols in the word.
  6974. * The order of the symbols follows the natural reading order.
  6975. */
  6976. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p2beta1Symbol *> *symbols;
  6977. @end
  6978. /**
  6979. * Response to a single file annotation request. A file may contain one or more
  6980. * images, which individually have their own responses.
  6981. */
  6982. @interface GTLRVision_GoogleCloudVisionV1p3beta1AnnotateFileResponse : GTLRObject
  6983. /** Information about the file for which this response is generated. */
  6984. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1InputConfig *inputConfig;
  6985. /** Individual responses to images found within the file. */
  6986. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1AnnotateImageResponse *> *responses;
  6987. @end
  6988. /**
  6989. * Response to an image annotation request.
  6990. */
  6991. @interface GTLRVision_GoogleCloudVisionV1p3beta1AnnotateImageResponse : GTLRObject
  6992. /**
  6993. * If present, contextual information is needed to understand where this image
  6994. * comes from.
  6995. */
  6996. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1ImageAnnotationContext *context;
  6997. /** If present, crop hints have completed successfully. */
  6998. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1CropHintsAnnotation *cropHintsAnnotation;
  6999. /**
  7000. * If set, represents the error message for the operation.
  7001. * Note that filled-in image annotations are guaranteed to be
  7002. * correct, even when `error` is set.
  7003. */
  7004. @property(nonatomic, strong, nullable) GTLRVision_Status *error;
  7005. /** If present, face detection has completed successfully. */
  7006. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation *> *faceAnnotations;
  7007. /**
  7008. * If present, text (OCR) detection or document (OCR) text detection has
  7009. * completed successfully.
  7010. * This annotation provides the structural hierarchy for the OCR detected
  7011. * text.
  7012. */
  7013. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotation *fullTextAnnotation;
  7014. /** If present, image properties were extracted successfully. */
  7015. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1ImageProperties *imagePropertiesAnnotation;
  7016. /** If present, label detection has completed successfully. */
  7017. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *labelAnnotations;
  7018. /** If present, landmark detection has completed successfully. */
  7019. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *landmarkAnnotations;
  7020. /**
  7021. * If present, localized object detection has completed successfully.
  7022. * This will be sorted descending by confidence score.
  7023. */
  7024. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1LocalizedObjectAnnotation *> *localizedObjectAnnotations;
  7025. /** If present, logo detection has completed successfully. */
  7026. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *logoAnnotations;
  7027. /** If present, product search has completed successfully. */
  7028. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResults *productSearchResults;
  7029. /** If present, safe-search annotation has completed successfully. */
  7030. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation *safeSearchAnnotation;
  7031. /** If present, text (OCR) detection has completed successfully. */
  7032. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation *> *textAnnotations;
  7033. /** If present, web detection has completed successfully. */
  7034. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1WebDetection *webDetection;
  7035. @end
  7036. /**
  7037. * The response for a single offline file annotation request.
  7038. */
  7039. @interface GTLRVision_GoogleCloudVisionV1p3beta1AsyncAnnotateFileResponse : GTLRObject
  7040. /** The output location and metadata from AsyncAnnotateFileRequest. */
  7041. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1OutputConfig *outputConfig;
  7042. @end
  7043. /**
  7044. * Response to an async batch file annotation request.
  7045. */
  7046. @interface GTLRVision_GoogleCloudVisionV1p3beta1AsyncBatchAnnotateFilesResponse : GTLRObject
  7047. /**
  7048. * The list of file annotation responses, one for each request in
  7049. * AsyncBatchAnnotateFilesRequest.
  7050. */
  7051. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1AsyncAnnotateFileResponse *> *responses;
  7052. @end
  7053. /**
  7054. * Metadata for the batch operations such as the current state.
  7055. * This is included in the `metadata` field of the `Operation` returned by the
  7056. * `GetOperation` call of the `google::longrunning::Operations` service.
  7057. */
  7058. @interface GTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata : GTLRObject
  7059. /**
  7060. * The time when the batch request is finished and
  7061. * google.longrunning.Operation.done is set to true.
  7062. */
  7063. @property(nonatomic, strong, nullable) GTLRDateTime *endTime;
  7064. /**
  7065. * The current state of the batch operation.
  7066. *
  7067. * Likely values:
  7068. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Cancelled
  7069. * The request is done after the longrunning.Operations.CancelOperation
  7070. * has
  7071. * been called by the user. Any records that were processed before the
  7072. * cancel command are output as specified in the request. (Value:
  7073. * "CANCELLED")
  7074. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Failed
  7075. * The request is done and no item has been successfully processed.
  7076. * (Value: "FAILED")
  7077. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Processing
  7078. * Request is actively being processed. (Value: "PROCESSING")
  7079. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_StateUnspecified
  7080. * Invalid. (Value: "STATE_UNSPECIFIED")
  7081. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1BatchOperationMetadata_State_Successful
  7082. * The request is done and at least one item has been successfully
  7083. * processed. (Value: "SUCCESSFUL")
  7084. */
  7085. @property(nonatomic, copy, nullable) NSString *state;
  7086. /** The time when the batch request was submitted to the server. */
  7087. @property(nonatomic, strong, nullable) GTLRDateTime *submitTime;
  7088. @end
  7089. /**
  7090. * Logical element on the page.
  7091. */
  7092. @interface GTLRVision_GoogleCloudVisionV1p3beta1Block : GTLRObject
  7093. /**
  7094. * Detected block type (text, image etc) for this block.
  7095. *
  7096. * Likely values:
  7097. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Barcode
  7098. * Barcode block. (Value: "BARCODE")
  7099. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Picture
  7100. * Image block. (Value: "PICTURE")
  7101. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Ruler
  7102. * Horizontal/vertical line box. (Value: "RULER")
  7103. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Table Table
  7104. * block. (Value: "TABLE")
  7105. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Text Regular
  7106. * text block. (Value: "TEXT")
  7107. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1Block_BlockType_Unknown
  7108. * Unknown block type. (Value: "UNKNOWN")
  7109. */
  7110. @property(nonatomic, copy, nullable) NSString *blockType;
  7111. /**
  7112. * The bounding box for the block.
  7113. * The vertices are in the order of top-left, top-right, bottom-right,
  7114. * bottom-left. When a rotation of the bounding box is detected the rotation
  7115. * is represented as around the top-left corner as defined when the text is
  7116. * read in the 'natural' orientation.
  7117. * For example:
  7118. * * when the text is horizontal it might look like:
  7119. * 0----1
  7120. * | |
  7121. * 3----2
  7122. * * when it's rotated 180 degrees around the top-left corner it becomes:
  7123. * 2----3
  7124. * | |
  7125. * 1----0
  7126. * and the vertex order will still be (0, 1, 2, 3).
  7127. */
  7128. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
  7129. /**
  7130. * Confidence of the OCR results on the block. Range [0, 1].
  7131. *
  7132. * Uses NSNumber of floatValue.
  7133. */
  7134. @property(nonatomic, strong, nullable) NSNumber *confidence;
  7135. /** List of paragraphs in this block (if this blocks is of type text). */
  7136. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Paragraph *> *paragraphs;
  7137. /** Additional information detected for the block. */
  7138. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
  7139. @end
  7140. /**
  7141. * A bounding polygon for the detected image annotation.
  7142. */
  7143. @interface GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly : GTLRObject
  7144. /** The bounding polygon normalized vertices. */
  7145. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1NormalizedVertex *> *normalizedVertices;
  7146. /** The bounding polygon vertices. */
  7147. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Vertex *> *vertices;
  7148. @end
  7149. /**
  7150. * Color information consists of RGB channels, score, and the fraction of
  7151. * the image that the color occupies in the image.
  7152. */
  7153. @interface GTLRVision_GoogleCloudVisionV1p3beta1ColorInfo : GTLRObject
  7154. /** RGB components of the color. */
  7155. @property(nonatomic, strong, nullable) GTLRVision_Color *color;
  7156. /**
  7157. * The fraction of pixels the color occupies in the image.
  7158. * Value in range [0, 1].
  7159. *
  7160. * Uses NSNumber of floatValue.
  7161. */
  7162. @property(nonatomic, strong, nullable) NSNumber *pixelFraction;
  7163. /**
  7164. * Image-specific score for this color. Value in range [0, 1].
  7165. *
  7166. * Uses NSNumber of floatValue.
  7167. */
  7168. @property(nonatomic, strong, nullable) NSNumber *score;
  7169. @end
  7170. /**
  7171. * Single crop hint that is used to generate a new crop when serving an image.
  7172. */
  7173. @interface GTLRVision_GoogleCloudVisionV1p3beta1CropHint : GTLRObject
  7174. /**
  7175. * The bounding polygon for the crop region. The coordinates of the bounding
  7176. * box are in the original image's scale.
  7177. */
  7178. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
  7179. /**
  7180. * Confidence of this being a salient region. Range [0, 1].
  7181. *
  7182. * Uses NSNumber of floatValue.
  7183. */
  7184. @property(nonatomic, strong, nullable) NSNumber *confidence;
  7185. /**
  7186. * Fraction of importance of this salient region with respect to the original
  7187. * image.
  7188. *
  7189. * Uses NSNumber of floatValue.
  7190. */
  7191. @property(nonatomic, strong, nullable) NSNumber *importanceFraction;
  7192. @end
  7193. /**
  7194. * Set of crop hints that are used to generate new crops when serving images.
  7195. */
  7196. @interface GTLRVision_GoogleCloudVisionV1p3beta1CropHintsAnnotation : GTLRObject
  7197. /** Crop hint results. */
  7198. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1CropHint *> *cropHints;
  7199. @end
  7200. /**
  7201. * Set of dominant colors and their corresponding scores.
  7202. */
  7203. @interface GTLRVision_GoogleCloudVisionV1p3beta1DominantColorsAnnotation : GTLRObject
  7204. /** RGB color values with their score and pixel fraction. */
  7205. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ColorInfo *> *colors;
  7206. @end
  7207. /**
  7208. * Set of detected entity features.
  7209. */
  7210. @interface GTLRVision_GoogleCloudVisionV1p3beta1EntityAnnotation : GTLRObject
  7211. /**
  7212. * Image region to which this entity belongs. Not produced
  7213. * for `LABEL_DETECTION` features.
  7214. */
  7215. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
  7216. /**
  7217. * **Deprecated. Use `score` instead.**
  7218. * The accuracy of the entity detection in an image.
  7219. * For example, for an image in which the "Eiffel Tower" entity is detected,
  7220. * this field represents the confidence that there is a tower in the query
  7221. * image. Range [0, 1].
  7222. *
  7223. * Uses NSNumber of floatValue.
  7224. */
  7225. @property(nonatomic, strong, nullable) NSNumber *confidence;
  7226. /**
  7227. * Entity textual description, expressed in its `locale` language.
  7228. *
  7229. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  7230. */
  7231. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  7232. /**
  7233. * The language code for the locale in which the entity textual
  7234. * `description` is expressed.
  7235. */
  7236. @property(nonatomic, copy, nullable) NSString *locale;
  7237. /**
  7238. * The location information for the detected entity. Multiple
  7239. * `LocationInfo` elements can be present because one location may
  7240. * indicate the location of the scene in the image, and another location
  7241. * may indicate the location of the place where the image was taken.
  7242. * Location information is usually present for landmarks.
  7243. */
  7244. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1LocationInfo *> *locations;
  7245. /**
  7246. * Opaque entity ID. Some IDs may be available in
  7247. * [Google Knowledge Graph Search
  7248. * API](https://developers.google.com/knowledge-graph/).
  7249. */
  7250. @property(nonatomic, copy, nullable) NSString *mid;
  7251. /**
  7252. * Some entities may have optional user-supplied `Property` (name/value)
  7253. * fields, such a score or string that qualifies the entity.
  7254. */
  7255. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Property *> *properties;
  7256. /**
  7257. * Overall score of the result. Range [0, 1].
  7258. *
  7259. * Uses NSNumber of floatValue.
  7260. */
  7261. @property(nonatomic, strong, nullable) NSNumber *score;
  7262. /**
  7263. * The relevancy of the ICA (Image Content Annotation) label to the
  7264. * image. For example, the relevancy of "tower" is likely higher to an image
  7265. * containing the detected "Eiffel Tower" than to an image containing a
  7266. * detected distant towering building, even though the confidence that
  7267. * there is a tower in each image may be the same. Range [0, 1].
  7268. *
  7269. * Uses NSNumber of floatValue.
  7270. */
  7271. @property(nonatomic, strong, nullable) NSNumber *topicality;
  7272. @end
  7273. /**
  7274. * A face annotation object contains the results of face detection.
  7275. */
  7276. @interface GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation : GTLRObject
  7277. /**
  7278. * Anger likelihood.
  7279. *
  7280. * Likely values:
  7281. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Likely
  7282. * It is likely that the image belongs to the specified vertical. (Value:
  7283. * "LIKELY")
  7284. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Possible
  7285. * It is possible that the image belongs to the specified vertical.
  7286. * (Value: "POSSIBLE")
  7287. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unknown
  7288. * Unknown likelihood. (Value: "UNKNOWN")
  7289. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_Unlikely
  7290. * It is unlikely that the image belongs to the specified vertical.
  7291. * (Value: "UNLIKELY")
  7292. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryLikely
  7293. * It is very likely that the image belongs to the specified vertical.
  7294. * (Value: "VERY_LIKELY")
  7295. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_AngerLikelihood_VeryUnlikely
  7296. * It is very unlikely that the image belongs to the specified vertical.
  7297. * (Value: "VERY_UNLIKELY")
  7298. */
  7299. @property(nonatomic, copy, nullable) NSString *angerLikelihood;
  7300. /**
  7301. * Blurred likelihood.
  7302. *
  7303. * Likely values:
  7304. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Likely
  7305. * It is likely that the image belongs to the specified vertical. (Value:
  7306. * "LIKELY")
  7307. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Possible
  7308. * It is possible that the image belongs to the specified vertical.
  7309. * (Value: "POSSIBLE")
  7310. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unknown
  7311. * Unknown likelihood. (Value: "UNKNOWN")
  7312. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_Unlikely
  7313. * It is unlikely that the image belongs to the specified vertical.
  7314. * (Value: "UNLIKELY")
  7315. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryLikely
  7316. * It is very likely that the image belongs to the specified vertical.
  7317. * (Value: "VERY_LIKELY")
  7318. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_BlurredLikelihood_VeryUnlikely
  7319. * It is very unlikely that the image belongs to the specified vertical.
  7320. * (Value: "VERY_UNLIKELY")
  7321. */
  7322. @property(nonatomic, copy, nullable) NSString *blurredLikelihood;
  7323. /**
  7324. * The bounding polygon around the face. The coordinates of the bounding box
  7325. * are in the original image's scale.
  7326. * The bounding box is computed to "frame" the face in accordance with human
  7327. * expectations. It is based on the landmarker results.
  7328. * Note that one or more x and/or y coordinates may not be generated in the
  7329. * `BoundingPoly` (the polygon will be unbounded) if only a partial face
  7330. * appears in the image to be annotated.
  7331. */
  7332. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
  7333. /**
  7334. * Detection confidence. Range [0, 1].
  7335. *
  7336. * Uses NSNumber of floatValue.
  7337. */
  7338. @property(nonatomic, strong, nullable) NSNumber *detectionConfidence;
  7339. /**
  7340. * The `fd_bounding_poly` bounding polygon is tighter than the
  7341. * `boundingPoly`, and encloses only the skin part of the face. Typically, it
  7342. * is used to eliminate the face from any image analysis that detects the
  7343. * "amount of skin" visible in an image. It is not based on the
  7344. * landmarker results, only on the initial face detection, hence
  7345. * the <code>fd</code> (face detection) prefix.
  7346. */
  7347. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *fdBoundingPoly;
  7348. /**
  7349. * Headwear likelihood.
  7350. *
  7351. * Likely values:
  7352. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Likely
  7353. * It is likely that the image belongs to the specified vertical. (Value:
  7354. * "LIKELY")
  7355. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Possible
  7356. * It is possible that the image belongs to the specified vertical.
  7357. * (Value: "POSSIBLE")
  7358. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unknown
  7359. * Unknown likelihood. (Value: "UNKNOWN")
  7360. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_Unlikely
  7361. * It is unlikely that the image belongs to the specified vertical.
  7362. * (Value: "UNLIKELY")
  7363. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryLikely
  7364. * It is very likely that the image belongs to the specified vertical.
  7365. * (Value: "VERY_LIKELY")
  7366. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_HeadwearLikelihood_VeryUnlikely
  7367. * It is very unlikely that the image belongs to the specified vertical.
  7368. * (Value: "VERY_UNLIKELY")
  7369. */
  7370. @property(nonatomic, copy, nullable) NSString *headwearLikelihood;
  7371. /**
  7372. * Joy likelihood.
  7373. *
  7374. * Likely values:
  7375. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Likely
  7376. * It is likely that the image belongs to the specified vertical. (Value:
  7377. * "LIKELY")
  7378. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Possible
  7379. * It is possible that the image belongs to the specified vertical.
  7380. * (Value: "POSSIBLE")
  7381. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unknown
  7382. * Unknown likelihood. (Value: "UNKNOWN")
  7383. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_Unlikely
  7384. * It is unlikely that the image belongs to the specified vertical.
  7385. * (Value: "UNLIKELY")
  7386. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryLikely
  7387. * It is very likely that the image belongs to the specified vertical.
  7388. * (Value: "VERY_LIKELY")
  7389. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_JoyLikelihood_VeryUnlikely
  7390. * It is very unlikely that the image belongs to the specified vertical.
  7391. * (Value: "VERY_UNLIKELY")
  7392. */
  7393. @property(nonatomic, copy, nullable) NSString *joyLikelihood;
  7394. /**
  7395. * Face landmarking confidence. Range [0, 1].
  7396. *
  7397. * Uses NSNumber of floatValue.
  7398. */
  7399. @property(nonatomic, strong, nullable) NSNumber *landmarkingConfidence;
  7400. /** Detected face landmarks. */
  7401. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark *> *landmarks;
  7402. /**
  7403. * Yaw angle, which indicates the leftward/rightward angle that the face is
  7404. * pointing relative to the vertical plane perpendicular to the image. Range
  7405. * [-180,180].
  7406. *
  7407. * Uses NSNumber of floatValue.
  7408. */
  7409. @property(nonatomic, strong, nullable) NSNumber *panAngle;
  7410. /**
  7411. * Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
  7412. * of the face relative to the image vertical about the axis perpendicular to
  7413. * the face. Range [-180,180].
  7414. *
  7415. * Uses NSNumber of floatValue.
  7416. */
  7417. @property(nonatomic, strong, nullable) NSNumber *rollAngle;
  7418. /**
  7419. * Sorrow likelihood.
  7420. *
  7421. * Likely values:
  7422. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Likely
  7423. * It is likely that the image belongs to the specified vertical. (Value:
  7424. * "LIKELY")
  7425. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Possible
  7426. * It is possible that the image belongs to the specified vertical.
  7427. * (Value: "POSSIBLE")
  7428. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unknown
  7429. * Unknown likelihood. (Value: "UNKNOWN")
  7430. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_Unlikely
  7431. * It is unlikely that the image belongs to the specified vertical.
  7432. * (Value: "UNLIKELY")
  7433. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryLikely
  7434. * It is very likely that the image belongs to the specified vertical.
  7435. * (Value: "VERY_LIKELY")
  7436. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SorrowLikelihood_VeryUnlikely
  7437. * It is very unlikely that the image belongs to the specified vertical.
  7438. * (Value: "VERY_UNLIKELY")
  7439. */
  7440. @property(nonatomic, copy, nullable) NSString *sorrowLikelihood;
  7441. /**
  7442. * Surprise likelihood.
  7443. *
  7444. * Likely values:
  7445. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Likely
  7446. * It is likely that the image belongs to the specified vertical. (Value:
  7447. * "LIKELY")
  7448. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Possible
  7449. * It is possible that the image belongs to the specified vertical.
  7450. * (Value: "POSSIBLE")
  7451. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unknown
  7452. * Unknown likelihood. (Value: "UNKNOWN")
  7453. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_Unlikely
  7454. * It is unlikely that the image belongs to the specified vertical.
  7455. * (Value: "UNLIKELY")
  7456. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryLikely
  7457. * It is very likely that the image belongs to the specified vertical.
  7458. * (Value: "VERY_LIKELY")
  7459. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_SurpriseLikelihood_VeryUnlikely
  7460. * It is very unlikely that the image belongs to the specified vertical.
  7461. * (Value: "VERY_UNLIKELY")
  7462. */
  7463. @property(nonatomic, copy, nullable) NSString *surpriseLikelihood;
  7464. /**
  7465. * Pitch angle, which indicates the upwards/downwards angle that the face is
  7466. * pointing relative to the image's horizontal plane. Range [-180,180].
  7467. *
  7468. * Uses NSNumber of floatValue.
  7469. */
  7470. @property(nonatomic, strong, nullable) NSNumber *tiltAngle;
  7471. /**
  7472. * Under-exposed likelihood.
  7473. *
  7474. * Likely values:
  7475. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Likely
  7476. * It is likely that the image belongs to the specified vertical. (Value:
  7477. * "LIKELY")
  7478. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Possible
  7479. * It is possible that the image belongs to the specified vertical.
  7480. * (Value: "POSSIBLE")
  7481. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unknown
  7482. * Unknown likelihood. (Value: "UNKNOWN")
  7483. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_Unlikely
  7484. * It is unlikely that the image belongs to the specified vertical.
  7485. * (Value: "UNLIKELY")
  7486. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryLikely
  7487. * It is very likely that the image belongs to the specified vertical.
  7488. * (Value: "VERY_LIKELY")
  7489. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotation_UnderExposedLikelihood_VeryUnlikely
  7490. * It is very unlikely that the image belongs to the specified vertical.
  7491. * (Value: "VERY_UNLIKELY")
  7492. */
  7493. @property(nonatomic, copy, nullable) NSString *underExposedLikelihood;
  7494. @end
  7495. /**
  7496. * A face-specific landmark (for example, a face feature).
  7497. */
  7498. @interface GTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark : GTLRObject
  7499. /** Face landmark position. */
  7500. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1Position *position;
  7501. /**
  7502. * Face landmark type.
  7503. *
  7504. * Likely values:
  7505. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinGnathion
  7506. * Chin gnathion. (Value: "CHIN_GNATHION")
  7507. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinLeftGonion
  7508. * Chin left gonion. (Value: "CHIN_LEFT_GONION")
  7509. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ChinRightGonion
  7510. * Chin right gonion. (Value: "CHIN_RIGHT_GONION")
  7511. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_ForeheadGlabella
  7512. * Forehead glabella. (Value: "FOREHEAD_GLABELLA")
  7513. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEarTragion
  7514. * Left ear tragion. (Value: "LEFT_EAR_TRAGION")
  7515. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEye
  7516. * Left eye. (Value: "LEFT_EYE")
  7517. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeBottomBoundary
  7518. * Left eye, bottom boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
  7519. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyebrowUpperMidpoint
  7520. * Left eyebrow, upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
  7521. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeLeftCorner
  7522. * Left eye, left corner. (Value: "LEFT_EYE_LEFT_CORNER")
  7523. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyePupil
  7524. * Left eye pupil. (Value: "LEFT_EYE_PUPIL")
  7525. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeRightCorner
  7526. * Left eye, right corner. (Value: "LEFT_EYE_RIGHT_CORNER")
  7527. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftEyeTopBoundary
  7528. * Left eye, top boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
  7529. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfLeftEyebrow
  7530. * Left of left eyebrow. (Value: "LEFT_OF_LEFT_EYEBROW")
  7531. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LeftOfRightEyebrow
  7532. * Left of right eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
  7533. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_LowerLip
  7534. * Lower lip. (Value: "LOWER_LIP")
  7535. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MidpointBetweenEyes
  7536. * Midpoint between eyes. (Value: "MIDPOINT_BETWEEN_EYES")
  7537. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthCenter
  7538. * Mouth center. (Value: "MOUTH_CENTER")
  7539. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthLeft
  7540. * Mouth left. (Value: "MOUTH_LEFT")
  7541. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_MouthRight
  7542. * Mouth right. (Value: "MOUTH_RIGHT")
  7543. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomCenter
  7544. * Nose, bottom center. (Value: "NOSE_BOTTOM_CENTER")
  7545. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomLeft
  7546. * Nose, bottom left. (Value: "NOSE_BOTTOM_LEFT")
  7547. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseBottomRight
  7548. * Nose, bottom right. (Value: "NOSE_BOTTOM_RIGHT")
  7549. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_NoseTip
  7550. * Nose tip. (Value: "NOSE_TIP")
  7551. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEarTragion
  7552. * Right ear tragion. (Value: "RIGHT_EAR_TRAGION")
  7553. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEye
  7554. * Right eye. (Value: "RIGHT_EYE")
  7555. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeBottomBoundary
  7556. * Right eye, bottom boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
  7557. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyebrowUpperMidpoint
  7558. * Right eyebrow, upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
  7559. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeLeftCorner
  7560. * Right eye, left corner. (Value: "RIGHT_EYE_LEFT_CORNER")
  7561. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyePupil
  7562. * Right eye pupil. (Value: "RIGHT_EYE_PUPIL")
  7563. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeRightCorner
  7564. * Right eye, right corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
  7565. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightEyeTopBoundary
  7566. * Right eye, top boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
  7567. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfLeftEyebrow
  7568. * Right of left eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
  7569. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_RightOfRightEyebrow
  7570. * Right of right eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
  7571. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UnknownLandmark
  7572. * Unknown face landmark detected. Should not be filled. (Value:
  7573. * "UNKNOWN_LANDMARK")
  7574. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1FaceAnnotationLandmark_Type_UpperLip
  7575. * Upper lip. (Value: "UPPER_LIP")
  7576. */
  7577. @property(nonatomic, copy, nullable) NSString *type;
  7578. @end
  7579. /**
  7580. * The Google Cloud Storage location where the output will be written to.
  7581. */
  7582. @interface GTLRVision_GoogleCloudVisionV1p3beta1GcsDestination : GTLRObject
  7583. /**
  7584. * Google Cloud Storage URI where the results will be stored. Results will
  7585. * be in JSON format and preceded by its corresponding input URI. This field
  7586. * can either represent a single file, or a prefix for multiple outputs.
  7587. * Prefixes must end in a `/`.
  7588. * Examples:
  7589. * * File: gs://bucket-name/filename.json
  7590. * * Prefix: gs://bucket-name/prefix/here/
  7591. * * File: gs://bucket-name/prefix/here
  7592. * If multiple outputs, each response is still AnnotateFileResponse, each of
  7593. * which contains some subset of the full list of AnnotateImageResponse.
  7594. * Multiple outputs can happen if, for example, the output JSON is too large
  7595. * and overflows into multiple sharded files.
  7596. */
  7597. @property(nonatomic, copy, nullable) NSString *uri;
  7598. @end
  7599. /**
  7600. * The Google Cloud Storage location where the input will be read from.
  7601. */
  7602. @interface GTLRVision_GoogleCloudVisionV1p3beta1GcsSource : GTLRObject
  7603. /**
  7604. * Google Cloud Storage URI for the input file. This must only be a
  7605. * Google Cloud Storage object. Wildcards are not currently supported.
  7606. */
  7607. @property(nonatomic, copy, nullable) NSString *uri;
  7608. @end
  7609. /**
  7610. * If an image was produced from a file (e.g. a PDF), this message gives
  7611. * information about the source of that image.
  7612. */
  7613. @interface GTLRVision_GoogleCloudVisionV1p3beta1ImageAnnotationContext : GTLRObject
  7614. /**
  7615. * If the file was a PDF or TIFF, this field gives the page number within
  7616. * the file used to produce the image.
  7617. *
  7618. * Uses NSNumber of intValue.
  7619. */
  7620. @property(nonatomic, strong, nullable) NSNumber *pageNumber;
  7621. /** The URI of the file used to produce the image. */
  7622. @property(nonatomic, copy, nullable) NSString *uri;
  7623. @end
  7624. /**
  7625. * Stores image properties, such as dominant colors.
  7626. */
  7627. @interface GTLRVision_GoogleCloudVisionV1p3beta1ImageProperties : GTLRObject
  7628. /** If present, dominant colors completed successfully. */
  7629. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1DominantColorsAnnotation *dominantColors;
  7630. @end
  7631. /**
  7632. * Response message for the `ImportProductSets` method.
  7633. * This message is returned by the
  7634. * google.longrunning.Operations.GetOperation method in the returned
  7635. * google.longrunning.Operation.response field.
  7636. */
  7637. @interface GTLRVision_GoogleCloudVisionV1p3beta1ImportProductSetsResponse : GTLRObject
  7638. /** The list of reference_images that are imported successfully. */
  7639. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ReferenceImage *> *referenceImages;
  7640. /**
  7641. * The rpc status for each ImportProductSet request, including both successes
  7642. * and errors.
  7643. * The number of statuses here matches the number of lines in the csv file,
  7644. * and statuses[i] stores the success or failure status of processing the i-th
  7645. * line of the csv, starting from line 0.
  7646. */
  7647. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Status *> *statuses;
  7648. @end
  7649. /**
  7650. * The desired input location and metadata.
  7651. */
  7652. @interface GTLRVision_GoogleCloudVisionV1p3beta1InputConfig : GTLRObject
  7653. /** The Google Cloud Storage location to read the input from. */
  7654. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1GcsSource *gcsSource;
  7655. /**
  7656. * The type of the file. Currently only "application/pdf" and "image/tiff"
  7657. * are supported. Wildcards are not supported.
  7658. */
  7659. @property(nonatomic, copy, nullable) NSString *mimeType;
  7660. @end
  7661. /**
  7662. * Set of detected objects with bounding boxes.
  7663. */
  7664. @interface GTLRVision_GoogleCloudVisionV1p3beta1LocalizedObjectAnnotation : GTLRObject
  7665. /** Image region to which this object belongs. This must be populated. */
  7666. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
  7667. /**
  7668. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  7669. * information, see
  7670. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  7671. */
  7672. @property(nonatomic, copy, nullable) NSString *languageCode;
  7673. /** Object ID that should align with EntityAnnotation mid. */
  7674. @property(nonatomic, copy, nullable) NSString *mid;
  7675. /** Object name, expressed in its `language_code` language. */
  7676. @property(nonatomic, copy, nullable) NSString *name;
  7677. /**
  7678. * Score of the result. Range [0, 1].
  7679. *
  7680. * Uses NSNumber of floatValue.
  7681. */
  7682. @property(nonatomic, strong, nullable) NSNumber *score;
  7683. @end
  7684. /**
  7685. * Detected entity location information.
  7686. */
  7687. @interface GTLRVision_GoogleCloudVisionV1p3beta1LocationInfo : GTLRObject
  7688. /** lat/long location coordinates. */
  7689. @property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
  7690. @end
  7691. /**
  7692. * A vertex represents a 2D point in the image.
  7693. * NOTE: the normalized vertex coordinates are relative to the original image
  7694. * and range from 0 to 1.
  7695. */
  7696. @interface GTLRVision_GoogleCloudVisionV1p3beta1NormalizedVertex : GTLRObject
  7697. /**
  7698. * X coordinate.
  7699. *
  7700. * Uses NSNumber of floatValue.
  7701. */
  7702. @property(nonatomic, strong, nullable) NSNumber *x;
  7703. /**
  7704. * Y coordinate.
  7705. *
  7706. * Uses NSNumber of floatValue.
  7707. */
  7708. @property(nonatomic, strong, nullable) NSNumber *y;
  7709. @end
  7710. /**
  7711. * Contains metadata for the BatchAnnotateImages operation.
  7712. */
  7713. @interface GTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata : GTLRObject
  7714. /** The time when the batch request was received. */
  7715. @property(nonatomic, strong, nullable) GTLRDateTime *createTime;
  7716. /**
  7717. * Current state of the batch operation.
  7718. *
  7719. * Likely values:
  7720. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Cancelled
  7721. * The batch processing was cancelled. (Value: "CANCELLED")
  7722. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Created
  7723. * Request is received. (Value: "CREATED")
  7724. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Done
  7725. * The batch processing is done. (Value: "DONE")
  7726. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_Running
  7727. * Request is actively being processed. (Value: "RUNNING")
  7728. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1OperationMetadata_State_StateUnspecified
  7729. * Invalid. (Value: "STATE_UNSPECIFIED")
  7730. */
  7731. @property(nonatomic, copy, nullable) NSString *state;
  7732. /** The time when the operation result was last updated. */
  7733. @property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
  7734. @end
  7735. /**
  7736. * The desired output location and metadata.
  7737. */
  7738. @interface GTLRVision_GoogleCloudVisionV1p3beta1OutputConfig : GTLRObject
  7739. /**
  7740. * The max number of response protos to put into each output JSON file on
  7741. * Google Cloud Storage.
  7742. * The valid range is [1, 100]. If not specified, the default value is 20.
  7743. * For example, for one pdf file with 100 pages, 100 response protos will
  7744. * be generated. If `batch_size` = 20, then 5 json files each
  7745. * containing 20 response protos will be written under the prefix
  7746. * `gcs_destination`.`uri`.
  7747. * Currently, batch_size only applies to GcsDestination, with potential future
  7748. * support for other output configurations.
  7749. *
  7750. * Uses NSNumber of intValue.
  7751. */
  7752. @property(nonatomic, strong, nullable) NSNumber *batchSize;
  7753. /** The Google Cloud Storage location to write the output(s) to. */
  7754. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1GcsDestination *gcsDestination;
  7755. @end
  7756. /**
  7757. * Detected page from OCR.
  7758. */
  7759. @interface GTLRVision_GoogleCloudVisionV1p3beta1Page : GTLRObject
  7760. /** List of blocks of text, images etc on this page. */
  7761. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Block *> *blocks;
  7762. /**
  7763. * Confidence of the OCR results on the page. Range [0, 1].
  7764. *
  7765. * Uses NSNumber of floatValue.
  7766. */
  7767. @property(nonatomic, strong, nullable) NSNumber *confidence;
  7768. /**
  7769. * Page height. For PDFs the unit is points. For images (including
  7770. * TIFFs) the unit is pixels.
  7771. *
  7772. * Uses NSNumber of intValue.
  7773. */
  7774. @property(nonatomic, strong, nullable) NSNumber *height;
  7775. /** Additional information detected on the page. */
  7776. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
  7777. /**
  7778. * Page width. For PDFs the unit is points. For images (including
  7779. * TIFFs) the unit is pixels.
  7780. *
  7781. * Uses NSNumber of intValue.
  7782. */
  7783. @property(nonatomic, strong, nullable) NSNumber *width;
  7784. @end
  7785. /**
  7786. * Structural unit of text representing a number of words in certain order.
  7787. */
  7788. @interface GTLRVision_GoogleCloudVisionV1p3beta1Paragraph : GTLRObject
  7789. /**
  7790. * The bounding box for the paragraph.
  7791. * The vertices are in the order of top-left, top-right, bottom-right,
  7792. * bottom-left. When a rotation of the bounding box is detected the rotation
  7793. * is represented as around the top-left corner as defined when the text is
  7794. * read in the 'natural' orientation.
  7795. * For example:
  7796. * * when the text is horizontal it might look like:
  7797. * 0----1
  7798. * | |
  7799. * 3----2
  7800. * * when it's rotated 180 degrees around the top-left corner it becomes:
  7801. * 2----3
  7802. * | |
  7803. * 1----0
  7804. * and the vertex order will still be (0, 1, 2, 3).
  7805. */
  7806. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
  7807. /**
  7808. * Confidence of the OCR results for the paragraph. Range [0, 1].
  7809. *
  7810. * Uses NSNumber of floatValue.
  7811. */
  7812. @property(nonatomic, strong, nullable) NSNumber *confidence;
  7813. /** Additional information detected for the paragraph. */
  7814. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
  7815. /** List of words in this paragraph. */
  7816. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Word *> *words;
  7817. @end
  7818. /**
  7819. * A 3D position in the image, used primarily for Face detection landmarks.
  7820. * A valid Position must have both x and y coordinates.
  7821. * The position coordinates are in the same scale as the original image.
  7822. */
  7823. @interface GTLRVision_GoogleCloudVisionV1p3beta1Position : GTLRObject
  7824. /**
  7825. * X coordinate.
  7826. *
  7827. * Uses NSNumber of floatValue.
  7828. */
  7829. @property(nonatomic, strong, nullable) NSNumber *x;
  7830. /**
  7831. * Y coordinate.
  7832. *
  7833. * Uses NSNumber of floatValue.
  7834. */
  7835. @property(nonatomic, strong, nullable) NSNumber *y;
  7836. /**
  7837. * Z coordinate (or depth).
  7838. *
  7839. * Uses NSNumber of floatValue.
  7840. */
  7841. @property(nonatomic, strong, nullable) NSNumber *z;
  7842. @end
  7843. /**
  7844. * A Product contains ReferenceImages.
  7845. */
  7846. @interface GTLRVision_GoogleCloudVisionV1p3beta1Product : GTLRObject
  7847. /**
  7848. * User-provided metadata to be stored with this product. Must be at most 4096
  7849. * characters long.
  7850. *
  7851. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  7852. */
  7853. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  7854. /**
  7855. * The user-provided name for this Product. Must not be empty. Must be at most
  7856. * 4096 characters long.
  7857. */
  7858. @property(nonatomic, copy, nullable) NSString *displayName;
  7859. /**
  7860. * The resource name of the product.
  7861. * Format is:
  7862. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
  7863. * This field is ignored when creating a product.
  7864. */
  7865. @property(nonatomic, copy, nullable) NSString *name;
  7866. /**
  7867. * The category for the product identified by the reference image. This should
  7868. * be either "homegoods", "apparel", or "toys".
  7869. * This field is immutable.
  7870. */
  7871. @property(nonatomic, copy, nullable) NSString *productCategory;
  7872. /**
  7873. * Key-value pairs that can be attached to a product. At query time,
  7874. * constraints can be specified based on the product_labels.
  7875. * Note that integer values can be provided as strings, e.g. "1199". Only
  7876. * strings with integer values can match a range-based restriction which is
  7877. * to be supported soon.
  7878. * Multiple values can be assigned to the same key. One product may have up to
  7879. * 100 product_labels.
  7880. */
  7881. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductKeyValue *> *productLabels;
  7882. @end
  7883. /**
  7884. * A product label represented as a key-value pair.
  7885. */
  7886. @interface GTLRVision_GoogleCloudVisionV1p3beta1ProductKeyValue : GTLRObject
  7887. /**
  7888. * The key of the label attached to the product. Cannot be empty and cannot
  7889. * exceed 128 bytes.
  7890. */
  7891. @property(nonatomic, copy, nullable) NSString *key;
  7892. /**
  7893. * The value of the label attached to the product. Cannot be empty and
  7894. * cannot exceed 128 bytes.
  7895. */
  7896. @property(nonatomic, copy, nullable) NSString *value;
  7897. @end
  7898. /**
  7899. * Results for a product search request.
  7900. */
  7901. @interface GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResults : GTLRObject
  7902. /**
  7903. * Timestamp of the index which provided these results. Changes made after
  7904. * this time are not reflected in the current results.
  7905. */
  7906. @property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
  7907. /**
  7908. * List of results grouped by products detected in the query image. Each entry
  7909. * corresponds to one bounding polygon in the query image, and contains the
  7910. * matching products specific to that region. There may be duplicate product
  7911. * matches in the union of all the per-product results.
  7912. */
  7913. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsGroupedResult *> *productGroupedResults;
  7914. /** List of results, one for each product match. */
  7915. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult *> *results;
  7916. @end
  7917. /**
  7918. * Information about the products similar to a single product in a query
  7919. * image.
  7920. */
  7921. @interface GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsGroupedResult : GTLRObject
  7922. /** The bounding polygon around the product detected in the query image. */
  7923. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingPoly;
  7924. /** List of results, one for each product match. */
  7925. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult *> *results;
  7926. @end
  7927. /**
  7928. * Information about a product.
  7929. */
  7930. @interface GTLRVision_GoogleCloudVisionV1p3beta1ProductSearchResultsResult : GTLRObject
  7931. /**
  7932. * The resource name of the image from the product that is the closest match
  7933. * to the query.
  7934. */
  7935. @property(nonatomic, copy, nullable) NSString *image;
  7936. /** The Product. */
  7937. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1Product *product;
  7938. /**
  7939. * A confidence level on the match, ranging from 0 (no confidence) to
  7940. * 1 (full confidence).
  7941. *
  7942. * Uses NSNumber of floatValue.
  7943. */
  7944. @property(nonatomic, strong, nullable) NSNumber *score;
  7945. @end
  7946. /**
  7947. * A `Property` consists of a user-supplied name/value pair.
  7948. */
  7949. @interface GTLRVision_GoogleCloudVisionV1p3beta1Property : GTLRObject
  7950. /** Name of the property. */
  7951. @property(nonatomic, copy, nullable) NSString *name;
  7952. /**
  7953. * Value of numeric properties.
  7954. *
  7955. * Uses NSNumber of unsignedLongLongValue.
  7956. */
  7957. @property(nonatomic, strong, nullable) NSNumber *uint64Value;
  7958. /** Value of the property. */
  7959. @property(nonatomic, copy, nullable) NSString *value;
  7960. @end
  7961. /**
  7962. * A `ReferenceImage` represents a product image and its associated metadata,
  7963. * such as bounding boxes.
  7964. */
  7965. @interface GTLRVision_GoogleCloudVisionV1p3beta1ReferenceImage : GTLRObject
  7966. /**
  7967. * Bounding polygons around the areas of interest in the reference image.
  7968. * Optional. If this field is empty, the system will try to detect regions of
  7969. * interest. At most 10 bounding polygons will be used.
  7970. * The provided shape is converted into a non-rotated rectangle. Once
  7971. * converted, the small edge of the rectangle must be greater than or equal
  7972. * to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
  7973. * is not).
  7974. */
  7975. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *> *boundingPolys;
  7976. /**
  7977. * The resource name of the reference image.
  7978. * Format is:
  7979. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
  7980. * This field is ignored when creating a reference image.
  7981. */
  7982. @property(nonatomic, copy, nullable) NSString *name;
  7983. /**
  7984. * The Google Cloud Storage URI of the reference image.
  7985. * The URI must start with `gs://`.
  7986. * Required.
  7987. */
  7988. @property(nonatomic, copy, nullable) NSString *uri;
  7989. @end
  7990. /**
  7991. * Set of features pertaining to the image, computed by computer vision
  7992. * methods over safe-search verticals (for example, adult, spoof, medical,
  7993. * violence).
  7994. */
  7995. @interface GTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation : GTLRObject
  7996. /**
  7997. * Represents the adult content likelihood for the image. Adult content may
  7998. * contain elements such as nudity, pornographic images or cartoons, or
  7999. * sexual activities.
  8000. *
  8001. * Likely values:
  8002. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Likely
  8003. * It is likely that the image belongs to the specified vertical. (Value:
  8004. * "LIKELY")
  8005. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Possible
  8006. * It is possible that the image belongs to the specified vertical.
  8007. * (Value: "POSSIBLE")
  8008. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unknown
  8009. * Unknown likelihood. (Value: "UNKNOWN")
  8010. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_Unlikely
  8011. * It is unlikely that the image belongs to the specified vertical.
  8012. * (Value: "UNLIKELY")
  8013. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryLikely
  8014. * It is very likely that the image belongs to the specified vertical.
  8015. * (Value: "VERY_LIKELY")
  8016. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Adult_VeryUnlikely
  8017. * It is very unlikely that the image belongs to the specified vertical.
  8018. * (Value: "VERY_UNLIKELY")
  8019. */
  8020. @property(nonatomic, copy, nullable) NSString *adult;
  8021. /**
  8022. * Likelihood that this is a medical image.
  8023. *
  8024. * Likely values:
  8025. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Likely
  8026. * It is likely that the image belongs to the specified vertical. (Value:
  8027. * "LIKELY")
  8028. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Possible
  8029. * It is possible that the image belongs to the specified vertical.
  8030. * (Value: "POSSIBLE")
  8031. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unknown
  8032. * Unknown likelihood. (Value: "UNKNOWN")
  8033. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_Unlikely
  8034. * It is unlikely that the image belongs to the specified vertical.
  8035. * (Value: "UNLIKELY")
  8036. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryLikely
  8037. * It is very likely that the image belongs to the specified vertical.
  8038. * (Value: "VERY_LIKELY")
  8039. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Medical_VeryUnlikely
  8040. * It is very unlikely that the image belongs to the specified vertical.
  8041. * (Value: "VERY_UNLIKELY")
  8042. */
  8043. @property(nonatomic, copy, nullable) NSString *medical;
  8044. /**
  8045. * Likelihood that the request image contains racy content. Racy content may
  8046. * include (but is not limited to) skimpy or sheer clothing, strategically
  8047. * covered nudity, lewd or provocative poses, or close-ups of sensitive
  8048. * body areas.
  8049. *
  8050. * Likely values:
  8051. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Likely
  8052. * It is likely that the image belongs to the specified vertical. (Value:
  8053. * "LIKELY")
  8054. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Possible
  8055. * It is possible that the image belongs to the specified vertical.
  8056. * (Value: "POSSIBLE")
  8057. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unknown
  8058. * Unknown likelihood. (Value: "UNKNOWN")
  8059. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_Unlikely
  8060. * It is unlikely that the image belongs to the specified vertical.
  8061. * (Value: "UNLIKELY")
  8062. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryLikely
  8063. * It is very likely that the image belongs to the specified vertical.
  8064. * (Value: "VERY_LIKELY")
  8065. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Racy_VeryUnlikely
  8066. * It is very unlikely that the image belongs to the specified vertical.
  8067. * (Value: "VERY_UNLIKELY")
  8068. */
  8069. @property(nonatomic, copy, nullable) NSString *racy;
  8070. /**
  8071. * Spoof likelihood. The likelihood that an modification
  8072. * was made to the image's canonical version to make it appear
  8073. * funny or offensive.
  8074. *
  8075. * Likely values:
  8076. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Likely
  8077. * It is likely that the image belongs to the specified vertical. (Value:
  8078. * "LIKELY")
  8079. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Possible
  8080. * It is possible that the image belongs to the specified vertical.
  8081. * (Value: "POSSIBLE")
  8082. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unknown
  8083. * Unknown likelihood. (Value: "UNKNOWN")
  8084. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_Unlikely
  8085. * It is unlikely that the image belongs to the specified vertical.
  8086. * (Value: "UNLIKELY")
  8087. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryLikely
  8088. * It is very likely that the image belongs to the specified vertical.
  8089. * (Value: "VERY_LIKELY")
  8090. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Spoof_VeryUnlikely
  8091. * It is very unlikely that the image belongs to the specified vertical.
  8092. * (Value: "VERY_UNLIKELY")
  8093. */
  8094. @property(nonatomic, copy, nullable) NSString *spoof;
  8095. /**
  8096. * Likelihood that this image contains violent content.
  8097. *
  8098. * Likely values:
  8099. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Likely
  8100. * It is likely that the image belongs to the specified vertical. (Value:
  8101. * "LIKELY")
  8102. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Possible
  8103. * It is possible that the image belongs to the specified vertical.
  8104. * (Value: "POSSIBLE")
  8105. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unknown
  8106. * Unknown likelihood. (Value: "UNKNOWN")
  8107. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_Unlikely
  8108. * It is unlikely that the image belongs to the specified vertical.
  8109. * (Value: "UNLIKELY")
  8110. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryLikely
  8111. * It is very likely that the image belongs to the specified vertical.
  8112. * (Value: "VERY_LIKELY")
  8113. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1SafeSearchAnnotation_Violence_VeryUnlikely
  8114. * It is very unlikely that the image belongs to the specified vertical.
  8115. * (Value: "VERY_UNLIKELY")
  8116. */
  8117. @property(nonatomic, copy, nullable) NSString *violence;
  8118. @end
  8119. /**
  8120. * A single symbol representation.
  8121. */
  8122. @interface GTLRVision_GoogleCloudVisionV1p3beta1Symbol : GTLRObject
  8123. /**
  8124. * The bounding box for the symbol.
  8125. * The vertices are in the order of top-left, top-right, bottom-right,
  8126. * bottom-left. When a rotation of the bounding box is detected the rotation
  8127. * is represented as around the top-left corner as defined when the text is
  8128. * read in the 'natural' orientation.
  8129. * For example:
  8130. * * when the text is horizontal it might look like:
  8131. * 0----1
  8132. * | |
  8133. * 3----2
  8134. * * when it's rotated 180 degrees around the top-left corner it becomes:
  8135. * 2----3
  8136. * | |
  8137. * 1----0
  8138. * and the vertice order will still be (0, 1, 2, 3).
  8139. */
  8140. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
  8141. /**
  8142. * Confidence of the OCR results for the symbol. Range [0, 1].
  8143. *
  8144. * Uses NSNumber of floatValue.
  8145. */
  8146. @property(nonatomic, strong, nullable) NSNumber *confidence;
  8147. /** Additional information detected for the symbol. */
  8148. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
  8149. /** The actual UTF-8 representation of the symbol. */
  8150. @property(nonatomic, copy, nullable) NSString *text;
  8151. @end
  8152. /**
  8153. * TextAnnotation contains a structured representation of OCR extracted text.
  8154. * The hierarchy of an OCR extracted text structure is like this:
  8155. * TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
  8156. * Each structural component, starting from Page, may further have their own
  8157. * properties. Properties describe detected languages, breaks etc.. Please
  8158. * refer
  8159. * to the TextAnnotation.TextProperty message definition below for more
  8160. * detail.
  8161. */
  8162. @interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotation : GTLRObject
  8163. /** List of pages detected by OCR. */
  8164. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Page *> *pages;
  8165. /** UTF-8 text detected on the pages. */
  8166. @property(nonatomic, copy, nullable) NSString *text;
  8167. @end
  8168. /**
  8169. * Detected start or end of a structural component.
  8170. */
  8171. @interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak : GTLRObject
  8172. /**
  8173. * True if break prepends the element.
  8174. *
  8175. * Uses NSNumber of boolValue.
  8176. */
  8177. @property(nonatomic, strong, nullable) NSNumber *isPrefix;
  8178. /**
  8179. * Detected break type.
  8180. *
  8181. * Likely values:
  8182. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_EolSureSpace
  8183. * Line-wrapping break. (Value: "EOL_SURE_SPACE")
  8184. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Hyphen
  8185. * End-line hyphen that is not present in text; does not co-occur with
  8186. * `SPACE`, `LEADER_SPACE`, or `LINE_BREAK`. (Value: "HYPHEN")
  8187. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_LineBreak
  8188. * Line break that ends a paragraph. (Value: "LINE_BREAK")
  8189. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Space
  8190. * Regular space. (Value: "SPACE")
  8191. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_SureSpace
  8192. * Sure space (very wide). (Value: "SURE_SPACE")
  8193. * @arg @c kGTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak_Type_Unknown
  8194. * Unknown break label type. (Value: "UNKNOWN")
  8195. */
  8196. @property(nonatomic, copy, nullable) NSString *type;
  8197. @end
  8198. /**
  8199. * Detected language for a structural component.
  8200. */
  8201. @interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedLanguage : GTLRObject
  8202. /**
  8203. * Confidence of detected language. Range [0, 1].
  8204. *
  8205. * Uses NSNumber of floatValue.
  8206. */
  8207. @property(nonatomic, strong, nullable) NSNumber *confidence;
  8208. /**
  8209. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  8210. * information, see
  8211. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  8212. */
  8213. @property(nonatomic, copy, nullable) NSString *languageCode;
  8214. @end
  8215. /**
  8216. * Additional information detected on the structural component.
  8217. */
  8218. @interface GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty : GTLRObject
  8219. /** Detected start or end of a text segment. */
  8220. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedBreak *detectedBreak;
  8221. /** A list of detected languages together with confidence. */
  8222. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationDetectedLanguage *> *detectedLanguages;
  8223. @end
  8224. /**
  8225. * A vertex represents a 2D point in the image.
  8226. * NOTE: the vertex coordinates are in the same scale as the original image.
  8227. */
  8228. @interface GTLRVision_GoogleCloudVisionV1p3beta1Vertex : GTLRObject
  8229. /**
  8230. * X coordinate.
  8231. *
  8232. * Uses NSNumber of intValue.
  8233. */
  8234. @property(nonatomic, strong, nullable) NSNumber *x;
  8235. /**
  8236. * Y coordinate.
  8237. *
  8238. * Uses NSNumber of intValue.
  8239. */
  8240. @property(nonatomic, strong, nullable) NSNumber *y;
  8241. @end
  8242. /**
  8243. * Relevant information for the image from the Internet.
  8244. */
  8245. @interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetection : GTLRObject
  8246. /**
  8247. * The service's best guess as to the topic of the request image.
  8248. * Inferred from similar images on the open web.
  8249. */
  8250. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebLabel *> *bestGuessLabels;
  8251. /**
  8252. * Fully matching images from the Internet.
  8253. * Can include resized copies of the query image.
  8254. */
  8255. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *fullMatchingImages;
  8256. /** Web pages containing the matching images from the Internet. */
  8257. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebPage *> *pagesWithMatchingImages;
  8258. /**
  8259. * Partial matching images from the Internet.
  8260. * Those images are similar enough to share some key-point features. For
  8261. * example an original image will likely have partial matching for its crops.
  8262. */
  8263. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *partialMatchingImages;
  8264. /** The visually similar image results. */
  8265. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *visuallySimilarImages;
  8266. /** Deduced entities from similar images on the Internet. */
  8267. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebEntity *> *webEntities;
  8268. @end
  8269. /**
  8270. * Entity deduced from similar images on the Internet.
  8271. */
  8272. @interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebEntity : GTLRObject
  8273. /**
  8274. * Canonical description of the entity, in English.
  8275. *
  8276. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  8277. */
  8278. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  8279. /** Opaque entity ID. */
  8280. @property(nonatomic, copy, nullable) NSString *entityId;
  8281. /**
  8282. * Overall relevancy score for the entity.
  8283. * Not normalized and not comparable across different image queries.
  8284. *
  8285. * Uses NSNumber of floatValue.
  8286. */
  8287. @property(nonatomic, strong, nullable) NSNumber *score;
  8288. @end
  8289. /**
  8290. * Metadata for online images.
  8291. */
  8292. @interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage : GTLRObject
  8293. /**
  8294. * (Deprecated) Overall relevancy score for the image.
  8295. *
  8296. * Uses NSNumber of floatValue.
  8297. */
  8298. @property(nonatomic, strong, nullable) NSNumber *score;
  8299. /** The result image URL. */
  8300. @property(nonatomic, copy, nullable) NSString *url;
  8301. @end
  8302. /**
  8303. * Label to provide extra metadata for the web detection.
  8304. */
  8305. @interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebLabel : GTLRObject
  8306. /** Label for extra metadata. */
  8307. @property(nonatomic, copy, nullable) NSString *label;
  8308. /**
  8309. * The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
  8310. * For more information, see
  8311. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  8312. */
  8313. @property(nonatomic, copy, nullable) NSString *languageCode;
  8314. @end
  8315. /**
  8316. * Metadata for web pages.
  8317. */
  8318. @interface GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebPage : GTLRObject
  8319. /**
  8320. * Fully matching images on the page.
  8321. * Can include resized copies of the query image.
  8322. */
  8323. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *fullMatchingImages;
  8324. /** Title for the web page, may contain HTML markups. */
  8325. @property(nonatomic, copy, nullable) NSString *pageTitle;
  8326. /**
  8327. * Partial matching images on the page.
  8328. * Those images are similar enough to share some key-point features. For
  8329. * example an original image will likely have partial matching for its
  8330. * crops.
  8331. */
  8332. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1WebDetectionWebImage *> *partialMatchingImages;
  8333. /**
  8334. * (Deprecated) Overall relevancy score for the web page.
  8335. *
  8336. * Uses NSNumber of floatValue.
  8337. */
  8338. @property(nonatomic, strong, nullable) NSNumber *score;
  8339. /** The result web page URL. */
  8340. @property(nonatomic, copy, nullable) NSString *url;
  8341. @end
  8342. /**
  8343. * A word representation.
  8344. */
  8345. @interface GTLRVision_GoogleCloudVisionV1p3beta1Word : GTLRObject
  8346. /**
  8347. * The bounding box for the word.
  8348. * The vertices are in the order of top-left, top-right, bottom-right,
  8349. * bottom-left. When a rotation of the bounding box is detected the rotation
  8350. * is represented as around the top-left corner as defined when the text is
  8351. * read in the 'natural' orientation.
  8352. * For example:
  8353. * * when the text is horizontal it might look like:
  8354. * 0----1
  8355. * | |
  8356. * 3----2
  8357. * * when it's rotated 180 degrees around the top-left corner it becomes:
  8358. * 2----3
  8359. * | |
  8360. * 1----0
  8361. * and the vertex order will still be (0, 1, 2, 3).
  8362. */
  8363. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1BoundingPoly *boundingBox;
  8364. /**
  8365. * Confidence of the OCR results for the word. Range [0, 1].
  8366. *
  8367. * Uses NSNumber of floatValue.
  8368. */
  8369. @property(nonatomic, strong, nullable) NSNumber *confidence;
  8370. /** Additional information detected for the word. */
  8371. @property(nonatomic, strong, nullable) GTLRVision_GoogleCloudVisionV1p3beta1TextAnnotationTextProperty *property;
  8372. /**
  8373. * List of symbols in the word.
  8374. * The order of the symbols follows the natural reading order.
  8375. */
  8376. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GoogleCloudVisionV1p3beta1Symbol *> *symbols;
  8377. @end
  8378. /**
  8379. * Information about the products similar to a single product in a query
  8380. * image.
  8381. */
  8382. @interface GTLRVision_GroupedResult : GTLRObject
  8383. /** The bounding polygon around the product detected in the query image. */
  8384. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
  8385. /** List of results, one for each product match. */
  8386. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Result *> *results;
  8387. @end
  8388. /**
  8389. * Client image to perform Google Cloud Vision API tasks over.
  8390. */
  8391. @interface GTLRVision_Image : GTLRObject
  8392. /**
  8393. * Image content, represented as a stream of bytes.
  8394. * Note: As with all `bytes` fields, protobuffers use a pure binary
  8395. * representation, whereas JSON representations use base64.
  8396. *
  8397. * Contains encoded binary data; GTLRBase64 can encode/decode (probably
  8398. * web-safe format).
  8399. */
  8400. @property(nonatomic, copy, nullable) NSString *content;
  8401. /**
  8402. * Google Cloud Storage image location, or publicly-accessible image
  8403. * URL. If both `content` and `source` are provided for an image, `content`
  8404. * takes precedence and is used to perform the image annotation request.
  8405. */
  8406. @property(nonatomic, strong, nullable) GTLRVision_ImageSource *source;
  8407. @end
  8408. /**
  8409. * If an image was produced from a file (e.g. a PDF), this message gives
  8410. * information about the source of that image.
  8411. */
  8412. @interface GTLRVision_ImageAnnotationContext : GTLRObject
  8413. /**
  8414. * If the file was a PDF or TIFF, this field gives the page number within
  8415. * the file used to produce the image.
  8416. *
  8417. * Uses NSNumber of intValue.
  8418. */
  8419. @property(nonatomic, strong, nullable) NSNumber *pageNumber;
  8420. /** The URI of the file used to produce the image. */
  8421. @property(nonatomic, copy, nullable) NSString *uri;
  8422. @end
  8423. /**
  8424. * Image context and/or feature-specific parameters.
  8425. */
  8426. @interface GTLRVision_ImageContext : GTLRObject
  8427. /** Parameters for crop hints annotation request. */
  8428. @property(nonatomic, strong, nullable) GTLRVision_CropHintsParams *cropHintsParams;
  8429. /**
  8430. * List of languages to use for TEXT_DETECTION. In most cases, an empty value
  8431. * yields the best results since it enables automatic language detection. For
  8432. * languages based on the Latin alphabet, setting `language_hints` is not
  8433. * needed. In rare cases, when the language of the text in the image is known,
  8434. * setting a hint will help get better results (although it will be a
  8435. * significant hindrance if the hint is wrong). Text detection returns an
  8436. * error if one or more of the specified languages is not one of the
  8437. * [supported languages](/vision/docs/languages).
  8438. */
  8439. @property(nonatomic, strong, nullable) NSArray<NSString *> *languageHints;
  8440. /** Not used. */
  8441. @property(nonatomic, strong, nullable) GTLRVision_LatLongRect *latLongRect;
  8442. /** Parameters for product search. */
  8443. @property(nonatomic, strong, nullable) GTLRVision_ProductSearchParams *productSearchParams;
  8444. /** Parameters for web detection. */
  8445. @property(nonatomic, strong, nullable) GTLRVision_WebDetectionParams *webDetectionParams;
  8446. @end
  8447. /**
  8448. * Stores image properties, such as dominant colors.
  8449. */
  8450. @interface GTLRVision_ImageProperties : GTLRObject
  8451. /** If present, dominant colors completed successfully. */
  8452. @property(nonatomic, strong, nullable) GTLRVision_DominantColorsAnnotation *dominantColors;
  8453. @end
  8454. /**
  8455. * External image source (Google Cloud Storage or web URL image location).
  8456. */
  8457. @interface GTLRVision_ImageSource : GTLRObject
  8458. /**
  8459. * **Use `image_uri` instead.**
  8460. * The Google Cloud Storage URI of the form
  8461. * `gs://bucket_name/object_name`. Object versioning is not supported. See
  8462. * [Google Cloud Storage Request
  8463. * URIs](https://cloud.google.com/storage/docs/reference-uris) for more info.
  8464. */
  8465. @property(nonatomic, copy, nullable) NSString *gcsImageUri;
  8466. /**
  8467. * The URI of the source image. Can be either:
  8468. * 1. A Google Cloud Storage URI of the form
  8469. * `gs://bucket_name/object_name`. Object versioning is not supported. See
  8470. * [Google Cloud Storage Request
  8471. * URIs](https://cloud.google.com/storage/docs/reference-uris) for more
  8472. * info.
  8473. * 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from
  8474. * HTTP/HTTPS URLs, Google cannot guarantee that the request will be
  8475. * completed. Your request may fail if the specified host denies the
  8476. * request (e.g. due to request throttling or DOS prevention), or if Google
  8477. * throttles requests to the site for abuse prevention. You should not
  8478. * depend on externally-hosted images for production applications.
  8479. * When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes
  8480. * precedence.
  8481. */
  8482. @property(nonatomic, copy, nullable) NSString *imageUri;
  8483. @end
  8484. /**
  8485. * The Google Cloud Storage location for a csv file which preserves a list of
  8486. * ImportProductSetRequests in each line.
  8487. */
  8488. @interface GTLRVision_ImportProductSetsGcsSource : GTLRObject
  8489. /**
  8490. * The Google Cloud Storage URI of the input csv file.
  8491. * The URI must start with `gs://`.
  8492. * The format of the input csv file should be one image per line.
  8493. * In each line, there are 8 columns.
  8494. * 1. image-uri
  8495. * 2. image-id
  8496. * 3. product-set-id
  8497. * 4. product-id
  8498. * 5. product-category
  8499. * 6. product-display-name
  8500. * 7. labels
  8501. * 8. bounding-poly
  8502. * The `image-uri`, `product-set-id`, `product-id`, and `product-category`
  8503. * columns are required. All other columns are optional.
  8504. * If the `ProductSet` or `Product` specified by the `product-set-id` and
  8505. * `product-id` values does not exist, then the system will create a new
  8506. * `ProductSet` or `Product` for the image. In this case, the
  8507. * `product-display-name` column refers to
  8508. * display_name, the
  8509. * `product-category` column refers to
  8510. * product_category, and the
  8511. * `labels` column refers to product_labels.
  8512. * The `image-id` column is optional but must be unique if provided. If it is
  8513. * empty, the system will automatically assign a unique id to the image.
  8514. * The `product-display-name` column is optional. If it is empty, the system
  8515. * sets the display_name field for the product to a
  8516. * space (" "). You can update the `display_name` later by using the API.
  8517. * If a `Product` with the specified `product-id` already exists, then the
  8518. * system ignores the `product-display-name`, `product-category`, and `labels`
  8519. * columns.
  8520. * The `labels` column (optional) is a line containing a list of
  8521. * comma-separated key-value pairs, in the following format:
  8522. * "key_1=value_1,key_2=value_2,...,key_n=value_n"
  8523. * The `bounding-poly` column (optional) identifies one region of
  8524. * interest from the image in the same manner as `CreateReferenceImage`. If
  8525. * you do not specify the `bounding-poly` column, then the system will try to
  8526. * detect regions of interest automatically.
  8527. * At most one `bounding-poly` column is allowed per line. If the image
  8528. * contains multiple regions of interest, add a line to the CSV file that
  8529. * includes the same product information, and the `bounding-poly` values for
  8530. * each region of interest.
  8531. * The `bounding-poly` column must contain an even number of comma-separated
  8532. * numbers, in the format "p1_x,p1_y,p2_x,p2_y,...,pn_x,pn_y". Use
  8533. * non-negative integers for absolute bounding polygons, and float values
  8534. * in [0, 1] for normalized bounding polygons.
  8535. * The system will resize the image if the image resolution is too
  8536. * large to process (larger than 20MP).
  8537. */
  8538. @property(nonatomic, copy, nullable) NSString *csvFileUri;
  8539. @end
  8540. /**
  8541. * The input content for the `ImportProductSets` method.
  8542. */
  8543. @interface GTLRVision_ImportProductSetsInputConfig : GTLRObject
  8544. /**
  8545. * The Google Cloud Storage location for a csv file which preserves a list
  8546. * of ImportProductSetRequests in each line.
  8547. */
  8548. @property(nonatomic, strong, nullable) GTLRVision_ImportProductSetsGcsSource *gcsSource;
  8549. @end
  8550. /**
  8551. * Request message for the `ImportProductSets` method.
  8552. */
  8553. @interface GTLRVision_ImportProductSetsRequest : GTLRObject
  8554. /** The input content for the list of requests. */
  8555. @property(nonatomic, strong, nullable) GTLRVision_ImportProductSetsInputConfig *inputConfig;
  8556. @end
  8557. /**
  8558. * Response message for the `ImportProductSets` method.
  8559. * This message is returned by the
  8560. * google.longrunning.Operations.GetOperation method in the returned
  8561. * google.longrunning.Operation.response field.
  8562. */
  8563. @interface GTLRVision_ImportProductSetsResponse : GTLRObject
  8564. /** The list of reference_images that are imported successfully. */
  8565. @property(nonatomic, strong, nullable) NSArray<GTLRVision_ReferenceImage *> *referenceImages;
  8566. /**
  8567. * The rpc status for each ImportProductSet request, including both successes
  8568. * and errors.
  8569. * The number of statuses here matches the number of lines in the csv file,
  8570. * and statuses[i] stores the success or failure status of processing the i-th
  8571. * line of the csv, starting from line 0.
  8572. */
  8573. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Status *> *statuses;
  8574. @end
  8575. /**
  8576. * The desired input location and metadata.
  8577. */
  8578. @interface GTLRVision_InputConfig : GTLRObject
  8579. /** The Google Cloud Storage location to read the input from. */
  8580. @property(nonatomic, strong, nullable) GTLRVision_GcsSource *gcsSource;
  8581. /**
  8582. * The type of the file. Currently only "application/pdf" and "image/tiff"
  8583. * are supported. Wildcards are not supported.
  8584. */
  8585. @property(nonatomic, copy, nullable) NSString *mimeType;
  8586. @end
  8587. /**
  8588. * A product label represented as a key-value pair.
  8589. */
  8590. @interface GTLRVision_KeyValue : GTLRObject
  8591. /**
  8592. * The key of the label attached to the product. Cannot be empty and cannot
  8593. * exceed 128 bytes.
  8594. */
  8595. @property(nonatomic, copy, nullable) NSString *key;
  8596. /**
  8597. * The value of the label attached to the product. Cannot be empty and
  8598. * cannot exceed 128 bytes.
  8599. */
  8600. @property(nonatomic, copy, nullable) NSString *value;
  8601. @end
  8602. /**
  8603. * A face-specific landmark (for example, a face feature).
  8604. */
  8605. @interface GTLRVision_Landmark : GTLRObject
  8606. /** Face landmark position. */
  8607. @property(nonatomic, strong, nullable) GTLRVision_Position *position;
  8608. /**
  8609. * Face landmark type.
  8610. *
  8611. * Likely values:
  8612. * @arg @c kGTLRVision_Landmark_Type_ChinGnathion Chin gnathion. (Value:
  8613. * "CHIN_GNATHION")
  8614. * @arg @c kGTLRVision_Landmark_Type_ChinLeftGonion Chin left gonion. (Value:
  8615. * "CHIN_LEFT_GONION")
  8616. * @arg @c kGTLRVision_Landmark_Type_ChinRightGonion Chin right gonion.
  8617. * (Value: "CHIN_RIGHT_GONION")
  8618. * @arg @c kGTLRVision_Landmark_Type_ForeheadGlabella Forehead glabella.
  8619. * (Value: "FOREHEAD_GLABELLA")
  8620. * @arg @c kGTLRVision_Landmark_Type_LeftEarTragion Left ear tragion. (Value:
  8621. * "LEFT_EAR_TRAGION")
  8622. * @arg @c kGTLRVision_Landmark_Type_LeftEye Left eye. (Value: "LEFT_EYE")
  8623. * @arg @c kGTLRVision_Landmark_Type_LeftEyeBottomBoundary Left eye, bottom
  8624. * boundary. (Value: "LEFT_EYE_BOTTOM_BOUNDARY")
  8625. * @arg @c kGTLRVision_Landmark_Type_LeftEyebrowUpperMidpoint Left eyebrow,
  8626. * upper midpoint. (Value: "LEFT_EYEBROW_UPPER_MIDPOINT")
  8627. * @arg @c kGTLRVision_Landmark_Type_LeftEyeLeftCorner Left eye, left corner.
  8628. * (Value: "LEFT_EYE_LEFT_CORNER")
  8629. * @arg @c kGTLRVision_Landmark_Type_LeftEyePupil Left eye pupil. (Value:
  8630. * "LEFT_EYE_PUPIL")
  8631. * @arg @c kGTLRVision_Landmark_Type_LeftEyeRightCorner Left eye, right
  8632. * corner. (Value: "LEFT_EYE_RIGHT_CORNER")
  8633. * @arg @c kGTLRVision_Landmark_Type_LeftEyeTopBoundary Left eye, top
  8634. * boundary. (Value: "LEFT_EYE_TOP_BOUNDARY")
  8635. * @arg @c kGTLRVision_Landmark_Type_LeftOfLeftEyebrow Left of left eyebrow.
  8636. * (Value: "LEFT_OF_LEFT_EYEBROW")
  8637. * @arg @c kGTLRVision_Landmark_Type_LeftOfRightEyebrow Left of right
  8638. * eyebrow. (Value: "LEFT_OF_RIGHT_EYEBROW")
  8639. * @arg @c kGTLRVision_Landmark_Type_LowerLip Lower lip. (Value: "LOWER_LIP")
  8640. * @arg @c kGTLRVision_Landmark_Type_MidpointBetweenEyes Midpoint between
  8641. * eyes. (Value: "MIDPOINT_BETWEEN_EYES")
  8642. * @arg @c kGTLRVision_Landmark_Type_MouthCenter Mouth center. (Value:
  8643. * "MOUTH_CENTER")
  8644. * @arg @c kGTLRVision_Landmark_Type_MouthLeft Mouth left. (Value:
  8645. * "MOUTH_LEFT")
  8646. * @arg @c kGTLRVision_Landmark_Type_MouthRight Mouth right. (Value:
  8647. * "MOUTH_RIGHT")
  8648. * @arg @c kGTLRVision_Landmark_Type_NoseBottomCenter Nose, bottom center.
  8649. * (Value: "NOSE_BOTTOM_CENTER")
  8650. * @arg @c kGTLRVision_Landmark_Type_NoseBottomLeft Nose, bottom left.
  8651. * (Value: "NOSE_BOTTOM_LEFT")
  8652. * @arg @c kGTLRVision_Landmark_Type_NoseBottomRight Nose, bottom right.
  8653. * (Value: "NOSE_BOTTOM_RIGHT")
  8654. * @arg @c kGTLRVision_Landmark_Type_NoseTip Nose tip. (Value: "NOSE_TIP")
  8655. * @arg @c kGTLRVision_Landmark_Type_RightEarTragion Right ear tragion.
  8656. * (Value: "RIGHT_EAR_TRAGION")
  8657. * @arg @c kGTLRVision_Landmark_Type_RightEye Right eye. (Value: "RIGHT_EYE")
  8658. * @arg @c kGTLRVision_Landmark_Type_RightEyeBottomBoundary Right eye, bottom
  8659. * boundary. (Value: "RIGHT_EYE_BOTTOM_BOUNDARY")
  8660. * @arg @c kGTLRVision_Landmark_Type_RightEyebrowUpperMidpoint Right eyebrow,
  8661. * upper midpoint. (Value: "RIGHT_EYEBROW_UPPER_MIDPOINT")
  8662. * @arg @c kGTLRVision_Landmark_Type_RightEyeLeftCorner Right eye, left
  8663. * corner. (Value: "RIGHT_EYE_LEFT_CORNER")
  8664. * @arg @c kGTLRVision_Landmark_Type_RightEyePupil Right eye pupil. (Value:
  8665. * "RIGHT_EYE_PUPIL")
  8666. * @arg @c kGTLRVision_Landmark_Type_RightEyeRightCorner Right eye, right
  8667. * corner. (Value: "RIGHT_EYE_RIGHT_CORNER")
  8668. * @arg @c kGTLRVision_Landmark_Type_RightEyeTopBoundary Right eye, top
  8669. * boundary. (Value: "RIGHT_EYE_TOP_BOUNDARY")
  8670. * @arg @c kGTLRVision_Landmark_Type_RightOfLeftEyebrow Right of left
  8671. * eyebrow. (Value: "RIGHT_OF_LEFT_EYEBROW")
  8672. * @arg @c kGTLRVision_Landmark_Type_RightOfRightEyebrow Right of right
  8673. * eyebrow. (Value: "RIGHT_OF_RIGHT_EYEBROW")
  8674. * @arg @c kGTLRVision_Landmark_Type_UnknownLandmark Unknown face landmark
  8675. * detected. Should not be filled. (Value: "UNKNOWN_LANDMARK")
  8676. * @arg @c kGTLRVision_Landmark_Type_UpperLip Upper lip. (Value: "UPPER_LIP")
  8677. */
  8678. @property(nonatomic, copy, nullable) NSString *type;
  8679. @end
  8680. /**
  8681. * An object representing a latitude/longitude pair. This is expressed as a
  8682. * pair
  8683. * of doubles representing degrees latitude and degrees longitude. Unless
  8684. * specified otherwise, this must conform to the
  8685. * <a href="http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf">WGS84
  8686. * standard</a>. Values must be within normalized ranges.
  8687. */
  8688. @interface GTLRVision_LatLng : GTLRObject
  8689. /**
  8690. * The latitude in degrees. It must be in the range [-90.0, +90.0].
  8691. *
  8692. * Uses NSNumber of doubleValue.
  8693. */
  8694. @property(nonatomic, strong, nullable) NSNumber *latitude;
  8695. /**
  8696. * The longitude in degrees. It must be in the range [-180.0, +180.0].
  8697. *
  8698. * Uses NSNumber of doubleValue.
  8699. */
  8700. @property(nonatomic, strong, nullable) NSNumber *longitude;
  8701. @end
  8702. /**
  8703. * Rectangle determined by min and max `LatLng` pairs.
  8704. */
  8705. @interface GTLRVision_LatLongRect : GTLRObject
  8706. /** Max lat/long pair. */
  8707. @property(nonatomic, strong, nullable) GTLRVision_LatLng *maxLatLng;
  8708. /** Min lat/long pair. */
  8709. @property(nonatomic, strong, nullable) GTLRVision_LatLng *minLatLng;
  8710. @end
  8711. /**
  8712. * The response message for Operations.ListOperations.
  8713. *
  8714. * @note This class supports NSFastEnumeration and indexed subscripting over
  8715. * its "operations" property. If returned as the result of a query, it
  8716. * should support automatic pagination (when @c shouldFetchNextPages is
  8717. * enabled).
  8718. */
  8719. @interface GTLRVision_ListOperationsResponse : GTLRCollectionObject
  8720. /** The standard List next-page token. */
  8721. @property(nonatomic, copy, nullable) NSString *nextPageToken;
  8722. /**
  8723. * A list of operations that matches the specified filter in the request.
  8724. *
  8725. * @note This property is used to support NSFastEnumeration and indexed
  8726. * subscripting on this class.
  8727. */
  8728. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Operation *> *operations;
  8729. @end
  8730. /**
  8731. * Response message for the `ListProductSets` method.
  8732. *
  8733. * @note This class supports NSFastEnumeration and indexed subscripting over
  8734. * its "productSets" property. If returned as the result of a query, it
  8735. * should support automatic pagination (when @c shouldFetchNextPages is
  8736. * enabled).
  8737. */
  8738. @interface GTLRVision_ListProductSetsResponse : GTLRCollectionObject
  8739. /**
  8740. * Token to retrieve the next page of results, or empty if there are no more
  8741. * results in the list.
  8742. */
  8743. @property(nonatomic, copy, nullable) NSString *nextPageToken;
  8744. /**
  8745. * List of ProductSets.
  8746. *
  8747. * @note This property is used to support NSFastEnumeration and indexed
  8748. * subscripting on this class.
  8749. */
  8750. @property(nonatomic, strong, nullable) NSArray<GTLRVision_ProductSet *> *productSets;
  8751. @end
  8752. /**
  8753. * Response message for the `ListProductsInProductSet` method.
  8754. *
  8755. * @note This class supports NSFastEnumeration and indexed subscripting over
  8756. * its "products" property. If returned as the result of a query, it
  8757. * should support automatic pagination (when @c shouldFetchNextPages is
  8758. * enabled).
  8759. */
  8760. @interface GTLRVision_ListProductsInProductSetResponse : GTLRCollectionObject
  8761. /**
  8762. * Token to retrieve the next page of results, or empty if there are no more
  8763. * results in the list.
  8764. */
  8765. @property(nonatomic, copy, nullable) NSString *nextPageToken;
  8766. /**
  8767. * The list of Products.
  8768. *
  8769. * @note This property is used to support NSFastEnumeration and indexed
  8770. * subscripting on this class.
  8771. */
  8772. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Product *> *products;
  8773. @end
  8774. /**
  8775. * Response message for the `ListProducts` method.
  8776. *
  8777. * @note This class supports NSFastEnumeration and indexed subscripting over
  8778. * its "products" property. If returned as the result of a query, it
  8779. * should support automatic pagination (when @c shouldFetchNextPages is
  8780. * enabled).
  8781. */
  8782. @interface GTLRVision_ListProductsResponse : GTLRCollectionObject
  8783. /**
  8784. * Token to retrieve the next page of results, or empty if there are no more
  8785. * results in the list.
  8786. */
  8787. @property(nonatomic, copy, nullable) NSString *nextPageToken;
  8788. /**
  8789. * List of products.
  8790. *
  8791. * @note This property is used to support NSFastEnumeration and indexed
  8792. * subscripting on this class.
  8793. */
  8794. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Product *> *products;
  8795. @end
  8796. /**
  8797. * Response message for the `ListReferenceImages` method.
  8798. *
  8799. * @note This class supports NSFastEnumeration and indexed subscripting over
  8800. * its "referenceImages" property. If returned as the result of a query,
  8801. * it should support automatic pagination (when @c shouldFetchNextPages
  8802. * is enabled).
  8803. */
  8804. @interface GTLRVision_ListReferenceImagesResponse : GTLRCollectionObject
  8805. /** The next_page_token returned from a previous List request, if any. */
  8806. @property(nonatomic, copy, nullable) NSString *nextPageToken;
  8807. /**
  8808. * The maximum number of items to return. Default 10, maximum 100.
  8809. *
  8810. * Uses NSNumber of intValue.
  8811. */
  8812. @property(nonatomic, strong, nullable) NSNumber *pageSize;
  8813. /**
  8814. * The list of reference images.
  8815. *
  8816. * @note This property is used to support NSFastEnumeration and indexed
  8817. * subscripting on this class.
  8818. */
  8819. @property(nonatomic, strong, nullable) NSArray<GTLRVision_ReferenceImage *> *referenceImages;
  8820. @end
  8821. /**
  8822. * Set of detected objects with bounding boxes.
  8823. */
  8824. @interface GTLRVision_LocalizedObjectAnnotation : GTLRObject
  8825. /** Image region to which this object belongs. This must be populated. */
  8826. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
  8827. /**
  8828. * The BCP-47 language code, such as "en-US" or "sr-Latn". For more
  8829. * information, see
  8830. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  8831. */
  8832. @property(nonatomic, copy, nullable) NSString *languageCode;
  8833. /** Object ID that should align with EntityAnnotation mid. */
  8834. @property(nonatomic, copy, nullable) NSString *mid;
  8835. /** Object name, expressed in its `language_code` language. */
  8836. @property(nonatomic, copy, nullable) NSString *name;
  8837. /**
  8838. * Score of the result. Range [0, 1].
  8839. *
  8840. * Uses NSNumber of floatValue.
  8841. */
  8842. @property(nonatomic, strong, nullable) NSNumber *score;
  8843. @end
  8844. /**
  8845. * Detected entity location information.
  8846. */
  8847. @interface GTLRVision_LocationInfo : GTLRObject
  8848. /** lat/long location coordinates. */
  8849. @property(nonatomic, strong, nullable) GTLRVision_LatLng *latLng;
  8850. @end
  8851. /**
  8852. * A vertex represents a 2D point in the image.
  8853. * NOTE: the normalized vertex coordinates are relative to the original image
  8854. * and range from 0 to 1.
  8855. */
  8856. @interface GTLRVision_NormalizedVertex : GTLRObject
  8857. /**
  8858. * X coordinate.
  8859. *
  8860. * Uses NSNumber of floatValue.
  8861. */
  8862. @property(nonatomic, strong, nullable) NSNumber *x;
  8863. /**
  8864. * Y coordinate.
  8865. *
  8866. * Uses NSNumber of floatValue.
  8867. */
  8868. @property(nonatomic, strong, nullable) NSNumber *y;
  8869. @end
  8870. /**
  8871. * This resource represents a long-running operation that is the result of a
  8872. * network API call.
  8873. */
  8874. @interface GTLRVision_Operation : GTLRObject
  8875. /**
  8876. * If the value is `false`, it means the operation is still in progress.
  8877. * If `true`, the operation is completed, and either `error` or `response` is
  8878. * available.
  8879. *
  8880. * Uses NSNumber of boolValue.
  8881. */
  8882. @property(nonatomic, strong, nullable) NSNumber *done;
  8883. /** The error result of the operation in case of failure or cancellation. */
  8884. @property(nonatomic, strong, nullable) GTLRVision_Status *error;
  8885. /**
  8886. * Service-specific metadata associated with the operation. It typically
  8887. * contains progress information and common metadata such as create time.
  8888. * Some services might not provide such metadata. Any method that returns a
  8889. * long-running operation should document the metadata type, if any.
  8890. */
  8891. @property(nonatomic, strong, nullable) GTLRVision_Operation_Metadata *metadata;
  8892. /**
  8893. * The server-assigned name, which is only unique within the same service that
  8894. * originally returns it. If you use the default HTTP mapping, the
  8895. * `name` should have the format of `operations/some/unique/name`.
  8896. */
  8897. @property(nonatomic, copy, nullable) NSString *name;
  8898. /**
  8899. * The normal response of the operation in case of success. If the original
  8900. * method returns no data on success, such as `Delete`, the response is
  8901. * `google.protobuf.Empty`. If the original method is standard
  8902. * `Get`/`Create`/`Update`, the response should be the resource. For other
  8903. * methods, the response should have the type `XxxResponse`, where `Xxx`
  8904. * is the original method name. For example, if the original method name
  8905. * is `TakeSnapshot()`, the inferred response type is
  8906. * `TakeSnapshotResponse`.
  8907. */
  8908. @property(nonatomic, strong, nullable) GTLRVision_Operation_Response *response;
  8909. @end
  8910. /**
  8911. * Service-specific metadata associated with the operation. It typically
  8912. * contains progress information and common metadata such as create time.
  8913. * Some services might not provide such metadata. Any method that returns a
  8914. * long-running operation should document the metadata type, if any.
  8915. *
  8916. * @note This class is documented as having more properties of any valid JSON
  8917. * type. Use @c -additionalJSONKeys and @c -additionalPropertyForName: to
  8918. * get the list of properties and then fetch them; or @c
  8919. * -additionalProperties to fetch them all at once.
  8920. */
  8921. @interface GTLRVision_Operation_Metadata : GTLRObject
  8922. @end
  8923. /**
  8924. * The normal response of the operation in case of success. If the original
  8925. * method returns no data on success, such as `Delete`, the response is
  8926. * `google.protobuf.Empty`. If the original method is standard
  8927. * `Get`/`Create`/`Update`, the response should be the resource. For other
  8928. * methods, the response should have the type `XxxResponse`, where `Xxx`
  8929. * is the original method name. For example, if the original method name
  8930. * is `TakeSnapshot()`, the inferred response type is
  8931. * `TakeSnapshotResponse`.
  8932. *
  8933. * @note This class is documented as having more properties of any valid JSON
  8934. * type. Use @c -additionalJSONKeys and @c -additionalPropertyForName: to
  8935. * get the list of properties and then fetch them; or @c
  8936. * -additionalProperties to fetch them all at once.
  8937. */
  8938. @interface GTLRVision_Operation_Response : GTLRObject
  8939. @end
  8940. /**
  8941. * Contains metadata for the BatchAnnotateImages operation.
  8942. */
  8943. @interface GTLRVision_OperationMetadata : GTLRObject
  8944. /** The time when the batch request was received. */
  8945. @property(nonatomic, strong, nullable) GTLRDateTime *createTime;
  8946. /**
  8947. * Current state of the batch operation.
  8948. *
  8949. * Likely values:
  8950. * @arg @c kGTLRVision_OperationMetadata_State_Cancelled The batch processing
  8951. * was cancelled. (Value: "CANCELLED")
  8952. * @arg @c kGTLRVision_OperationMetadata_State_Created Request is received.
  8953. * (Value: "CREATED")
  8954. * @arg @c kGTLRVision_OperationMetadata_State_Done The batch processing is
  8955. * done. (Value: "DONE")
  8956. * @arg @c kGTLRVision_OperationMetadata_State_Running Request is actively
  8957. * being processed. (Value: "RUNNING")
  8958. * @arg @c kGTLRVision_OperationMetadata_State_StateUnspecified Invalid.
  8959. * (Value: "STATE_UNSPECIFIED")
  8960. */
  8961. @property(nonatomic, copy, nullable) NSString *state;
  8962. /** The time when the operation result was last updated. */
  8963. @property(nonatomic, strong, nullable) GTLRDateTime *updateTime;
  8964. @end
  8965. /**
  8966. * The desired output location and metadata.
  8967. */
  8968. @interface GTLRVision_OutputConfig : GTLRObject
  8969. /**
  8970. * The max number of response protos to put into each output JSON file on
  8971. * Google Cloud Storage.
  8972. * The valid range is [1, 100]. If not specified, the default value is 20.
  8973. * For example, for one pdf file with 100 pages, 100 response protos will
  8974. * be generated. If `batch_size` = 20, then 5 json files each
  8975. * containing 20 response protos will be written under the prefix
  8976. * `gcs_destination`.`uri`.
  8977. * Currently, batch_size only applies to GcsDestination, with potential future
  8978. * support for other output configurations.
  8979. *
  8980. * Uses NSNumber of intValue.
  8981. */
  8982. @property(nonatomic, strong, nullable) NSNumber *batchSize;
  8983. /** The Google Cloud Storage location to write the output(s) to. */
  8984. @property(nonatomic, strong, nullable) GTLRVision_GcsDestination *gcsDestination;
  8985. @end
  8986. /**
  8987. * Detected page from OCR.
  8988. */
  8989. @interface GTLRVision_Page : GTLRObject
  8990. /** List of blocks of text, images etc on this page. */
  8991. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Block *> *blocks;
  8992. /**
  8993. * Confidence of the OCR results on the page. Range [0, 1].
  8994. *
  8995. * Uses NSNumber of floatValue.
  8996. */
  8997. @property(nonatomic, strong, nullable) NSNumber *confidence;
  8998. /**
  8999. * Page height. For PDFs the unit is points. For images (including
  9000. * TIFFs) the unit is pixels.
  9001. *
  9002. * Uses NSNumber of intValue.
  9003. */
  9004. @property(nonatomic, strong, nullable) NSNumber *height;
  9005. /** Additional information detected on the page. */
  9006. @property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
  9007. /**
  9008. * Page width. For PDFs the unit is points. For images (including
  9009. * TIFFs) the unit is pixels.
  9010. *
  9011. * Uses NSNumber of intValue.
  9012. */
  9013. @property(nonatomic, strong, nullable) NSNumber *width;
  9014. @end
  9015. /**
  9016. * Structural unit of text representing a number of words in certain order.
  9017. */
  9018. @interface GTLRVision_Paragraph : GTLRObject
  9019. /**
  9020. * The bounding box for the paragraph.
  9021. * The vertices are in the order of top-left, top-right, bottom-right,
  9022. * bottom-left. When a rotation of the bounding box is detected the rotation
  9023. * is represented as around the top-left corner as defined when the text is
  9024. * read in the 'natural' orientation.
  9025. * For example:
  9026. * * when the text is horizontal it might look like:
  9027. * 0----1
  9028. * | |
  9029. * 3----2
  9030. * * when it's rotated 180 degrees around the top-left corner it becomes:
  9031. * 2----3
  9032. * | |
  9033. * 1----0
  9034. * and the vertex order will still be (0, 1, 2, 3).
  9035. */
  9036. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
  9037. /**
  9038. * Confidence of the OCR results for the paragraph. Range [0, 1].
  9039. *
  9040. * Uses NSNumber of floatValue.
  9041. */
  9042. @property(nonatomic, strong, nullable) NSNumber *confidence;
  9043. /** Additional information detected for the paragraph. */
  9044. @property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
  9045. /** List of words in this paragraph. */
  9046. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Word *> *words;
  9047. @end
  9048. /**
  9049. * A 3D position in the image, used primarily for Face detection landmarks.
  9050. * A valid Position must have both x and y coordinates.
  9051. * The position coordinates are in the same scale as the original image.
  9052. */
  9053. @interface GTLRVision_Position : GTLRObject
  9054. /**
  9055. * X coordinate.
  9056. *
  9057. * Uses NSNumber of floatValue.
  9058. */
  9059. @property(nonatomic, strong, nullable) NSNumber *x;
  9060. /**
  9061. * Y coordinate.
  9062. *
  9063. * Uses NSNumber of floatValue.
  9064. */
  9065. @property(nonatomic, strong, nullable) NSNumber *y;
  9066. /**
  9067. * Z coordinate (or depth).
  9068. *
  9069. * Uses NSNumber of floatValue.
  9070. */
  9071. @property(nonatomic, strong, nullable) NSNumber *z;
  9072. @end
  9073. /**
  9074. * A Product contains ReferenceImages.
  9075. */
  9076. @interface GTLRVision_Product : GTLRObject
  9077. /**
  9078. * User-provided metadata to be stored with this product. Must be at most 4096
  9079. * characters long.
  9080. *
  9081. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  9082. */
  9083. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  9084. /**
  9085. * The user-provided name for this Product. Must not be empty. Must be at most
  9086. * 4096 characters long.
  9087. */
  9088. @property(nonatomic, copy, nullable) NSString *displayName;
  9089. /**
  9090. * The resource name of the product.
  9091. * Format is:
  9092. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
  9093. * This field is ignored when creating a product.
  9094. */
  9095. @property(nonatomic, copy, nullable) NSString *name;
  9096. /**
  9097. * The category for the product identified by the reference image. This should
  9098. * be either "homegoods", "apparel", or "toys".
  9099. * This field is immutable.
  9100. */
  9101. @property(nonatomic, copy, nullable) NSString *productCategory;
  9102. /**
  9103. * Key-value pairs that can be attached to a product. At query time,
  9104. * constraints can be specified based on the product_labels.
  9105. * Note that integer values can be provided as strings, e.g. "1199". Only
  9106. * strings with integer values can match a range-based restriction which is
  9107. * to be supported soon.
  9108. * Multiple values can be assigned to the same key. One product may have up to
  9109. * 100 product_labels.
  9110. */
  9111. @property(nonatomic, strong, nullable) NSArray<GTLRVision_KeyValue *> *productLabels;
  9112. @end
  9113. /**
  9114. * Parameters for a product search request.
  9115. */
  9116. @interface GTLRVision_ProductSearchParams : GTLRObject
  9117. /**
  9118. * The bounding polygon around the area of interest in the image.
  9119. * Optional. If it is not specified, system discretion will be applied.
  9120. */
  9121. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingPoly;
  9122. /**
  9123. * The filtering expression. This can be used to restrict search results based
  9124. * on Product labels. We currently support an AND of OR of key-value
  9125. * expressions, where each expression within an OR must have the same key.
  9126. * For example, "(color = red OR color = blue) AND brand = Google" is
  9127. * acceptable, but not "(color = red OR brand = Google)" or "color: red".
  9128. */
  9129. @property(nonatomic, copy, nullable) NSString *filter;
  9130. /**
  9131. * The list of product categories to search in. Currently, we only consider
  9132. * the first category, and either "homegoods", "apparel", or "toys" should be
  9133. * specified.
  9134. */
  9135. @property(nonatomic, strong, nullable) NSArray<NSString *> *productCategories;
  9136. /**
  9137. * The resource name of a ProductSet to be searched for similar images.
  9138. * Format is:
  9139. * `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
  9140. */
  9141. @property(nonatomic, copy, nullable) NSString *productSet;
  9142. @end
  9143. /**
  9144. * Results for a product search request.
  9145. */
  9146. @interface GTLRVision_ProductSearchResults : GTLRObject
  9147. /**
  9148. * Timestamp of the index which provided these results. Changes made after
  9149. * this time are not reflected in the current results.
  9150. */
  9151. @property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
  9152. /**
  9153. * List of results grouped by products detected in the query image. Each entry
  9154. * corresponds to one bounding polygon in the query image, and contains the
  9155. * matching products specific to that region. There may be duplicate product
  9156. * matches in the union of all the per-product results.
  9157. */
  9158. @property(nonatomic, strong, nullable) NSArray<GTLRVision_GroupedResult *> *productGroupedResults;
  9159. /** List of results, one for each product match. */
  9160. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Result *> *results;
  9161. @end
  9162. /**
  9163. * A ProductSet contains Products. A ProductSet can contain a maximum of 1
  9164. * million reference images. If the limit is exceeded, periodic indexing will
  9165. * fail.
  9166. */
  9167. @interface GTLRVision_ProductSet : GTLRObject
  9168. /**
  9169. * The user-provided name for this ProductSet. Must not be empty. Must be at
  9170. * most 4096 characters long.
  9171. */
  9172. @property(nonatomic, copy, nullable) NSString *displayName;
  9173. /**
  9174. * Output only. If there was an error with indexing the product set, the field
  9175. * is populated.
  9176. * This field is ignored when creating a ProductSet.
  9177. */
  9178. @property(nonatomic, strong, nullable) GTLRVision_Status *indexError;
  9179. /**
  9180. * Output only. The time at which this ProductSet was last indexed. Query
  9181. * results will reflect all updates before this time. If this ProductSet has
  9182. * never been indexed, this timestamp is the default value
  9183. * "1970-01-01T00:00:00Z".
  9184. * This field is ignored when creating a ProductSet.
  9185. */
  9186. @property(nonatomic, strong, nullable) GTLRDateTime *indexTime;
  9187. /**
  9188. * The resource name of the ProductSet.
  9189. * Format is:
  9190. * `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
  9191. * This field is ignored when creating a ProductSet.
  9192. */
  9193. @property(nonatomic, copy, nullable) NSString *name;
  9194. @end
  9195. /**
  9196. * A `Property` consists of a user-supplied name/value pair.
  9197. */
  9198. @interface GTLRVision_Property : GTLRObject
  9199. /** Name of the property. */
  9200. @property(nonatomic, copy, nullable) NSString *name;
  9201. /**
  9202. * Value of numeric properties.
  9203. *
  9204. * Uses NSNumber of unsignedLongLongValue.
  9205. */
  9206. @property(nonatomic, strong, nullable) NSNumber *uint64Value;
  9207. /** Value of the property. */
  9208. @property(nonatomic, copy, nullable) NSString *value;
  9209. @end
  9210. /**
  9211. * A `ReferenceImage` represents a product image and its associated metadata,
  9212. * such as bounding boxes.
  9213. */
  9214. @interface GTLRVision_ReferenceImage : GTLRObject
  9215. /**
  9216. * Bounding polygons around the areas of interest in the reference image.
  9217. * Optional. If this field is empty, the system will try to detect regions of
  9218. * interest. At most 10 bounding polygons will be used.
  9219. * The provided shape is converted into a non-rotated rectangle. Once
  9220. * converted, the small edge of the rectangle must be greater than or equal
  9221. * to 300 pixels. The aspect ratio must be 1:4 or less (i.e. 1:3 is ok; 1:5
  9222. * is not).
  9223. */
  9224. @property(nonatomic, strong, nullable) NSArray<GTLRVision_BoundingPoly *> *boundingPolys;
  9225. /**
  9226. * The resource name of the reference image.
  9227. * Format is:
  9228. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID/referenceImages/IMAGE_ID`.
  9229. * This field is ignored when creating a reference image.
  9230. */
  9231. @property(nonatomic, copy, nullable) NSString *name;
  9232. /**
  9233. * The Google Cloud Storage URI of the reference image.
  9234. * The URI must start with `gs://`.
  9235. * Required.
  9236. */
  9237. @property(nonatomic, copy, nullable) NSString *uri;
  9238. @end
  9239. /**
  9240. * Request message for the `RemoveProductFromProductSet` method.
  9241. */
  9242. @interface GTLRVision_RemoveProductFromProductSetRequest : GTLRObject
  9243. /**
  9244. * The resource name for the Product to be removed from this ProductSet.
  9245. * Format is:
  9246. * `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`
  9247. */
  9248. @property(nonatomic, copy, nullable) NSString *product;
  9249. @end
  9250. /**
  9251. * Information about a product.
  9252. */
  9253. @interface GTLRVision_Result : GTLRObject
  9254. /**
  9255. * The resource name of the image from the product that is the closest match
  9256. * to the query.
  9257. */
  9258. @property(nonatomic, copy, nullable) NSString *image;
  9259. /** The Product. */
  9260. @property(nonatomic, strong, nullable) GTLRVision_Product *product;
  9261. /**
  9262. * A confidence level on the match, ranging from 0 (no confidence) to
  9263. * 1 (full confidence).
  9264. *
  9265. * Uses NSNumber of floatValue.
  9266. */
  9267. @property(nonatomic, strong, nullable) NSNumber *score;
  9268. @end
  9269. /**
  9270. * Set of features pertaining to the image, computed by computer vision
  9271. * methods over safe-search verticals (for example, adult, spoof, medical,
  9272. * violence).
  9273. */
  9274. @interface GTLRVision_SafeSearchAnnotation : GTLRObject
  9275. /**
  9276. * Represents the adult content likelihood for the image. Adult content may
  9277. * contain elements such as nudity, pornographic images or cartoons, or
  9278. * sexual activities.
  9279. *
  9280. * Likely values:
  9281. * @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Likely It is likely that
  9282. * the image belongs to the specified vertical. (Value: "LIKELY")
  9283. * @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Possible It is possible
  9284. * that the image belongs to the specified vertical. (Value: "POSSIBLE")
  9285. * @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Unknown Unknown likelihood.
  9286. * (Value: "UNKNOWN")
  9287. * @arg @c kGTLRVision_SafeSearchAnnotation_Adult_Unlikely It is unlikely
  9288. * that the image belongs to the specified vertical. (Value: "UNLIKELY")
  9289. * @arg @c kGTLRVision_SafeSearchAnnotation_Adult_VeryLikely It is very
  9290. * likely that the image belongs to the specified vertical. (Value:
  9291. * "VERY_LIKELY")
  9292. * @arg @c kGTLRVision_SafeSearchAnnotation_Adult_VeryUnlikely It is very
  9293. * unlikely that the image belongs to the specified vertical. (Value:
  9294. * "VERY_UNLIKELY")
  9295. */
  9296. @property(nonatomic, copy, nullable) NSString *adult;
  9297. /**
  9298. * Likelihood that this is a medical image.
  9299. *
  9300. * Likely values:
  9301. * @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Likely It is likely that
  9302. * the image belongs to the specified vertical. (Value: "LIKELY")
  9303. * @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Possible It is possible
  9304. * that the image belongs to the specified vertical. (Value: "POSSIBLE")
  9305. * @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Unknown Unknown
  9306. * likelihood. (Value: "UNKNOWN")
  9307. * @arg @c kGTLRVision_SafeSearchAnnotation_Medical_Unlikely It is unlikely
  9308. * that the image belongs to the specified vertical. (Value: "UNLIKELY")
  9309. * @arg @c kGTLRVision_SafeSearchAnnotation_Medical_VeryLikely It is very
  9310. * likely that the image belongs to the specified vertical. (Value:
  9311. * "VERY_LIKELY")
  9312. * @arg @c kGTLRVision_SafeSearchAnnotation_Medical_VeryUnlikely It is very
  9313. * unlikely that the image belongs to the specified vertical. (Value:
  9314. * "VERY_UNLIKELY")
  9315. */
  9316. @property(nonatomic, copy, nullable) NSString *medical;
  9317. /**
  9318. * Likelihood that the request image contains racy content. Racy content may
  9319. * include (but is not limited to) skimpy or sheer clothing, strategically
  9320. * covered nudity, lewd or provocative poses, or close-ups of sensitive
  9321. * body areas.
  9322. *
  9323. * Likely values:
  9324. * @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Likely It is likely that the
  9325. * image belongs to the specified vertical. (Value: "LIKELY")
  9326. * @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Possible It is possible that
  9327. * the image belongs to the specified vertical. (Value: "POSSIBLE")
  9328. * @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Unknown Unknown likelihood.
  9329. * (Value: "UNKNOWN")
  9330. * @arg @c kGTLRVision_SafeSearchAnnotation_Racy_Unlikely It is unlikely that
  9331. * the image belongs to the specified vertical. (Value: "UNLIKELY")
  9332. * @arg @c kGTLRVision_SafeSearchAnnotation_Racy_VeryLikely It is very likely
  9333. * that the image belongs to the specified vertical. (Value:
  9334. * "VERY_LIKELY")
  9335. * @arg @c kGTLRVision_SafeSearchAnnotation_Racy_VeryUnlikely It is very
  9336. * unlikely that the image belongs to the specified vertical. (Value:
  9337. * "VERY_UNLIKELY")
  9338. */
  9339. @property(nonatomic, copy, nullable) NSString *racy;
  9340. /**
  9341. * Spoof likelihood. The likelihood that an modification
  9342. * was made to the image's canonical version to make it appear
  9343. * funny or offensive.
  9344. *
  9345. * Likely values:
  9346. * @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Likely It is likely that
  9347. * the image belongs to the specified vertical. (Value: "LIKELY")
  9348. * @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Possible It is possible
  9349. * that the image belongs to the specified vertical. (Value: "POSSIBLE")
  9350. * @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Unknown Unknown likelihood.
  9351. * (Value: "UNKNOWN")
  9352. * @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_Unlikely It is unlikely
  9353. * that the image belongs to the specified vertical. (Value: "UNLIKELY")
  9354. * @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_VeryLikely It is very
  9355. * likely that the image belongs to the specified vertical. (Value:
  9356. * "VERY_LIKELY")
  9357. * @arg @c kGTLRVision_SafeSearchAnnotation_Spoof_VeryUnlikely It is very
  9358. * unlikely that the image belongs to the specified vertical. (Value:
  9359. * "VERY_UNLIKELY")
  9360. */
  9361. @property(nonatomic, copy, nullable) NSString *spoof;
  9362. /**
  9363. * Likelihood that this image contains violent content.
  9364. *
  9365. * Likely values:
  9366. * @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Likely It is likely that
  9367. * the image belongs to the specified vertical. (Value: "LIKELY")
  9368. * @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Possible It is possible
  9369. * that the image belongs to the specified vertical. (Value: "POSSIBLE")
  9370. * @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Unknown Unknown
  9371. * likelihood. (Value: "UNKNOWN")
  9372. * @arg @c kGTLRVision_SafeSearchAnnotation_Violence_Unlikely It is unlikely
  9373. * that the image belongs to the specified vertical. (Value: "UNLIKELY")
  9374. * @arg @c kGTLRVision_SafeSearchAnnotation_Violence_VeryLikely It is very
  9375. * likely that the image belongs to the specified vertical. (Value:
  9376. * "VERY_LIKELY")
  9377. * @arg @c kGTLRVision_SafeSearchAnnotation_Violence_VeryUnlikely It is very
  9378. * unlikely that the image belongs to the specified vertical. (Value:
  9379. * "VERY_UNLIKELY")
  9380. */
  9381. @property(nonatomic, copy, nullable) NSString *violence;
  9382. @end
  9383. /**
  9384. * The `Status` type defines a logical error model that is suitable for
  9385. * different
  9386. * programming environments, including REST APIs and RPC APIs. It is used by
  9387. * [gRPC](https://github.com/grpc). The error model is designed to be:
  9388. * - Simple to use and understand for most users
  9389. * - Flexible enough to meet unexpected needs
  9390. * # Overview
  9391. * The `Status` message contains three pieces of data: error code, error
  9392. * message,
  9393. * and error details. The error code should be an enum value of
  9394. * google.rpc.Code, but it may accept additional error codes if needed. The
  9395. * error message should be a developer-facing English message that helps
  9396. * developers *understand* and *resolve* the error. If a localized user-facing
  9397. * error message is needed, put the localized message in the error details or
  9398. * localize it in the client. The optional error details may contain arbitrary
  9399. * information about the error. There is a predefined set of error detail types
  9400. * in the package `google.rpc` that can be used for common error conditions.
  9401. * # Language mapping
  9402. * The `Status` message is the logical representation of the error model, but
  9403. * it
  9404. * is not necessarily the actual wire format. When the `Status` message is
  9405. * exposed in different client libraries and different wire protocols, it can
  9406. * be
  9407. * mapped differently. For example, it will likely be mapped to some exceptions
  9408. * in Java, but more likely mapped to some error codes in C.
  9409. * # Other uses
  9410. * The error model and the `Status` message can be used in a variety of
  9411. * environments, either with or without APIs, to provide a
  9412. * consistent developer experience across different environments.
  9413. * Example uses of this error model include:
  9414. * - Partial errors. If a service needs to return partial errors to the client,
  9415. * it may embed the `Status` in the normal response to indicate the partial
  9416. * errors.
  9417. * - Workflow errors. A typical workflow has multiple steps. Each step may
  9418. * have a `Status` message for error reporting.
  9419. * - Batch operations. If a client uses batch request and batch response, the
  9420. * `Status` message should be used directly inside batch response, one for
  9421. * each error sub-response.
  9422. * - Asynchronous operations. If an API call embeds asynchronous operation
  9423. * results in its response, the status of those operations should be
  9424. * represented directly using the `Status` message.
  9425. * - Logging. If some API errors are stored in logs, the message `Status` could
  9426. * be used directly after any stripping needed for security/privacy reasons.
  9427. */
  9428. @interface GTLRVision_Status : GTLRObject
  9429. /**
  9430. * The status code, which should be an enum value of google.rpc.Code.
  9431. *
  9432. * Uses NSNumber of intValue.
  9433. */
  9434. @property(nonatomic, strong, nullable) NSNumber *code;
  9435. /**
  9436. * A list of messages that carry the error details. There is a common set of
  9437. * message types for APIs to use.
  9438. */
  9439. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Status_Details_Item *> *details;
  9440. /**
  9441. * A developer-facing error message, which should be in English. Any
  9442. * user-facing error message should be localized and sent in the
  9443. * google.rpc.Status.details field, or localized by the client.
  9444. */
  9445. @property(nonatomic, copy, nullable) NSString *message;
  9446. @end
  9447. /**
  9448. * GTLRVision_Status_Details_Item
  9449. *
  9450. * @note This class is documented as having more properties of any valid JSON
  9451. * type. Use @c -additionalJSONKeys and @c -additionalPropertyForName: to
  9452. * get the list of properties and then fetch them; or @c
  9453. * -additionalProperties to fetch them all at once.
  9454. */
  9455. @interface GTLRVision_Status_Details_Item : GTLRObject
  9456. @end
  9457. /**
  9458. * A single symbol representation.
  9459. */
  9460. @interface GTLRVision_Symbol : GTLRObject
  9461. /**
  9462. * The bounding box for the symbol.
  9463. * The vertices are in the order of top-left, top-right, bottom-right,
  9464. * bottom-left. When a rotation of the bounding box is detected the rotation
  9465. * is represented as around the top-left corner as defined when the text is
  9466. * read in the 'natural' orientation.
  9467. * For example:
  9468. * * when the text is horizontal it might look like:
  9469. * 0----1
  9470. * | |
  9471. * 3----2
  9472. * * when it's rotated 180 degrees around the top-left corner it becomes:
  9473. * 2----3
  9474. * | |
  9475. * 1----0
  9476. * and the vertice order will still be (0, 1, 2, 3).
  9477. */
  9478. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
  9479. /**
  9480. * Confidence of the OCR results for the symbol. Range [0, 1].
  9481. *
  9482. * Uses NSNumber of floatValue.
  9483. */
  9484. @property(nonatomic, strong, nullable) NSNumber *confidence;
  9485. /** Additional information detected for the symbol. */
  9486. @property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
  9487. /** The actual UTF-8 representation of the symbol. */
  9488. @property(nonatomic, copy, nullable) NSString *text;
  9489. @end
  9490. /**
  9491. * TextAnnotation contains a structured representation of OCR extracted text.
  9492. * The hierarchy of an OCR extracted text structure is like this:
  9493. * TextAnnotation -> Page -> Block -> Paragraph -> Word -> Symbol
  9494. * Each structural component, starting from Page, may further have their own
  9495. * properties. Properties describe detected languages, breaks etc.. Please
  9496. * refer
  9497. * to the TextAnnotation.TextProperty message definition below for more
  9498. * detail.
  9499. */
  9500. @interface GTLRVision_TextAnnotation : GTLRObject
  9501. /** List of pages detected by OCR. */
  9502. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Page *> *pages;
  9503. /** UTF-8 text detected on the pages. */
  9504. @property(nonatomic, copy, nullable) NSString *text;
  9505. @end
  9506. /**
  9507. * Additional information detected on the structural component.
  9508. */
  9509. @interface GTLRVision_TextProperty : GTLRObject
  9510. /** Detected start or end of a text segment. */
  9511. @property(nonatomic, strong, nullable) GTLRVision_DetectedBreak *detectedBreak;
  9512. /** A list of detected languages together with confidence. */
  9513. @property(nonatomic, strong, nullable) NSArray<GTLRVision_DetectedLanguage *> *detectedLanguages;
  9514. @end
  9515. /**
  9516. * A vertex represents a 2D point in the image.
  9517. * NOTE: the vertex coordinates are in the same scale as the original image.
  9518. */
  9519. @interface GTLRVision_Vertex : GTLRObject
  9520. /**
  9521. * X coordinate.
  9522. *
  9523. * Uses NSNumber of intValue.
  9524. */
  9525. @property(nonatomic, strong, nullable) NSNumber *x;
  9526. /**
  9527. * Y coordinate.
  9528. *
  9529. * Uses NSNumber of intValue.
  9530. */
  9531. @property(nonatomic, strong, nullable) NSNumber *y;
  9532. @end
  9533. /**
  9534. * Relevant information for the image from the Internet.
  9535. */
  9536. @interface GTLRVision_WebDetection : GTLRObject
  9537. /**
  9538. * The service's best guess as to the topic of the request image.
  9539. * Inferred from similar images on the open web.
  9540. */
  9541. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebLabel *> *bestGuessLabels;
  9542. /**
  9543. * Fully matching images from the Internet.
  9544. * Can include resized copies of the query image.
  9545. */
  9546. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *fullMatchingImages;
  9547. /** Web pages containing the matching images from the Internet. */
  9548. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebPage *> *pagesWithMatchingImages;
  9549. /**
  9550. * Partial matching images from the Internet.
  9551. * Those images are similar enough to share some key-point features. For
  9552. * example an original image will likely have partial matching for its crops.
  9553. */
  9554. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *partialMatchingImages;
  9555. /** The visually similar image results. */
  9556. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *visuallySimilarImages;
  9557. /** Deduced entities from similar images on the Internet. */
  9558. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebEntity *> *webEntities;
  9559. @end
  9560. /**
  9561. * Parameters for web detection request.
  9562. */
  9563. @interface GTLRVision_WebDetectionParams : GTLRObject
  9564. /**
  9565. * Whether to include results derived from the geo information in the image.
  9566. *
  9567. * Uses NSNumber of boolValue.
  9568. */
  9569. @property(nonatomic, strong, nullable) NSNumber *includeGeoResults;
  9570. @end
  9571. /**
  9572. * Entity deduced from similar images on the Internet.
  9573. */
  9574. @interface GTLRVision_WebEntity : GTLRObject
  9575. /**
  9576. * Canonical description of the entity, in English.
  9577. *
  9578. * Remapped to 'descriptionProperty' to avoid NSObject's 'description'.
  9579. */
  9580. @property(nonatomic, copy, nullable) NSString *descriptionProperty;
  9581. /** Opaque entity ID. */
  9582. @property(nonatomic, copy, nullable) NSString *entityId;
  9583. /**
  9584. * Overall relevancy score for the entity.
  9585. * Not normalized and not comparable across different image queries.
  9586. *
  9587. * Uses NSNumber of floatValue.
  9588. */
  9589. @property(nonatomic, strong, nullable) NSNumber *score;
  9590. @end
  9591. /**
  9592. * Metadata for online images.
  9593. */
  9594. @interface GTLRVision_WebImage : GTLRObject
  9595. /**
  9596. * (Deprecated) Overall relevancy score for the image.
  9597. *
  9598. * Uses NSNumber of floatValue.
  9599. */
  9600. @property(nonatomic, strong, nullable) NSNumber *score;
  9601. /** The result image URL. */
  9602. @property(nonatomic, copy, nullable) NSString *url;
  9603. @end
  9604. /**
  9605. * Label to provide extra metadata for the web detection.
  9606. */
  9607. @interface GTLRVision_WebLabel : GTLRObject
  9608. /** Label for extra metadata. */
  9609. @property(nonatomic, copy, nullable) NSString *label;
  9610. /**
  9611. * The BCP-47 language code for `label`, such as "en-US" or "sr-Latn".
  9612. * For more information, see
  9613. * http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
  9614. */
  9615. @property(nonatomic, copy, nullable) NSString *languageCode;
  9616. @end
  9617. /**
  9618. * Metadata for web pages.
  9619. */
  9620. @interface GTLRVision_WebPage : GTLRObject
  9621. /**
  9622. * Fully matching images on the page.
  9623. * Can include resized copies of the query image.
  9624. */
  9625. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *fullMatchingImages;
  9626. /** Title for the web page, may contain HTML markups. */
  9627. @property(nonatomic, copy, nullable) NSString *pageTitle;
  9628. /**
  9629. * Partial matching images on the page.
  9630. * Those images are similar enough to share some key-point features. For
  9631. * example an original image will likely have partial matching for its
  9632. * crops.
  9633. */
  9634. @property(nonatomic, strong, nullable) NSArray<GTLRVision_WebImage *> *partialMatchingImages;
  9635. /**
  9636. * (Deprecated) Overall relevancy score for the web page.
  9637. *
  9638. * Uses NSNumber of floatValue.
  9639. */
  9640. @property(nonatomic, strong, nullable) NSNumber *score;
  9641. /** The result web page URL. */
  9642. @property(nonatomic, copy, nullable) NSString *url;
  9643. @end
  9644. /**
  9645. * A word representation.
  9646. */
  9647. @interface GTLRVision_Word : GTLRObject
  9648. /**
  9649. * The bounding box for the word.
  9650. * The vertices are in the order of top-left, top-right, bottom-right,
  9651. * bottom-left. When a rotation of the bounding box is detected the rotation
  9652. * is represented as around the top-left corner as defined when the text is
  9653. * read in the 'natural' orientation.
  9654. * For example:
  9655. * * when the text is horizontal it might look like:
  9656. * 0----1
  9657. * | |
  9658. * 3----2
  9659. * * when it's rotated 180 degrees around the top-left corner it becomes:
  9660. * 2----3
  9661. * | |
  9662. * 1----0
  9663. * and the vertex order will still be (0, 1, 2, 3).
  9664. */
  9665. @property(nonatomic, strong, nullable) GTLRVision_BoundingPoly *boundingBox;
  9666. /**
  9667. * Confidence of the OCR results for the word. Range [0, 1].
  9668. *
  9669. * Uses NSNumber of floatValue.
  9670. */
  9671. @property(nonatomic, strong, nullable) NSNumber *confidence;
  9672. /** Additional information detected for the word. */
  9673. @property(nonatomic, strong, nullable) GTLRVision_TextProperty *property;
  9674. /**
  9675. * List of symbols in the word.
  9676. * The order of the symbols follows the natural reading order.
  9677. */
  9678. @property(nonatomic, strong, nullable) NSArray<GTLRVision_Symbol *> *symbols;
  9679. @end
  9680. NS_ASSUME_NONNULL_END
  9681. #pragma clang diagnostic pop